blob: 2e10904cb1aa8724618491861fad37bcc8197cf5 [file] [log] [blame]
Andrei Popescu31002712010-02-23 13:46:05 +00001// Copyright (c) 1994-2006 Sun Microsystems Inc.
2// All Rights Reserved.
3//
4// Redistribution and use in source and binary forms, with or without
5// modification, are permitted provided that the following conditions are
6// met:
7//
8// - Redistributions of source code must retain the above copyright notice,
9// this list of conditions and the following disclaimer.
10//
11// - Redistribution in binary form must reproduce the above copyright
12// notice, this list of conditions and the following disclaimer in the
13// documentation and/or other materials provided with the distribution.
14//
15// - Neither the name of Sun Microsystems or the names of contributors may
16// be used to endorse or promote products derived from this software without
17// specific prior written permission.
18//
19// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
20// IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
21// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
23// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
24// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
25// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
26// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
27// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
28// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
29// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30
31// The original source code covered by the above license above has been
32// modified significantly by Google Inc.
Ben Murdoch257744e2011-11-30 15:57:28 +000033// Copyright 2011 the V8 project authors. All rights reserved.
Andrei Popescu31002712010-02-23 13:46:05 +000034
35
36#include "v8.h"
Leon Clarkef7060e22010-06-03 12:02:55 +010037
38#if defined(V8_TARGET_ARCH_MIPS)
39
Andrei Popescu31002712010-02-23 13:46:05 +000040#include "mips/assembler-mips-inl.h"
41#include "serialize.h"
42
Andrei Popescu31002712010-02-23 13:46:05 +000043namespace v8 {
44namespace internal {
45
Ben Murdoch257744e2011-11-30 15:57:28 +000046#ifdef DEBUG
47bool CpuFeatures::initialized_ = false;
48#endif
49unsigned CpuFeatures::supported_ = 0;
50unsigned CpuFeatures::found_by_runtime_probing_ = 0;
Andrei Popescu31002712010-02-23 13:46:05 +000051
Ben Murdoch257744e2011-11-30 15:57:28 +000052void CpuFeatures::Probe() {
53 ASSERT(!initialized_);
54#ifdef DEBUG
55 initialized_ = true;
56#endif
Steve Block44f0eee2011-05-26 01:26:41 +010057 // If the compiler is allowed to use fpu then we can use fpu too in our
58 // code generation.
59#if !defined(__mips__)
60 // For the simulator=mips build, use FPU when FLAG_enable_fpu is enabled.
61 if (FLAG_enable_fpu) {
62 supported_ |= 1u << FPU;
63 }
64#else
Ben Murdoch257744e2011-11-30 15:57:28 +000065 if (Serializer::enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +010066 supported_ |= OS::CpuFeaturesImpliedByPlatform();
67 return; // No features if we might serialize.
68 }
Andrei Popescu31002712010-02-23 13:46:05 +000069
Steve Block44f0eee2011-05-26 01:26:41 +010070 if (OS::MipsCpuHasFeature(FPU)) {
71 // This implementation also sets the FPU flags if
72 // runtime detection of FPU returns true.
73 supported_ |= 1u << FPU;
74 found_by_runtime_probing_ |= 1u << FPU;
75 }
Steve Block44f0eee2011-05-26 01:26:41 +010076#endif
77}
Andrei Popescu31002712010-02-23 13:46:05 +000078
79
Andrei Popescu31002712010-02-23 13:46:05 +000080int ToNumber(Register reg) {
81 ASSERT(reg.is_valid());
82 const int kNumbers[] = {
83 0, // zero_reg
84 1, // at
85 2, // v0
86 3, // v1
87 4, // a0
88 5, // a1
89 6, // a2
90 7, // a3
91 8, // t0
92 9, // t1
93 10, // t2
94 11, // t3
95 12, // t4
96 13, // t5
97 14, // t6
98 15, // t7
99 16, // s0
100 17, // s1
101 18, // s2
102 19, // s3
103 20, // s4
104 21, // s5
105 22, // s6
106 23, // s7
107 24, // t8
108 25, // t9
109 26, // k0
110 27, // k1
111 28, // gp
112 29, // sp
113 30, // s8_fp
114 31, // ra
115 };
116 return kNumbers[reg.code()];
117}
118
Steve Block44f0eee2011-05-26 01:26:41 +0100119
Andrei Popescu31002712010-02-23 13:46:05 +0000120Register ToRegister(int num) {
121 ASSERT(num >= 0 && num < kNumRegisters);
122 const Register kRegisters[] = {
123 zero_reg,
124 at,
125 v0, v1,
126 a0, a1, a2, a3,
127 t0, t1, t2, t3, t4, t5, t6, t7,
128 s0, s1, s2, s3, s4, s5, s6, s7,
129 t8, t9,
130 k0, k1,
131 gp,
132 sp,
133 s8_fp,
134 ra
135 };
136 return kRegisters[num];
137}
138
139
140// -----------------------------------------------------------------------------
141// Implementation of RelocInfo.
142
143const int RelocInfo::kApplyMask = 0;
144
Steve Block44f0eee2011-05-26 01:26:41 +0100145
146bool RelocInfo::IsCodedSpecially() {
147 // The deserializer needs to know whether a pointer is specially coded. Being
148 // specially coded on MIPS means that it is a lui/ori instruction, and that is
149 // always the case inside code objects.
150 return true;
151}
152
153
Andrei Popescu31002712010-02-23 13:46:05 +0000154// Patch the code at the current address with the supplied instructions.
155void RelocInfo::PatchCode(byte* instructions, int instruction_count) {
156 Instr* pc = reinterpret_cast<Instr*>(pc_);
157 Instr* instr = reinterpret_cast<Instr*>(instructions);
158 for (int i = 0; i < instruction_count; i++) {
159 *(pc + i) = *(instr + i);
160 }
161
162 // Indicate that code has changed.
163 CPU::FlushICache(pc_, instruction_count * Assembler::kInstrSize);
164}
165
166
167// Patch the code at the current PC with a call to the target address.
168// Additional guard instructions can be added if required.
169void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) {
170 // Patch the code at the current address with a call to the target.
171 UNIMPLEMENTED_MIPS();
172}
173
174
175// -----------------------------------------------------------------------------
176// Implementation of Operand and MemOperand.
177// See assembler-mips-inl.h for inlined constructors.
178
179Operand::Operand(Handle<Object> handle) {
180 rm_ = no_reg;
181 // Verify all Objects referred by code are NOT in new space.
182 Object* obj = *handle;
Steve Block44f0eee2011-05-26 01:26:41 +0100183 ASSERT(!HEAP->InNewSpace(obj));
Andrei Popescu31002712010-02-23 13:46:05 +0000184 if (obj->IsHeapObject()) {
185 imm32_ = reinterpret_cast<intptr_t>(handle.location());
186 rmode_ = RelocInfo::EMBEDDED_OBJECT;
187 } else {
188 // No relocation needed.
189 imm32_ = reinterpret_cast<intptr_t>(obj);
190 rmode_ = RelocInfo::NONE;
191 }
192}
193
Steve Block44f0eee2011-05-26 01:26:41 +0100194
195MemOperand::MemOperand(Register rm, int32_t offset) : Operand(rm) {
Andrei Popescu31002712010-02-23 13:46:05 +0000196 offset_ = offset;
197}
198
199
200// -----------------------------------------------------------------------------
Steve Block44f0eee2011-05-26 01:26:41 +0100201// Specific instructions, constants, and masks.
Andrei Popescu31002712010-02-23 13:46:05 +0000202
Steve Block44f0eee2011-05-26 01:26:41 +0100203static const int kNegOffset = 0x00008000;
204// addiu(sp, sp, 4) aka Pop() operation or part of Pop(r)
205// operations as post-increment of sp.
206const Instr kPopInstruction = ADDIU | (sp.code() << kRsShift)
207 | (sp.code() << kRtShift) | (kPointerSize & kImm16Mask);
208// addiu(sp, sp, -4) part of Push(r) operation as pre-decrement of sp.
209const Instr kPushInstruction = ADDIU | (sp.code() << kRsShift)
210 | (sp.code() << kRtShift) | (-kPointerSize & kImm16Mask);
211// sw(r, MemOperand(sp, 0))
212const Instr kPushRegPattern = SW | (sp.code() << kRsShift)
213 | (0 & kImm16Mask);
214// lw(r, MemOperand(sp, 0))
215const Instr kPopRegPattern = LW | (sp.code() << kRsShift)
216 | (0 & kImm16Mask);
Andrei Popescu31002712010-02-23 13:46:05 +0000217
Steve Block44f0eee2011-05-26 01:26:41 +0100218const Instr kLwRegFpOffsetPattern = LW | (s8_fp.code() << kRsShift)
219 | (0 & kImm16Mask);
220
221const Instr kSwRegFpOffsetPattern = SW | (s8_fp.code() << kRsShift)
222 | (0 & kImm16Mask);
223
224const Instr kLwRegFpNegOffsetPattern = LW | (s8_fp.code() << kRsShift)
225 | (kNegOffset & kImm16Mask);
226
227const Instr kSwRegFpNegOffsetPattern = SW | (s8_fp.code() << kRsShift)
228 | (kNegOffset & kImm16Mask);
229// A mask for the Rt register for push, pop, lw, sw instructions.
230const Instr kRtMask = kRtFieldMask;
231const Instr kLwSwInstrTypeMask = 0xffe00000;
232const Instr kLwSwInstrArgumentMask = ~kLwSwInstrTypeMask;
233const Instr kLwSwOffsetMask = kImm16Mask;
234
235
236// Spare buffer.
237static const int kMinimalBufferSize = 4 * KB;
238
239
Ben Murdoch257744e2011-11-30 15:57:28 +0000240Assembler::Assembler(Isolate* arg_isolate, void* buffer, int buffer_size)
241 : AssemblerBase(arg_isolate),
Steve Block44f0eee2011-05-26 01:26:41 +0100242 positions_recorder_(this),
Ben Murdoch257744e2011-11-30 15:57:28 +0000243 emit_debug_code_(FLAG_debug_code) {
Andrei Popescu31002712010-02-23 13:46:05 +0000244 if (buffer == NULL) {
245 // Do our own buffer management.
246 if (buffer_size <= kMinimalBufferSize) {
247 buffer_size = kMinimalBufferSize;
248
Steve Block44f0eee2011-05-26 01:26:41 +0100249 if (isolate()->assembler_spare_buffer() != NULL) {
250 buffer = isolate()->assembler_spare_buffer();
251 isolate()->set_assembler_spare_buffer(NULL);
Andrei Popescu31002712010-02-23 13:46:05 +0000252 }
253 }
254 if (buffer == NULL) {
255 buffer_ = NewArray<byte>(buffer_size);
256 } else {
257 buffer_ = static_cast<byte*>(buffer);
258 }
259 buffer_size_ = buffer_size;
260 own_buffer_ = true;
261
262 } else {
263 // Use externally provided buffer instead.
264 ASSERT(buffer_size > 0);
265 buffer_ = static_cast<byte*>(buffer);
266 buffer_size_ = buffer_size;
267 own_buffer_ = false;
268 }
269
270 // Setup buffer pointers.
271 ASSERT(buffer_ != NULL);
272 pc_ = buffer_;
273 reloc_info_writer.Reposition(buffer_ + buffer_size, pc_);
Steve Block44f0eee2011-05-26 01:26:41 +0100274
275 last_trampoline_pool_end_ = 0;
276 no_trampoline_pool_before_ = 0;
277 trampoline_pool_blocked_nesting_ = 0;
278 next_buffer_check_ = kMaxBranchOffset - kTrampolineSize;
Ben Murdoch257744e2011-11-30 15:57:28 +0000279 internal_trampoline_exception_ = false;
280 last_bound_pos_ = 0;
281
282 ast_id_for_reloc_info_ = kNoASTId;
Andrei Popescu31002712010-02-23 13:46:05 +0000283}
284
285
286Assembler::~Assembler() {
287 if (own_buffer_) {
Steve Block44f0eee2011-05-26 01:26:41 +0100288 if (isolate()->assembler_spare_buffer() == NULL &&
Ben Murdoch257744e2011-11-30 15:57:28 +0000289 buffer_size_ == kMinimalBufferSize) {
Steve Block44f0eee2011-05-26 01:26:41 +0100290 isolate()->set_assembler_spare_buffer(buffer_);
Andrei Popescu31002712010-02-23 13:46:05 +0000291 } else {
292 DeleteArray(buffer_);
293 }
294 }
295}
296
297
298void Assembler::GetCode(CodeDesc* desc) {
Steve Block44f0eee2011-05-26 01:26:41 +0100299 ASSERT(pc_ <= reloc_info_writer.pos()); // No overlap.
Andrei Popescu31002712010-02-23 13:46:05 +0000300 // Setup code descriptor.
301 desc->buffer = buffer_;
302 desc->buffer_size = buffer_size_;
303 desc->instr_size = pc_offset();
304 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos();
305}
306
307
Steve Block44f0eee2011-05-26 01:26:41 +0100308void Assembler::Align(int m) {
309 ASSERT(m >= 4 && IsPowerOf2(m));
310 while ((pc_offset() & (m - 1)) != 0) {
311 nop();
312 }
313}
314
315
316void Assembler::CodeTargetAlign() {
317 // No advantage to aligning branch/call targets to more than
318 // single instruction, that I am aware of.
319 Align(4);
320}
321
322
Ben Murdoch257744e2011-11-30 15:57:28 +0000323Register Assembler::GetRtReg(Instr instr) {
Steve Block44f0eee2011-05-26 01:26:41 +0100324 Register rt;
Ben Murdoch257744e2011-11-30 15:57:28 +0000325 rt.code_ = (instr & kRtFieldMask) >> kRtShift;
Steve Block44f0eee2011-05-26 01:26:41 +0100326 return rt;
327}
328
329
Ben Murdoch257744e2011-11-30 15:57:28 +0000330Register Assembler::GetRsReg(Instr instr) {
331 Register rs;
332 rs.code_ = (instr & kRsFieldMask) >> kRsShift;
333 return rs;
334}
335
336
337Register Assembler::GetRdReg(Instr instr) {
338 Register rd;
339 rd.code_ = (instr & kRdFieldMask) >> kRdShift;
340 return rd;
341}
342
343
344uint32_t Assembler::GetRt(Instr instr) {
345 return (instr & kRtFieldMask) >> kRtShift;
346}
347
348
349uint32_t Assembler::GetRtField(Instr instr) {
350 return instr & kRtFieldMask;
351}
352
353
354uint32_t Assembler::GetRs(Instr instr) {
355 return (instr & kRsFieldMask) >> kRsShift;
356}
357
358
359uint32_t Assembler::GetRsField(Instr instr) {
360 return instr & kRsFieldMask;
361}
362
363
364uint32_t Assembler::GetRd(Instr instr) {
365 return (instr & kRdFieldMask) >> kRdShift;
366}
367
368
369uint32_t Assembler::GetRdField(Instr instr) {
370 return instr & kRdFieldMask;
371}
372
373
374uint32_t Assembler::GetSa(Instr instr) {
375 return (instr & kSaFieldMask) >> kSaShift;
376}
377
378
379uint32_t Assembler::GetSaField(Instr instr) {
380 return instr & kSaFieldMask;
381}
382
383
384uint32_t Assembler::GetOpcodeField(Instr instr) {
385 return instr & kOpcodeMask;
386}
387
388
389uint32_t Assembler::GetImmediate16(Instr instr) {
390 return instr & kImm16Mask;
391}
392
393
394uint32_t Assembler::GetLabelConst(Instr instr) {
395 return instr & ~kImm16Mask;
396}
397
398
Steve Block44f0eee2011-05-26 01:26:41 +0100399bool Assembler::IsPop(Instr instr) {
400 return (instr & ~kRtMask) == kPopRegPattern;
401}
402
403
404bool Assembler::IsPush(Instr instr) {
405 return (instr & ~kRtMask) == kPushRegPattern;
406}
407
408
409bool Assembler::IsSwRegFpOffset(Instr instr) {
410 return ((instr & kLwSwInstrTypeMask) == kSwRegFpOffsetPattern);
411}
412
413
414bool Assembler::IsLwRegFpOffset(Instr instr) {
415 return ((instr & kLwSwInstrTypeMask) == kLwRegFpOffsetPattern);
416}
417
418
419bool Assembler::IsSwRegFpNegOffset(Instr instr) {
420 return ((instr & (kLwSwInstrTypeMask | kNegOffset)) ==
421 kSwRegFpNegOffsetPattern);
422}
423
424
425bool Assembler::IsLwRegFpNegOffset(Instr instr) {
426 return ((instr & (kLwSwInstrTypeMask | kNegOffset)) ==
427 kLwRegFpNegOffsetPattern);
428}
429
430
Andrei Popescu31002712010-02-23 13:46:05 +0000431// Labels refer to positions in the (to be) generated code.
432// There are bound, linked, and unused labels.
433//
434// Bound labels refer to known positions in the already
435// generated code. pos() is the position the label refers to.
436//
437// Linked labels refer to unknown positions in the code
438// to be generated; pos() is the position of the last
439// instruction using the label.
440
Steve Block44f0eee2011-05-26 01:26:41 +0100441// The link chain is terminated by a value in the instruction of -1,
442// which is an otherwise illegal value (branch -1 is inf loop).
443// The instruction 16-bit offset field addresses 32-bit words, but in
444// code is conv to an 18-bit value addressing bytes, hence the -4 value.
Andrei Popescu31002712010-02-23 13:46:05 +0000445
Andrei Popescu31002712010-02-23 13:46:05 +0000446const int kEndOfChain = -4;
447
Steve Block44f0eee2011-05-26 01:26:41 +0100448
449bool Assembler::IsBranch(Instr instr) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000450 uint32_t opcode = GetOpcodeField(instr);
451 uint32_t rt_field = GetRtField(instr);
452 uint32_t rs_field = GetRsField(instr);
453 uint32_t label_constant = GetLabelConst(instr);
Andrei Popescu31002712010-02-23 13:46:05 +0000454 // Checks if the instruction is a branch.
455 return opcode == BEQ ||
456 opcode == BNE ||
457 opcode == BLEZ ||
458 opcode == BGTZ ||
459 opcode == BEQL ||
460 opcode == BNEL ||
461 opcode == BLEZL ||
Ben Murdoch257744e2011-11-30 15:57:28 +0000462 opcode == BGTZL ||
Andrei Popescu31002712010-02-23 13:46:05 +0000463 (opcode == REGIMM && (rt_field == BLTZ || rt_field == BGEZ ||
464 rt_field == BLTZAL || rt_field == BGEZAL)) ||
Steve Block44f0eee2011-05-26 01:26:41 +0100465 (opcode == COP1 && rs_field == BC1) || // Coprocessor branch.
466 label_constant == 0; // Emitted label const in reg-exp engine.
467}
468
469
Ben Murdoch257744e2011-11-30 15:57:28 +0000470bool Assembler::IsBeq(Instr instr) {
471 return GetOpcodeField(instr) == BEQ;
472}
473
474
475bool Assembler::IsBne(Instr instr) {
476 return GetOpcodeField(instr) == BNE;
477}
478
479
Steve Block44f0eee2011-05-26 01:26:41 +0100480bool Assembler::IsNop(Instr instr, unsigned int type) {
481 // See Assembler::nop(type).
482 ASSERT(type < 32);
Ben Murdoch257744e2011-11-30 15:57:28 +0000483 uint32_t opcode = GetOpcodeField(instr);
484 uint32_t rt = GetRt(instr);
485 uint32_t rs = GetRs(instr);
486 uint32_t sa = GetSa(instr);
Steve Block44f0eee2011-05-26 01:26:41 +0100487
488 // nop(type) == sll(zero_reg, zero_reg, type);
489 // Technically all these values will be 0 but
490 // this makes more sense to the reader.
491
492 bool ret = (opcode == SLL &&
493 rt == static_cast<uint32_t>(ToNumber(zero_reg)) &&
494 rs == static_cast<uint32_t>(ToNumber(zero_reg)) &&
495 sa == type);
496
497 return ret;
498}
499
500
501int32_t Assembler::GetBranchOffset(Instr instr) {
502 ASSERT(IsBranch(instr));
503 return ((int16_t)(instr & kImm16Mask)) << 2;
504}
505
506
507bool Assembler::IsLw(Instr instr) {
508 return ((instr & kOpcodeMask) == LW);
509}
510
511
512int16_t Assembler::GetLwOffset(Instr instr) {
513 ASSERT(IsLw(instr));
514 return ((instr & kImm16Mask));
515}
516
517
518Instr Assembler::SetLwOffset(Instr instr, int16_t offset) {
519 ASSERT(IsLw(instr));
520
521 // We actually create a new lw instruction based on the original one.
522 Instr temp_instr = LW | (instr & kRsFieldMask) | (instr & kRtFieldMask)
523 | (offset & kImm16Mask);
524
525 return temp_instr;
526}
527
528
529bool Assembler::IsSw(Instr instr) {
530 return ((instr & kOpcodeMask) == SW);
531}
532
533
534Instr Assembler::SetSwOffset(Instr instr, int16_t offset) {
535 ASSERT(IsSw(instr));
536 return ((instr & ~kImm16Mask) | (offset & kImm16Mask));
537}
538
539
540bool Assembler::IsAddImmediate(Instr instr) {
541 return ((instr & kOpcodeMask) == ADDIU);
542}
543
544
545Instr Assembler::SetAddImmediateOffset(Instr instr, int16_t offset) {
546 ASSERT(IsAddImmediate(instr));
547 return ((instr & ~kImm16Mask) | (offset & kImm16Mask));
Andrei Popescu31002712010-02-23 13:46:05 +0000548}
549
550
Ben Murdoch257744e2011-11-30 15:57:28 +0000551bool Assembler::IsAndImmediate(Instr instr) {
552 return GetOpcodeField(instr) == ANDI;
553}
554
555
Andrei Popescu31002712010-02-23 13:46:05 +0000556int Assembler::target_at(int32_t pos) {
557 Instr instr = instr_at(pos);
558 if ((instr & ~kImm16Mask) == 0) {
559 // Emitted label constant, not part of a branch.
Steve Block44f0eee2011-05-26 01:26:41 +0100560 if (instr == 0) {
561 return kEndOfChain;
562 } else {
563 int32_t imm18 =((instr & static_cast<int32_t>(kImm16Mask)) << 16) >> 14;
564 return (imm18 + pos);
565 }
Andrei Popescu31002712010-02-23 13:46:05 +0000566 }
567 // Check we have a branch instruction.
Steve Block44f0eee2011-05-26 01:26:41 +0100568 ASSERT(IsBranch(instr));
Andrei Popescu31002712010-02-23 13:46:05 +0000569 // Do NOT change this to <<2. We rely on arithmetic shifts here, assuming
570 // the compiler uses arithmectic shifts for signed integers.
Steve Block44f0eee2011-05-26 01:26:41 +0100571 int32_t imm18 = ((instr & static_cast<int32_t>(kImm16Mask)) << 16) >> 14;
Andrei Popescu31002712010-02-23 13:46:05 +0000572
Steve Block44f0eee2011-05-26 01:26:41 +0100573 if (imm18 == kEndOfChain) {
574 // EndOfChain sentinel is returned directly, not relative to pc or pos.
575 return kEndOfChain;
576 } else {
577 return pos + kBranchPCOffset + imm18;
578 }
Andrei Popescu31002712010-02-23 13:46:05 +0000579}
580
581
582void Assembler::target_at_put(int32_t pos, int32_t target_pos) {
583 Instr instr = instr_at(pos);
584 if ((instr & ~kImm16Mask) == 0) {
585 ASSERT(target_pos == kEndOfChain || target_pos >= 0);
586 // Emitted label constant, not part of a branch.
587 // Make label relative to Code* of generated Code object.
588 instr_at_put(pos, target_pos + (Code::kHeaderSize - kHeapObjectTag));
589 return;
590 }
591
Steve Block44f0eee2011-05-26 01:26:41 +0100592 ASSERT(IsBranch(instr));
Andrei Popescu31002712010-02-23 13:46:05 +0000593 int32_t imm18 = target_pos - (pos + kBranchPCOffset);
594 ASSERT((imm18 & 3) == 0);
595
596 instr &= ~kImm16Mask;
597 int32_t imm16 = imm18 >> 2;
598 ASSERT(is_int16(imm16));
599
600 instr_at_put(pos, instr | (imm16 & kImm16Mask));
601}
602
603
604void Assembler::print(Label* L) {
605 if (L->is_unused()) {
606 PrintF("unused label\n");
607 } else if (L->is_bound()) {
608 PrintF("bound label to %d\n", L->pos());
609 } else if (L->is_linked()) {
610 Label l = *L;
611 PrintF("unbound label");
612 while (l.is_linked()) {
613 PrintF("@ %d ", l.pos());
614 Instr instr = instr_at(l.pos());
615 if ((instr & ~kImm16Mask) == 0) {
616 PrintF("value\n");
617 } else {
618 PrintF("%d\n", instr);
619 }
620 next(&l);
621 }
622 } else {
623 PrintF("label in inconsistent state (pos = %d)\n", L->pos_);
624 }
625}
626
627
628void Assembler::bind_to(Label* L, int pos) {
Steve Block44f0eee2011-05-26 01:26:41 +0100629 ASSERT(0 <= pos && pos <= pc_offset()); // Must have valid binding position.
Andrei Popescu31002712010-02-23 13:46:05 +0000630 while (L->is_linked()) {
631 int32_t fixup_pos = L->pos();
Steve Block44f0eee2011-05-26 01:26:41 +0100632 int32_t dist = pos - fixup_pos;
633 next(L); // Call next before overwriting link with target at fixup_pos.
634 if (dist > kMaxBranchOffset) {
635 do {
636 int32_t trampoline_pos = get_trampoline_entry(fixup_pos);
Ben Murdoch257744e2011-11-30 15:57:28 +0000637 if (kInvalidSlotPos == trampoline_pos) {
638 // Internal error.
639 return;
640 }
Steve Block44f0eee2011-05-26 01:26:41 +0100641 ASSERT((trampoline_pos - fixup_pos) <= kMaxBranchOffset);
642 target_at_put(fixup_pos, trampoline_pos);
643 fixup_pos = trampoline_pos;
644 dist = pos - fixup_pos;
645 } while (dist > kMaxBranchOffset);
646 } else if (dist < -kMaxBranchOffset) {
647 do {
648 int32_t trampoline_pos = get_trampoline_entry(fixup_pos, false);
Ben Murdoch257744e2011-11-30 15:57:28 +0000649 if (kInvalidSlotPos == trampoline_pos) {
650 // Internal error.
651 return;
652 }
Steve Block44f0eee2011-05-26 01:26:41 +0100653 ASSERT((trampoline_pos - fixup_pos) >= -kMaxBranchOffset);
654 target_at_put(fixup_pos, trampoline_pos);
655 fixup_pos = trampoline_pos;
656 dist = pos - fixup_pos;
657 } while (dist < -kMaxBranchOffset);
658 };
Andrei Popescu31002712010-02-23 13:46:05 +0000659 target_at_put(fixup_pos, pos);
660 }
661 L->bind_to(pos);
662
663 // Keep track of the last bound label so we don't eliminate any instructions
664 // before a bound label.
665 if (pos > last_bound_pos_)
666 last_bound_pos_ = pos;
667}
668
669
670void Assembler::link_to(Label* L, Label* appendix) {
671 if (appendix->is_linked()) {
672 if (L->is_linked()) {
673 // Append appendix to L's list.
674 int fixup_pos;
675 int link = L->pos();
676 do {
677 fixup_pos = link;
678 link = target_at(fixup_pos);
679 } while (link > 0);
680 ASSERT(link == kEndOfChain);
681 target_at_put(fixup_pos, appendix->pos());
682 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100683 // L is empty, simply use appendix.
Andrei Popescu31002712010-02-23 13:46:05 +0000684 *L = *appendix;
685 }
686 }
Steve Block44f0eee2011-05-26 01:26:41 +0100687 appendix->Unuse(); // Appendix should not be used anymore.
Andrei Popescu31002712010-02-23 13:46:05 +0000688}
689
690
691void Assembler::bind(Label* L) {
Steve Block44f0eee2011-05-26 01:26:41 +0100692 ASSERT(!L->is_bound()); // Label can only be bound once.
Andrei Popescu31002712010-02-23 13:46:05 +0000693 bind_to(L, pc_offset());
694}
695
696
697void Assembler::next(Label* L) {
698 ASSERT(L->is_linked());
699 int link = target_at(L->pos());
Steve Block44f0eee2011-05-26 01:26:41 +0100700 ASSERT(link > 0 || link == kEndOfChain);
701 if (link == kEndOfChain) {
Andrei Popescu31002712010-02-23 13:46:05 +0000702 L->Unuse();
Steve Block44f0eee2011-05-26 01:26:41 +0100703 } else if (link > 0) {
704 L->link_to(link);
Andrei Popescu31002712010-02-23 13:46:05 +0000705 }
706}
707
708
709// We have to use a temporary register for things that can be relocated even
710// if they can be encoded in the MIPS's 16 bits of immediate-offset instruction
711// space. There is no guarantee that the relocated location can be similarly
712// encoded.
Steve Block44f0eee2011-05-26 01:26:41 +0100713bool Assembler::MustUseReg(RelocInfo::Mode rmode) {
714 return rmode != RelocInfo::NONE;
Andrei Popescu31002712010-02-23 13:46:05 +0000715}
716
717
718void Assembler::GenInstrRegister(Opcode opcode,
719 Register rs,
720 Register rt,
721 Register rd,
722 uint16_t sa,
723 SecondaryField func) {
724 ASSERT(rd.is_valid() && rs.is_valid() && rt.is_valid() && is_uint5(sa));
725 Instr instr = opcode | (rs.code() << kRsShift) | (rt.code() << kRtShift)
726 | (rd.code() << kRdShift) | (sa << kSaShift) | func;
727 emit(instr);
728}
729
730
731void Assembler::GenInstrRegister(Opcode opcode,
Steve Block44f0eee2011-05-26 01:26:41 +0100732 Register rs,
733 Register rt,
734 uint16_t msb,
735 uint16_t lsb,
736 SecondaryField func) {
737 ASSERT(rs.is_valid() && rt.is_valid() && is_uint5(msb) && is_uint5(lsb));
738 Instr instr = opcode | (rs.code() << kRsShift) | (rt.code() << kRtShift)
739 | (msb << kRdShift) | (lsb << kSaShift) | func;
740 emit(instr);
741}
742
743
744void Assembler::GenInstrRegister(Opcode opcode,
Andrei Popescu31002712010-02-23 13:46:05 +0000745 SecondaryField fmt,
746 FPURegister ft,
747 FPURegister fs,
748 FPURegister fd,
749 SecondaryField func) {
750 ASSERT(fd.is_valid() && fs.is_valid() && ft.is_valid());
Ben Murdoch257744e2011-11-30 15:57:28 +0000751 ASSERT(CpuFeatures::IsEnabled(FPU));
Steve Block44f0eee2011-05-26 01:26:41 +0100752 Instr instr = opcode | fmt | (ft.code() << kFtShift) | (fs.code() << kFsShift)
753 | (fd.code() << kFdShift) | func;
Andrei Popescu31002712010-02-23 13:46:05 +0000754 emit(instr);
755}
756
757
758void Assembler::GenInstrRegister(Opcode opcode,
759 SecondaryField fmt,
760 Register rt,
761 FPURegister fs,
762 FPURegister fd,
763 SecondaryField func) {
764 ASSERT(fd.is_valid() && fs.is_valid() && rt.is_valid());
Ben Murdoch257744e2011-11-30 15:57:28 +0000765 ASSERT(CpuFeatures::IsEnabled(FPU));
Andrei Popescu31002712010-02-23 13:46:05 +0000766 Instr instr = opcode | fmt | (rt.code() << kRtShift)
Steve Block44f0eee2011-05-26 01:26:41 +0100767 | (fs.code() << kFsShift) | (fd.code() << kFdShift) | func;
768 emit(instr);
769}
770
771
772void Assembler::GenInstrRegister(Opcode opcode,
773 SecondaryField fmt,
774 Register rt,
775 FPUControlRegister fs,
776 SecondaryField func) {
777 ASSERT(fs.is_valid() && rt.is_valid());
Ben Murdoch257744e2011-11-30 15:57:28 +0000778 ASSERT(CpuFeatures::IsEnabled(FPU));
Steve Block44f0eee2011-05-26 01:26:41 +0100779 Instr instr =
780 opcode | fmt | (rt.code() << kRtShift) | (fs.code() << kFsShift) | func;
Andrei Popescu31002712010-02-23 13:46:05 +0000781 emit(instr);
782}
783
784
785// Instructions with immediate value.
786// Registers are in the order of the instruction encoding, from left to right.
787void Assembler::GenInstrImmediate(Opcode opcode,
788 Register rs,
789 Register rt,
790 int32_t j) {
791 ASSERT(rs.is_valid() && rt.is_valid() && (is_int16(j) || is_uint16(j)));
792 Instr instr = opcode | (rs.code() << kRsShift) | (rt.code() << kRtShift)
793 | (j & kImm16Mask);
794 emit(instr);
795}
796
797
798void Assembler::GenInstrImmediate(Opcode opcode,
799 Register rs,
800 SecondaryField SF,
801 int32_t j) {
802 ASSERT(rs.is_valid() && (is_int16(j) || is_uint16(j)));
803 Instr instr = opcode | (rs.code() << kRsShift) | SF | (j & kImm16Mask);
804 emit(instr);
805}
806
807
808void Assembler::GenInstrImmediate(Opcode opcode,
809 Register rs,
810 FPURegister ft,
811 int32_t j) {
812 ASSERT(rs.is_valid() && ft.is_valid() && (is_int16(j) || is_uint16(j)));
Ben Murdoch257744e2011-11-30 15:57:28 +0000813 ASSERT(CpuFeatures::IsEnabled(FPU));
Andrei Popescu31002712010-02-23 13:46:05 +0000814 Instr instr = opcode | (rs.code() << kRsShift) | (ft.code() << kFtShift)
815 | (j & kImm16Mask);
816 emit(instr);
817}
818
819
820// Registers are in the order of the instruction encoding, from left to right.
821void Assembler::GenInstrJump(Opcode opcode,
822 uint32_t address) {
Steve Block44f0eee2011-05-26 01:26:41 +0100823 BlockTrampolinePoolScope block_trampoline_pool(this);
Andrei Popescu31002712010-02-23 13:46:05 +0000824 ASSERT(is_uint26(address));
825 Instr instr = opcode | address;
826 emit(instr);
Steve Block44f0eee2011-05-26 01:26:41 +0100827 BlockTrampolinePoolFor(1); // For associated delay slot.
828}
829
830
831// Returns the next free label entry from the next trampoline pool.
832int32_t Assembler::get_label_entry(int32_t pos, bool next_pool) {
833 int trampoline_count = trampolines_.length();
834 int32_t label_entry = 0;
835 ASSERT(trampoline_count > 0);
836
837 if (next_pool) {
838 for (int i = 0; i < trampoline_count; i++) {
839 if (trampolines_[i].start() > pos) {
840 label_entry = trampolines_[i].take_label();
841 break;
842 }
843 }
844 } else { // Caller needs a label entry from the previous pool.
845 for (int i = trampoline_count-1; i >= 0; i--) {
846 if (trampolines_[i].end() < pos) {
847 label_entry = trampolines_[i].take_label();
848 break;
849 }
850 }
851 }
852 return label_entry;
853}
854
855
856// Returns the next free trampoline entry from the next trampoline pool.
857int32_t Assembler::get_trampoline_entry(int32_t pos, bool next_pool) {
858 int trampoline_count = trampolines_.length();
Ben Murdoch257744e2011-11-30 15:57:28 +0000859 int32_t trampoline_entry = kInvalidSlotPos;
Steve Block44f0eee2011-05-26 01:26:41 +0100860 ASSERT(trampoline_count > 0);
861
Ben Murdoch257744e2011-11-30 15:57:28 +0000862 if (!internal_trampoline_exception_) {
863 if (next_pool) {
864 for (int i = 0; i < trampoline_count; i++) {
865 if (trampolines_[i].start() > pos) {
866 trampoline_entry = trampolines_[i].take_slot();
867 break;
868 }
869 }
870 } else { // Caller needs a trampoline entry from the previous pool.
871 for (int i = trampoline_count-1; i >= 0; i--) {
872 if (trampolines_[i].end() < pos) {
873 trampoline_entry = trampolines_[i].take_slot();
874 break;
875 }
Steve Block44f0eee2011-05-26 01:26:41 +0100876 }
877 }
Ben Murdoch257744e2011-11-30 15:57:28 +0000878 if (kInvalidSlotPos == trampoline_entry) {
879 internal_trampoline_exception_ = true;
Steve Block44f0eee2011-05-26 01:26:41 +0100880 }
881 }
882 return trampoline_entry;
Andrei Popescu31002712010-02-23 13:46:05 +0000883}
884
885
886int32_t Assembler::branch_offset(Label* L, bool jump_elimination_allowed) {
887 int32_t target_pos;
Steve Block44f0eee2011-05-26 01:26:41 +0100888 int32_t pc_offset_v = pc_offset();
889
Andrei Popescu31002712010-02-23 13:46:05 +0000890 if (L->is_bound()) {
891 target_pos = L->pos();
Steve Block44f0eee2011-05-26 01:26:41 +0100892 int32_t dist = pc_offset_v - target_pos;
893 if (dist > kMaxBranchOffset) {
894 do {
895 int32_t trampoline_pos = get_trampoline_entry(target_pos);
Ben Murdoch257744e2011-11-30 15:57:28 +0000896 if (kInvalidSlotPos == trampoline_pos) {
897 // Internal error.
898 return 0;
899 }
Steve Block44f0eee2011-05-26 01:26:41 +0100900 ASSERT((trampoline_pos - target_pos) > 0);
901 ASSERT((trampoline_pos - target_pos) <= kMaxBranchOffset);
902 target_at_put(trampoline_pos, target_pos);
903 target_pos = trampoline_pos;
904 dist = pc_offset_v - target_pos;
905 } while (dist > kMaxBranchOffset);
906 } else if (dist < -kMaxBranchOffset) {
907 do {
908 int32_t trampoline_pos = get_trampoline_entry(target_pos, false);
Ben Murdoch257744e2011-11-30 15:57:28 +0000909 if (kInvalidSlotPos == trampoline_pos) {
910 // Internal error.
911 return 0;
912 }
Steve Block44f0eee2011-05-26 01:26:41 +0100913 ASSERT((target_pos - trampoline_pos) > 0);
914 ASSERT((target_pos - trampoline_pos) <= kMaxBranchOffset);
915 target_at_put(trampoline_pos, target_pos);
916 target_pos = trampoline_pos;
917 dist = pc_offset_v - target_pos;
918 } while (dist < -kMaxBranchOffset);
919 }
Andrei Popescu31002712010-02-23 13:46:05 +0000920 } else {
921 if (L->is_linked()) {
Steve Block44f0eee2011-05-26 01:26:41 +0100922 target_pos = L->pos(); // L's link.
923 int32_t dist = pc_offset_v - target_pos;
924 if (dist > kMaxBranchOffset) {
925 do {
926 int32_t label_pos = get_label_entry(target_pos);
927 ASSERT((label_pos - target_pos) < kMaxBranchOffset);
928 label_at_put(L, label_pos);
929 target_pos = label_pos;
930 dist = pc_offset_v - target_pos;
931 } while (dist > kMaxBranchOffset);
932 } else if (dist < -kMaxBranchOffset) {
933 do {
934 int32_t label_pos = get_label_entry(target_pos, false);
935 ASSERT((label_pos - target_pos) > -kMaxBranchOffset);
936 label_at_put(L, label_pos);
937 target_pos = label_pos;
938 dist = pc_offset_v - target_pos;
939 } while (dist < -kMaxBranchOffset);
940 }
941 L->link_to(pc_offset());
Andrei Popescu31002712010-02-23 13:46:05 +0000942 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100943 L->link_to(pc_offset());
944 return kEndOfChain;
Andrei Popescu31002712010-02-23 13:46:05 +0000945 }
Andrei Popescu31002712010-02-23 13:46:05 +0000946 }
947
948 int32_t offset = target_pos - (pc_offset() + kBranchPCOffset);
Steve Block44f0eee2011-05-26 01:26:41 +0100949 ASSERT((offset & 3) == 0);
950 ASSERT(is_int16(offset >> 2));
951
Andrei Popescu31002712010-02-23 13:46:05 +0000952 return offset;
953}
954
955
956void Assembler::label_at_put(Label* L, int at_offset) {
957 int target_pos;
958 if (L->is_bound()) {
959 target_pos = L->pos();
Steve Block44f0eee2011-05-26 01:26:41 +0100960 instr_at_put(at_offset, target_pos + (Code::kHeaderSize - kHeapObjectTag));
Andrei Popescu31002712010-02-23 13:46:05 +0000961 } else {
962 if (L->is_linked()) {
Steve Block44f0eee2011-05-26 01:26:41 +0100963 target_pos = L->pos(); // L's link.
964 int32_t imm18 = target_pos - at_offset;
965 ASSERT((imm18 & 3) == 0);
966 int32_t imm16 = imm18 >> 2;
967 ASSERT(is_int16(imm16));
968 instr_at_put(at_offset, (imm16 & kImm16Mask));
Andrei Popescu31002712010-02-23 13:46:05 +0000969 } else {
970 target_pos = kEndOfChain;
Steve Block44f0eee2011-05-26 01:26:41 +0100971 instr_at_put(at_offset, 0);
Andrei Popescu31002712010-02-23 13:46:05 +0000972 }
973 L->link_to(at_offset);
Andrei Popescu31002712010-02-23 13:46:05 +0000974 }
975}
976
977
978//------- Branch and jump instructions --------
979
980void Assembler::b(int16_t offset) {
981 beq(zero_reg, zero_reg, offset);
982}
983
984
985void Assembler::bal(int16_t offset) {
Steve Block44f0eee2011-05-26 01:26:41 +0100986 positions_recorder()->WriteRecordedPositions();
Andrei Popescu31002712010-02-23 13:46:05 +0000987 bgezal(zero_reg, offset);
988}
989
990
991void Assembler::beq(Register rs, Register rt, int16_t offset) {
Steve Block44f0eee2011-05-26 01:26:41 +0100992 BlockTrampolinePoolScope block_trampoline_pool(this);
Andrei Popescu31002712010-02-23 13:46:05 +0000993 GenInstrImmediate(BEQ, rs, rt, offset);
Steve Block44f0eee2011-05-26 01:26:41 +0100994 BlockTrampolinePoolFor(1); // For associated delay slot.
Andrei Popescu31002712010-02-23 13:46:05 +0000995}
996
997
998void Assembler::bgez(Register rs, int16_t offset) {
Steve Block44f0eee2011-05-26 01:26:41 +0100999 BlockTrampolinePoolScope block_trampoline_pool(this);
Andrei Popescu31002712010-02-23 13:46:05 +00001000 GenInstrImmediate(REGIMM, rs, BGEZ, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01001001 BlockTrampolinePoolFor(1); // For associated delay slot.
Andrei Popescu31002712010-02-23 13:46:05 +00001002}
1003
1004
1005void Assembler::bgezal(Register rs, int16_t offset) {
Steve Block44f0eee2011-05-26 01:26:41 +01001006 BlockTrampolinePoolScope block_trampoline_pool(this);
1007 positions_recorder()->WriteRecordedPositions();
Andrei Popescu31002712010-02-23 13:46:05 +00001008 GenInstrImmediate(REGIMM, rs, BGEZAL, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01001009 BlockTrampolinePoolFor(1); // For associated delay slot.
Andrei Popescu31002712010-02-23 13:46:05 +00001010}
1011
1012
1013void Assembler::bgtz(Register rs, int16_t offset) {
Steve Block44f0eee2011-05-26 01:26:41 +01001014 BlockTrampolinePoolScope block_trampoline_pool(this);
Andrei Popescu31002712010-02-23 13:46:05 +00001015 GenInstrImmediate(BGTZ, rs, zero_reg, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01001016 BlockTrampolinePoolFor(1); // For associated delay slot.
Andrei Popescu31002712010-02-23 13:46:05 +00001017}
1018
1019
1020void Assembler::blez(Register rs, int16_t offset) {
Steve Block44f0eee2011-05-26 01:26:41 +01001021 BlockTrampolinePoolScope block_trampoline_pool(this);
Andrei Popescu31002712010-02-23 13:46:05 +00001022 GenInstrImmediate(BLEZ, rs, zero_reg, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01001023 BlockTrampolinePoolFor(1); // For associated delay slot.
Andrei Popescu31002712010-02-23 13:46:05 +00001024}
1025
1026
1027void Assembler::bltz(Register rs, int16_t offset) {
Steve Block44f0eee2011-05-26 01:26:41 +01001028 BlockTrampolinePoolScope block_trampoline_pool(this);
Andrei Popescu31002712010-02-23 13:46:05 +00001029 GenInstrImmediate(REGIMM, rs, BLTZ, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01001030 BlockTrampolinePoolFor(1); // For associated delay slot.
Andrei Popescu31002712010-02-23 13:46:05 +00001031}
1032
1033
1034void Assembler::bltzal(Register rs, int16_t offset) {
Steve Block44f0eee2011-05-26 01:26:41 +01001035 BlockTrampolinePoolScope block_trampoline_pool(this);
1036 positions_recorder()->WriteRecordedPositions();
Andrei Popescu31002712010-02-23 13:46:05 +00001037 GenInstrImmediate(REGIMM, rs, BLTZAL, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01001038 BlockTrampolinePoolFor(1); // For associated delay slot.
Andrei Popescu31002712010-02-23 13:46:05 +00001039}
1040
1041
1042void Assembler::bne(Register rs, Register rt, int16_t offset) {
Steve Block44f0eee2011-05-26 01:26:41 +01001043 BlockTrampolinePoolScope block_trampoline_pool(this);
Andrei Popescu31002712010-02-23 13:46:05 +00001044 GenInstrImmediate(BNE, rs, rt, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01001045 BlockTrampolinePoolFor(1); // For associated delay slot.
Andrei Popescu31002712010-02-23 13:46:05 +00001046}
1047
1048
1049void Assembler::j(int32_t target) {
1050 ASSERT(is_uint28(target) && ((target & 3) == 0));
1051 GenInstrJump(J, target >> 2);
1052}
1053
1054
1055void Assembler::jr(Register rs) {
Steve Block44f0eee2011-05-26 01:26:41 +01001056 BlockTrampolinePoolScope block_trampoline_pool(this);
1057 if (rs.is(ra)) {
1058 positions_recorder()->WriteRecordedPositions();
1059 }
Andrei Popescu31002712010-02-23 13:46:05 +00001060 GenInstrRegister(SPECIAL, rs, zero_reg, zero_reg, 0, JR);
Steve Block44f0eee2011-05-26 01:26:41 +01001061 BlockTrampolinePoolFor(1); // For associated delay slot.
Andrei Popescu31002712010-02-23 13:46:05 +00001062}
1063
1064
1065void Assembler::jal(int32_t target) {
Steve Block44f0eee2011-05-26 01:26:41 +01001066 positions_recorder()->WriteRecordedPositions();
Andrei Popescu31002712010-02-23 13:46:05 +00001067 ASSERT(is_uint28(target) && ((target & 3) == 0));
1068 GenInstrJump(JAL, target >> 2);
1069}
1070
1071
1072void Assembler::jalr(Register rs, Register rd) {
Steve Block44f0eee2011-05-26 01:26:41 +01001073 BlockTrampolinePoolScope block_trampoline_pool(this);
1074 positions_recorder()->WriteRecordedPositions();
Andrei Popescu31002712010-02-23 13:46:05 +00001075 GenInstrRegister(SPECIAL, rs, zero_reg, rd, 0, JALR);
Steve Block44f0eee2011-05-26 01:26:41 +01001076 BlockTrampolinePoolFor(1); // For associated delay slot.
Andrei Popescu31002712010-02-23 13:46:05 +00001077}
1078
1079
1080//-------Data-processing-instructions---------
1081
1082// Arithmetic.
1083
Andrei Popescu31002712010-02-23 13:46:05 +00001084void Assembler::addu(Register rd, Register rs, Register rt) {
1085 GenInstrRegister(SPECIAL, rs, rt, rd, 0, ADDU);
1086}
1087
1088
Andrei Popescu31002712010-02-23 13:46:05 +00001089void Assembler::addiu(Register rd, Register rs, int32_t j) {
1090 GenInstrImmediate(ADDIU, rs, rd, j);
Andrei Popescu31002712010-02-23 13:46:05 +00001091}
1092
1093
1094void Assembler::subu(Register rd, Register rs, Register rt) {
1095 GenInstrRegister(SPECIAL, rs, rt, rd, 0, SUBU);
1096}
1097
1098
1099void Assembler::mul(Register rd, Register rs, Register rt) {
1100 GenInstrRegister(SPECIAL2, rs, rt, rd, 0, MUL);
1101}
1102
1103
1104void Assembler::mult(Register rs, Register rt) {
1105 GenInstrRegister(SPECIAL, rs, rt, zero_reg, 0, MULT);
1106}
1107
1108
1109void Assembler::multu(Register rs, Register rt) {
1110 GenInstrRegister(SPECIAL, rs, rt, zero_reg, 0, MULTU);
1111}
1112
1113
1114void Assembler::div(Register rs, Register rt) {
1115 GenInstrRegister(SPECIAL, rs, rt, zero_reg, 0, DIV);
1116}
1117
1118
1119void Assembler::divu(Register rs, Register rt) {
1120 GenInstrRegister(SPECIAL, rs, rt, zero_reg, 0, DIVU);
1121}
1122
1123
1124// Logical.
1125
1126void Assembler::and_(Register rd, Register rs, Register rt) {
1127 GenInstrRegister(SPECIAL, rs, rt, rd, 0, AND);
1128}
1129
1130
1131void Assembler::andi(Register rt, Register rs, int32_t j) {
1132 GenInstrImmediate(ANDI, rs, rt, j);
1133}
1134
1135
1136void Assembler::or_(Register rd, Register rs, Register rt) {
1137 GenInstrRegister(SPECIAL, rs, rt, rd, 0, OR);
1138}
1139
1140
1141void Assembler::ori(Register rt, Register rs, int32_t j) {
1142 GenInstrImmediate(ORI, rs, rt, j);
1143}
1144
1145
1146void Assembler::xor_(Register rd, Register rs, Register rt) {
1147 GenInstrRegister(SPECIAL, rs, rt, rd, 0, XOR);
1148}
1149
1150
1151void Assembler::xori(Register rt, Register rs, int32_t j) {
1152 GenInstrImmediate(XORI, rs, rt, j);
1153}
1154
1155
1156void Assembler::nor(Register rd, Register rs, Register rt) {
1157 GenInstrRegister(SPECIAL, rs, rt, rd, 0, NOR);
1158}
1159
1160
1161// Shifts.
Steve Block44f0eee2011-05-26 01:26:41 +01001162void Assembler::sll(Register rd,
1163 Register rt,
1164 uint16_t sa,
1165 bool coming_from_nop) {
1166 // Don't allow nop instructions in the form sll zero_reg, zero_reg to be
1167 // generated using the sll instruction. They must be generated using
1168 // nop(int/NopMarkerTypes) or MarkCode(int/NopMarkerTypes) pseudo
1169 // instructions.
1170 ASSERT(coming_from_nop || !(rd.is(zero_reg) && rt.is(zero_reg)));
Andrei Popescu31002712010-02-23 13:46:05 +00001171 GenInstrRegister(SPECIAL, zero_reg, rt, rd, sa, SLL);
1172}
1173
1174
1175void Assembler::sllv(Register rd, Register rt, Register rs) {
1176 GenInstrRegister(SPECIAL, rs, rt, rd, 0, SLLV);
1177}
1178
1179
1180void Assembler::srl(Register rd, Register rt, uint16_t sa) {
1181 GenInstrRegister(SPECIAL, zero_reg, rt, rd, sa, SRL);
1182}
1183
1184
1185void Assembler::srlv(Register rd, Register rt, Register rs) {
1186 GenInstrRegister(SPECIAL, rs, rt, rd, 0, SRLV);
1187}
1188
1189
1190void Assembler::sra(Register rd, Register rt, uint16_t sa) {
1191 GenInstrRegister(SPECIAL, zero_reg, rt, rd, sa, SRA);
1192}
1193
1194
1195void Assembler::srav(Register rd, Register rt, Register rs) {
1196 GenInstrRegister(SPECIAL, rs, rt, rd, 0, SRAV);
1197}
1198
1199
Steve Block44f0eee2011-05-26 01:26:41 +01001200void Assembler::rotr(Register rd, Register rt, uint16_t sa) {
1201 // Should be called via MacroAssembler::Ror.
1202 ASSERT(rd.is_valid() && rt.is_valid() && is_uint5(sa));
1203 ASSERT(mips32r2);
1204 Instr instr = SPECIAL | (1 << kRsShift) | (rt.code() << kRtShift)
1205 | (rd.code() << kRdShift) | (sa << kSaShift) | SRL;
1206 emit(instr);
1207}
1208
1209
1210void Assembler::rotrv(Register rd, Register rt, Register rs) {
1211 // Should be called via MacroAssembler::Ror.
1212 ASSERT(rd.is_valid() && rt.is_valid() && rs.is_valid() );
1213 ASSERT(mips32r2);
1214 Instr instr = SPECIAL | (rs.code() << kRsShift) | (rt.code() << kRtShift)
1215 | (rd.code() << kRdShift) | (1 << kSaShift) | SRLV;
1216 emit(instr);
1217}
1218
1219
Andrei Popescu31002712010-02-23 13:46:05 +00001220//------------Memory-instructions-------------
1221
Steve Block44f0eee2011-05-26 01:26:41 +01001222// Helper for base-reg + offset, when offset is larger than int16.
1223void Assembler::LoadRegPlusOffsetToAt(const MemOperand& src) {
1224 ASSERT(!src.rm().is(at));
1225 lui(at, src.offset_ >> kLuiShift);
1226 ori(at, at, src.offset_ & kImm16Mask); // Load 32-bit offset.
1227 addu(at, at, src.rm()); // Add base register.
1228}
1229
1230
Andrei Popescu31002712010-02-23 13:46:05 +00001231void Assembler::lb(Register rd, const MemOperand& rs) {
Steve Block44f0eee2011-05-26 01:26:41 +01001232 if (is_int16(rs.offset_)) {
1233 GenInstrImmediate(LB, rs.rm(), rd, rs.offset_);
1234 } else { // Offset > 16 bits, use multiple instructions to load.
1235 LoadRegPlusOffsetToAt(rs);
1236 GenInstrImmediate(LB, at, rd, 0); // Equiv to lb(rd, MemOperand(at, 0));
1237 }
Andrei Popescu31002712010-02-23 13:46:05 +00001238}
1239
1240
1241void Assembler::lbu(Register rd, const MemOperand& rs) {
Steve Block44f0eee2011-05-26 01:26:41 +01001242 if (is_int16(rs.offset_)) {
1243 GenInstrImmediate(LBU, rs.rm(), rd, rs.offset_);
1244 } else { // Offset > 16 bits, use multiple instructions to load.
1245 LoadRegPlusOffsetToAt(rs);
1246 GenInstrImmediate(LBU, at, rd, 0); // Equiv to lbu(rd, MemOperand(at, 0));
1247 }
1248}
1249
1250
1251void Assembler::lh(Register rd, const MemOperand& rs) {
1252 if (is_int16(rs.offset_)) {
1253 GenInstrImmediate(LH, rs.rm(), rd, rs.offset_);
1254 } else { // Offset > 16 bits, use multiple instructions to load.
1255 LoadRegPlusOffsetToAt(rs);
1256 GenInstrImmediate(LH, at, rd, 0); // Equiv to lh(rd, MemOperand(at, 0));
1257 }
1258}
1259
1260
1261void Assembler::lhu(Register rd, const MemOperand& rs) {
1262 if (is_int16(rs.offset_)) {
1263 GenInstrImmediate(LHU, rs.rm(), rd, rs.offset_);
1264 } else { // Offset > 16 bits, use multiple instructions to load.
1265 LoadRegPlusOffsetToAt(rs);
1266 GenInstrImmediate(LHU, at, rd, 0); // Equiv to lhu(rd, MemOperand(at, 0));
1267 }
Andrei Popescu31002712010-02-23 13:46:05 +00001268}
1269
1270
1271void Assembler::lw(Register rd, const MemOperand& rs) {
Steve Block44f0eee2011-05-26 01:26:41 +01001272 if (is_int16(rs.offset_)) {
1273 GenInstrImmediate(LW, rs.rm(), rd, rs.offset_);
1274 } else { // Offset > 16 bits, use multiple instructions to load.
1275 LoadRegPlusOffsetToAt(rs);
1276 GenInstrImmediate(LW, at, rd, 0); // Equiv to lw(rd, MemOperand(at, 0));
1277 }
Steve Block44f0eee2011-05-26 01:26:41 +01001278}
1279
1280
1281void Assembler::lwl(Register rd, const MemOperand& rs) {
1282 GenInstrImmediate(LWL, rs.rm(), rd, rs.offset_);
1283}
1284
1285
1286void Assembler::lwr(Register rd, const MemOperand& rs) {
1287 GenInstrImmediate(LWR, rs.rm(), rd, rs.offset_);
Andrei Popescu31002712010-02-23 13:46:05 +00001288}
1289
1290
1291void Assembler::sb(Register rd, const MemOperand& rs) {
Steve Block44f0eee2011-05-26 01:26:41 +01001292 if (is_int16(rs.offset_)) {
1293 GenInstrImmediate(SB, rs.rm(), rd, rs.offset_);
1294 } else { // Offset > 16 bits, use multiple instructions to store.
1295 LoadRegPlusOffsetToAt(rs);
1296 GenInstrImmediate(SB, at, rd, 0); // Equiv to sb(rd, MemOperand(at, 0));
1297 }
1298}
1299
1300
1301void Assembler::sh(Register rd, const MemOperand& rs) {
1302 if (is_int16(rs.offset_)) {
1303 GenInstrImmediate(SH, rs.rm(), rd, rs.offset_);
1304 } else { // Offset > 16 bits, use multiple instructions to store.
1305 LoadRegPlusOffsetToAt(rs);
1306 GenInstrImmediate(SH, at, rd, 0); // Equiv to sh(rd, MemOperand(at, 0));
1307 }
Andrei Popescu31002712010-02-23 13:46:05 +00001308}
1309
1310
1311void Assembler::sw(Register rd, const MemOperand& rs) {
Steve Block44f0eee2011-05-26 01:26:41 +01001312 if (is_int16(rs.offset_)) {
1313 GenInstrImmediate(SW, rs.rm(), rd, rs.offset_);
1314 } else { // Offset > 16 bits, use multiple instructions to store.
1315 LoadRegPlusOffsetToAt(rs);
1316 GenInstrImmediate(SW, at, rd, 0); // Equiv to sw(rd, MemOperand(at, 0));
1317 }
Steve Block44f0eee2011-05-26 01:26:41 +01001318}
1319
1320
1321void Assembler::swl(Register rd, const MemOperand& rs) {
1322 GenInstrImmediate(SWL, rs.rm(), rd, rs.offset_);
1323}
1324
1325
1326void Assembler::swr(Register rd, const MemOperand& rs) {
1327 GenInstrImmediate(SWR, rs.rm(), rd, rs.offset_);
Andrei Popescu31002712010-02-23 13:46:05 +00001328}
1329
1330
1331void Assembler::lui(Register rd, int32_t j) {
1332 GenInstrImmediate(LUI, zero_reg, rd, j);
1333}
1334
1335
1336//-------------Misc-instructions--------------
1337
1338// Break / Trap instructions.
1339void Assembler::break_(uint32_t code) {
1340 ASSERT((code & ~0xfffff) == 0);
1341 Instr break_instr = SPECIAL | BREAK | (code << 6);
1342 emit(break_instr);
1343}
1344
1345
1346void Assembler::tge(Register rs, Register rt, uint16_t code) {
1347 ASSERT(is_uint10(code));
1348 Instr instr = SPECIAL | TGE | rs.code() << kRsShift
1349 | rt.code() << kRtShift | code << 6;
1350 emit(instr);
1351}
1352
1353
1354void Assembler::tgeu(Register rs, Register rt, uint16_t code) {
1355 ASSERT(is_uint10(code));
1356 Instr instr = SPECIAL | TGEU | rs.code() << kRsShift
1357 | rt.code() << kRtShift | code << 6;
1358 emit(instr);
1359}
1360
1361
1362void Assembler::tlt(Register rs, Register rt, uint16_t code) {
1363 ASSERT(is_uint10(code));
1364 Instr instr =
1365 SPECIAL | TLT | rs.code() << kRsShift | rt.code() << kRtShift | code << 6;
1366 emit(instr);
1367}
1368
1369
1370void Assembler::tltu(Register rs, Register rt, uint16_t code) {
1371 ASSERT(is_uint10(code));
Steve Block44f0eee2011-05-26 01:26:41 +01001372 Instr instr =
1373 SPECIAL | TLTU | rs.code() << kRsShift
Andrei Popescu31002712010-02-23 13:46:05 +00001374 | rt.code() << kRtShift | code << 6;
1375 emit(instr);
1376}
1377
1378
1379void Assembler::teq(Register rs, Register rt, uint16_t code) {
1380 ASSERT(is_uint10(code));
1381 Instr instr =
1382 SPECIAL | TEQ | rs.code() << kRsShift | rt.code() << kRtShift | code << 6;
1383 emit(instr);
1384}
1385
1386
1387void Assembler::tne(Register rs, Register rt, uint16_t code) {
1388 ASSERT(is_uint10(code));
1389 Instr instr =
1390 SPECIAL | TNE | rs.code() << kRsShift | rt.code() << kRtShift | code << 6;
1391 emit(instr);
1392}
1393
1394
1395// Move from HI/LO register.
1396
1397void Assembler::mfhi(Register rd) {
1398 GenInstrRegister(SPECIAL, zero_reg, zero_reg, rd, 0, MFHI);
1399}
1400
1401
1402void Assembler::mflo(Register rd) {
1403 GenInstrRegister(SPECIAL, zero_reg, zero_reg, rd, 0, MFLO);
1404}
1405
1406
1407// Set on less than instructions.
1408void Assembler::slt(Register rd, Register rs, Register rt) {
1409 GenInstrRegister(SPECIAL, rs, rt, rd, 0, SLT);
1410}
1411
1412
1413void Assembler::sltu(Register rd, Register rs, Register rt) {
1414 GenInstrRegister(SPECIAL, rs, rt, rd, 0, SLTU);
1415}
1416
1417
1418void Assembler::slti(Register rt, Register rs, int32_t j) {
1419 GenInstrImmediate(SLTI, rs, rt, j);
1420}
1421
1422
1423void Assembler::sltiu(Register rt, Register rs, int32_t j) {
1424 GenInstrImmediate(SLTIU, rs, rt, j);
1425}
1426
1427
Steve Block44f0eee2011-05-26 01:26:41 +01001428// Conditional move.
1429void Assembler::movz(Register rd, Register rs, Register rt) {
1430 GenInstrRegister(SPECIAL, rs, rt, rd, 0, MOVZ);
1431}
1432
1433
1434void Assembler::movn(Register rd, Register rs, Register rt) {
1435 GenInstrRegister(SPECIAL, rs, rt, rd, 0, MOVN);
1436}
1437
1438
1439void Assembler::movt(Register rd, Register rs, uint16_t cc) {
1440 Register rt;
Ben Murdoch257744e2011-11-30 15:57:28 +00001441 rt.code_ = (cc & 0x0007) << 2 | 1;
Steve Block44f0eee2011-05-26 01:26:41 +01001442 GenInstrRegister(SPECIAL, rs, rt, rd, 0, MOVCI);
1443}
1444
1445
1446void Assembler::movf(Register rd, Register rs, uint16_t cc) {
1447 Register rt;
Ben Murdoch257744e2011-11-30 15:57:28 +00001448 rt.code_ = (cc & 0x0007) << 2 | 0;
Steve Block44f0eee2011-05-26 01:26:41 +01001449 GenInstrRegister(SPECIAL, rs, rt, rd, 0, MOVCI);
1450}
1451
1452
1453// Bit twiddling.
1454void Assembler::clz(Register rd, Register rs) {
1455 // Clz instr requires same GPR number in 'rd' and 'rt' fields.
1456 GenInstrRegister(SPECIAL2, rs, rd, rd, 0, CLZ);
1457}
1458
1459
1460void Assembler::ins_(Register rt, Register rs, uint16_t pos, uint16_t size) {
1461 // Should be called via MacroAssembler::Ins.
1462 // Ins instr has 'rt' field as dest, and two uint5: msb, lsb.
1463 ASSERT(mips32r2);
1464 GenInstrRegister(SPECIAL3, rs, rt, pos + size - 1, pos, INS);
1465}
1466
1467
1468void Assembler::ext_(Register rt, Register rs, uint16_t pos, uint16_t size) {
1469 // Should be called via MacroAssembler::Ext.
1470 // Ext instr has 'rt' field as dest, and two uint5: msb, lsb.
1471 ASSERT(mips32r2);
1472 GenInstrRegister(SPECIAL3, rs, rt, size - 1, pos, EXT);
1473}
1474
1475
Andrei Popescu31002712010-02-23 13:46:05 +00001476//--------Coprocessor-instructions----------------
1477
1478// Load, store, move.
1479void Assembler::lwc1(FPURegister fd, const MemOperand& src) {
1480 GenInstrImmediate(LWC1, src.rm(), fd, src.offset_);
1481}
1482
1483
1484void Assembler::ldc1(FPURegister fd, const MemOperand& src) {
Steve Block44f0eee2011-05-26 01:26:41 +01001485 // Workaround for non-8-byte alignment of HeapNumber, convert 64-bit
1486 // load to two 32-bit loads.
1487 GenInstrImmediate(LWC1, src.rm(), fd, src.offset_);
1488 FPURegister nextfpreg;
1489 nextfpreg.setcode(fd.code() + 1);
1490 GenInstrImmediate(LWC1, src.rm(), nextfpreg, src.offset_ + 4);
Andrei Popescu31002712010-02-23 13:46:05 +00001491}
1492
1493
1494void Assembler::swc1(FPURegister fd, const MemOperand& src) {
1495 GenInstrImmediate(SWC1, src.rm(), fd, src.offset_);
1496}
1497
1498
1499void Assembler::sdc1(FPURegister fd, const MemOperand& src) {
Steve Block44f0eee2011-05-26 01:26:41 +01001500 // Workaround for non-8-byte alignment of HeapNumber, convert 64-bit
1501 // store to two 32-bit stores.
1502 GenInstrImmediate(SWC1, src.rm(), fd, src.offset_);
1503 FPURegister nextfpreg;
1504 nextfpreg.setcode(fd.code() + 1);
1505 GenInstrImmediate(SWC1, src.rm(), nextfpreg, src.offset_ + 4);
Andrei Popescu31002712010-02-23 13:46:05 +00001506}
1507
1508
Steve Block44f0eee2011-05-26 01:26:41 +01001509void Assembler::mtc1(Register rt, FPURegister fs) {
Andrei Popescu31002712010-02-23 13:46:05 +00001510 GenInstrRegister(COP1, MTC1, rt, fs, f0);
1511}
1512
1513
Steve Block44f0eee2011-05-26 01:26:41 +01001514void Assembler::mfc1(Register rt, FPURegister fs) {
Andrei Popescu31002712010-02-23 13:46:05 +00001515 GenInstrRegister(COP1, MFC1, rt, fs, f0);
1516}
1517
1518
Steve Block44f0eee2011-05-26 01:26:41 +01001519void Assembler::ctc1(Register rt, FPUControlRegister fs) {
1520 GenInstrRegister(COP1, CTC1, rt, fs);
1521}
1522
1523
1524void Assembler::cfc1(Register rt, FPUControlRegister fs) {
1525 GenInstrRegister(COP1, CFC1, rt, fs);
1526}
1527
1528
1529// Arithmetic.
1530
1531void Assembler::add_d(FPURegister fd, FPURegister fs, FPURegister ft) {
1532 GenInstrRegister(COP1, D, ft, fs, fd, ADD_D);
1533}
1534
1535
1536void Assembler::sub_d(FPURegister fd, FPURegister fs, FPURegister ft) {
1537 GenInstrRegister(COP1, D, ft, fs, fd, SUB_D);
1538}
1539
1540
1541void Assembler::mul_d(FPURegister fd, FPURegister fs, FPURegister ft) {
1542 GenInstrRegister(COP1, D, ft, fs, fd, MUL_D);
1543}
1544
1545
1546void Assembler::div_d(FPURegister fd, FPURegister fs, FPURegister ft) {
1547 GenInstrRegister(COP1, D, ft, fs, fd, DIV_D);
1548}
1549
1550
1551void Assembler::abs_d(FPURegister fd, FPURegister fs) {
1552 GenInstrRegister(COP1, D, f0, fs, fd, ABS_D);
1553}
1554
1555
1556void Assembler::mov_d(FPURegister fd, FPURegister fs) {
1557 GenInstrRegister(COP1, D, f0, fs, fd, MOV_D);
1558}
1559
1560
1561void Assembler::neg_d(FPURegister fd, FPURegister fs) {
1562 GenInstrRegister(COP1, D, f0, fs, fd, NEG_D);
1563}
1564
1565
1566void Assembler::sqrt_d(FPURegister fd, FPURegister fs) {
1567 GenInstrRegister(COP1, D, f0, fs, fd, SQRT_D);
Andrei Popescu31002712010-02-23 13:46:05 +00001568}
1569
1570
1571// Conversions.
1572
1573void Assembler::cvt_w_s(FPURegister fd, FPURegister fs) {
1574 GenInstrRegister(COP1, S, f0, fs, fd, CVT_W_S);
1575}
1576
1577
1578void Assembler::cvt_w_d(FPURegister fd, FPURegister fs) {
1579 GenInstrRegister(COP1, D, f0, fs, fd, CVT_W_D);
1580}
1581
1582
Steve Block44f0eee2011-05-26 01:26:41 +01001583void Assembler::trunc_w_s(FPURegister fd, FPURegister fs) {
1584 GenInstrRegister(COP1, S, f0, fs, fd, TRUNC_W_S);
1585}
1586
1587
1588void Assembler::trunc_w_d(FPURegister fd, FPURegister fs) {
1589 GenInstrRegister(COP1, D, f0, fs, fd, TRUNC_W_D);
1590}
1591
1592
1593void Assembler::round_w_s(FPURegister fd, FPURegister fs) {
1594 GenInstrRegister(COP1, S, f0, fs, fd, ROUND_W_S);
1595}
1596
1597
1598void Assembler::round_w_d(FPURegister fd, FPURegister fs) {
1599 GenInstrRegister(COP1, D, f0, fs, fd, ROUND_W_D);
1600}
1601
1602
1603void Assembler::floor_w_s(FPURegister fd, FPURegister fs) {
1604 GenInstrRegister(COP1, S, f0, fs, fd, FLOOR_W_S);
1605}
1606
1607
1608void Assembler::floor_w_d(FPURegister fd, FPURegister fs) {
1609 GenInstrRegister(COP1, D, f0, fs, fd, FLOOR_W_D);
1610}
1611
1612
1613void Assembler::ceil_w_s(FPURegister fd, FPURegister fs) {
1614 GenInstrRegister(COP1, S, f0, fs, fd, CEIL_W_S);
1615}
1616
1617
1618void Assembler::ceil_w_d(FPURegister fd, FPURegister fs) {
1619 GenInstrRegister(COP1, D, f0, fs, fd, CEIL_W_D);
1620}
1621
1622
Andrei Popescu31002712010-02-23 13:46:05 +00001623void Assembler::cvt_l_s(FPURegister fd, FPURegister fs) {
Steve Block44f0eee2011-05-26 01:26:41 +01001624 ASSERT(mips32r2);
Andrei Popescu31002712010-02-23 13:46:05 +00001625 GenInstrRegister(COP1, S, f0, fs, fd, CVT_L_S);
1626}
1627
1628
1629void Assembler::cvt_l_d(FPURegister fd, FPURegister fs) {
Steve Block44f0eee2011-05-26 01:26:41 +01001630 ASSERT(mips32r2);
Andrei Popescu31002712010-02-23 13:46:05 +00001631 GenInstrRegister(COP1, D, f0, fs, fd, CVT_L_D);
1632}
1633
1634
Steve Block44f0eee2011-05-26 01:26:41 +01001635void Assembler::trunc_l_s(FPURegister fd, FPURegister fs) {
1636 ASSERT(mips32r2);
1637 GenInstrRegister(COP1, S, f0, fs, fd, TRUNC_L_S);
1638}
1639
1640
1641void Assembler::trunc_l_d(FPURegister fd, FPURegister fs) {
1642 ASSERT(mips32r2);
1643 GenInstrRegister(COP1, D, f0, fs, fd, TRUNC_L_D);
1644}
1645
1646
1647void Assembler::round_l_s(FPURegister fd, FPURegister fs) {
1648 GenInstrRegister(COP1, S, f0, fs, fd, ROUND_L_S);
1649}
1650
1651
1652void Assembler::round_l_d(FPURegister fd, FPURegister fs) {
1653 GenInstrRegister(COP1, D, f0, fs, fd, ROUND_L_D);
1654}
1655
1656
1657void Assembler::floor_l_s(FPURegister fd, FPURegister fs) {
1658 GenInstrRegister(COP1, S, f0, fs, fd, FLOOR_L_S);
1659}
1660
1661
1662void Assembler::floor_l_d(FPURegister fd, FPURegister fs) {
1663 GenInstrRegister(COP1, D, f0, fs, fd, FLOOR_L_D);
1664}
1665
1666
1667void Assembler::ceil_l_s(FPURegister fd, FPURegister fs) {
1668 GenInstrRegister(COP1, S, f0, fs, fd, CEIL_L_S);
1669}
1670
1671
1672void Assembler::ceil_l_d(FPURegister fd, FPURegister fs) {
1673 GenInstrRegister(COP1, D, f0, fs, fd, CEIL_L_D);
1674}
1675
1676
Andrei Popescu31002712010-02-23 13:46:05 +00001677void Assembler::cvt_s_w(FPURegister fd, FPURegister fs) {
1678 GenInstrRegister(COP1, W, f0, fs, fd, CVT_S_W);
1679}
1680
1681
1682void Assembler::cvt_s_l(FPURegister fd, FPURegister fs) {
Steve Block44f0eee2011-05-26 01:26:41 +01001683 ASSERT(mips32r2);
Andrei Popescu31002712010-02-23 13:46:05 +00001684 GenInstrRegister(COP1, L, f0, fs, fd, CVT_S_L);
1685}
1686
1687
1688void Assembler::cvt_s_d(FPURegister fd, FPURegister fs) {
1689 GenInstrRegister(COP1, D, f0, fs, fd, CVT_S_D);
1690}
1691
1692
1693void Assembler::cvt_d_w(FPURegister fd, FPURegister fs) {
1694 GenInstrRegister(COP1, W, f0, fs, fd, CVT_D_W);
1695}
1696
1697
1698void Assembler::cvt_d_l(FPURegister fd, FPURegister fs) {
Steve Block44f0eee2011-05-26 01:26:41 +01001699 ASSERT(mips32r2);
Andrei Popescu31002712010-02-23 13:46:05 +00001700 GenInstrRegister(COP1, L, f0, fs, fd, CVT_D_L);
1701}
1702
1703
1704void Assembler::cvt_d_s(FPURegister fd, FPURegister fs) {
1705 GenInstrRegister(COP1, S, f0, fs, fd, CVT_D_S);
1706}
1707
1708
1709// Conditions.
1710void Assembler::c(FPUCondition cond, SecondaryField fmt,
Steve Block44f0eee2011-05-26 01:26:41 +01001711 FPURegister fs, FPURegister ft, uint16_t cc) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001712 ASSERT(CpuFeatures::IsEnabled(FPU));
Andrei Popescu31002712010-02-23 13:46:05 +00001713 ASSERT(is_uint3(cc));
1714 ASSERT((fmt & ~(31 << kRsShift)) == 0);
1715 Instr instr = COP1 | fmt | ft.code() << 16 | fs.code() << kFsShift
1716 | cc << 8 | 3 << 4 | cond;
1717 emit(instr);
1718}
1719
1720
Steve Block44f0eee2011-05-26 01:26:41 +01001721void Assembler::fcmp(FPURegister src1, const double src2,
1722 FPUCondition cond) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001723 ASSERT(CpuFeatures::IsEnabled(FPU));
Steve Block44f0eee2011-05-26 01:26:41 +01001724 ASSERT(src2 == 0.0);
1725 mtc1(zero_reg, f14);
1726 cvt_d_w(f14, f14);
1727 c(cond, D, src1, f14, 0);
1728}
1729
1730
Andrei Popescu31002712010-02-23 13:46:05 +00001731void Assembler::bc1f(int16_t offset, uint16_t cc) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001732 ASSERT(CpuFeatures::IsEnabled(FPU));
Andrei Popescu31002712010-02-23 13:46:05 +00001733 ASSERT(is_uint3(cc));
1734 Instr instr = COP1 | BC1 | cc << 18 | 0 << 16 | (offset & kImm16Mask);
1735 emit(instr);
1736}
1737
1738
1739void Assembler::bc1t(int16_t offset, uint16_t cc) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001740 ASSERT(CpuFeatures::IsEnabled(FPU));
Andrei Popescu31002712010-02-23 13:46:05 +00001741 ASSERT(is_uint3(cc));
1742 Instr instr = COP1 | BC1 | cc << 18 | 1 << 16 | (offset & kImm16Mask);
1743 emit(instr);
1744}
1745
1746
1747// Debugging.
1748void Assembler::RecordJSReturn() {
Steve Block44f0eee2011-05-26 01:26:41 +01001749 positions_recorder()->WriteRecordedPositions();
Andrei Popescu31002712010-02-23 13:46:05 +00001750 CheckBuffer();
1751 RecordRelocInfo(RelocInfo::JS_RETURN);
1752}
1753
1754
Steve Block44f0eee2011-05-26 01:26:41 +01001755void Assembler::RecordDebugBreakSlot() {
1756 positions_recorder()->WriteRecordedPositions();
1757 CheckBuffer();
1758 RecordRelocInfo(RelocInfo::DEBUG_BREAK_SLOT);
1759}
1760
1761
Andrei Popescu31002712010-02-23 13:46:05 +00001762void Assembler::RecordComment(const char* msg) {
Steve Block44f0eee2011-05-26 01:26:41 +01001763 if (FLAG_code_comments) {
Andrei Popescu31002712010-02-23 13:46:05 +00001764 CheckBuffer();
1765 RecordRelocInfo(RelocInfo::COMMENT, reinterpret_cast<intptr_t>(msg));
1766 }
1767}
1768
1769
Andrei Popescu31002712010-02-23 13:46:05 +00001770void Assembler::GrowBuffer() {
1771 if (!own_buffer_) FATAL("external code buffer is too small");
1772
1773 // Compute new buffer size.
Steve Block44f0eee2011-05-26 01:26:41 +01001774 CodeDesc desc; // The new buffer.
Andrei Popescu31002712010-02-23 13:46:05 +00001775 if (buffer_size_ < 4*KB) {
1776 desc.buffer_size = 4*KB;
1777 } else if (buffer_size_ < 1*MB) {
1778 desc.buffer_size = 2*buffer_size_;
1779 } else {
1780 desc.buffer_size = buffer_size_ + 1*MB;
1781 }
Steve Block44f0eee2011-05-26 01:26:41 +01001782 CHECK_GT(desc.buffer_size, 0); // No overflow.
Andrei Popescu31002712010-02-23 13:46:05 +00001783
1784 // Setup new buffer.
1785 desc.buffer = NewArray<byte>(desc.buffer_size);
1786
1787 desc.instr_size = pc_offset();
1788 desc.reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos();
1789
1790 // Copy the data.
1791 int pc_delta = desc.buffer - buffer_;
1792 int rc_delta = (desc.buffer + desc.buffer_size) - (buffer_ + buffer_size_);
1793 memmove(desc.buffer, buffer_, desc.instr_size);
1794 memmove(reloc_info_writer.pos() + rc_delta,
1795 reloc_info_writer.pos(), desc.reloc_size);
1796
1797 // Switch buffers.
1798 DeleteArray(buffer_);
1799 buffer_ = desc.buffer;
1800 buffer_size_ = desc.buffer_size;
1801 pc_ += pc_delta;
1802 reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta,
1803 reloc_info_writer.last_pc() + pc_delta);
1804
Andrei Popescu31002712010-02-23 13:46:05 +00001805 // On ia32 and ARM pc relative addressing is used, and we thus need to apply a
1806 // shift by pc_delta. But on MIPS the target address it directly loaded, so
1807 // we do not need to relocate here.
1808
1809 ASSERT(!overflow());
1810}
1811
1812
Steve Block44f0eee2011-05-26 01:26:41 +01001813void Assembler::db(uint8_t data) {
1814 CheckBuffer();
1815 *reinterpret_cast<uint8_t*>(pc_) = data;
1816 pc_ += sizeof(uint8_t);
1817}
1818
1819
1820void Assembler::dd(uint32_t data) {
1821 CheckBuffer();
1822 *reinterpret_cast<uint32_t*>(pc_) = data;
1823 pc_ += sizeof(uint32_t);
1824}
1825
1826
Andrei Popescu31002712010-02-23 13:46:05 +00001827void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
Steve Block44f0eee2011-05-26 01:26:41 +01001828 RelocInfo rinfo(pc_, rmode, data); // We do not try to reuse pool constants.
1829 if (rmode >= RelocInfo::JS_RETURN && rmode <= RelocInfo::DEBUG_BREAK_SLOT) {
Andrei Popescu31002712010-02-23 13:46:05 +00001830 // Adjust code for new modes.
Steve Block44f0eee2011-05-26 01:26:41 +01001831 ASSERT(RelocInfo::IsDebugBreakSlot(rmode)
1832 || RelocInfo::IsJSReturn(rmode)
Andrei Popescu31002712010-02-23 13:46:05 +00001833 || RelocInfo::IsComment(rmode)
1834 || RelocInfo::IsPosition(rmode));
1835 // These modes do not need an entry in the constant pool.
1836 }
1837 if (rinfo.rmode() != RelocInfo::NONE) {
1838 // Don't record external references unless the heap will be serialized.
1839 if (rmode == RelocInfo::EXTERNAL_REFERENCE &&
1840 !Serializer::enabled() &&
1841 !FLAG_debug_code) {
1842 return;
1843 }
Steve Block44f0eee2011-05-26 01:26:41 +01001844 ASSERT(buffer_space() >= kMaxRelocSize); // Too late to grow buffer here.
Ben Murdoch257744e2011-11-30 15:57:28 +00001845 if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
1846 ASSERT(ast_id_for_reloc_info_ != kNoASTId);
1847 RelocInfo reloc_info_with_ast_id(pc_, rmode, ast_id_for_reloc_info_);
1848 ast_id_for_reloc_info_ = kNoASTId;
1849 reloc_info_writer.Write(&reloc_info_with_ast_id);
1850 } else {
1851 reloc_info_writer.Write(&rinfo);
1852 }
Andrei Popescu31002712010-02-23 13:46:05 +00001853 }
1854}
1855
1856
Steve Block44f0eee2011-05-26 01:26:41 +01001857void Assembler::BlockTrampolinePoolFor(int instructions) {
1858 BlockTrampolinePoolBefore(pc_offset() + instructions * kInstrSize);
1859}
1860
1861
1862void Assembler::CheckTrampolinePool(bool force_emit) {
1863 // Calculate the offset of the next check.
1864 next_buffer_check_ = pc_offset() + kCheckConstInterval;
1865
1866 int dist = pc_offset() - last_trampoline_pool_end_;
1867
1868 if (dist <= kMaxDistBetweenPools && !force_emit) {
1869 return;
1870 }
1871
1872 // Some small sequences of instructions must not be broken up by the
1873 // insertion of a trampoline pool; such sequences are protected by setting
1874 // either trampoline_pool_blocked_nesting_ or no_trampoline_pool_before_,
1875 // which are both checked here. Also, recursive calls to CheckTrampolinePool
1876 // are blocked by trampoline_pool_blocked_nesting_.
1877 if ((trampoline_pool_blocked_nesting_ > 0) ||
1878 (pc_offset() < no_trampoline_pool_before_)) {
1879 // Emission is currently blocked; make sure we try again as soon as
1880 // possible.
1881 if (trampoline_pool_blocked_nesting_ > 0) {
1882 next_buffer_check_ = pc_offset() + kInstrSize;
1883 } else {
1884 next_buffer_check_ = no_trampoline_pool_before_;
1885 }
1886 return;
1887 }
1888
1889 // First we emit jump (2 instructions), then we emit trampoline pool.
1890 { BlockTrampolinePoolScope block_trampoline_pool(this);
1891 Label after_pool;
1892 b(&after_pool);
1893 nop();
1894
1895 int pool_start = pc_offset();
1896 for (int i = 0; i < kSlotsPerTrampoline; i++) {
1897 b(&after_pool);
1898 nop();
1899 }
1900 for (int i = 0; i < kLabelsPerTrampoline; i++) {
1901 emit(0);
1902 }
1903 last_trampoline_pool_end_ = pc_offset() - kInstrSize;
1904 bind(&after_pool);
1905 trampolines_.Add(Trampoline(pool_start,
1906 kSlotsPerTrampoline,
1907 kLabelsPerTrampoline));
1908
1909 // Since a trampoline pool was just emitted,
1910 // move the check offset forward by the standard interval.
1911 next_buffer_check_ = last_trampoline_pool_end_ + kMaxDistBetweenPools;
1912 }
1913 return;
1914}
1915
1916
Andrei Popescu31002712010-02-23 13:46:05 +00001917Address Assembler::target_address_at(Address pc) {
1918 Instr instr1 = instr_at(pc);
1919 Instr instr2 = instr_at(pc + kInstrSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00001920 // Interpret 2 instructions generated by li: lui/ori
1921 if ((GetOpcodeField(instr1) == LUI) && (GetOpcodeField(instr2) == ORI)) {
1922 // Assemble the 32 bit value.
Andrei Popescu31002712010-02-23 13:46:05 +00001923 return reinterpret_cast<Address>(
Ben Murdoch257744e2011-11-30 15:57:28 +00001924 (GetImmediate16(instr1) << 16) | GetImmediate16(instr2));
Andrei Popescu31002712010-02-23 13:46:05 +00001925 }
1926
Ben Murdoch257744e2011-11-30 15:57:28 +00001927 // We should never get here, force a bad address if we do.
Andrei Popescu31002712010-02-23 13:46:05 +00001928 UNREACHABLE();
1929 return (Address)0x0;
1930}
1931
1932
1933void Assembler::set_target_address_at(Address pc, Address target) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001934 // On MIPS we patch the address into lui/ori instruction pair.
Andrei Popescu31002712010-02-23 13:46:05 +00001935
Ben Murdoch257744e2011-11-30 15:57:28 +00001936 // First check we have an li (lui/ori pair).
Andrei Popescu31002712010-02-23 13:46:05 +00001937 Instr instr2 = instr_at(pc + kInstrSize);
1938#ifdef DEBUG
1939 Instr instr1 = instr_at(pc);
1940
Steve Block44f0eee2011-05-26 01:26:41 +01001941 // Check we have indeed the result from a li with MustUseReg true.
Ben Murdoch257744e2011-11-30 15:57:28 +00001942 CHECK((GetOpcodeField(instr1) == LUI && GetOpcodeField(instr2) == ORI));
Andrei Popescu31002712010-02-23 13:46:05 +00001943#endif
1944
Ben Murdoch257744e2011-11-30 15:57:28 +00001945 uint32_t rt_code = GetRtField(instr2);
Andrei Popescu31002712010-02-23 13:46:05 +00001946 uint32_t* p = reinterpret_cast<uint32_t*>(pc);
1947 uint32_t itarget = reinterpret_cast<uint32_t>(target);
1948
Ben Murdoch257744e2011-11-30 15:57:28 +00001949 // lui rt, high-16.
1950 // ori rt rt, low-16.
1951 *p = LUI | rt_code | ((itarget & kHiMask) >> kLuiShift);
1952 *(p+1) = ORI | rt_code | (rt_code << 5) | (itarget & kImm16Mask);
Andrei Popescu31002712010-02-23 13:46:05 +00001953
1954 CPU::FlushICache(pc, 2 * sizeof(int32_t));
1955}
1956
1957
1958} } // namespace v8::internal
1959
Leon Clarkef7060e22010-06-03 12:02:55 +01001960#endif // V8_TARGET_ARCH_MIPS