blob: 51642e05c3dc52d6ed09eda34ac4c4b404cd6117 [file] [log] [blame]
Andrei Popescu31002712010-02-23 13:46:05 +00001// Copyright (c) 1994-2006 Sun Microsystems Inc.
2// All Rights Reserved.
3//
4// Redistribution and use in source and binary forms, with or without
5// modification, are permitted provided that the following conditions are
6// met:
7//
8// - Redistributions of source code must retain the above copyright notice,
9// this list of conditions and the following disclaimer.
10//
11// - Redistribution in binary form must reproduce the above copyright
12// notice, this list of conditions and the following disclaimer in the
13// documentation and/or other materials provided with the distribution.
14//
15// - Neither the name of Sun Microsystems or the names of contributors may
16// be used to endorse or promote products derived from this software without
17// specific prior written permission.
18//
19// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
20// IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
21// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
23// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
24// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
25// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
26// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
27// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
28// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
29// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30
31// The original source code covered by the above license above has been
32// modified significantly by Google Inc.
Ben Murdoch257744e2011-11-30 15:57:28 +000033// Copyright 2011 the V8 project authors. All rights reserved.
Andrei Popescu31002712010-02-23 13:46:05 +000034
35
36#include "v8.h"
Leon Clarkef7060e22010-06-03 12:02:55 +010037
38#if defined(V8_TARGET_ARCH_MIPS)
39
Andrei Popescu31002712010-02-23 13:46:05 +000040#include "mips/assembler-mips-inl.h"
41#include "serialize.h"
42
Andrei Popescu31002712010-02-23 13:46:05 +000043namespace v8 {
44namespace internal {
45
Ben Murdoch257744e2011-11-30 15:57:28 +000046#ifdef DEBUG
47bool CpuFeatures::initialized_ = false;
48#endif
49unsigned CpuFeatures::supported_ = 0;
50unsigned CpuFeatures::found_by_runtime_probing_ = 0;
Andrei Popescu31002712010-02-23 13:46:05 +000051
Ben Murdoch257744e2011-11-30 15:57:28 +000052void CpuFeatures::Probe() {
53 ASSERT(!initialized_);
54#ifdef DEBUG
55 initialized_ = true;
56#endif
Steve Block44f0eee2011-05-26 01:26:41 +010057 // If the compiler is allowed to use fpu then we can use fpu too in our
58 // code generation.
59#if !defined(__mips__)
60 // For the simulator=mips build, use FPU when FLAG_enable_fpu is enabled.
61 if (FLAG_enable_fpu) {
62 supported_ |= 1u << FPU;
63 }
64#else
Ben Murdoch257744e2011-11-30 15:57:28 +000065 if (Serializer::enabled()) {
Steve Block44f0eee2011-05-26 01:26:41 +010066 supported_ |= OS::CpuFeaturesImpliedByPlatform();
67 return; // No features if we might serialize.
68 }
Andrei Popescu31002712010-02-23 13:46:05 +000069
Steve Block44f0eee2011-05-26 01:26:41 +010070 if (OS::MipsCpuHasFeature(FPU)) {
71 // This implementation also sets the FPU flags if
72 // runtime detection of FPU returns true.
73 supported_ |= 1u << FPU;
74 found_by_runtime_probing_ |= 1u << FPU;
75 }
Steve Block44f0eee2011-05-26 01:26:41 +010076#endif
77}
Andrei Popescu31002712010-02-23 13:46:05 +000078
79
Andrei Popescu31002712010-02-23 13:46:05 +000080int ToNumber(Register reg) {
81 ASSERT(reg.is_valid());
82 const int kNumbers[] = {
83 0, // zero_reg
84 1, // at
85 2, // v0
86 3, // v1
87 4, // a0
88 5, // a1
89 6, // a2
90 7, // a3
91 8, // t0
92 9, // t1
93 10, // t2
94 11, // t3
95 12, // t4
96 13, // t5
97 14, // t6
98 15, // t7
99 16, // s0
100 17, // s1
101 18, // s2
102 19, // s3
103 20, // s4
104 21, // s5
105 22, // s6
106 23, // s7
107 24, // t8
108 25, // t9
109 26, // k0
110 27, // k1
111 28, // gp
112 29, // sp
113 30, // s8_fp
114 31, // ra
115 };
116 return kNumbers[reg.code()];
117}
118
Steve Block44f0eee2011-05-26 01:26:41 +0100119
Andrei Popescu31002712010-02-23 13:46:05 +0000120Register ToRegister(int num) {
121 ASSERT(num >= 0 && num < kNumRegisters);
122 const Register kRegisters[] = {
123 zero_reg,
124 at,
125 v0, v1,
126 a0, a1, a2, a3,
127 t0, t1, t2, t3, t4, t5, t6, t7,
128 s0, s1, s2, s3, s4, s5, s6, s7,
129 t8, t9,
130 k0, k1,
131 gp,
132 sp,
133 s8_fp,
134 ra
135 };
136 return kRegisters[num];
137}
138
139
140// -----------------------------------------------------------------------------
141// Implementation of RelocInfo.
142
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000143const int RelocInfo::kApplyMask = 1 << RelocInfo::INTERNAL_REFERENCE;
Andrei Popescu31002712010-02-23 13:46:05 +0000144
Steve Block44f0eee2011-05-26 01:26:41 +0100145
146bool RelocInfo::IsCodedSpecially() {
147 // The deserializer needs to know whether a pointer is specially coded. Being
148 // specially coded on MIPS means that it is a lui/ori instruction, and that is
149 // always the case inside code objects.
150 return true;
151}
152
153
Andrei Popescu31002712010-02-23 13:46:05 +0000154// Patch the code at the current address with the supplied instructions.
155void RelocInfo::PatchCode(byte* instructions, int instruction_count) {
156 Instr* pc = reinterpret_cast<Instr*>(pc_);
157 Instr* instr = reinterpret_cast<Instr*>(instructions);
158 for (int i = 0; i < instruction_count; i++) {
159 *(pc + i) = *(instr + i);
160 }
161
162 // Indicate that code has changed.
163 CPU::FlushICache(pc_, instruction_count * Assembler::kInstrSize);
164}
165
166
167// Patch the code at the current PC with a call to the target address.
168// Additional guard instructions can be added if required.
169void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) {
170 // Patch the code at the current address with a call to the target.
171 UNIMPLEMENTED_MIPS();
172}
173
174
175// -----------------------------------------------------------------------------
176// Implementation of Operand and MemOperand.
177// See assembler-mips-inl.h for inlined constructors.
178
179Operand::Operand(Handle<Object> handle) {
180 rm_ = no_reg;
181 // Verify all Objects referred by code are NOT in new space.
182 Object* obj = *handle;
Steve Block44f0eee2011-05-26 01:26:41 +0100183 ASSERT(!HEAP->InNewSpace(obj));
Andrei Popescu31002712010-02-23 13:46:05 +0000184 if (obj->IsHeapObject()) {
185 imm32_ = reinterpret_cast<intptr_t>(handle.location());
186 rmode_ = RelocInfo::EMBEDDED_OBJECT;
187 } else {
188 // No relocation needed.
189 imm32_ = reinterpret_cast<intptr_t>(obj);
190 rmode_ = RelocInfo::NONE;
191 }
192}
193
Steve Block44f0eee2011-05-26 01:26:41 +0100194
195MemOperand::MemOperand(Register rm, int32_t offset) : Operand(rm) {
Andrei Popescu31002712010-02-23 13:46:05 +0000196 offset_ = offset;
197}
198
199
200// -----------------------------------------------------------------------------
Steve Block44f0eee2011-05-26 01:26:41 +0100201// Specific instructions, constants, and masks.
Andrei Popescu31002712010-02-23 13:46:05 +0000202
Steve Block44f0eee2011-05-26 01:26:41 +0100203static const int kNegOffset = 0x00008000;
204// addiu(sp, sp, 4) aka Pop() operation or part of Pop(r)
205// operations as post-increment of sp.
206const Instr kPopInstruction = ADDIU | (sp.code() << kRsShift)
207 | (sp.code() << kRtShift) | (kPointerSize & kImm16Mask);
208// addiu(sp, sp, -4) part of Push(r) operation as pre-decrement of sp.
209const Instr kPushInstruction = ADDIU | (sp.code() << kRsShift)
210 | (sp.code() << kRtShift) | (-kPointerSize & kImm16Mask);
211// sw(r, MemOperand(sp, 0))
212const Instr kPushRegPattern = SW | (sp.code() << kRsShift)
213 | (0 & kImm16Mask);
214// lw(r, MemOperand(sp, 0))
215const Instr kPopRegPattern = LW | (sp.code() << kRsShift)
216 | (0 & kImm16Mask);
Andrei Popescu31002712010-02-23 13:46:05 +0000217
Steve Block44f0eee2011-05-26 01:26:41 +0100218const Instr kLwRegFpOffsetPattern = LW | (s8_fp.code() << kRsShift)
219 | (0 & kImm16Mask);
220
221const Instr kSwRegFpOffsetPattern = SW | (s8_fp.code() << kRsShift)
222 | (0 & kImm16Mask);
223
224const Instr kLwRegFpNegOffsetPattern = LW | (s8_fp.code() << kRsShift)
225 | (kNegOffset & kImm16Mask);
226
227const Instr kSwRegFpNegOffsetPattern = SW | (s8_fp.code() << kRsShift)
228 | (kNegOffset & kImm16Mask);
229// A mask for the Rt register for push, pop, lw, sw instructions.
230const Instr kRtMask = kRtFieldMask;
231const Instr kLwSwInstrTypeMask = 0xffe00000;
232const Instr kLwSwInstrArgumentMask = ~kLwSwInstrTypeMask;
233const Instr kLwSwOffsetMask = kImm16Mask;
234
235
236// Spare buffer.
237static const int kMinimalBufferSize = 4 * KB;
238
239
Ben Murdoch257744e2011-11-30 15:57:28 +0000240Assembler::Assembler(Isolate* arg_isolate, void* buffer, int buffer_size)
241 : AssemblerBase(arg_isolate),
Steve Block44f0eee2011-05-26 01:26:41 +0100242 positions_recorder_(this),
Ben Murdoch257744e2011-11-30 15:57:28 +0000243 emit_debug_code_(FLAG_debug_code) {
Andrei Popescu31002712010-02-23 13:46:05 +0000244 if (buffer == NULL) {
245 // Do our own buffer management.
246 if (buffer_size <= kMinimalBufferSize) {
247 buffer_size = kMinimalBufferSize;
248
Steve Block44f0eee2011-05-26 01:26:41 +0100249 if (isolate()->assembler_spare_buffer() != NULL) {
250 buffer = isolate()->assembler_spare_buffer();
251 isolate()->set_assembler_spare_buffer(NULL);
Andrei Popescu31002712010-02-23 13:46:05 +0000252 }
253 }
254 if (buffer == NULL) {
255 buffer_ = NewArray<byte>(buffer_size);
256 } else {
257 buffer_ = static_cast<byte*>(buffer);
258 }
259 buffer_size_ = buffer_size;
260 own_buffer_ = true;
261
262 } else {
263 // Use externally provided buffer instead.
264 ASSERT(buffer_size > 0);
265 buffer_ = static_cast<byte*>(buffer);
266 buffer_size_ = buffer_size;
267 own_buffer_ = false;
268 }
269
270 // Setup buffer pointers.
271 ASSERT(buffer_ != NULL);
272 pc_ = buffer_;
273 reloc_info_writer.Reposition(buffer_ + buffer_size, pc_);
Steve Block44f0eee2011-05-26 01:26:41 +0100274
275 last_trampoline_pool_end_ = 0;
276 no_trampoline_pool_before_ = 0;
277 trampoline_pool_blocked_nesting_ = 0;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000278 // We leave space (16 * kTrampolineSlotsSize)
279 // for BlockTrampolinePoolScope buffer.
280 next_buffer_check_ = kMaxBranchOffset - kTrampolineSlotsSize * 16;
Ben Murdoch257744e2011-11-30 15:57:28 +0000281 internal_trampoline_exception_ = false;
282 last_bound_pos_ = 0;
283
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000284 trampoline_emitted_ = false;
285 unbound_labels_count_ = 0;
286 block_buffer_growth_ = false;
287
288 ClearRecordedAstId();
Andrei Popescu31002712010-02-23 13:46:05 +0000289}
290
291
292Assembler::~Assembler() {
293 if (own_buffer_) {
Steve Block44f0eee2011-05-26 01:26:41 +0100294 if (isolate()->assembler_spare_buffer() == NULL &&
Ben Murdoch257744e2011-11-30 15:57:28 +0000295 buffer_size_ == kMinimalBufferSize) {
Steve Block44f0eee2011-05-26 01:26:41 +0100296 isolate()->set_assembler_spare_buffer(buffer_);
Andrei Popescu31002712010-02-23 13:46:05 +0000297 } else {
298 DeleteArray(buffer_);
299 }
300 }
301}
302
303
304void Assembler::GetCode(CodeDesc* desc) {
Steve Block44f0eee2011-05-26 01:26:41 +0100305 ASSERT(pc_ <= reloc_info_writer.pos()); // No overlap.
Andrei Popescu31002712010-02-23 13:46:05 +0000306 // Setup code descriptor.
307 desc->buffer = buffer_;
308 desc->buffer_size = buffer_size_;
309 desc->instr_size = pc_offset();
310 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos();
311}
312
313
Steve Block44f0eee2011-05-26 01:26:41 +0100314void Assembler::Align(int m) {
315 ASSERT(m >= 4 && IsPowerOf2(m));
316 while ((pc_offset() & (m - 1)) != 0) {
317 nop();
318 }
319}
320
321
322void Assembler::CodeTargetAlign() {
323 // No advantage to aligning branch/call targets to more than
324 // single instruction, that I am aware of.
325 Align(4);
326}
327
328
Ben Murdoch257744e2011-11-30 15:57:28 +0000329Register Assembler::GetRtReg(Instr instr) {
Steve Block44f0eee2011-05-26 01:26:41 +0100330 Register rt;
Ben Murdoch257744e2011-11-30 15:57:28 +0000331 rt.code_ = (instr & kRtFieldMask) >> kRtShift;
Steve Block44f0eee2011-05-26 01:26:41 +0100332 return rt;
333}
334
335
Ben Murdoch257744e2011-11-30 15:57:28 +0000336Register Assembler::GetRsReg(Instr instr) {
337 Register rs;
338 rs.code_ = (instr & kRsFieldMask) >> kRsShift;
339 return rs;
340}
341
342
343Register Assembler::GetRdReg(Instr instr) {
344 Register rd;
345 rd.code_ = (instr & kRdFieldMask) >> kRdShift;
346 return rd;
347}
348
349
350uint32_t Assembler::GetRt(Instr instr) {
351 return (instr & kRtFieldMask) >> kRtShift;
352}
353
354
355uint32_t Assembler::GetRtField(Instr instr) {
356 return instr & kRtFieldMask;
357}
358
359
360uint32_t Assembler::GetRs(Instr instr) {
361 return (instr & kRsFieldMask) >> kRsShift;
362}
363
364
365uint32_t Assembler::GetRsField(Instr instr) {
366 return instr & kRsFieldMask;
367}
368
369
370uint32_t Assembler::GetRd(Instr instr) {
371 return (instr & kRdFieldMask) >> kRdShift;
372}
373
374
375uint32_t Assembler::GetRdField(Instr instr) {
376 return instr & kRdFieldMask;
377}
378
379
380uint32_t Assembler::GetSa(Instr instr) {
381 return (instr & kSaFieldMask) >> kSaShift;
382}
383
384
385uint32_t Assembler::GetSaField(Instr instr) {
386 return instr & kSaFieldMask;
387}
388
389
390uint32_t Assembler::GetOpcodeField(Instr instr) {
391 return instr & kOpcodeMask;
392}
393
394
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000395uint32_t Assembler::GetFunction(Instr instr) {
396 return (instr & kFunctionFieldMask) >> kFunctionShift;
397}
398
399
400uint32_t Assembler::GetFunctionField(Instr instr) {
401 return instr & kFunctionFieldMask;
402}
403
404
Ben Murdoch257744e2011-11-30 15:57:28 +0000405uint32_t Assembler::GetImmediate16(Instr instr) {
406 return instr & kImm16Mask;
407}
408
409
410uint32_t Assembler::GetLabelConst(Instr instr) {
411 return instr & ~kImm16Mask;
412}
413
414
Steve Block44f0eee2011-05-26 01:26:41 +0100415bool Assembler::IsPop(Instr instr) {
416 return (instr & ~kRtMask) == kPopRegPattern;
417}
418
419
420bool Assembler::IsPush(Instr instr) {
421 return (instr & ~kRtMask) == kPushRegPattern;
422}
423
424
425bool Assembler::IsSwRegFpOffset(Instr instr) {
426 return ((instr & kLwSwInstrTypeMask) == kSwRegFpOffsetPattern);
427}
428
429
430bool Assembler::IsLwRegFpOffset(Instr instr) {
431 return ((instr & kLwSwInstrTypeMask) == kLwRegFpOffsetPattern);
432}
433
434
435bool Assembler::IsSwRegFpNegOffset(Instr instr) {
436 return ((instr & (kLwSwInstrTypeMask | kNegOffset)) ==
437 kSwRegFpNegOffsetPattern);
438}
439
440
441bool Assembler::IsLwRegFpNegOffset(Instr instr) {
442 return ((instr & (kLwSwInstrTypeMask | kNegOffset)) ==
443 kLwRegFpNegOffsetPattern);
444}
445
446
Andrei Popescu31002712010-02-23 13:46:05 +0000447// Labels refer to positions in the (to be) generated code.
448// There are bound, linked, and unused labels.
449//
450// Bound labels refer to known positions in the already
451// generated code. pos() is the position the label refers to.
452//
453// Linked labels refer to unknown positions in the code
454// to be generated; pos() is the position of the last
455// instruction using the label.
456
Steve Block44f0eee2011-05-26 01:26:41 +0100457// The link chain is terminated by a value in the instruction of -1,
458// which is an otherwise illegal value (branch -1 is inf loop).
459// The instruction 16-bit offset field addresses 32-bit words, but in
460// code is conv to an 18-bit value addressing bytes, hence the -4 value.
Andrei Popescu31002712010-02-23 13:46:05 +0000461
Andrei Popescu31002712010-02-23 13:46:05 +0000462const int kEndOfChain = -4;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000463// Determines the end of the Jump chain (a subset of the label link chain).
464const int kEndOfJumpChain = 0;
Andrei Popescu31002712010-02-23 13:46:05 +0000465
Steve Block44f0eee2011-05-26 01:26:41 +0100466
467bool Assembler::IsBranch(Instr instr) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000468 uint32_t opcode = GetOpcodeField(instr);
469 uint32_t rt_field = GetRtField(instr);
470 uint32_t rs_field = GetRsField(instr);
471 uint32_t label_constant = GetLabelConst(instr);
Andrei Popescu31002712010-02-23 13:46:05 +0000472 // Checks if the instruction is a branch.
473 return opcode == BEQ ||
474 opcode == BNE ||
475 opcode == BLEZ ||
476 opcode == BGTZ ||
477 opcode == BEQL ||
478 opcode == BNEL ||
479 opcode == BLEZL ||
Ben Murdoch257744e2011-11-30 15:57:28 +0000480 opcode == BGTZL ||
Andrei Popescu31002712010-02-23 13:46:05 +0000481 (opcode == REGIMM && (rt_field == BLTZ || rt_field == BGEZ ||
482 rt_field == BLTZAL || rt_field == BGEZAL)) ||
Steve Block44f0eee2011-05-26 01:26:41 +0100483 (opcode == COP1 && rs_field == BC1) || // Coprocessor branch.
484 label_constant == 0; // Emitted label const in reg-exp engine.
485}
486
487
Ben Murdoch257744e2011-11-30 15:57:28 +0000488bool Assembler::IsBeq(Instr instr) {
489 return GetOpcodeField(instr) == BEQ;
490}
491
492
493bool Assembler::IsBne(Instr instr) {
494 return GetOpcodeField(instr) == BNE;
495}
496
497
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000498bool Assembler::IsJump(Instr instr) {
499 uint32_t opcode = GetOpcodeField(instr);
500 uint32_t rt_field = GetRtField(instr);
501 uint32_t rd_field = GetRdField(instr);
502 uint32_t function_field = GetFunctionField(instr);
503 // Checks if the instruction is a jump.
504 return opcode == J || opcode == JAL ||
505 (opcode == SPECIAL && rt_field == 0 &&
506 ((function_field == JALR) || (rd_field == 0 && (function_field == JR))));
507}
508
509
510bool Assembler::IsJ(Instr instr) {
511 uint32_t opcode = GetOpcodeField(instr);
512 // Checks if the instruction is a jump.
513 return opcode == J;
514}
515
516
517bool Assembler::IsLui(Instr instr) {
518 uint32_t opcode = GetOpcodeField(instr);
519 // Checks if the instruction is a load upper immediate.
520 return opcode == LUI;
521}
522
523
524bool Assembler::IsOri(Instr instr) {
525 uint32_t opcode = GetOpcodeField(instr);
526 // Checks if the instruction is a load upper immediate.
527 return opcode == ORI;
528}
529
530
Steve Block44f0eee2011-05-26 01:26:41 +0100531bool Assembler::IsNop(Instr instr, unsigned int type) {
532 // See Assembler::nop(type).
533 ASSERT(type < 32);
Ben Murdoch257744e2011-11-30 15:57:28 +0000534 uint32_t opcode = GetOpcodeField(instr);
535 uint32_t rt = GetRt(instr);
536 uint32_t rs = GetRs(instr);
537 uint32_t sa = GetSa(instr);
Steve Block44f0eee2011-05-26 01:26:41 +0100538
539 // nop(type) == sll(zero_reg, zero_reg, type);
540 // Technically all these values will be 0 but
541 // this makes more sense to the reader.
542
543 bool ret = (opcode == SLL &&
544 rt == static_cast<uint32_t>(ToNumber(zero_reg)) &&
545 rs == static_cast<uint32_t>(ToNumber(zero_reg)) &&
546 sa == type);
547
548 return ret;
549}
550
551
552int32_t Assembler::GetBranchOffset(Instr instr) {
553 ASSERT(IsBranch(instr));
554 return ((int16_t)(instr & kImm16Mask)) << 2;
555}
556
557
558bool Assembler::IsLw(Instr instr) {
559 return ((instr & kOpcodeMask) == LW);
560}
561
562
563int16_t Assembler::GetLwOffset(Instr instr) {
564 ASSERT(IsLw(instr));
565 return ((instr & kImm16Mask));
566}
567
568
569Instr Assembler::SetLwOffset(Instr instr, int16_t offset) {
570 ASSERT(IsLw(instr));
571
572 // We actually create a new lw instruction based on the original one.
573 Instr temp_instr = LW | (instr & kRsFieldMask) | (instr & kRtFieldMask)
574 | (offset & kImm16Mask);
575
576 return temp_instr;
577}
578
579
580bool Assembler::IsSw(Instr instr) {
581 return ((instr & kOpcodeMask) == SW);
582}
583
584
585Instr Assembler::SetSwOffset(Instr instr, int16_t offset) {
586 ASSERT(IsSw(instr));
587 return ((instr & ~kImm16Mask) | (offset & kImm16Mask));
588}
589
590
591bool Assembler::IsAddImmediate(Instr instr) {
592 return ((instr & kOpcodeMask) == ADDIU);
593}
594
595
596Instr Assembler::SetAddImmediateOffset(Instr instr, int16_t offset) {
597 ASSERT(IsAddImmediate(instr));
598 return ((instr & ~kImm16Mask) | (offset & kImm16Mask));
Andrei Popescu31002712010-02-23 13:46:05 +0000599}
600
601
Ben Murdoch257744e2011-11-30 15:57:28 +0000602bool Assembler::IsAndImmediate(Instr instr) {
603 return GetOpcodeField(instr) == ANDI;
604}
605
606
Andrei Popescu31002712010-02-23 13:46:05 +0000607int Assembler::target_at(int32_t pos) {
608 Instr instr = instr_at(pos);
609 if ((instr & ~kImm16Mask) == 0) {
610 // Emitted label constant, not part of a branch.
Steve Block44f0eee2011-05-26 01:26:41 +0100611 if (instr == 0) {
612 return kEndOfChain;
613 } else {
614 int32_t imm18 =((instr & static_cast<int32_t>(kImm16Mask)) << 16) >> 14;
615 return (imm18 + pos);
616 }
Andrei Popescu31002712010-02-23 13:46:05 +0000617 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000618 // Check we have a branch or jump instruction.
619 ASSERT(IsBranch(instr) || IsJ(instr) || IsLui(instr));
Andrei Popescu31002712010-02-23 13:46:05 +0000620 // Do NOT change this to <<2. We rely on arithmetic shifts here, assuming
621 // the compiler uses arithmectic shifts for signed integers.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000622 if (IsBranch(instr)) {
623 int32_t imm18 = ((instr & static_cast<int32_t>(kImm16Mask)) << 16) >> 14;
Andrei Popescu31002712010-02-23 13:46:05 +0000624
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000625 if (imm18 == kEndOfChain) {
626 // EndOfChain sentinel is returned directly, not relative to pc or pos.
627 return kEndOfChain;
628 } else {
629 return pos + kBranchPCOffset + imm18;
630 }
631 } else if (IsLui(instr)) {
632 Instr instr_lui = instr_at(pos + 0 * Assembler::kInstrSize);
633 Instr instr_ori = instr_at(pos + 1 * Assembler::kInstrSize);
634 ASSERT(IsOri(instr_ori));
635 int32_t imm = (instr_lui & static_cast<int32_t>(kImm16Mask)) << kLuiShift;
636 imm |= (instr_ori & static_cast<int32_t>(kImm16Mask));
637
638 if (imm == kEndOfJumpChain) {
639 // EndOfChain sentinel is returned directly, not relative to pc or pos.
640 return kEndOfChain;
641 } else {
642 uint32_t instr_address = reinterpret_cast<int32_t>(buffer_ + pos);
643 int32_t delta = instr_address - imm;
644 ASSERT(pos > delta);
645 return pos - delta;
646 }
Steve Block44f0eee2011-05-26 01:26:41 +0100647 } else {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000648 int32_t imm28 = (instr & static_cast<int32_t>(kImm26Mask)) << 2;
649 if (imm28 == kEndOfJumpChain) {
650 // EndOfChain sentinel is returned directly, not relative to pc or pos.
651 return kEndOfChain;
652 } else {
653 uint32_t instr_address = reinterpret_cast<int32_t>(buffer_ + pos);
654 instr_address &= kImm28Mask;
655 int32_t delta = instr_address - imm28;
656 ASSERT(pos > delta);
657 return pos - delta;
658 }
Steve Block44f0eee2011-05-26 01:26:41 +0100659 }
Andrei Popescu31002712010-02-23 13:46:05 +0000660}
661
662
663void Assembler::target_at_put(int32_t pos, int32_t target_pos) {
664 Instr instr = instr_at(pos);
665 if ((instr & ~kImm16Mask) == 0) {
666 ASSERT(target_pos == kEndOfChain || target_pos >= 0);
667 // Emitted label constant, not part of a branch.
668 // Make label relative to Code* of generated Code object.
669 instr_at_put(pos, target_pos + (Code::kHeaderSize - kHeapObjectTag));
670 return;
671 }
672
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000673 ASSERT(IsBranch(instr) || IsJ(instr) || IsLui(instr));
674 if (IsBranch(instr)) {
675 int32_t imm18 = target_pos - (pos + kBranchPCOffset);
676 ASSERT((imm18 & 3) == 0);
Andrei Popescu31002712010-02-23 13:46:05 +0000677
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000678 instr &= ~kImm16Mask;
679 int32_t imm16 = imm18 >> 2;
680 ASSERT(is_int16(imm16));
Andrei Popescu31002712010-02-23 13:46:05 +0000681
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000682 instr_at_put(pos, instr | (imm16 & kImm16Mask));
683 } else if (IsLui(instr)) {
684 Instr instr_lui = instr_at(pos + 0 * Assembler::kInstrSize);
685 Instr instr_ori = instr_at(pos + 1 * Assembler::kInstrSize);
686 ASSERT(IsOri(instr_ori));
687 uint32_t imm = (uint32_t)buffer_ + target_pos;
688 ASSERT((imm & 3) == 0);
689
690 instr_lui &= ~kImm16Mask;
691 instr_ori &= ~kImm16Mask;
692
693 instr_at_put(pos + 0 * Assembler::kInstrSize,
694 instr_lui | ((imm & kHiMask) >> kLuiShift));
695 instr_at_put(pos + 1 * Assembler::kInstrSize,
696 instr_ori | (imm & kImm16Mask));
697 } else {
698 uint32_t imm28 = (uint32_t)buffer_ + target_pos;
699 imm28 &= kImm28Mask;
700 ASSERT((imm28 & 3) == 0);
701
702 instr &= ~kImm26Mask;
703 uint32_t imm26 = imm28 >> 2;
704 ASSERT(is_uint26(imm26));
705
706 instr_at_put(pos, instr | (imm26 & kImm26Mask));
707 }
Andrei Popescu31002712010-02-23 13:46:05 +0000708}
709
710
711void Assembler::print(Label* L) {
712 if (L->is_unused()) {
713 PrintF("unused label\n");
714 } else if (L->is_bound()) {
715 PrintF("bound label to %d\n", L->pos());
716 } else if (L->is_linked()) {
717 Label l = *L;
718 PrintF("unbound label");
719 while (l.is_linked()) {
720 PrintF("@ %d ", l.pos());
721 Instr instr = instr_at(l.pos());
722 if ((instr & ~kImm16Mask) == 0) {
723 PrintF("value\n");
724 } else {
725 PrintF("%d\n", instr);
726 }
727 next(&l);
728 }
729 } else {
730 PrintF("label in inconsistent state (pos = %d)\n", L->pos_);
731 }
732}
733
734
735void Assembler::bind_to(Label* L, int pos) {
Steve Block44f0eee2011-05-26 01:26:41 +0100736 ASSERT(0 <= pos && pos <= pc_offset()); // Must have valid binding position.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000737 int32_t trampoline_pos = kInvalidSlotPos;
738 if (L->is_linked() && !trampoline_emitted_) {
739 unbound_labels_count_--;
740 next_buffer_check_ += kTrampolineSlotsSize;
741 }
742
Andrei Popescu31002712010-02-23 13:46:05 +0000743 while (L->is_linked()) {
744 int32_t fixup_pos = L->pos();
Steve Block44f0eee2011-05-26 01:26:41 +0100745 int32_t dist = pos - fixup_pos;
746 next(L); // Call next before overwriting link with target at fixup_pos.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000747 Instr instr = instr_at(fixup_pos);
748 if (IsBranch(instr)) {
749 if (dist > kMaxBranchOffset) {
750 if (trampoline_pos == kInvalidSlotPos) {
751 trampoline_pos = get_trampoline_entry(fixup_pos);
752 CHECK(trampoline_pos != kInvalidSlotPos);
Ben Murdoch257744e2011-11-30 15:57:28 +0000753 }
Steve Block44f0eee2011-05-26 01:26:41 +0100754 ASSERT((trampoline_pos - fixup_pos) <= kMaxBranchOffset);
755 target_at_put(fixup_pos, trampoline_pos);
756 fixup_pos = trampoline_pos;
757 dist = pos - fixup_pos;
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000758 }
759 target_at_put(fixup_pos, pos);
760 } else {
761 ASSERT(IsJ(instr) || IsLui(instr));
762 target_at_put(fixup_pos, pos);
763 }
Andrei Popescu31002712010-02-23 13:46:05 +0000764 }
765 L->bind_to(pos);
766
767 // Keep track of the last bound label so we don't eliminate any instructions
768 // before a bound label.
769 if (pos > last_bound_pos_)
770 last_bound_pos_ = pos;
771}
772
773
Andrei Popescu31002712010-02-23 13:46:05 +0000774void Assembler::bind(Label* L) {
Steve Block44f0eee2011-05-26 01:26:41 +0100775 ASSERT(!L->is_bound()); // Label can only be bound once.
Andrei Popescu31002712010-02-23 13:46:05 +0000776 bind_to(L, pc_offset());
777}
778
779
780void Assembler::next(Label* L) {
781 ASSERT(L->is_linked());
782 int link = target_at(L->pos());
Steve Block44f0eee2011-05-26 01:26:41 +0100783 ASSERT(link > 0 || link == kEndOfChain);
784 if (link == kEndOfChain) {
Andrei Popescu31002712010-02-23 13:46:05 +0000785 L->Unuse();
Steve Block44f0eee2011-05-26 01:26:41 +0100786 } else if (link > 0) {
787 L->link_to(link);
Andrei Popescu31002712010-02-23 13:46:05 +0000788 }
789}
790
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000791bool Assembler::is_near(Label* L) {
792 if (L->is_bound()) {
793 return ((pc_offset() - L->pos()) < kMaxBranchOffset - 4 * kInstrSize);
794 }
795 return false;
796}
Andrei Popescu31002712010-02-23 13:46:05 +0000797
798// We have to use a temporary register for things that can be relocated even
799// if they can be encoded in the MIPS's 16 bits of immediate-offset instruction
800// space. There is no guarantee that the relocated location can be similarly
801// encoded.
Steve Block44f0eee2011-05-26 01:26:41 +0100802bool Assembler::MustUseReg(RelocInfo::Mode rmode) {
803 return rmode != RelocInfo::NONE;
Andrei Popescu31002712010-02-23 13:46:05 +0000804}
805
806
807void Assembler::GenInstrRegister(Opcode opcode,
808 Register rs,
809 Register rt,
810 Register rd,
811 uint16_t sa,
812 SecondaryField func) {
813 ASSERT(rd.is_valid() && rs.is_valid() && rt.is_valid() && is_uint5(sa));
814 Instr instr = opcode | (rs.code() << kRsShift) | (rt.code() << kRtShift)
815 | (rd.code() << kRdShift) | (sa << kSaShift) | func;
816 emit(instr);
817}
818
819
820void Assembler::GenInstrRegister(Opcode opcode,
Steve Block44f0eee2011-05-26 01:26:41 +0100821 Register rs,
822 Register rt,
823 uint16_t msb,
824 uint16_t lsb,
825 SecondaryField func) {
826 ASSERT(rs.is_valid() && rt.is_valid() && is_uint5(msb) && is_uint5(lsb));
827 Instr instr = opcode | (rs.code() << kRsShift) | (rt.code() << kRtShift)
828 | (msb << kRdShift) | (lsb << kSaShift) | func;
829 emit(instr);
830}
831
832
833void Assembler::GenInstrRegister(Opcode opcode,
Andrei Popescu31002712010-02-23 13:46:05 +0000834 SecondaryField fmt,
835 FPURegister ft,
836 FPURegister fs,
837 FPURegister fd,
838 SecondaryField func) {
839 ASSERT(fd.is_valid() && fs.is_valid() && ft.is_valid());
Ben Murdoch257744e2011-11-30 15:57:28 +0000840 ASSERT(CpuFeatures::IsEnabled(FPU));
Steve Block44f0eee2011-05-26 01:26:41 +0100841 Instr instr = opcode | fmt | (ft.code() << kFtShift) | (fs.code() << kFsShift)
842 | (fd.code() << kFdShift) | func;
Andrei Popescu31002712010-02-23 13:46:05 +0000843 emit(instr);
844}
845
846
847void Assembler::GenInstrRegister(Opcode opcode,
848 SecondaryField fmt,
849 Register rt,
850 FPURegister fs,
851 FPURegister fd,
852 SecondaryField func) {
853 ASSERT(fd.is_valid() && fs.is_valid() && rt.is_valid());
Ben Murdoch257744e2011-11-30 15:57:28 +0000854 ASSERT(CpuFeatures::IsEnabled(FPU));
Andrei Popescu31002712010-02-23 13:46:05 +0000855 Instr instr = opcode | fmt | (rt.code() << kRtShift)
Steve Block44f0eee2011-05-26 01:26:41 +0100856 | (fs.code() << kFsShift) | (fd.code() << kFdShift) | func;
857 emit(instr);
858}
859
860
861void Assembler::GenInstrRegister(Opcode opcode,
862 SecondaryField fmt,
863 Register rt,
864 FPUControlRegister fs,
865 SecondaryField func) {
866 ASSERT(fs.is_valid() && rt.is_valid());
Ben Murdoch257744e2011-11-30 15:57:28 +0000867 ASSERT(CpuFeatures::IsEnabled(FPU));
Steve Block44f0eee2011-05-26 01:26:41 +0100868 Instr instr =
869 opcode | fmt | (rt.code() << kRtShift) | (fs.code() << kFsShift) | func;
Andrei Popescu31002712010-02-23 13:46:05 +0000870 emit(instr);
871}
872
873
874// Instructions with immediate value.
875// Registers are in the order of the instruction encoding, from left to right.
876void Assembler::GenInstrImmediate(Opcode opcode,
877 Register rs,
878 Register rt,
879 int32_t j) {
880 ASSERT(rs.is_valid() && rt.is_valid() && (is_int16(j) || is_uint16(j)));
881 Instr instr = opcode | (rs.code() << kRsShift) | (rt.code() << kRtShift)
882 | (j & kImm16Mask);
883 emit(instr);
884}
885
886
887void Assembler::GenInstrImmediate(Opcode opcode,
888 Register rs,
889 SecondaryField SF,
890 int32_t j) {
891 ASSERT(rs.is_valid() && (is_int16(j) || is_uint16(j)));
892 Instr instr = opcode | (rs.code() << kRsShift) | SF | (j & kImm16Mask);
893 emit(instr);
894}
895
896
897void Assembler::GenInstrImmediate(Opcode opcode,
898 Register rs,
899 FPURegister ft,
900 int32_t j) {
901 ASSERT(rs.is_valid() && ft.is_valid() && (is_int16(j) || is_uint16(j)));
Ben Murdoch257744e2011-11-30 15:57:28 +0000902 ASSERT(CpuFeatures::IsEnabled(FPU));
Andrei Popescu31002712010-02-23 13:46:05 +0000903 Instr instr = opcode | (rs.code() << kRsShift) | (ft.code() << kFtShift)
904 | (j & kImm16Mask);
905 emit(instr);
906}
907
908
Andrei Popescu31002712010-02-23 13:46:05 +0000909void Assembler::GenInstrJump(Opcode opcode,
910 uint32_t address) {
Steve Block44f0eee2011-05-26 01:26:41 +0100911 BlockTrampolinePoolScope block_trampoline_pool(this);
Andrei Popescu31002712010-02-23 13:46:05 +0000912 ASSERT(is_uint26(address));
913 Instr instr = opcode | address;
914 emit(instr);
Steve Block44f0eee2011-05-26 01:26:41 +0100915 BlockTrampolinePoolFor(1); // For associated delay slot.
916}
917
918
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000919// Returns the next free trampoline entry.
920int32_t Assembler::get_trampoline_entry(int32_t pos) {
Ben Murdoch257744e2011-11-30 15:57:28 +0000921 int32_t trampoline_entry = kInvalidSlotPos;
Steve Block44f0eee2011-05-26 01:26:41 +0100922
Ben Murdoch257744e2011-11-30 15:57:28 +0000923 if (!internal_trampoline_exception_) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000924 if (trampoline_.start() > pos) {
925 trampoline_entry = trampoline_.take_slot();
Steve Block44f0eee2011-05-26 01:26:41 +0100926 }
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000927
Ben Murdoch257744e2011-11-30 15:57:28 +0000928 if (kInvalidSlotPos == trampoline_entry) {
929 internal_trampoline_exception_ = true;
Steve Block44f0eee2011-05-26 01:26:41 +0100930 }
931 }
932 return trampoline_entry;
Andrei Popescu31002712010-02-23 13:46:05 +0000933}
934
935
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000936uint32_t Assembler::jump_address(Label* L) {
Andrei Popescu31002712010-02-23 13:46:05 +0000937 int32_t target_pos;
Steve Block44f0eee2011-05-26 01:26:41 +0100938
Andrei Popescu31002712010-02-23 13:46:05 +0000939 if (L->is_bound()) {
940 target_pos = L->pos();
941 } else {
942 if (L->is_linked()) {
Steve Block44f0eee2011-05-26 01:26:41 +0100943 target_pos = L->pos(); // L's link.
Steve Block44f0eee2011-05-26 01:26:41 +0100944 L->link_to(pc_offset());
Andrei Popescu31002712010-02-23 13:46:05 +0000945 } else {
Steve Block44f0eee2011-05-26 01:26:41 +0100946 L->link_to(pc_offset());
Ben Murdoch3fb3ca82011-12-02 17:19:32 +0000947 return kEndOfJumpChain;
948 }
949 }
950
951 uint32_t imm = (uint32_t)buffer_ + target_pos;
952 ASSERT((imm & 3) == 0);
953
954 return imm;
955}
956
957
958int32_t Assembler::branch_offset(Label* L, bool jump_elimination_allowed) {
959 int32_t target_pos;
960
961 if (L->is_bound()) {
962 target_pos = L->pos();
963 } else {
964 if (L->is_linked()) {
965 target_pos = L->pos();
966 L->link_to(pc_offset());
967 } else {
968 L->link_to(pc_offset());
969 if (!trampoline_emitted_) {
970 unbound_labels_count_++;
971 next_buffer_check_ -= kTrampolineSlotsSize;
972 }
Steve Block44f0eee2011-05-26 01:26:41 +0100973 return kEndOfChain;
Andrei Popescu31002712010-02-23 13:46:05 +0000974 }
Andrei Popescu31002712010-02-23 13:46:05 +0000975 }
976
977 int32_t offset = target_pos - (pc_offset() + kBranchPCOffset);
Steve Block44f0eee2011-05-26 01:26:41 +0100978 ASSERT((offset & 3) == 0);
979 ASSERT(is_int16(offset >> 2));
980
Andrei Popescu31002712010-02-23 13:46:05 +0000981 return offset;
982}
983
984
985void Assembler::label_at_put(Label* L, int at_offset) {
986 int target_pos;
987 if (L->is_bound()) {
988 target_pos = L->pos();
Steve Block44f0eee2011-05-26 01:26:41 +0100989 instr_at_put(at_offset, target_pos + (Code::kHeaderSize - kHeapObjectTag));
Andrei Popescu31002712010-02-23 13:46:05 +0000990 } else {
991 if (L->is_linked()) {
Steve Block44f0eee2011-05-26 01:26:41 +0100992 target_pos = L->pos(); // L's link.
993 int32_t imm18 = target_pos - at_offset;
994 ASSERT((imm18 & 3) == 0);
995 int32_t imm16 = imm18 >> 2;
996 ASSERT(is_int16(imm16));
997 instr_at_put(at_offset, (imm16 & kImm16Mask));
Andrei Popescu31002712010-02-23 13:46:05 +0000998 } else {
999 target_pos = kEndOfChain;
Steve Block44f0eee2011-05-26 01:26:41 +01001000 instr_at_put(at_offset, 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001001 if (!trampoline_emitted_) {
1002 unbound_labels_count_++;
1003 next_buffer_check_ -= kTrampolineSlotsSize;
1004 }
Andrei Popescu31002712010-02-23 13:46:05 +00001005 }
1006 L->link_to(at_offset);
Andrei Popescu31002712010-02-23 13:46:05 +00001007 }
1008}
1009
1010
1011//------- Branch and jump instructions --------
1012
1013void Assembler::b(int16_t offset) {
1014 beq(zero_reg, zero_reg, offset);
1015}
1016
1017
1018void Assembler::bal(int16_t offset) {
Steve Block44f0eee2011-05-26 01:26:41 +01001019 positions_recorder()->WriteRecordedPositions();
Andrei Popescu31002712010-02-23 13:46:05 +00001020 bgezal(zero_reg, offset);
1021}
1022
1023
1024void Assembler::beq(Register rs, Register rt, int16_t offset) {
Steve Block44f0eee2011-05-26 01:26:41 +01001025 BlockTrampolinePoolScope block_trampoline_pool(this);
Andrei Popescu31002712010-02-23 13:46:05 +00001026 GenInstrImmediate(BEQ, rs, rt, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01001027 BlockTrampolinePoolFor(1); // For associated delay slot.
Andrei Popescu31002712010-02-23 13:46:05 +00001028}
1029
1030
1031void Assembler::bgez(Register rs, int16_t offset) {
Steve Block44f0eee2011-05-26 01:26:41 +01001032 BlockTrampolinePoolScope block_trampoline_pool(this);
Andrei Popescu31002712010-02-23 13:46:05 +00001033 GenInstrImmediate(REGIMM, rs, BGEZ, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01001034 BlockTrampolinePoolFor(1); // For associated delay slot.
Andrei Popescu31002712010-02-23 13:46:05 +00001035}
1036
1037
1038void Assembler::bgezal(Register rs, int16_t offset) {
Steve Block44f0eee2011-05-26 01:26:41 +01001039 BlockTrampolinePoolScope block_trampoline_pool(this);
1040 positions_recorder()->WriteRecordedPositions();
Andrei Popescu31002712010-02-23 13:46:05 +00001041 GenInstrImmediate(REGIMM, rs, BGEZAL, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01001042 BlockTrampolinePoolFor(1); // For associated delay slot.
Andrei Popescu31002712010-02-23 13:46:05 +00001043}
1044
1045
1046void Assembler::bgtz(Register rs, int16_t offset) {
Steve Block44f0eee2011-05-26 01:26:41 +01001047 BlockTrampolinePoolScope block_trampoline_pool(this);
Andrei Popescu31002712010-02-23 13:46:05 +00001048 GenInstrImmediate(BGTZ, rs, zero_reg, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01001049 BlockTrampolinePoolFor(1); // For associated delay slot.
Andrei Popescu31002712010-02-23 13:46:05 +00001050}
1051
1052
1053void Assembler::blez(Register rs, int16_t offset) {
Steve Block44f0eee2011-05-26 01:26:41 +01001054 BlockTrampolinePoolScope block_trampoline_pool(this);
Andrei Popescu31002712010-02-23 13:46:05 +00001055 GenInstrImmediate(BLEZ, rs, zero_reg, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01001056 BlockTrampolinePoolFor(1); // For associated delay slot.
Andrei Popescu31002712010-02-23 13:46:05 +00001057}
1058
1059
1060void Assembler::bltz(Register rs, int16_t offset) {
Steve Block44f0eee2011-05-26 01:26:41 +01001061 BlockTrampolinePoolScope block_trampoline_pool(this);
Andrei Popescu31002712010-02-23 13:46:05 +00001062 GenInstrImmediate(REGIMM, rs, BLTZ, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01001063 BlockTrampolinePoolFor(1); // For associated delay slot.
Andrei Popescu31002712010-02-23 13:46:05 +00001064}
1065
1066
1067void Assembler::bltzal(Register rs, int16_t offset) {
Steve Block44f0eee2011-05-26 01:26:41 +01001068 BlockTrampolinePoolScope block_trampoline_pool(this);
1069 positions_recorder()->WriteRecordedPositions();
Andrei Popescu31002712010-02-23 13:46:05 +00001070 GenInstrImmediate(REGIMM, rs, BLTZAL, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01001071 BlockTrampolinePoolFor(1); // For associated delay slot.
Andrei Popescu31002712010-02-23 13:46:05 +00001072}
1073
1074
1075void Assembler::bne(Register rs, Register rt, int16_t offset) {
Steve Block44f0eee2011-05-26 01:26:41 +01001076 BlockTrampolinePoolScope block_trampoline_pool(this);
Andrei Popescu31002712010-02-23 13:46:05 +00001077 GenInstrImmediate(BNE, rs, rt, offset);
Steve Block44f0eee2011-05-26 01:26:41 +01001078 BlockTrampolinePoolFor(1); // For associated delay slot.
Andrei Popescu31002712010-02-23 13:46:05 +00001079}
1080
1081
1082void Assembler::j(int32_t target) {
1083 ASSERT(is_uint28(target) && ((target & 3) == 0));
1084 GenInstrJump(J, target >> 2);
1085}
1086
1087
1088void Assembler::jr(Register rs) {
Steve Block44f0eee2011-05-26 01:26:41 +01001089 BlockTrampolinePoolScope block_trampoline_pool(this);
1090 if (rs.is(ra)) {
1091 positions_recorder()->WriteRecordedPositions();
1092 }
Andrei Popescu31002712010-02-23 13:46:05 +00001093 GenInstrRegister(SPECIAL, rs, zero_reg, zero_reg, 0, JR);
Steve Block44f0eee2011-05-26 01:26:41 +01001094 BlockTrampolinePoolFor(1); // For associated delay slot.
Andrei Popescu31002712010-02-23 13:46:05 +00001095}
1096
1097
1098void Assembler::jal(int32_t target) {
Steve Block44f0eee2011-05-26 01:26:41 +01001099 positions_recorder()->WriteRecordedPositions();
Andrei Popescu31002712010-02-23 13:46:05 +00001100 ASSERT(is_uint28(target) && ((target & 3) == 0));
1101 GenInstrJump(JAL, target >> 2);
1102}
1103
1104
1105void Assembler::jalr(Register rs, Register rd) {
Steve Block44f0eee2011-05-26 01:26:41 +01001106 BlockTrampolinePoolScope block_trampoline_pool(this);
1107 positions_recorder()->WriteRecordedPositions();
Andrei Popescu31002712010-02-23 13:46:05 +00001108 GenInstrRegister(SPECIAL, rs, zero_reg, rd, 0, JALR);
Steve Block44f0eee2011-05-26 01:26:41 +01001109 BlockTrampolinePoolFor(1); // For associated delay slot.
Andrei Popescu31002712010-02-23 13:46:05 +00001110}
1111
1112
1113//-------Data-processing-instructions---------
1114
1115// Arithmetic.
1116
Andrei Popescu31002712010-02-23 13:46:05 +00001117void Assembler::addu(Register rd, Register rs, Register rt) {
1118 GenInstrRegister(SPECIAL, rs, rt, rd, 0, ADDU);
1119}
1120
1121
Andrei Popescu31002712010-02-23 13:46:05 +00001122void Assembler::addiu(Register rd, Register rs, int32_t j) {
1123 GenInstrImmediate(ADDIU, rs, rd, j);
Andrei Popescu31002712010-02-23 13:46:05 +00001124}
1125
1126
1127void Assembler::subu(Register rd, Register rs, Register rt) {
1128 GenInstrRegister(SPECIAL, rs, rt, rd, 0, SUBU);
1129}
1130
1131
1132void Assembler::mul(Register rd, Register rs, Register rt) {
1133 GenInstrRegister(SPECIAL2, rs, rt, rd, 0, MUL);
1134}
1135
1136
1137void Assembler::mult(Register rs, Register rt) {
1138 GenInstrRegister(SPECIAL, rs, rt, zero_reg, 0, MULT);
1139}
1140
1141
1142void Assembler::multu(Register rs, Register rt) {
1143 GenInstrRegister(SPECIAL, rs, rt, zero_reg, 0, MULTU);
1144}
1145
1146
1147void Assembler::div(Register rs, Register rt) {
1148 GenInstrRegister(SPECIAL, rs, rt, zero_reg, 0, DIV);
1149}
1150
1151
1152void Assembler::divu(Register rs, Register rt) {
1153 GenInstrRegister(SPECIAL, rs, rt, zero_reg, 0, DIVU);
1154}
1155
1156
1157// Logical.
1158
1159void Assembler::and_(Register rd, Register rs, Register rt) {
1160 GenInstrRegister(SPECIAL, rs, rt, rd, 0, AND);
1161}
1162
1163
1164void Assembler::andi(Register rt, Register rs, int32_t j) {
1165 GenInstrImmediate(ANDI, rs, rt, j);
1166}
1167
1168
1169void Assembler::or_(Register rd, Register rs, Register rt) {
1170 GenInstrRegister(SPECIAL, rs, rt, rd, 0, OR);
1171}
1172
1173
1174void Assembler::ori(Register rt, Register rs, int32_t j) {
1175 GenInstrImmediate(ORI, rs, rt, j);
1176}
1177
1178
1179void Assembler::xor_(Register rd, Register rs, Register rt) {
1180 GenInstrRegister(SPECIAL, rs, rt, rd, 0, XOR);
1181}
1182
1183
1184void Assembler::xori(Register rt, Register rs, int32_t j) {
1185 GenInstrImmediate(XORI, rs, rt, j);
1186}
1187
1188
1189void Assembler::nor(Register rd, Register rs, Register rt) {
1190 GenInstrRegister(SPECIAL, rs, rt, rd, 0, NOR);
1191}
1192
1193
1194// Shifts.
Steve Block44f0eee2011-05-26 01:26:41 +01001195void Assembler::sll(Register rd,
1196 Register rt,
1197 uint16_t sa,
1198 bool coming_from_nop) {
1199 // Don't allow nop instructions in the form sll zero_reg, zero_reg to be
1200 // generated using the sll instruction. They must be generated using
1201 // nop(int/NopMarkerTypes) or MarkCode(int/NopMarkerTypes) pseudo
1202 // instructions.
1203 ASSERT(coming_from_nop || !(rd.is(zero_reg) && rt.is(zero_reg)));
Andrei Popescu31002712010-02-23 13:46:05 +00001204 GenInstrRegister(SPECIAL, zero_reg, rt, rd, sa, SLL);
1205}
1206
1207
1208void Assembler::sllv(Register rd, Register rt, Register rs) {
1209 GenInstrRegister(SPECIAL, rs, rt, rd, 0, SLLV);
1210}
1211
1212
1213void Assembler::srl(Register rd, Register rt, uint16_t sa) {
1214 GenInstrRegister(SPECIAL, zero_reg, rt, rd, sa, SRL);
1215}
1216
1217
1218void Assembler::srlv(Register rd, Register rt, Register rs) {
1219 GenInstrRegister(SPECIAL, rs, rt, rd, 0, SRLV);
1220}
1221
1222
1223void Assembler::sra(Register rd, Register rt, uint16_t sa) {
1224 GenInstrRegister(SPECIAL, zero_reg, rt, rd, sa, SRA);
1225}
1226
1227
1228void Assembler::srav(Register rd, Register rt, Register rs) {
1229 GenInstrRegister(SPECIAL, rs, rt, rd, 0, SRAV);
1230}
1231
1232
Steve Block44f0eee2011-05-26 01:26:41 +01001233void Assembler::rotr(Register rd, Register rt, uint16_t sa) {
1234 // Should be called via MacroAssembler::Ror.
1235 ASSERT(rd.is_valid() && rt.is_valid() && is_uint5(sa));
1236 ASSERT(mips32r2);
1237 Instr instr = SPECIAL | (1 << kRsShift) | (rt.code() << kRtShift)
1238 | (rd.code() << kRdShift) | (sa << kSaShift) | SRL;
1239 emit(instr);
1240}
1241
1242
1243void Assembler::rotrv(Register rd, Register rt, Register rs) {
1244 // Should be called via MacroAssembler::Ror.
1245 ASSERT(rd.is_valid() && rt.is_valid() && rs.is_valid() );
1246 ASSERT(mips32r2);
1247 Instr instr = SPECIAL | (rs.code() << kRsShift) | (rt.code() << kRtShift)
1248 | (rd.code() << kRdShift) | (1 << kSaShift) | SRLV;
1249 emit(instr);
1250}
1251
1252
Andrei Popescu31002712010-02-23 13:46:05 +00001253//------------Memory-instructions-------------
1254
Steve Block44f0eee2011-05-26 01:26:41 +01001255// Helper for base-reg + offset, when offset is larger than int16.
1256void Assembler::LoadRegPlusOffsetToAt(const MemOperand& src) {
1257 ASSERT(!src.rm().is(at));
1258 lui(at, src.offset_ >> kLuiShift);
1259 ori(at, at, src.offset_ & kImm16Mask); // Load 32-bit offset.
1260 addu(at, at, src.rm()); // Add base register.
1261}
1262
1263
Andrei Popescu31002712010-02-23 13:46:05 +00001264void Assembler::lb(Register rd, const MemOperand& rs) {
Steve Block44f0eee2011-05-26 01:26:41 +01001265 if (is_int16(rs.offset_)) {
1266 GenInstrImmediate(LB, rs.rm(), rd, rs.offset_);
1267 } else { // Offset > 16 bits, use multiple instructions to load.
1268 LoadRegPlusOffsetToAt(rs);
1269 GenInstrImmediate(LB, at, rd, 0); // Equiv to lb(rd, MemOperand(at, 0));
1270 }
Andrei Popescu31002712010-02-23 13:46:05 +00001271}
1272
1273
1274void Assembler::lbu(Register rd, const MemOperand& rs) {
Steve Block44f0eee2011-05-26 01:26:41 +01001275 if (is_int16(rs.offset_)) {
1276 GenInstrImmediate(LBU, rs.rm(), rd, rs.offset_);
1277 } else { // Offset > 16 bits, use multiple instructions to load.
1278 LoadRegPlusOffsetToAt(rs);
1279 GenInstrImmediate(LBU, at, rd, 0); // Equiv to lbu(rd, MemOperand(at, 0));
1280 }
1281}
1282
1283
1284void Assembler::lh(Register rd, const MemOperand& rs) {
1285 if (is_int16(rs.offset_)) {
1286 GenInstrImmediate(LH, rs.rm(), rd, rs.offset_);
1287 } else { // Offset > 16 bits, use multiple instructions to load.
1288 LoadRegPlusOffsetToAt(rs);
1289 GenInstrImmediate(LH, at, rd, 0); // Equiv to lh(rd, MemOperand(at, 0));
1290 }
1291}
1292
1293
1294void Assembler::lhu(Register rd, const MemOperand& rs) {
1295 if (is_int16(rs.offset_)) {
1296 GenInstrImmediate(LHU, rs.rm(), rd, rs.offset_);
1297 } else { // Offset > 16 bits, use multiple instructions to load.
1298 LoadRegPlusOffsetToAt(rs);
1299 GenInstrImmediate(LHU, at, rd, 0); // Equiv to lhu(rd, MemOperand(at, 0));
1300 }
Andrei Popescu31002712010-02-23 13:46:05 +00001301}
1302
1303
1304void Assembler::lw(Register rd, const MemOperand& rs) {
Steve Block44f0eee2011-05-26 01:26:41 +01001305 if (is_int16(rs.offset_)) {
1306 GenInstrImmediate(LW, rs.rm(), rd, rs.offset_);
1307 } else { // Offset > 16 bits, use multiple instructions to load.
1308 LoadRegPlusOffsetToAt(rs);
1309 GenInstrImmediate(LW, at, rd, 0); // Equiv to lw(rd, MemOperand(at, 0));
1310 }
Steve Block44f0eee2011-05-26 01:26:41 +01001311}
1312
1313
1314void Assembler::lwl(Register rd, const MemOperand& rs) {
1315 GenInstrImmediate(LWL, rs.rm(), rd, rs.offset_);
1316}
1317
1318
1319void Assembler::lwr(Register rd, const MemOperand& rs) {
1320 GenInstrImmediate(LWR, rs.rm(), rd, rs.offset_);
Andrei Popescu31002712010-02-23 13:46:05 +00001321}
1322
1323
1324void Assembler::sb(Register rd, const MemOperand& rs) {
Steve Block44f0eee2011-05-26 01:26:41 +01001325 if (is_int16(rs.offset_)) {
1326 GenInstrImmediate(SB, rs.rm(), rd, rs.offset_);
1327 } else { // Offset > 16 bits, use multiple instructions to store.
1328 LoadRegPlusOffsetToAt(rs);
1329 GenInstrImmediate(SB, at, rd, 0); // Equiv to sb(rd, MemOperand(at, 0));
1330 }
1331}
1332
1333
1334void Assembler::sh(Register rd, const MemOperand& rs) {
1335 if (is_int16(rs.offset_)) {
1336 GenInstrImmediate(SH, rs.rm(), rd, rs.offset_);
1337 } else { // Offset > 16 bits, use multiple instructions to store.
1338 LoadRegPlusOffsetToAt(rs);
1339 GenInstrImmediate(SH, at, rd, 0); // Equiv to sh(rd, MemOperand(at, 0));
1340 }
Andrei Popescu31002712010-02-23 13:46:05 +00001341}
1342
1343
1344void Assembler::sw(Register rd, const MemOperand& rs) {
Steve Block44f0eee2011-05-26 01:26:41 +01001345 if (is_int16(rs.offset_)) {
1346 GenInstrImmediate(SW, rs.rm(), rd, rs.offset_);
1347 } else { // Offset > 16 bits, use multiple instructions to store.
1348 LoadRegPlusOffsetToAt(rs);
1349 GenInstrImmediate(SW, at, rd, 0); // Equiv to sw(rd, MemOperand(at, 0));
1350 }
Steve Block44f0eee2011-05-26 01:26:41 +01001351}
1352
1353
1354void Assembler::swl(Register rd, const MemOperand& rs) {
1355 GenInstrImmediate(SWL, rs.rm(), rd, rs.offset_);
1356}
1357
1358
1359void Assembler::swr(Register rd, const MemOperand& rs) {
1360 GenInstrImmediate(SWR, rs.rm(), rd, rs.offset_);
Andrei Popescu31002712010-02-23 13:46:05 +00001361}
1362
1363
1364void Assembler::lui(Register rd, int32_t j) {
1365 GenInstrImmediate(LUI, zero_reg, rd, j);
1366}
1367
1368
1369//-------------Misc-instructions--------------
1370
1371// Break / Trap instructions.
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001372void Assembler::break_(uint32_t code, bool break_as_stop) {
Andrei Popescu31002712010-02-23 13:46:05 +00001373 ASSERT((code & ~0xfffff) == 0);
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001374 // We need to invalidate breaks that could be stops as well because the
1375 // simulator expects a char pointer after the stop instruction.
1376 // See constants-mips.h for explanation.
1377 ASSERT((break_as_stop &&
1378 code <= kMaxStopCode &&
1379 code > kMaxWatchpointCode) ||
1380 (!break_as_stop &&
1381 (code > kMaxStopCode ||
1382 code <= kMaxWatchpointCode)));
Andrei Popescu31002712010-02-23 13:46:05 +00001383 Instr break_instr = SPECIAL | BREAK | (code << 6);
1384 emit(break_instr);
1385}
1386
1387
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001388void Assembler::stop(const char* msg, uint32_t code) {
1389 ASSERT(code > kMaxWatchpointCode);
1390 ASSERT(code <= kMaxStopCode);
1391#if defined(V8_HOST_ARCH_MIPS)
1392 break_(0x54321);
1393#else // V8_HOST_ARCH_MIPS
1394 BlockTrampolinePoolFor(2);
1395 // The Simulator will handle the stop instruction and get the message address.
1396 // On MIPS stop() is just a special kind of break_().
1397 break_(code, true);
1398 emit(reinterpret_cast<Instr>(msg));
1399#endif
1400}
1401
1402
Andrei Popescu31002712010-02-23 13:46:05 +00001403void Assembler::tge(Register rs, Register rt, uint16_t code) {
1404 ASSERT(is_uint10(code));
1405 Instr instr = SPECIAL | TGE | rs.code() << kRsShift
1406 | rt.code() << kRtShift | code << 6;
1407 emit(instr);
1408}
1409
1410
1411void Assembler::tgeu(Register rs, Register rt, uint16_t code) {
1412 ASSERT(is_uint10(code));
1413 Instr instr = SPECIAL | TGEU | rs.code() << kRsShift
1414 | rt.code() << kRtShift | code << 6;
1415 emit(instr);
1416}
1417
1418
1419void Assembler::tlt(Register rs, Register rt, uint16_t code) {
1420 ASSERT(is_uint10(code));
1421 Instr instr =
1422 SPECIAL | TLT | rs.code() << kRsShift | rt.code() << kRtShift | code << 6;
1423 emit(instr);
1424}
1425
1426
1427void Assembler::tltu(Register rs, Register rt, uint16_t code) {
1428 ASSERT(is_uint10(code));
Steve Block44f0eee2011-05-26 01:26:41 +01001429 Instr instr =
1430 SPECIAL | TLTU | rs.code() << kRsShift
Andrei Popescu31002712010-02-23 13:46:05 +00001431 | rt.code() << kRtShift | code << 6;
1432 emit(instr);
1433}
1434
1435
1436void Assembler::teq(Register rs, Register rt, uint16_t code) {
1437 ASSERT(is_uint10(code));
1438 Instr instr =
1439 SPECIAL | TEQ | rs.code() << kRsShift | rt.code() << kRtShift | code << 6;
1440 emit(instr);
1441}
1442
1443
1444void Assembler::tne(Register rs, Register rt, uint16_t code) {
1445 ASSERT(is_uint10(code));
1446 Instr instr =
1447 SPECIAL | TNE | rs.code() << kRsShift | rt.code() << kRtShift | code << 6;
1448 emit(instr);
1449}
1450
1451
1452// Move from HI/LO register.
1453
1454void Assembler::mfhi(Register rd) {
1455 GenInstrRegister(SPECIAL, zero_reg, zero_reg, rd, 0, MFHI);
1456}
1457
1458
1459void Assembler::mflo(Register rd) {
1460 GenInstrRegister(SPECIAL, zero_reg, zero_reg, rd, 0, MFLO);
1461}
1462
1463
1464// Set on less than instructions.
1465void Assembler::slt(Register rd, Register rs, Register rt) {
1466 GenInstrRegister(SPECIAL, rs, rt, rd, 0, SLT);
1467}
1468
1469
1470void Assembler::sltu(Register rd, Register rs, Register rt) {
1471 GenInstrRegister(SPECIAL, rs, rt, rd, 0, SLTU);
1472}
1473
1474
1475void Assembler::slti(Register rt, Register rs, int32_t j) {
1476 GenInstrImmediate(SLTI, rs, rt, j);
1477}
1478
1479
1480void Assembler::sltiu(Register rt, Register rs, int32_t j) {
1481 GenInstrImmediate(SLTIU, rs, rt, j);
1482}
1483
1484
Steve Block44f0eee2011-05-26 01:26:41 +01001485// Conditional move.
1486void Assembler::movz(Register rd, Register rs, Register rt) {
1487 GenInstrRegister(SPECIAL, rs, rt, rd, 0, MOVZ);
1488}
1489
1490
1491void Assembler::movn(Register rd, Register rs, Register rt) {
1492 GenInstrRegister(SPECIAL, rs, rt, rd, 0, MOVN);
1493}
1494
1495
1496void Assembler::movt(Register rd, Register rs, uint16_t cc) {
1497 Register rt;
Ben Murdoch257744e2011-11-30 15:57:28 +00001498 rt.code_ = (cc & 0x0007) << 2 | 1;
Steve Block44f0eee2011-05-26 01:26:41 +01001499 GenInstrRegister(SPECIAL, rs, rt, rd, 0, MOVCI);
1500}
1501
1502
1503void Assembler::movf(Register rd, Register rs, uint16_t cc) {
1504 Register rt;
Ben Murdoch257744e2011-11-30 15:57:28 +00001505 rt.code_ = (cc & 0x0007) << 2 | 0;
Steve Block44f0eee2011-05-26 01:26:41 +01001506 GenInstrRegister(SPECIAL, rs, rt, rd, 0, MOVCI);
1507}
1508
1509
1510// Bit twiddling.
1511void Assembler::clz(Register rd, Register rs) {
1512 // Clz instr requires same GPR number in 'rd' and 'rt' fields.
1513 GenInstrRegister(SPECIAL2, rs, rd, rd, 0, CLZ);
1514}
1515
1516
1517void Assembler::ins_(Register rt, Register rs, uint16_t pos, uint16_t size) {
1518 // Should be called via MacroAssembler::Ins.
1519 // Ins instr has 'rt' field as dest, and two uint5: msb, lsb.
1520 ASSERT(mips32r2);
1521 GenInstrRegister(SPECIAL3, rs, rt, pos + size - 1, pos, INS);
1522}
1523
1524
1525void Assembler::ext_(Register rt, Register rs, uint16_t pos, uint16_t size) {
1526 // Should be called via MacroAssembler::Ext.
1527 // Ext instr has 'rt' field as dest, and two uint5: msb, lsb.
1528 ASSERT(mips32r2);
1529 GenInstrRegister(SPECIAL3, rs, rt, size - 1, pos, EXT);
1530}
1531
1532
Andrei Popescu31002712010-02-23 13:46:05 +00001533//--------Coprocessor-instructions----------------
1534
1535// Load, store, move.
1536void Assembler::lwc1(FPURegister fd, const MemOperand& src) {
1537 GenInstrImmediate(LWC1, src.rm(), fd, src.offset_);
1538}
1539
1540
1541void Assembler::ldc1(FPURegister fd, const MemOperand& src) {
Steve Block44f0eee2011-05-26 01:26:41 +01001542 // Workaround for non-8-byte alignment of HeapNumber, convert 64-bit
1543 // load to two 32-bit loads.
1544 GenInstrImmediate(LWC1, src.rm(), fd, src.offset_);
1545 FPURegister nextfpreg;
1546 nextfpreg.setcode(fd.code() + 1);
1547 GenInstrImmediate(LWC1, src.rm(), nextfpreg, src.offset_ + 4);
Andrei Popescu31002712010-02-23 13:46:05 +00001548}
1549
1550
1551void Assembler::swc1(FPURegister fd, const MemOperand& src) {
1552 GenInstrImmediate(SWC1, src.rm(), fd, src.offset_);
1553}
1554
1555
1556void Assembler::sdc1(FPURegister fd, const MemOperand& src) {
Steve Block44f0eee2011-05-26 01:26:41 +01001557 // Workaround for non-8-byte alignment of HeapNumber, convert 64-bit
1558 // store to two 32-bit stores.
1559 GenInstrImmediate(SWC1, src.rm(), fd, src.offset_);
1560 FPURegister nextfpreg;
1561 nextfpreg.setcode(fd.code() + 1);
1562 GenInstrImmediate(SWC1, src.rm(), nextfpreg, src.offset_ + 4);
Andrei Popescu31002712010-02-23 13:46:05 +00001563}
1564
1565
Steve Block44f0eee2011-05-26 01:26:41 +01001566void Assembler::mtc1(Register rt, FPURegister fs) {
Andrei Popescu31002712010-02-23 13:46:05 +00001567 GenInstrRegister(COP1, MTC1, rt, fs, f0);
1568}
1569
1570
Steve Block44f0eee2011-05-26 01:26:41 +01001571void Assembler::mfc1(Register rt, FPURegister fs) {
Andrei Popescu31002712010-02-23 13:46:05 +00001572 GenInstrRegister(COP1, MFC1, rt, fs, f0);
1573}
1574
1575
Steve Block44f0eee2011-05-26 01:26:41 +01001576void Assembler::ctc1(Register rt, FPUControlRegister fs) {
1577 GenInstrRegister(COP1, CTC1, rt, fs);
1578}
1579
1580
1581void Assembler::cfc1(Register rt, FPUControlRegister fs) {
1582 GenInstrRegister(COP1, CFC1, rt, fs);
1583}
1584
1585
1586// Arithmetic.
1587
1588void Assembler::add_d(FPURegister fd, FPURegister fs, FPURegister ft) {
1589 GenInstrRegister(COP1, D, ft, fs, fd, ADD_D);
1590}
1591
1592
1593void Assembler::sub_d(FPURegister fd, FPURegister fs, FPURegister ft) {
1594 GenInstrRegister(COP1, D, ft, fs, fd, SUB_D);
1595}
1596
1597
1598void Assembler::mul_d(FPURegister fd, FPURegister fs, FPURegister ft) {
1599 GenInstrRegister(COP1, D, ft, fs, fd, MUL_D);
1600}
1601
1602
1603void Assembler::div_d(FPURegister fd, FPURegister fs, FPURegister ft) {
1604 GenInstrRegister(COP1, D, ft, fs, fd, DIV_D);
1605}
1606
1607
1608void Assembler::abs_d(FPURegister fd, FPURegister fs) {
1609 GenInstrRegister(COP1, D, f0, fs, fd, ABS_D);
1610}
1611
1612
1613void Assembler::mov_d(FPURegister fd, FPURegister fs) {
1614 GenInstrRegister(COP1, D, f0, fs, fd, MOV_D);
1615}
1616
1617
1618void Assembler::neg_d(FPURegister fd, FPURegister fs) {
1619 GenInstrRegister(COP1, D, f0, fs, fd, NEG_D);
1620}
1621
1622
1623void Assembler::sqrt_d(FPURegister fd, FPURegister fs) {
1624 GenInstrRegister(COP1, D, f0, fs, fd, SQRT_D);
Andrei Popescu31002712010-02-23 13:46:05 +00001625}
1626
1627
1628// Conversions.
1629
1630void Assembler::cvt_w_s(FPURegister fd, FPURegister fs) {
1631 GenInstrRegister(COP1, S, f0, fs, fd, CVT_W_S);
1632}
1633
1634
1635void Assembler::cvt_w_d(FPURegister fd, FPURegister fs) {
1636 GenInstrRegister(COP1, D, f0, fs, fd, CVT_W_D);
1637}
1638
1639
Steve Block44f0eee2011-05-26 01:26:41 +01001640void Assembler::trunc_w_s(FPURegister fd, FPURegister fs) {
1641 GenInstrRegister(COP1, S, f0, fs, fd, TRUNC_W_S);
1642}
1643
1644
1645void Assembler::trunc_w_d(FPURegister fd, FPURegister fs) {
1646 GenInstrRegister(COP1, D, f0, fs, fd, TRUNC_W_D);
1647}
1648
1649
1650void Assembler::round_w_s(FPURegister fd, FPURegister fs) {
1651 GenInstrRegister(COP1, S, f0, fs, fd, ROUND_W_S);
1652}
1653
1654
1655void Assembler::round_w_d(FPURegister fd, FPURegister fs) {
1656 GenInstrRegister(COP1, D, f0, fs, fd, ROUND_W_D);
1657}
1658
1659
1660void Assembler::floor_w_s(FPURegister fd, FPURegister fs) {
1661 GenInstrRegister(COP1, S, f0, fs, fd, FLOOR_W_S);
1662}
1663
1664
1665void Assembler::floor_w_d(FPURegister fd, FPURegister fs) {
1666 GenInstrRegister(COP1, D, f0, fs, fd, FLOOR_W_D);
1667}
1668
1669
1670void Assembler::ceil_w_s(FPURegister fd, FPURegister fs) {
1671 GenInstrRegister(COP1, S, f0, fs, fd, CEIL_W_S);
1672}
1673
1674
1675void Assembler::ceil_w_d(FPURegister fd, FPURegister fs) {
1676 GenInstrRegister(COP1, D, f0, fs, fd, CEIL_W_D);
1677}
1678
1679
Andrei Popescu31002712010-02-23 13:46:05 +00001680void Assembler::cvt_l_s(FPURegister fd, FPURegister fs) {
Steve Block44f0eee2011-05-26 01:26:41 +01001681 ASSERT(mips32r2);
Andrei Popescu31002712010-02-23 13:46:05 +00001682 GenInstrRegister(COP1, S, f0, fs, fd, CVT_L_S);
1683}
1684
1685
1686void Assembler::cvt_l_d(FPURegister fd, FPURegister fs) {
Steve Block44f0eee2011-05-26 01:26:41 +01001687 ASSERT(mips32r2);
Andrei Popescu31002712010-02-23 13:46:05 +00001688 GenInstrRegister(COP1, D, f0, fs, fd, CVT_L_D);
1689}
1690
1691
Steve Block44f0eee2011-05-26 01:26:41 +01001692void Assembler::trunc_l_s(FPURegister fd, FPURegister fs) {
1693 ASSERT(mips32r2);
1694 GenInstrRegister(COP1, S, f0, fs, fd, TRUNC_L_S);
1695}
1696
1697
1698void Assembler::trunc_l_d(FPURegister fd, FPURegister fs) {
1699 ASSERT(mips32r2);
1700 GenInstrRegister(COP1, D, f0, fs, fd, TRUNC_L_D);
1701}
1702
1703
1704void Assembler::round_l_s(FPURegister fd, FPURegister fs) {
1705 GenInstrRegister(COP1, S, f0, fs, fd, ROUND_L_S);
1706}
1707
1708
1709void Assembler::round_l_d(FPURegister fd, FPURegister fs) {
1710 GenInstrRegister(COP1, D, f0, fs, fd, ROUND_L_D);
1711}
1712
1713
1714void Assembler::floor_l_s(FPURegister fd, FPURegister fs) {
1715 GenInstrRegister(COP1, S, f0, fs, fd, FLOOR_L_S);
1716}
1717
1718
1719void Assembler::floor_l_d(FPURegister fd, FPURegister fs) {
1720 GenInstrRegister(COP1, D, f0, fs, fd, FLOOR_L_D);
1721}
1722
1723
1724void Assembler::ceil_l_s(FPURegister fd, FPURegister fs) {
1725 GenInstrRegister(COP1, S, f0, fs, fd, CEIL_L_S);
1726}
1727
1728
1729void Assembler::ceil_l_d(FPURegister fd, FPURegister fs) {
1730 GenInstrRegister(COP1, D, f0, fs, fd, CEIL_L_D);
1731}
1732
1733
Andrei Popescu31002712010-02-23 13:46:05 +00001734void Assembler::cvt_s_w(FPURegister fd, FPURegister fs) {
1735 GenInstrRegister(COP1, W, f0, fs, fd, CVT_S_W);
1736}
1737
1738
1739void Assembler::cvt_s_l(FPURegister fd, FPURegister fs) {
Steve Block44f0eee2011-05-26 01:26:41 +01001740 ASSERT(mips32r2);
Andrei Popescu31002712010-02-23 13:46:05 +00001741 GenInstrRegister(COP1, L, f0, fs, fd, CVT_S_L);
1742}
1743
1744
1745void Assembler::cvt_s_d(FPURegister fd, FPURegister fs) {
1746 GenInstrRegister(COP1, D, f0, fs, fd, CVT_S_D);
1747}
1748
1749
1750void Assembler::cvt_d_w(FPURegister fd, FPURegister fs) {
1751 GenInstrRegister(COP1, W, f0, fs, fd, CVT_D_W);
1752}
1753
1754
1755void Assembler::cvt_d_l(FPURegister fd, FPURegister fs) {
Steve Block44f0eee2011-05-26 01:26:41 +01001756 ASSERT(mips32r2);
Andrei Popescu31002712010-02-23 13:46:05 +00001757 GenInstrRegister(COP1, L, f0, fs, fd, CVT_D_L);
1758}
1759
1760
1761void Assembler::cvt_d_s(FPURegister fd, FPURegister fs) {
1762 GenInstrRegister(COP1, S, f0, fs, fd, CVT_D_S);
1763}
1764
1765
1766// Conditions.
1767void Assembler::c(FPUCondition cond, SecondaryField fmt,
Steve Block44f0eee2011-05-26 01:26:41 +01001768 FPURegister fs, FPURegister ft, uint16_t cc) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001769 ASSERT(CpuFeatures::IsEnabled(FPU));
Andrei Popescu31002712010-02-23 13:46:05 +00001770 ASSERT(is_uint3(cc));
1771 ASSERT((fmt & ~(31 << kRsShift)) == 0);
1772 Instr instr = COP1 | fmt | ft.code() << 16 | fs.code() << kFsShift
1773 | cc << 8 | 3 << 4 | cond;
1774 emit(instr);
1775}
1776
1777
Steve Block44f0eee2011-05-26 01:26:41 +01001778void Assembler::fcmp(FPURegister src1, const double src2,
1779 FPUCondition cond) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001780 ASSERT(CpuFeatures::IsEnabled(FPU));
Steve Block44f0eee2011-05-26 01:26:41 +01001781 ASSERT(src2 == 0.0);
1782 mtc1(zero_reg, f14);
1783 cvt_d_w(f14, f14);
1784 c(cond, D, src1, f14, 0);
1785}
1786
1787
Andrei Popescu31002712010-02-23 13:46:05 +00001788void Assembler::bc1f(int16_t offset, uint16_t cc) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001789 ASSERT(CpuFeatures::IsEnabled(FPU));
Andrei Popescu31002712010-02-23 13:46:05 +00001790 ASSERT(is_uint3(cc));
1791 Instr instr = COP1 | BC1 | cc << 18 | 0 << 16 | (offset & kImm16Mask);
1792 emit(instr);
1793}
1794
1795
1796void Assembler::bc1t(int16_t offset, uint16_t cc) {
Ben Murdoch257744e2011-11-30 15:57:28 +00001797 ASSERT(CpuFeatures::IsEnabled(FPU));
Andrei Popescu31002712010-02-23 13:46:05 +00001798 ASSERT(is_uint3(cc));
1799 Instr instr = COP1 | BC1 | cc << 18 | 1 << 16 | (offset & kImm16Mask);
1800 emit(instr);
1801}
1802
1803
1804// Debugging.
1805void Assembler::RecordJSReturn() {
Steve Block44f0eee2011-05-26 01:26:41 +01001806 positions_recorder()->WriteRecordedPositions();
Andrei Popescu31002712010-02-23 13:46:05 +00001807 CheckBuffer();
1808 RecordRelocInfo(RelocInfo::JS_RETURN);
1809}
1810
1811
Steve Block44f0eee2011-05-26 01:26:41 +01001812void Assembler::RecordDebugBreakSlot() {
1813 positions_recorder()->WriteRecordedPositions();
1814 CheckBuffer();
1815 RecordRelocInfo(RelocInfo::DEBUG_BREAK_SLOT);
1816}
1817
1818
Andrei Popescu31002712010-02-23 13:46:05 +00001819void Assembler::RecordComment(const char* msg) {
Steve Block44f0eee2011-05-26 01:26:41 +01001820 if (FLAG_code_comments) {
Andrei Popescu31002712010-02-23 13:46:05 +00001821 CheckBuffer();
1822 RecordRelocInfo(RelocInfo::COMMENT, reinterpret_cast<intptr_t>(msg));
1823 }
1824}
1825
1826
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001827int Assembler::RelocateInternalReference(byte* pc, intptr_t pc_delta) {
1828 Instr instr = instr_at(pc);
1829 ASSERT(IsJ(instr) || IsLui(instr));
1830 if (IsLui(instr)) {
1831 Instr instr_lui = instr_at(pc + 0 * Assembler::kInstrSize);
1832 Instr instr_ori = instr_at(pc + 1 * Assembler::kInstrSize);
1833 ASSERT(IsOri(instr_ori));
1834 int32_t imm = (instr_lui & static_cast<int32_t>(kImm16Mask)) << kLuiShift;
1835 imm |= (instr_ori & static_cast<int32_t>(kImm16Mask));
1836 if (imm == kEndOfJumpChain) {
1837 return 0; // Number of instructions patched.
1838 }
1839 imm += pc_delta;
1840 ASSERT((imm & 3) == 0);
1841
1842 instr_lui &= ~kImm16Mask;
1843 instr_ori &= ~kImm16Mask;
1844
1845 instr_at_put(pc + 0 * Assembler::kInstrSize,
1846 instr_lui | ((imm >> kLuiShift) & kImm16Mask));
1847 instr_at_put(pc + 1 * Assembler::kInstrSize,
1848 instr_ori | (imm & kImm16Mask));
1849 return 2; // Number of instructions patched.
1850 } else {
1851 uint32_t imm28 = (instr & static_cast<int32_t>(kImm26Mask)) << 2;
1852 if ((int32_t)imm28 == kEndOfJumpChain) {
1853 return 0; // Number of instructions patched.
1854 }
1855 imm28 += pc_delta;
1856 imm28 &= kImm28Mask;
1857 ASSERT((imm28 & 3) == 0);
1858
1859 instr &= ~kImm26Mask;
1860 uint32_t imm26 = imm28 >> 2;
1861 ASSERT(is_uint26(imm26));
1862
1863 instr_at_put(pc, instr | (imm26 & kImm26Mask));
1864 return 1; // Number of instructions patched.
1865 }
1866}
1867
1868
Andrei Popescu31002712010-02-23 13:46:05 +00001869void Assembler::GrowBuffer() {
1870 if (!own_buffer_) FATAL("external code buffer is too small");
1871
1872 // Compute new buffer size.
Steve Block44f0eee2011-05-26 01:26:41 +01001873 CodeDesc desc; // The new buffer.
Andrei Popescu31002712010-02-23 13:46:05 +00001874 if (buffer_size_ < 4*KB) {
1875 desc.buffer_size = 4*KB;
1876 } else if (buffer_size_ < 1*MB) {
1877 desc.buffer_size = 2*buffer_size_;
1878 } else {
1879 desc.buffer_size = buffer_size_ + 1*MB;
1880 }
Steve Block44f0eee2011-05-26 01:26:41 +01001881 CHECK_GT(desc.buffer_size, 0); // No overflow.
Andrei Popescu31002712010-02-23 13:46:05 +00001882
1883 // Setup new buffer.
1884 desc.buffer = NewArray<byte>(desc.buffer_size);
1885
1886 desc.instr_size = pc_offset();
1887 desc.reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos();
1888
1889 // Copy the data.
1890 int pc_delta = desc.buffer - buffer_;
1891 int rc_delta = (desc.buffer + desc.buffer_size) - (buffer_ + buffer_size_);
1892 memmove(desc.buffer, buffer_, desc.instr_size);
1893 memmove(reloc_info_writer.pos() + rc_delta,
1894 reloc_info_writer.pos(), desc.reloc_size);
1895
1896 // Switch buffers.
1897 DeleteArray(buffer_);
1898 buffer_ = desc.buffer;
1899 buffer_size_ = desc.buffer_size;
1900 pc_ += pc_delta;
1901 reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta,
1902 reloc_info_writer.last_pc() + pc_delta);
1903
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001904 // Relocate runtime entries.
1905 for (RelocIterator it(desc); !it.done(); it.next()) {
1906 RelocInfo::Mode rmode = it.rinfo()->rmode();
1907 if (rmode == RelocInfo::INTERNAL_REFERENCE) {
1908 byte* p = reinterpret_cast<byte*>(it.rinfo()->pc());
1909 RelocateInternalReference(p, pc_delta);
1910 }
1911 }
Andrei Popescu31002712010-02-23 13:46:05 +00001912
1913 ASSERT(!overflow());
1914}
1915
1916
Steve Block44f0eee2011-05-26 01:26:41 +01001917void Assembler::db(uint8_t data) {
1918 CheckBuffer();
1919 *reinterpret_cast<uint8_t*>(pc_) = data;
1920 pc_ += sizeof(uint8_t);
1921}
1922
1923
1924void Assembler::dd(uint32_t data) {
1925 CheckBuffer();
1926 *reinterpret_cast<uint32_t*>(pc_) = data;
1927 pc_ += sizeof(uint32_t);
1928}
1929
1930
Andrei Popescu31002712010-02-23 13:46:05 +00001931void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
Steve Block44f0eee2011-05-26 01:26:41 +01001932 RelocInfo rinfo(pc_, rmode, data); // We do not try to reuse pool constants.
1933 if (rmode >= RelocInfo::JS_RETURN && rmode <= RelocInfo::DEBUG_BREAK_SLOT) {
Andrei Popescu31002712010-02-23 13:46:05 +00001934 // Adjust code for new modes.
Steve Block44f0eee2011-05-26 01:26:41 +01001935 ASSERT(RelocInfo::IsDebugBreakSlot(rmode)
1936 || RelocInfo::IsJSReturn(rmode)
Andrei Popescu31002712010-02-23 13:46:05 +00001937 || RelocInfo::IsComment(rmode)
1938 || RelocInfo::IsPosition(rmode));
1939 // These modes do not need an entry in the constant pool.
1940 }
1941 if (rinfo.rmode() != RelocInfo::NONE) {
1942 // Don't record external references unless the heap will be serialized.
1943 if (rmode == RelocInfo::EXTERNAL_REFERENCE &&
1944 !Serializer::enabled() &&
1945 !FLAG_debug_code) {
1946 return;
1947 }
Steve Block44f0eee2011-05-26 01:26:41 +01001948 ASSERT(buffer_space() >= kMaxRelocSize); // Too late to grow buffer here.
Ben Murdoch257744e2011-11-30 15:57:28 +00001949 if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001950 RelocInfo reloc_info_with_ast_id(pc_, rmode, RecordedAstId());
1951 ClearRecordedAstId();
Ben Murdoch257744e2011-11-30 15:57:28 +00001952 reloc_info_writer.Write(&reloc_info_with_ast_id);
1953 } else {
1954 reloc_info_writer.Write(&rinfo);
1955 }
Andrei Popescu31002712010-02-23 13:46:05 +00001956 }
1957}
1958
1959
Steve Block44f0eee2011-05-26 01:26:41 +01001960void Assembler::BlockTrampolinePoolFor(int instructions) {
1961 BlockTrampolinePoolBefore(pc_offset() + instructions * kInstrSize);
1962}
1963
1964
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001965void Assembler::CheckTrampolinePool() {
Steve Block44f0eee2011-05-26 01:26:41 +01001966 // Some small sequences of instructions must not be broken up by the
1967 // insertion of a trampoline pool; such sequences are protected by setting
1968 // either trampoline_pool_blocked_nesting_ or no_trampoline_pool_before_,
1969 // which are both checked here. Also, recursive calls to CheckTrampolinePool
1970 // are blocked by trampoline_pool_blocked_nesting_.
1971 if ((trampoline_pool_blocked_nesting_ > 0) ||
1972 (pc_offset() < no_trampoline_pool_before_)) {
1973 // Emission is currently blocked; make sure we try again as soon as
1974 // possible.
1975 if (trampoline_pool_blocked_nesting_ > 0) {
1976 next_buffer_check_ = pc_offset() + kInstrSize;
1977 } else {
1978 next_buffer_check_ = no_trampoline_pool_before_;
1979 }
1980 return;
1981 }
1982
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001983 ASSERT(!trampoline_emitted_);
1984 ASSERT(unbound_labels_count_ >= 0);
1985 if (unbound_labels_count_ > 0) {
1986 // First we emit jump (2 instructions), then we emit trampoline pool.
1987 { BlockTrampolinePoolScope block_trampoline_pool(this);
1988 Label after_pool;
Steve Block44f0eee2011-05-26 01:26:41 +01001989 b(&after_pool);
1990 nop();
Steve Block44f0eee2011-05-26 01:26:41 +01001991
Ben Murdoch3fb3ca82011-12-02 17:19:32 +00001992 int pool_start = pc_offset();
1993 for (int i = 0; i < unbound_labels_count_; i++) {
1994 uint32_t imm32;
1995 imm32 = jump_address(&after_pool);
1996 { BlockGrowBufferScope block_buf_growth(this);
1997 // Buffer growth (and relocation) must be blocked for internal
1998 // references until associated instructions are emitted and available
1999 // to be patched.
2000 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE);
2001 lui(at, (imm32 & kHiMask) >> kLuiShift);
2002 ori(at, at, (imm32 & kImm16Mask));
2003 }
2004 jr(at);
2005 nop();
2006 }
2007 bind(&after_pool);
2008 trampoline_ = Trampoline(pool_start, unbound_labels_count_);
2009
2010 trampoline_emitted_ = true;
2011 // As we are only going to emit trampoline once, we need to prevent any
2012 // further emission.
2013 next_buffer_check_ = kMaxInt;
2014 }
2015 } else {
2016 // Number of branches to unbound label at this point is zero, so we can
2017 // move next buffer check to maximum.
2018 next_buffer_check_ = pc_offset() +
2019 kMaxBranchOffset - kTrampolineSlotsSize * 16;
Steve Block44f0eee2011-05-26 01:26:41 +01002020 }
2021 return;
2022}
2023
2024
Andrei Popescu31002712010-02-23 13:46:05 +00002025Address Assembler::target_address_at(Address pc) {
2026 Instr instr1 = instr_at(pc);
2027 Instr instr2 = instr_at(pc + kInstrSize);
Ben Murdoch257744e2011-11-30 15:57:28 +00002028 // Interpret 2 instructions generated by li: lui/ori
2029 if ((GetOpcodeField(instr1) == LUI) && (GetOpcodeField(instr2) == ORI)) {
2030 // Assemble the 32 bit value.
Andrei Popescu31002712010-02-23 13:46:05 +00002031 return reinterpret_cast<Address>(
Ben Murdoch257744e2011-11-30 15:57:28 +00002032 (GetImmediate16(instr1) << 16) | GetImmediate16(instr2));
Andrei Popescu31002712010-02-23 13:46:05 +00002033 }
2034
Ben Murdoch257744e2011-11-30 15:57:28 +00002035 // We should never get here, force a bad address if we do.
Andrei Popescu31002712010-02-23 13:46:05 +00002036 UNREACHABLE();
2037 return (Address)0x0;
2038}
2039
2040
2041void Assembler::set_target_address_at(Address pc, Address target) {
Ben Murdoch257744e2011-11-30 15:57:28 +00002042 // On MIPS we patch the address into lui/ori instruction pair.
Andrei Popescu31002712010-02-23 13:46:05 +00002043
Ben Murdoch257744e2011-11-30 15:57:28 +00002044 // First check we have an li (lui/ori pair).
Andrei Popescu31002712010-02-23 13:46:05 +00002045 Instr instr2 = instr_at(pc + kInstrSize);
2046#ifdef DEBUG
2047 Instr instr1 = instr_at(pc);
2048
Steve Block44f0eee2011-05-26 01:26:41 +01002049 // Check we have indeed the result from a li with MustUseReg true.
Ben Murdoch257744e2011-11-30 15:57:28 +00002050 CHECK((GetOpcodeField(instr1) == LUI && GetOpcodeField(instr2) == ORI));
Andrei Popescu31002712010-02-23 13:46:05 +00002051#endif
2052
Ben Murdoch257744e2011-11-30 15:57:28 +00002053 uint32_t rt_code = GetRtField(instr2);
Andrei Popescu31002712010-02-23 13:46:05 +00002054 uint32_t* p = reinterpret_cast<uint32_t*>(pc);
2055 uint32_t itarget = reinterpret_cast<uint32_t>(target);
2056
Ben Murdoch257744e2011-11-30 15:57:28 +00002057 // lui rt, high-16.
2058 // ori rt rt, low-16.
2059 *p = LUI | rt_code | ((itarget & kHiMask) >> kLuiShift);
2060 *(p+1) = ORI | rt_code | (rt_code << 5) | (itarget & kImm16Mask);
Andrei Popescu31002712010-02-23 13:46:05 +00002061
2062 CPU::FlushICache(pc, 2 * sizeof(int32_t));
2063}
2064
2065
2066} } // namespace v8::internal
2067
Leon Clarkef7060e22010-06-03 12:02:55 +01002068#endif // V8_TARGET_ARCH_MIPS