blob: 2190531b435d575c8b331fa57985b37dc18e88ba [file] [log] [blame]
Ben Murdoch3ef787d2012-04-12 10:51:47 +01001// Copyright 2012 the V8 project authors. All rights reserved.
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
Steve Blocka7e24c12009-10-30 11:49:00 +00004
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005#include "src/ia32/codegen-ia32.h"
Steve Blocka7e24c12009-10-30 11:49:00 +00006
Ben Murdochb8a8cc12014-11-26 15:28:44 +00007#if V8_TARGET_ARCH_IA32
Leon Clarkef7060e22010-06-03 12:02:55 +01008
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/codegen.h"
10#include "src/heap/heap.h"
11#include "src/macro-assembler.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000012
13namespace v8 {
14namespace internal {
15
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010016
17// -------------------------------------------------------------------------
18// Platform-specific RuntimeCallHelper functions.
19
Ben Murdochb0fe1622011-05-05 13:52:32 +010020void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
Ben Murdoch3ef787d2012-04-12 10:51:47 +010021 masm->EnterFrame(StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000022 DCHECK(!masm->has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +010023 masm->set_has_frame(true);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010024}
25
26
Ben Murdochb0fe1622011-05-05 13:52:32 +010027void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
Ben Murdoch3ef787d2012-04-12 10:51:47 +010028 masm->LeaveFrame(StackFrame::INTERNAL);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000029 DCHECK(masm->has_frame());
Ben Murdoch3ef787d2012-04-12 10:51:47 +010030 masm->set_has_frame(false);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010031}
32
33
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +010034#define __ masm.
35
Ben Murdoch3ef787d2012-04-12 10:51:47 +010036
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000037UnaryMathFunctionWithIsolate CreateExpFunction(Isolate* isolate) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +010038 size_t actual_size;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000039 byte* buffer =
40 static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000041 if (buffer == nullptr) return nullptr;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000042 ExternalReference::InitializeMathExpData();
Ben Murdoch3ef787d2012-04-12 10:51:47 +010043
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000044 MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
45 CodeObjectRequired::kNo);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010046 // esp[1 * kPointerSize]: raw double input
47 // esp[0 * kPointerSize]: return address
Ben Murdochb8a8cc12014-11-26 15:28:44 +000048 {
49 XMMRegister input = xmm1;
50 XMMRegister result = xmm2;
51 __ movsd(input, Operand(esp, 1 * kPointerSize));
52 __ push(eax);
53 __ push(ebx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010054
Ben Murdochb8a8cc12014-11-26 15:28:44 +000055 MathExpGenerator::EmitMathExp(&masm, input, result, xmm0, eax, ebx);
56
57 __ pop(ebx);
58 __ pop(eax);
59 __ movsd(Operand(esp, 1 * kPointerSize), result);
60 __ fld_d(Operand(esp, 1 * kPointerSize));
61 __ Ret();
62 }
Ben Murdoch3ef787d2012-04-12 10:51:47 +010063
64 CodeDesc desc;
65 masm.GetCode(&desc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000066 DCHECK(!RelocInfo::RequiresRelocation(desc));
Ben Murdoch3ef787d2012-04-12 10:51:47 +010067
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000068 Assembler::FlushICache(isolate, buffer, actual_size);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000069 base::OS::ProtectCode(buffer, actual_size);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000070 return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010071}
72
73
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000074UnaryMathFunctionWithIsolate CreateSqrtFunction(Isolate* isolate) {
Ben Murdoch3ef787d2012-04-12 10:51:47 +010075 size_t actual_size;
76 // Allocate buffer in executable space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +000077 byte* buffer =
78 static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000079 if (buffer == nullptr) return nullptr;
80 MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
81 CodeObjectRequired::kNo);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010082 // esp[1 * kPointerSize]: raw double input
83 // esp[0 * kPointerSize]: return address
84 // Move double input into registers.
85 {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000086 __ movsd(xmm0, Operand(esp, 1 * kPointerSize));
Ben Murdoch3ef787d2012-04-12 10:51:47 +010087 __ sqrtsd(xmm0, xmm0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000088 __ movsd(Operand(esp, 1 * kPointerSize), xmm0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +010089 // Load result into floating point register as return value.
90 __ fld_d(Operand(esp, 1 * kPointerSize));
91 __ Ret();
92 }
93
94 CodeDesc desc;
95 masm.GetCode(&desc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000096 DCHECK(!RelocInfo::RequiresRelocation(desc));
Ben Murdoch3ef787d2012-04-12 10:51:47 +010097
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000098 Assembler::FlushICache(isolate, buffer, actual_size);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000099 base::OS::ProtectCode(buffer, actual_size);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000100 return FUNCTION_CAST<UnaryMathFunctionWithIsolate>(buffer);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100101}
102
103
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000104// Helper functions for CreateMemMoveFunction.
105#undef __
106#define __ ACCESS_MASM(masm)
107
108enum Direction { FORWARD, BACKWARD };
109enum Alignment { MOVE_ALIGNED, MOVE_UNALIGNED };
110
111// Expects registers:
112// esi - source, aligned if alignment == ALIGNED
113// edi - destination, always aligned
114// ecx - count (copy size in bytes)
115// edx - loop count (number of 64 byte chunks)
116void MemMoveEmitMainLoop(MacroAssembler* masm,
117 Label* move_last_15,
118 Direction direction,
119 Alignment alignment) {
120 Register src = esi;
121 Register dst = edi;
122 Register count = ecx;
123 Register loop_count = edx;
124 Label loop, move_last_31, move_last_63;
125 __ cmp(loop_count, 0);
126 __ j(equal, &move_last_63);
127 __ bind(&loop);
128 // Main loop. Copy in 64 byte chunks.
129 if (direction == BACKWARD) __ sub(src, Immediate(0x40));
130 __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0x00));
131 __ movdq(alignment == MOVE_ALIGNED, xmm1, Operand(src, 0x10));
132 __ movdq(alignment == MOVE_ALIGNED, xmm2, Operand(src, 0x20));
133 __ movdq(alignment == MOVE_ALIGNED, xmm3, Operand(src, 0x30));
134 if (direction == FORWARD) __ add(src, Immediate(0x40));
135 if (direction == BACKWARD) __ sub(dst, Immediate(0x40));
136 __ movdqa(Operand(dst, 0x00), xmm0);
137 __ movdqa(Operand(dst, 0x10), xmm1);
138 __ movdqa(Operand(dst, 0x20), xmm2);
139 __ movdqa(Operand(dst, 0x30), xmm3);
140 if (direction == FORWARD) __ add(dst, Immediate(0x40));
141 __ dec(loop_count);
142 __ j(not_zero, &loop);
143 // At most 63 bytes left to copy.
144 __ bind(&move_last_63);
145 __ test(count, Immediate(0x20));
146 __ j(zero, &move_last_31);
147 if (direction == BACKWARD) __ sub(src, Immediate(0x20));
148 __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0x00));
149 __ movdq(alignment == MOVE_ALIGNED, xmm1, Operand(src, 0x10));
150 if (direction == FORWARD) __ add(src, Immediate(0x20));
151 if (direction == BACKWARD) __ sub(dst, Immediate(0x20));
152 __ movdqa(Operand(dst, 0x00), xmm0);
153 __ movdqa(Operand(dst, 0x10), xmm1);
154 if (direction == FORWARD) __ add(dst, Immediate(0x20));
155 // At most 31 bytes left to copy.
156 __ bind(&move_last_31);
157 __ test(count, Immediate(0x10));
158 __ j(zero, move_last_15);
159 if (direction == BACKWARD) __ sub(src, Immediate(0x10));
160 __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0));
161 if (direction == FORWARD) __ add(src, Immediate(0x10));
162 if (direction == BACKWARD) __ sub(dst, Immediate(0x10));
163 __ movdqa(Operand(dst, 0), xmm0);
164 if (direction == FORWARD) __ add(dst, Immediate(0x10));
Ben Murdochb0fe1622011-05-05 13:52:32 +0100165}
166
167
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000168void MemMoveEmitPopAndReturn(MacroAssembler* masm) {
169 __ pop(esi);
170 __ pop(edi);
171 __ ret(0);
172}
173
174
175#undef __
176#define __ masm.
177
178
179class LabelConverter {
180 public:
181 explicit LabelConverter(byte* buffer) : buffer_(buffer) {}
182 int32_t address(Label* l) const {
183 return reinterpret_cast<int32_t>(buffer_) + l->pos();
184 }
185 private:
186 byte* buffer_;
187};
188
189
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000190MemMoveFunction CreateMemMoveFunction(Isolate* isolate) {
Ben Murdoch8b112d22011-06-08 16:22:53 +0100191 size_t actual_size;
192 // Allocate buffer in executable space.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000193 byte* buffer =
194 static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000195 if (buffer == nullptr) return nullptr;
196 MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size),
197 CodeObjectRequired::kNo);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000198 LabelConverter conv(buffer);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100199
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000200 // Generated code is put into a fixed, unmovable buffer, and not into
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100201 // the V8 heap. We can't, and don't, refer to any relocatable addresses
202 // (e.g. the JavaScript nan-object).
203
204 // 32-bit C declaration function calls pass arguments on stack.
205
206 // Stack layout:
207 // esp[12]: Third argument, size.
208 // esp[8]: Second argument, source pointer.
209 // esp[4]: First argument, destination pointer.
210 // esp[0]: return address
211
212 const int kDestinationOffset = 1 * kPointerSize;
213 const int kSourceOffset = 2 * kPointerSize;
214 const int kSizeOffset = 3 * kPointerSize;
215
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000216 // When copying up to this many bytes, use special "small" handlers.
217 const size_t kSmallCopySize = 8;
218 // When copying up to this many bytes, use special "medium" handlers.
219 const size_t kMediumCopySize = 63;
220 // When non-overlapping region of src and dst is less than this,
221 // use a more careful implementation (slightly slower).
222 const size_t kMinMoveDistance = 16;
223 // Note that these values are dictated by the implementation below,
224 // do not just change them and hope things will work!
225
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100226 int stack_offset = 0; // Update if we change the stack height.
227
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000228 Label backward, backward_much_overlap;
229 Label forward_much_overlap, small_size, medium_size, pop_and_return;
230 __ push(edi);
231 __ push(esi);
232 stack_offset += 2 * kPointerSize;
233 Register dst = edi;
234 Register src = esi;
235 Register count = ecx;
236 Register loop_count = edx;
237 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset));
238 __ mov(src, Operand(esp, stack_offset + kSourceOffset));
239 __ mov(count, Operand(esp, stack_offset + kSizeOffset));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100240
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000241 __ cmp(dst, src);
242 __ j(equal, &pop_and_return);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100243
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000244 __ prefetch(Operand(src, 0), 1);
245 __ cmp(count, kSmallCopySize);
246 __ j(below_equal, &small_size);
247 __ cmp(count, kMediumCopySize);
248 __ j(below_equal, &medium_size);
249 __ cmp(dst, src);
250 __ j(above, &backward);
251
252 {
253 // |dst| is a lower address than |src|. Copy front-to-back.
254 Label unaligned_source, move_last_15, skip_last_move;
255 __ mov(eax, src);
256 __ sub(eax, dst);
257 __ cmp(eax, kMinMoveDistance);
258 __ j(below, &forward_much_overlap);
259 // Copy first 16 bytes.
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100260 __ movdqu(xmm0, Operand(src, 0));
261 __ movdqu(Operand(dst, 0), xmm0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000262 // Determine distance to alignment: 16 - (dst & 0xF).
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100263 __ mov(edx, dst);
264 __ and_(edx, 0xF);
265 __ neg(edx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100266 __ add(edx, Immediate(16));
267 __ add(dst, edx);
268 __ add(src, edx);
269 __ sub(count, edx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000270 // dst is now aligned. Main copy loop.
271 __ mov(loop_count, count);
272 __ shr(loop_count, 6);
273 // Check if src is also aligned.
274 __ test(src, Immediate(0xF));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100275 __ j(not_zero, &unaligned_source);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000276 // Copy loop for aligned source and destination.
277 MemMoveEmitMainLoop(&masm, &move_last_15, FORWARD, MOVE_ALIGNED);
278 // At most 15 bytes to copy. Copy 16 bytes at end of string.
279 __ bind(&move_last_15);
280 __ and_(count, 0xF);
281 __ j(zero, &skip_last_move, Label::kNear);
282 __ movdqu(xmm0, Operand(src, count, times_1, -0x10));
283 __ movdqu(Operand(dst, count, times_1, -0x10), xmm0);
284 __ bind(&skip_last_move);
285 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100286
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000287 // Copy loop for unaligned source and aligned destination.
288 __ bind(&unaligned_source);
289 MemMoveEmitMainLoop(&masm, &move_last_15, FORWARD, MOVE_UNALIGNED);
290 __ jmp(&move_last_15);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100291
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000292 // Less than kMinMoveDistance offset between dst and src.
293 Label loop_until_aligned, last_15_much_overlap;
294 __ bind(&loop_until_aligned);
295 __ mov_b(eax, Operand(src, 0));
296 __ inc(src);
297 __ mov_b(Operand(dst, 0), eax);
298 __ inc(dst);
299 __ dec(count);
300 __ bind(&forward_much_overlap); // Entry point into this block.
301 __ test(dst, Immediate(0xF));
302 __ j(not_zero, &loop_until_aligned);
303 // dst is now aligned, src can't be. Main copy loop.
304 __ mov(loop_count, count);
305 __ shr(loop_count, 6);
306 MemMoveEmitMainLoop(&masm, &last_15_much_overlap,
307 FORWARD, MOVE_UNALIGNED);
308 __ bind(&last_15_much_overlap);
309 __ and_(count, 0xF);
310 __ j(zero, &pop_and_return);
311 __ cmp(count, kSmallCopySize);
312 __ j(below_equal, &small_size);
313 __ jmp(&medium_size);
314 }
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100315
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000316 {
317 // |dst| is a higher address than |src|. Copy backwards.
318 Label unaligned_source, move_first_15, skip_last_move;
319 __ bind(&backward);
320 // |dst| and |src| always point to the end of what's left to copy.
321 __ add(dst, count);
322 __ add(src, count);
323 __ mov(eax, dst);
324 __ sub(eax, src);
325 __ cmp(eax, kMinMoveDistance);
326 __ j(below, &backward_much_overlap);
327 // Copy last 16 bytes.
328 __ movdqu(xmm0, Operand(src, -0x10));
329 __ movdqu(Operand(dst, -0x10), xmm0);
330 // Find distance to alignment: dst & 0xF
331 __ mov(edx, dst);
332 __ and_(edx, 0xF);
333 __ sub(dst, edx);
334 __ sub(src, edx);
335 __ sub(count, edx);
336 // dst is now aligned. Main copy loop.
337 __ mov(loop_count, count);
338 __ shr(loop_count, 6);
339 // Check if src is also aligned.
340 __ test(src, Immediate(0xF));
341 __ j(not_zero, &unaligned_source);
342 // Copy loop for aligned source and destination.
343 MemMoveEmitMainLoop(&masm, &move_first_15, BACKWARD, MOVE_ALIGNED);
344 // At most 15 bytes to copy. Copy 16 bytes at beginning of string.
345 __ bind(&move_first_15);
346 __ and_(count, 0xF);
347 __ j(zero, &skip_last_move, Label::kNear);
348 __ sub(src, count);
349 __ sub(dst, count);
350 __ movdqu(xmm0, Operand(src, 0));
351 __ movdqu(Operand(dst, 0), xmm0);
352 __ bind(&skip_last_move);
353 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100354
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000355 // Copy loop for unaligned source and aligned destination.
356 __ bind(&unaligned_source);
357 MemMoveEmitMainLoop(&masm, &move_first_15, BACKWARD, MOVE_UNALIGNED);
358 __ jmp(&move_first_15);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100359
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000360 // Less than kMinMoveDistance offset between dst and src.
361 Label loop_until_aligned, first_15_much_overlap;
362 __ bind(&loop_until_aligned);
363 __ dec(src);
364 __ dec(dst);
365 __ mov_b(eax, Operand(src, 0));
366 __ mov_b(Operand(dst, 0), eax);
367 __ dec(count);
368 __ bind(&backward_much_overlap); // Entry point into this block.
369 __ test(dst, Immediate(0xF));
370 __ j(not_zero, &loop_until_aligned);
371 // dst is now aligned, src can't be. Main copy loop.
372 __ mov(loop_count, count);
373 __ shr(loop_count, 6);
374 MemMoveEmitMainLoop(&masm, &first_15_much_overlap,
375 BACKWARD, MOVE_UNALIGNED);
376 __ bind(&first_15_much_overlap);
377 __ and_(count, 0xF);
378 __ j(zero, &pop_and_return);
379 // Small/medium handlers expect dst/src to point to the beginning.
380 __ sub(dst, count);
381 __ sub(src, count);
382 __ cmp(count, kSmallCopySize);
383 __ j(below_equal, &small_size);
384 __ jmp(&medium_size);
385 }
386 {
387 // Special handlers for 9 <= copy_size < 64. No assumptions about
388 // alignment or move distance, so all reads must be unaligned and
389 // must happen before any writes.
390 Label medium_handlers, f9_16, f17_32, f33_48, f49_63;
391
392 __ bind(&f9_16);
393 __ movsd(xmm0, Operand(src, 0));
394 __ movsd(xmm1, Operand(src, count, times_1, -8));
395 __ movsd(Operand(dst, 0), xmm0);
396 __ movsd(Operand(dst, count, times_1, -8), xmm1);
397 MemMoveEmitPopAndReturn(&masm);
398
399 __ bind(&f17_32);
400 __ movdqu(xmm0, Operand(src, 0));
401 __ movdqu(xmm1, Operand(src, count, times_1, -0x10));
402 __ movdqu(Operand(dst, 0x00), xmm0);
403 __ movdqu(Operand(dst, count, times_1, -0x10), xmm1);
404 MemMoveEmitPopAndReturn(&masm);
405
406 __ bind(&f33_48);
407 __ movdqu(xmm0, Operand(src, 0x00));
408 __ movdqu(xmm1, Operand(src, 0x10));
409 __ movdqu(xmm2, Operand(src, count, times_1, -0x10));
410 __ movdqu(Operand(dst, 0x00), xmm0);
411 __ movdqu(Operand(dst, 0x10), xmm1);
412 __ movdqu(Operand(dst, count, times_1, -0x10), xmm2);
413 MemMoveEmitPopAndReturn(&masm);
414
415 __ bind(&f49_63);
416 __ movdqu(xmm0, Operand(src, 0x00));
417 __ movdqu(xmm1, Operand(src, 0x10));
418 __ movdqu(xmm2, Operand(src, 0x20));
419 __ movdqu(xmm3, Operand(src, count, times_1, -0x10));
420 __ movdqu(Operand(dst, 0x00), xmm0);
421 __ movdqu(Operand(dst, 0x10), xmm1);
422 __ movdqu(Operand(dst, 0x20), xmm2);
423 __ movdqu(Operand(dst, count, times_1, -0x10), xmm3);
424 MemMoveEmitPopAndReturn(&masm);
425
426 __ bind(&medium_handlers);
427 __ dd(conv.address(&f9_16));
428 __ dd(conv.address(&f17_32));
429 __ dd(conv.address(&f33_48));
430 __ dd(conv.address(&f49_63));
431
432 __ bind(&medium_size); // Entry point into this block.
433 __ mov(eax, count);
434 __ dec(eax);
435 __ shr(eax, 4);
436 if (FLAG_debug_code) {
437 Label ok;
438 __ cmp(eax, 3);
439 __ j(below_equal, &ok);
440 __ int3();
441 __ bind(&ok);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100442 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000443 __ mov(eax, Operand(eax, times_4, conv.address(&medium_handlers)));
444 __ jmp(eax);
445 }
446 {
447 // Specialized copiers for copy_size <= 8 bytes.
448 Label small_handlers, f0, f1, f2, f3, f4, f5_8;
449 __ bind(&f0);
450 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100451
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000452 __ bind(&f1);
453 __ mov_b(eax, Operand(src, 0));
454 __ mov_b(Operand(dst, 0), eax);
455 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100456
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000457 __ bind(&f2);
458 __ mov_w(eax, Operand(src, 0));
459 __ mov_w(Operand(dst, 0), eax);
460 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100461
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000462 __ bind(&f3);
463 __ mov_w(eax, Operand(src, 0));
464 __ mov_b(edx, Operand(src, 2));
465 __ mov_w(Operand(dst, 0), eax);
466 __ mov_b(Operand(dst, 2), edx);
467 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100468
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000469 __ bind(&f4);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100470 __ mov(eax, Operand(src, 0));
471 __ mov(Operand(dst, 0), eax);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000472 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100473
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000474 __ bind(&f5_8);
475 __ mov(eax, Operand(src, 0));
476 __ mov(edx, Operand(src, count, times_1, -4));
477 __ mov(Operand(dst, 0), eax);
478 __ mov(Operand(dst, count, times_1, -4), edx);
479 MemMoveEmitPopAndReturn(&masm);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100480
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000481 __ bind(&small_handlers);
482 __ dd(conv.address(&f0));
483 __ dd(conv.address(&f1));
484 __ dd(conv.address(&f2));
485 __ dd(conv.address(&f3));
486 __ dd(conv.address(&f4));
487 __ dd(conv.address(&f5_8));
488 __ dd(conv.address(&f5_8));
489 __ dd(conv.address(&f5_8));
490 __ dd(conv.address(&f5_8));
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100491
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000492 __ bind(&small_size); // Entry point into this block.
493 if (FLAG_debug_code) {
494 Label ok;
495 __ cmp(count, 8);
496 __ j(below_equal, &ok);
497 __ int3();
498 __ bind(&ok);
499 }
500 __ mov(eax, Operand(count, times_4, conv.address(&small_handlers)));
501 __ jmp(eax);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100502 }
503
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000504 __ bind(&pop_and_return);
505 MemMoveEmitPopAndReturn(&masm);
506
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100507 CodeDesc desc;
508 masm.GetCode(&desc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000509 DCHECK(!RelocInfo::RequiresRelocation(desc));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000510 Assembler::FlushICache(isolate, buffer, actual_size);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000511 base::OS::ProtectCode(buffer, actual_size);
512 // TODO(jkummerow): It would be nice to register this code creation event
513 // with the PROFILE / GDBJIT system.
514 return FUNCTION_CAST<MemMoveFunction>(buffer);
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100515}
516
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000517
Ben Murdoch7f4d5bd2010-06-15 11:15:29 +0100518#undef __
519
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100520// -------------------------------------------------------------------------
521// Code generators
522
523#define __ ACCESS_MASM(masm)
524
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000525
526void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
527 MacroAssembler* masm,
528 Register receiver,
529 Register key,
530 Register value,
531 Register target_map,
532 AllocationSiteMode mode,
533 Label* allocation_memento_found) {
534 Register scratch = edi;
535 DCHECK(!AreAliased(receiver, key, value, target_map, scratch));
536
537 if (mode == TRACK_ALLOCATION_SITE) {
538 DCHECK(allocation_memento_found != NULL);
539 __ JumpIfJSArrayHasAllocationMemento(
540 receiver, scratch, allocation_memento_found);
541 }
542
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100543 // Set transitioned map.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000544 __ mov(FieldOperand(receiver, HeapObject::kMapOffset), target_map);
545 __ RecordWriteField(receiver,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100546 HeapObject::kMapOffset,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000547 target_map,
548 scratch,
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100549 kDontSaveFPRegs,
550 EMIT_REMEMBERED_SET,
551 OMIT_SMI_CHECK);
552}
553
554
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000555void ElementsTransitionGenerator::GenerateSmiToDouble(
556 MacroAssembler* masm,
557 Register receiver,
558 Register key,
559 Register value,
560 Register target_map,
561 AllocationSiteMode mode,
562 Label* fail) {
563 // Return address is on the stack.
564 DCHECK(receiver.is(edx));
565 DCHECK(key.is(ecx));
566 DCHECK(value.is(eax));
567 DCHECK(target_map.is(ebx));
568
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100569 Label loop, entry, convert_hole, gc_required, only_change_map;
570
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000571 if (mode == TRACK_ALLOCATION_SITE) {
572 __ JumpIfJSArrayHasAllocationMemento(edx, edi, fail);
573 }
574
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100575 // Check for empty arrays, which only require a map transition and no changes
576 // to the backing store.
577 __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
578 __ cmp(edi, Immediate(masm->isolate()->factory()->empty_fixed_array()));
579 __ j(equal, &only_change_map);
580
581 __ push(eax);
582 __ push(ebx);
583
584 __ mov(edi, FieldOperand(edi, FixedArray::kLengthOffset));
585
586 // Allocate new FixedDoubleArray.
587 // edx: receiver
588 // edi: length of source FixedArray (smi-tagged)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000589 AllocationFlags flags =
590 static_cast<AllocationFlags>(TAG_OBJECT | DOUBLE_ALIGNMENT);
591 __ Allocate(FixedDoubleArray::kHeaderSize, times_8, edi,
592 REGISTER_VALUE_IS_SMI, eax, ebx, no_reg, &gc_required, flags);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100593
594 // eax: destination FixedDoubleArray
595 // edi: number of elements
596 // edx: receiver
597 __ mov(FieldOperand(eax, HeapObject::kMapOffset),
598 Immediate(masm->isolate()->factory()->fixed_double_array_map()));
599 __ mov(FieldOperand(eax, FixedDoubleArray::kLengthOffset), edi);
600 __ mov(esi, FieldOperand(edx, JSObject::kElementsOffset));
601 // Replace receiver's backing store with newly created FixedDoubleArray.
602 __ mov(FieldOperand(edx, JSObject::kElementsOffset), eax);
603 __ mov(ebx, eax);
604 __ RecordWriteField(edx,
605 JSObject::kElementsOffset,
606 ebx,
607 edi,
608 kDontSaveFPRegs,
609 EMIT_REMEMBERED_SET,
610 OMIT_SMI_CHECK);
611
612 __ mov(edi, FieldOperand(esi, FixedArray::kLengthOffset));
613
614 // Prepare for conversion loop.
615 ExternalReference canonical_the_hole_nan_reference =
616 ExternalReference::address_of_the_hole_nan();
617 XMMRegister the_hole_nan = xmm1;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000618 __ movsd(the_hole_nan,
619 Operand::StaticVariable(canonical_the_hole_nan_reference));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100620 __ jmp(&entry);
621
622 // Call into runtime if GC is required.
623 __ bind(&gc_required);
624 // Restore registers before jumping into runtime.
625 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
626 __ pop(ebx);
627 __ pop(eax);
628 __ jmp(fail);
629
630 // Convert and copy elements
631 // esi: source FixedArray
632 __ bind(&loop);
633 __ mov(ebx, FieldOperand(esi, edi, times_2, FixedArray::kHeaderSize));
634 // ebx: current element from source
635 // edi: index of current element
636 __ JumpIfNotSmi(ebx, &convert_hole);
637
638 // Normal smi, convert it to double and store.
639 __ SmiUntag(ebx);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000640 __ Cvtsi2sd(xmm0, ebx);
641 __ movsd(FieldOperand(eax, edi, times_4, FixedDoubleArray::kHeaderSize),
642 xmm0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100643 __ jmp(&entry);
644
645 // Found hole, store hole_nan_as_double instead.
646 __ bind(&convert_hole);
647
648 if (FLAG_debug_code) {
649 __ cmp(ebx, masm->isolate()->factory()->the_hole_value());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000650 __ Assert(equal, kObjectFoundInSmiOnlyArray);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100651 }
652
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000653 __ movsd(FieldOperand(eax, edi, times_4, FixedDoubleArray::kHeaderSize),
654 the_hole_nan);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100655
656 __ bind(&entry);
657 __ sub(edi, Immediate(Smi::FromInt(1)));
658 __ j(not_sign, &loop);
659
660 __ pop(ebx);
661 __ pop(eax);
662
663 // Restore esi.
664 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
665
666 __ bind(&only_change_map);
667 // eax: value
668 // ebx: target map
669 // Set transitioned map.
670 __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx);
671 __ RecordWriteField(edx,
672 HeapObject::kMapOffset,
673 ebx,
674 edi,
675 kDontSaveFPRegs,
676 OMIT_REMEMBERED_SET,
677 OMIT_SMI_CHECK);
678}
679
680
681void ElementsTransitionGenerator::GenerateDoubleToObject(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000682 MacroAssembler* masm,
683 Register receiver,
684 Register key,
685 Register value,
686 Register target_map,
687 AllocationSiteMode mode,
688 Label* fail) {
689 // Return address is on the stack.
690 DCHECK(receiver.is(edx));
691 DCHECK(key.is(ecx));
692 DCHECK(value.is(eax));
693 DCHECK(target_map.is(ebx));
694
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100695 Label loop, entry, convert_hole, gc_required, only_change_map, success;
696
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000697 if (mode == TRACK_ALLOCATION_SITE) {
698 __ JumpIfJSArrayHasAllocationMemento(edx, edi, fail);
699 }
700
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100701 // Check for empty arrays, which only require a map transition and no changes
702 // to the backing store.
703 __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
704 __ cmp(edi, Immediate(masm->isolate()->factory()->empty_fixed_array()));
705 __ j(equal, &only_change_map);
706
Ben Murdochda12d292016-06-02 14:46:10 +0100707 __ push(esi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100708 __ push(eax);
709 __ push(edx);
710 __ push(ebx);
711
712 __ mov(ebx, FieldOperand(edi, FixedDoubleArray::kLengthOffset));
713
714 // Allocate new FixedArray.
715 // ebx: length of source FixedDoubleArray (smi-tagged)
716 __ lea(edi, Operand(ebx, times_2, FixedArray::kHeaderSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000717 __ Allocate(edi, eax, esi, no_reg, &gc_required, TAG_OBJECT);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100718
719 // eax: destination FixedArray
720 // ebx: number of elements
721 __ mov(FieldOperand(eax, HeapObject::kMapOffset),
722 Immediate(masm->isolate()->factory()->fixed_array_map()));
723 __ mov(FieldOperand(eax, FixedArray::kLengthOffset), ebx);
724 __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
725
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400726 // Allocating heap numbers in the loop below can fail and cause a jump to
727 // gc_required. We can't leave a partly initialized FixedArray behind,
728 // so pessimistically fill it with holes now.
729 Label initialization_loop, initialization_loop_entry;
730 __ jmp(&initialization_loop_entry, Label::kNear);
731 __ bind(&initialization_loop);
732 __ mov(FieldOperand(eax, ebx, times_2, FixedArray::kHeaderSize),
733 masm->isolate()->factory()->the_hole_value());
734 __ bind(&initialization_loop_entry);
735 __ sub(ebx, Immediate(Smi::FromInt(1)));
736 __ j(not_sign, &initialization_loop);
737
738 __ mov(ebx, FieldOperand(edi, FixedDoubleArray::kLengthOffset));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100739 __ jmp(&entry);
740
741 // ebx: target map
742 // edx: receiver
743 // Set transitioned map.
744 __ bind(&only_change_map);
745 __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx);
746 __ RecordWriteField(edx,
747 HeapObject::kMapOffset,
748 ebx,
749 edi,
750 kDontSaveFPRegs,
751 OMIT_REMEMBERED_SET,
752 OMIT_SMI_CHECK);
753 __ jmp(&success);
754
755 // Call into runtime if GC is required.
756 __ bind(&gc_required);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100757 __ pop(ebx);
758 __ pop(edx);
759 __ pop(eax);
Ben Murdochda12d292016-06-02 14:46:10 +0100760 __ pop(esi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100761 __ jmp(fail);
762
763 // Box doubles into heap numbers.
764 // edi: source FixedDoubleArray
765 // eax: destination FixedArray
766 __ bind(&loop);
767 // ebx: index of current element (smi-tagged)
768 uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32);
769 __ cmp(FieldOperand(edi, ebx, times_4, offset), Immediate(kHoleNanUpper32));
770 __ j(equal, &convert_hole);
771
772 // Non-hole double, copy value into a heap number.
773 __ AllocateHeapNumber(edx, esi, no_reg, &gc_required);
774 // edx: new heap number
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000775 __ movsd(xmm0,
776 FieldOperand(edi, ebx, times_4, FixedDoubleArray::kHeaderSize));
777 __ movsd(FieldOperand(edx, HeapNumber::kValueOffset), xmm0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100778 __ mov(FieldOperand(eax, ebx, times_2, FixedArray::kHeaderSize), edx);
779 __ mov(esi, ebx);
780 __ RecordWriteArray(eax,
781 edx,
782 esi,
783 kDontSaveFPRegs,
784 EMIT_REMEMBERED_SET,
785 OMIT_SMI_CHECK);
786 __ jmp(&entry, Label::kNear);
787
788 // Replace the-hole NaN with the-hole pointer.
789 __ bind(&convert_hole);
790 __ mov(FieldOperand(eax, ebx, times_2, FixedArray::kHeaderSize),
791 masm->isolate()->factory()->the_hole_value());
792
793 __ bind(&entry);
794 __ sub(ebx, Immediate(Smi::FromInt(1)));
795 __ j(not_sign, &loop);
796
797 __ pop(ebx);
798 __ pop(edx);
799 // ebx: target map
800 // edx: receiver
801 // Set transitioned map.
802 __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx);
803 __ RecordWriteField(edx,
804 HeapObject::kMapOffset,
805 ebx,
806 edi,
807 kDontSaveFPRegs,
808 OMIT_REMEMBERED_SET,
809 OMIT_SMI_CHECK);
810 // Replace receiver's backing store with newly created and filled FixedArray.
811 __ mov(FieldOperand(edx, JSObject::kElementsOffset), eax);
812 __ RecordWriteField(edx,
813 JSObject::kElementsOffset,
814 eax,
815 edi,
816 kDontSaveFPRegs,
817 EMIT_REMEMBERED_SET,
818 OMIT_SMI_CHECK);
819
820 // Restore registers.
821 __ pop(eax);
Ben Murdochda12d292016-06-02 14:46:10 +0100822 __ pop(esi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100823
824 __ bind(&success);
825}
826
827
828void StringCharLoadGenerator::Generate(MacroAssembler* masm,
829 Factory* factory,
830 Register string,
831 Register index,
832 Register result,
833 Label* call_runtime) {
834 // Fetch the instance type of the receiver into result register.
835 __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
836 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
837
838 // We need special handling for indirect strings.
839 Label check_sequential;
840 __ test(result, Immediate(kIsIndirectStringMask));
841 __ j(zero, &check_sequential, Label::kNear);
842
843 // Dispatch on the indirect string shape: slice or cons.
844 Label cons_string;
845 __ test(result, Immediate(kSlicedNotConsMask));
846 __ j(zero, &cons_string, Label::kNear);
847
848 // Handle slices.
849 Label indirect_string_loaded;
850 __ mov(result, FieldOperand(string, SlicedString::kOffsetOffset));
851 __ SmiUntag(result);
852 __ add(index, result);
853 __ mov(string, FieldOperand(string, SlicedString::kParentOffset));
854 __ jmp(&indirect_string_loaded, Label::kNear);
855
856 // Handle cons strings.
857 // Check whether the right hand side is the empty string (i.e. if
858 // this is really a flat string in a cons string). If that is not
859 // the case we would rather go to the runtime system now to flatten
860 // the string.
861 __ bind(&cons_string);
862 __ cmp(FieldOperand(string, ConsString::kSecondOffset),
863 Immediate(factory->empty_string()));
864 __ j(not_equal, call_runtime);
865 __ mov(string, FieldOperand(string, ConsString::kFirstOffset));
866
867 __ bind(&indirect_string_loaded);
868 __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
869 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
870
871 // Distinguish sequential and external strings. Only these two string
872 // representations can reach here (slices and flat cons strings have been
873 // reduced to the underlying sequential or external string).
874 Label seq_string;
875 __ bind(&check_sequential);
876 STATIC_ASSERT(kSeqStringTag == 0);
877 __ test(result, Immediate(kStringRepresentationMask));
878 __ j(zero, &seq_string, Label::kNear);
879
880 // Handle external strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000881 Label one_byte_external, done;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100882 if (FLAG_debug_code) {
883 // Assert that we do not have a cons or slice (indirect strings) here.
884 // Sequential strings have already been ruled out.
885 __ test(result, Immediate(kIsIndirectStringMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000886 __ Assert(zero, kExternalStringExpectedButNotFound);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100887 }
888 // Rule out short external strings.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000889 STATIC_ASSERT(kShortExternalStringTag != 0);
Ben Murdochda12d292016-06-02 14:46:10 +0100890 __ test_b(result, Immediate(kShortExternalStringMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100891 __ j(not_zero, call_runtime);
892 // Check encoding.
893 STATIC_ASSERT(kTwoByteStringTag == 0);
Ben Murdochda12d292016-06-02 14:46:10 +0100894 __ test_b(result, Immediate(kStringEncodingMask));
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100895 __ mov(result, FieldOperand(string, ExternalString::kResourceDataOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000896 __ j(not_equal, &one_byte_external, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100897 // Two-byte string.
898 __ movzx_w(result, Operand(result, index, times_2, 0));
899 __ jmp(&done, Label::kNear);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000900 __ bind(&one_byte_external);
901 // One-byte string.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100902 __ movzx_b(result, Operand(result, index, times_1, 0));
903 __ jmp(&done, Label::kNear);
904
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000905 // Dispatch on the encoding: one-byte or two-byte.
906 Label one_byte;
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100907 __ bind(&seq_string);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000908 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100909 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
910 __ test(result, Immediate(kStringEncodingMask));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000911 __ j(not_zero, &one_byte, Label::kNear);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100912
913 // Two-byte string.
914 // Load the two-byte character code into the result register.
915 __ movzx_w(result, FieldOperand(string,
916 index,
917 times_2,
918 SeqTwoByteString::kHeaderSize));
919 __ jmp(&done, Label::kNear);
920
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000921 // One-byte string.
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100922 // Load the byte into the result register.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000923 __ bind(&one_byte);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100924 __ movzx_b(result, FieldOperand(string,
925 index,
926 times_1,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000927 SeqOneByteString::kHeaderSize));
928 __ bind(&done);
929}
930
931
932static Operand ExpConstant(int index) {
933 return Operand::StaticVariable(ExternalReference::math_exp_constants(index));
934}
935
936
937void MathExpGenerator::EmitMathExp(MacroAssembler* masm,
938 XMMRegister input,
939 XMMRegister result,
940 XMMRegister double_scratch,
941 Register temp1,
942 Register temp2) {
943 DCHECK(!input.is(double_scratch));
944 DCHECK(!input.is(result));
945 DCHECK(!result.is(double_scratch));
946 DCHECK(!temp1.is(temp2));
947 DCHECK(ExternalReference::math_exp_constants(0).address() != NULL);
948 DCHECK(!masm->serializer_enabled()); // External references not serializable.
949
950 Label done;
951
952 __ movsd(double_scratch, ExpConstant(0));
953 __ xorpd(result, result);
954 __ ucomisd(double_scratch, input);
955 __ j(above_equal, &done);
956 __ ucomisd(input, ExpConstant(1));
957 __ movsd(result, ExpConstant(2));
958 __ j(above_equal, &done);
959 __ movsd(double_scratch, ExpConstant(3));
960 __ movsd(result, ExpConstant(4));
961 __ mulsd(double_scratch, input);
962 __ addsd(double_scratch, result);
963 __ movd(temp2, double_scratch);
964 __ subsd(double_scratch, result);
965 __ movsd(result, ExpConstant(6));
966 __ mulsd(double_scratch, ExpConstant(5));
967 __ subsd(double_scratch, input);
968 __ subsd(result, double_scratch);
969 __ movsd(input, double_scratch);
970 __ mulsd(input, double_scratch);
971 __ mulsd(result, input);
972 __ mov(temp1, temp2);
973 __ mulsd(result, ExpConstant(7));
974 __ subsd(result, double_scratch);
975 __ add(temp1, Immediate(0x1ff800));
976 __ addsd(result, ExpConstant(8));
977 __ and_(temp2, Immediate(0x7ff));
978 __ shr(temp1, 11);
979 __ shl(temp1, 20);
980 __ movd(input, temp1);
981 __ pshufd(input, input, static_cast<uint8_t>(0xe1)); // Order: 11 10 00 01
982 __ movsd(double_scratch, Operand::StaticArray(
983 temp2, times_8, ExternalReference::math_exp_log_table()));
984 __ orps(input, double_scratch);
985 __ mulsd(result, input);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100986 __ bind(&done);
987}
988
989#undef __
990
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000991
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000992CodeAgingHelper::CodeAgingHelper(Isolate* isolate) {
993 USE(isolate);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000994 DCHECK(young_sequence_.length() == kNoCodeAgeSequenceLength);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000995 CodePatcher patcher(isolate, young_sequence_.start(),
996 young_sequence_.length());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000997 patcher.masm()->push(ebp);
998 patcher.masm()->mov(ebp, esp);
999 patcher.masm()->push(esi);
1000 patcher.masm()->push(edi);
1001}
1002
1003
1004#ifdef DEBUG
1005bool CodeAgingHelper::IsOld(byte* candidate) const {
1006 return *candidate == kCallOpcode;
1007}
1008#endif
1009
1010
1011bool Code::IsYoungSequence(Isolate* isolate, byte* sequence) {
1012 bool result = isolate->code_aging_helper()->IsYoung(sequence);
1013 DCHECK(result || isolate->code_aging_helper()->IsOld(sequence));
1014 return result;
1015}
1016
1017
1018void Code::GetCodeAgeAndParity(Isolate* isolate, byte* sequence, Age* age,
1019 MarkingParity* parity) {
1020 if (IsYoungSequence(isolate, sequence)) {
1021 *age = kNoAgeCodeAge;
1022 *parity = NO_MARKING_PARITY;
1023 } else {
1024 sequence++; // Skip the kCallOpcode byte
1025 Address target_address = sequence + *reinterpret_cast<int*>(sequence) +
1026 Assembler::kCallTargetAddressOffset;
1027 Code* stub = GetCodeFromTargetAddress(target_address);
1028 GetCodeAgeAndParity(stub, age, parity);
1029 }
1030}
1031
1032
1033void Code::PatchPlatformCodeAge(Isolate* isolate,
1034 byte* sequence,
1035 Code::Age age,
1036 MarkingParity parity) {
1037 uint32_t young_length = isolate->code_aging_helper()->young_sequence_length();
1038 if (age == kNoAgeCodeAge) {
1039 isolate->code_aging_helper()->CopyYoungSequenceTo(sequence);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001040 Assembler::FlushICache(isolate, sequence, young_length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001041 } else {
1042 Code* stub = GetCodeAgeStub(isolate, age, parity);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001043 CodePatcher patcher(isolate, sequence, young_length);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001044 patcher.masm()->call(stub->instruction_start(), RelocInfo::NONE32);
1045 }
1046}
1047
1048
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001049} // namespace internal
1050} // namespace v8
Leon Clarkef7060e22010-06-03 12:02:55 +01001051
1052#endif // V8_TARGET_ARCH_IA32