blob: 00d36097d08efc3a6aabd19f96c3f09e341ef896 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000028#include <cstdlib>
29#include <iostream>
Steve Blocka7e24c12009-10-30 11:49:00 +000030
Ben Murdochb8a8cc12014-11-26 15:28:44 +000031#include "src/v8.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000032
Ben Murdochb8a8cc12014-11-26 15:28:44 +000033#include "src/base/platform/platform.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000034#include "src/base/utils/random-number-generator.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000035#include "src/factory.h"
36#include "src/macro-assembler.h"
37#include "src/ostreams.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000038#include "test/cctest/cctest.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000039
Ben Murdochb8a8cc12014-11-26 15:28:44 +000040using namespace v8::internal;
Steve Blocka7e24c12009-10-30 11:49:00 +000041
Steve Blocka7e24c12009-10-30 11:49:00 +000042// Test the x64 assembler by compiling some simple functions into
43// a buffer and executing them. These tests do not initialize the
44// V8 library, create a context, or use any V8 objects.
Steve Block3ce2e202009-11-05 08:53:23 +000045// The AMD64 calling convention is used, with the first six arguments
46// in RDI, RSI, RDX, RCX, R8, and R9, and floating point arguments in
Steve Blocka7e24c12009-10-30 11:49:00 +000047// the XMM registers. The return value is in RAX.
48// This calling convention is used on Linux, with GCC, and on Mac OS,
Steve Block3ce2e202009-11-05 08:53:23 +000049// with GCC. A different convention is used on 64-bit windows,
50// where the first four integer arguments are passed in RCX, RDX, R8 and R9.
Steve Blocka7e24c12009-10-30 11:49:00 +000051
52typedef int (*F0)();
53typedef int (*F1)(int64_t x);
54typedef int (*F2)(int64_t x, int64_t y);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000055typedef unsigned (*F3)(double x);
56typedef uint64_t (*F4)(uint64_t* x, uint64_t* y);
57typedef uint64_t (*F5)(uint64_t x);
Steve Blocka7e24c12009-10-30 11:49:00 +000058
Steve Block3ce2e202009-11-05 08:53:23 +000059#ifdef _WIN64
Ben Murdochb8a8cc12014-11-26 15:28:44 +000060static const Register arg1 = rcx;
61static const Register arg2 = rdx;
Steve Block3ce2e202009-11-05 08:53:23 +000062#else
Ben Murdochb8a8cc12014-11-26 15:28:44 +000063static const Register arg1 = rdi;
64static const Register arg2 = rsi;
Steve Block3ce2e202009-11-05 08:53:23 +000065#endif
66
Steve Blocka7e24c12009-10-30 11:49:00 +000067#define __ assm.
68
69
70TEST(AssemblerX64ReturnOperation) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000071 CcTest::InitializeVM();
Steve Blocka7e24c12009-10-30 11:49:00 +000072 // Allocate an executable page of memory.
73 size_t actual_size;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000074 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
75 Assembler::kMinimalBufferSize, &actual_size, true));
Steve Blocka7e24c12009-10-30 11:49:00 +000076 CHECK(buffer);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000077 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
Steve Blocka7e24c12009-10-30 11:49:00 +000078
79 // Assemble a simple function that copies argument 2 and returns it.
Steve Block3ce2e202009-11-05 08:53:23 +000080 __ movq(rax, arg2);
Steve Blocka7e24c12009-10-30 11:49:00 +000081 __ nop();
82 __ ret(0);
83
84 CodeDesc desc;
85 assm.GetCode(&desc);
86 // Call the function from C++.
87 int result = FUNCTION_CAST<F2>(buffer)(3, 2);
88 CHECK_EQ(2, result);
89}
90
Ben Murdochb8a8cc12014-11-26 15:28:44 +000091
Steve Blocka7e24c12009-10-30 11:49:00 +000092TEST(AssemblerX64StackOperations) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000093 CcTest::InitializeVM();
Steve Blocka7e24c12009-10-30 11:49:00 +000094 // Allocate an executable page of memory.
95 size_t actual_size;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000096 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
97 Assembler::kMinimalBufferSize, &actual_size, true));
Steve Blocka7e24c12009-10-30 11:49:00 +000098 CHECK(buffer);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000099 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
Steve Blocka7e24c12009-10-30 11:49:00 +0000100
101 // Assemble a simple function that copies argument 2 and returns it.
102 // We compile without stack frame pointers, so the gdb debugger shows
103 // incorrect stack frames when debugging this function (which has them).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000104 __ pushq(rbp);
Steve Blocka7e24c12009-10-30 11:49:00 +0000105 __ movq(rbp, rsp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000106 __ pushq(arg2); // Value at (rbp - 8)
107 __ pushq(arg2); // Value at (rbp - 16)
108 __ pushq(arg1); // Value at (rbp - 24)
109 __ popq(rax);
110 __ popq(rax);
111 __ popq(rax);
112 __ popq(rbp);
Steve Blocka7e24c12009-10-30 11:49:00 +0000113 __ nop();
114 __ ret(0);
115
116 CodeDesc desc;
117 assm.GetCode(&desc);
118 // Call the function from C++.
119 int result = FUNCTION_CAST<F2>(buffer)(3, 2);
120 CHECK_EQ(2, result);
121}
122
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000123
Steve Blocka7e24c12009-10-30 11:49:00 +0000124TEST(AssemblerX64ArithmeticOperations) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000125 CcTest::InitializeVM();
Steve Blocka7e24c12009-10-30 11:49:00 +0000126 // Allocate an executable page of memory.
127 size_t actual_size;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000128 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
129 Assembler::kMinimalBufferSize, &actual_size, true));
Steve Blocka7e24c12009-10-30 11:49:00 +0000130 CHECK(buffer);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000131 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
Steve Blocka7e24c12009-10-30 11:49:00 +0000132
133 // Assemble a simple function that adds arguments returning the sum.
Steve Block3ce2e202009-11-05 08:53:23 +0000134 __ movq(rax, arg2);
135 __ addq(rax, arg1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000136 __ ret(0);
137
138 CodeDesc desc;
139 assm.GetCode(&desc);
140 // Call the function from C++.
141 int result = FUNCTION_CAST<F2>(buffer)(3, 2);
142 CHECK_EQ(5, result);
143}
144
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000145
146TEST(AssemblerX64CmpbOperation) {
147 CcTest::InitializeVM();
Steve Blocka7e24c12009-10-30 11:49:00 +0000148 // Allocate an executable page of memory.
149 size_t actual_size;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000150 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
151 Assembler::kMinimalBufferSize, &actual_size, true));
Steve Blocka7e24c12009-10-30 11:49:00 +0000152 CHECK(buffer);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000153 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
154
155 // Assemble a function that compare argument byte returing 1 if equal else 0.
156 // On Windows, it compares rcx with rdx which does not require REX prefix;
157 // on Linux, it compares rdi with rsi which requires REX prefix.
158
159 Label done;
160 __ movq(rax, Immediate(1));
161 __ cmpb(arg1, arg2);
162 __ j(equal, &done);
163 __ movq(rax, Immediate(0));
164 __ bind(&done);
165 __ ret(0);
166
167 CodeDesc desc;
168 assm.GetCode(&desc);
169 // Call the function from C++.
170 int result = FUNCTION_CAST<F2>(buffer)(0x1002, 0x2002);
171 CHECK_EQ(1, result);
172 result = FUNCTION_CAST<F2>(buffer)(0x1002, 0x2003);
173 CHECK_EQ(0, result);
174}
175
176
177TEST(AssemblerX64ImulOperation) {
178 CcTest::InitializeVM();
179 // Allocate an executable page of memory.
180 size_t actual_size;
181 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
182 Assembler::kMinimalBufferSize, &actual_size, true));
183 CHECK(buffer);
184 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
Steve Blocka7e24c12009-10-30 11:49:00 +0000185
186 // Assemble a simple function that multiplies arguments returning the high
187 // word.
Steve Block3ce2e202009-11-05 08:53:23 +0000188 __ movq(rax, arg2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000189 __ imulq(arg1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000190 __ movq(rax, rdx);
191 __ ret(0);
192
193 CodeDesc desc;
194 assm.GetCode(&desc);
195 // Call the function from C++.
196 int result = FUNCTION_CAST<F2>(buffer)(3, 2);
197 CHECK_EQ(0, result);
198 result = FUNCTION_CAST<F2>(buffer)(0x100000000l, 0x100000000l);
199 CHECK_EQ(1, result);
200 result = FUNCTION_CAST<F2>(buffer)(-0x100000000l, 0x100000000l);
201 CHECK_EQ(-1, result);
202}
203
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000204
205TEST(AssemblerX64XchglOperations) {
206 CcTest::InitializeVM();
Steve Blocka7e24c12009-10-30 11:49:00 +0000207 // Allocate an executable page of memory.
208 size_t actual_size;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000209 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
210 Assembler::kMinimalBufferSize, &actual_size, true));
Steve Blocka7e24c12009-10-30 11:49:00 +0000211 CHECK(buffer);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000212 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
213
214 __ movq(rax, Operand(arg1, 0));
215 __ movq(r11, Operand(arg2, 0));
216 __ xchgl(rax, r11);
217 __ movq(Operand(arg1, 0), rax);
218 __ movq(Operand(arg2, 0), r11);
219 __ ret(0);
220
221 CodeDesc desc;
222 assm.GetCode(&desc);
223 // Call the function from C++.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000224 uint64_t left = V8_2PART_UINT64_C(0x10000000, 20000000);
225 uint64_t right = V8_2PART_UINT64_C(0x30000000, 40000000);
226 uint64_t result = FUNCTION_CAST<F4>(buffer)(&left, &right);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000227 CHECK_EQ(V8_2PART_UINT64_C(0x00000000, 40000000), left);
228 CHECK_EQ(V8_2PART_UINT64_C(0x00000000, 20000000), right);
229 USE(result);
230}
231
232
233TEST(AssemblerX64OrlOperations) {
234 CcTest::InitializeVM();
235 // Allocate an executable page of memory.
236 size_t actual_size;
237 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
238 Assembler::kMinimalBufferSize, &actual_size, true));
239 CHECK(buffer);
240 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
241
242 __ movq(rax, Operand(arg2, 0));
243 __ orl(Operand(arg1, 0), rax);
244 __ ret(0);
245
246 CodeDesc desc;
247 assm.GetCode(&desc);
248 // Call the function from C++.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000249 uint64_t left = V8_2PART_UINT64_C(0x10000000, 20000000);
250 uint64_t right = V8_2PART_UINT64_C(0x30000000, 40000000);
251 uint64_t result = FUNCTION_CAST<F4>(buffer)(&left, &right);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000252 CHECK_EQ(V8_2PART_UINT64_C(0x10000000, 60000000), left);
253 USE(result);
254}
255
256
257TEST(AssemblerX64RollOperations) {
258 CcTest::InitializeVM();
259 // Allocate an executable page of memory.
260 size_t actual_size;
261 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
262 Assembler::kMinimalBufferSize, &actual_size, true));
263 CHECK(buffer);
264 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
265
266 __ movq(rax, arg1);
267 __ roll(rax, Immediate(1));
268 __ ret(0);
269
270 CodeDesc desc;
271 assm.GetCode(&desc);
272 // Call the function from C++.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000273 uint64_t src = V8_2PART_UINT64_C(0x10000000, C0000000);
274 uint64_t result = FUNCTION_CAST<F5>(buffer)(src);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000275 CHECK_EQ(V8_2PART_UINT64_C(0x00000000, 80000001), result);
276}
277
278
279TEST(AssemblerX64SublOperations) {
280 CcTest::InitializeVM();
281 // Allocate an executable page of memory.
282 size_t actual_size;
283 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
284 Assembler::kMinimalBufferSize, &actual_size, true));
285 CHECK(buffer);
286 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
287
288 __ movq(rax, Operand(arg2, 0));
289 __ subl(Operand(arg1, 0), rax);
290 __ ret(0);
291
292 CodeDesc desc;
293 assm.GetCode(&desc);
294 // Call the function from C++.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000295 uint64_t left = V8_2PART_UINT64_C(0x10000000, 20000000);
296 uint64_t right = V8_2PART_UINT64_C(0x30000000, 40000000);
297 uint64_t result = FUNCTION_CAST<F4>(buffer)(&left, &right);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000298 CHECK_EQ(V8_2PART_UINT64_C(0x10000000, e0000000), left);
299 USE(result);
300}
301
302
303TEST(AssemblerX64TestlOperations) {
304 CcTest::InitializeVM();
305 // Allocate an executable page of memory.
306 size_t actual_size;
307 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
308 Assembler::kMinimalBufferSize, &actual_size, true));
309 CHECK(buffer);
310 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
311
312 // Set rax with the ZF flag of the testl instruction.
313 Label done;
314 __ movq(rax, Immediate(1));
315 __ movq(r11, Operand(arg2, 0));
316 __ testl(Operand(arg1, 0), r11);
317 __ j(zero, &done, Label::kNear);
318 __ movq(rax, Immediate(0));
319 __ bind(&done);
320 __ ret(0);
321
322 CodeDesc desc;
323 assm.GetCode(&desc);
324 // Call the function from C++.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000325 uint64_t left = V8_2PART_UINT64_C(0x10000000, 20000000);
326 uint64_t right = V8_2PART_UINT64_C(0x30000000, 00000000);
327 uint64_t result = FUNCTION_CAST<F4>(buffer)(&left, &right);
328 CHECK_EQ(1u, result);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000329}
330
Ben Murdochc5610432016-08-08 18:44:38 +0100331TEST(AssemblerX64TestwOperations) {
332 typedef uint16_t (*F)(uint16_t * x);
333 CcTest::InitializeVM();
334 // Allocate an executable page of memory.
335 size_t actual_size;
336 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
337 Assembler::kMinimalBufferSize, &actual_size, true));
338 CHECK(buffer);
339 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
340
341 // Set rax with the ZF flag of the testl instruction.
342 Label done;
343 __ movq(rax, Immediate(1));
344 __ testw(Operand(arg1, 0), Immediate(0xf0f0));
345 __ j(not_zero, &done, Label::kNear);
346 __ movq(rax, Immediate(0));
347 __ bind(&done);
348 __ ret(0);
349
350 CodeDesc desc;
351 assm.GetCode(&desc);
352 // Call the function from C++.
353 uint16_t operand = 0x8000;
354 uint16_t result = FUNCTION_CAST<F>(buffer)(&operand);
355 CHECK_EQ(1u, result);
356}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000357
358TEST(AssemblerX64XorlOperations) {
359 CcTest::InitializeVM();
360 // Allocate an executable page of memory.
361 size_t actual_size;
362 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
363 Assembler::kMinimalBufferSize, &actual_size, true));
364 CHECK(buffer);
365 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
366
367 __ movq(rax, Operand(arg2, 0));
368 __ xorl(Operand(arg1, 0), rax);
369 __ ret(0);
370
371 CodeDesc desc;
372 assm.GetCode(&desc);
373 // Call the function from C++.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000374 uint64_t left = V8_2PART_UINT64_C(0x10000000, 20000000);
375 uint64_t right = V8_2PART_UINT64_C(0x30000000, 60000000);
376 uint64_t result = FUNCTION_CAST<F4>(buffer)(&left, &right);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000377 CHECK_EQ(V8_2PART_UINT64_C(0x10000000, 40000000), left);
378 USE(result);
379}
380
381
382TEST(AssemblerX64MemoryOperands) {
383 CcTest::InitializeVM();
384 // Allocate an executable page of memory.
385 size_t actual_size;
386 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
387 Assembler::kMinimalBufferSize, &actual_size, true));
388 CHECK(buffer);
389 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
Steve Blocka7e24c12009-10-30 11:49:00 +0000390
391 // Assemble a simple function that copies argument 2 and returns it.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000392 __ pushq(rbp);
Steve Blocka7e24c12009-10-30 11:49:00 +0000393 __ movq(rbp, rsp);
Steve Block3ce2e202009-11-05 08:53:23 +0000394
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000395 __ pushq(arg2); // Value at (rbp - 8)
396 __ pushq(arg2); // Value at (rbp - 16)
397 __ pushq(arg1); // Value at (rbp - 24)
Steve Block3ce2e202009-11-05 08:53:23 +0000398
Steve Blocka7e24c12009-10-30 11:49:00 +0000399 const int kStackElementSize = 8;
400 __ movq(rax, Operand(rbp, -3 * kStackElementSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000401 __ popq(arg2);
402 __ popq(arg2);
403 __ popq(arg2);
404 __ popq(rbp);
Steve Blocka7e24c12009-10-30 11:49:00 +0000405 __ nop();
406 __ ret(0);
407
408 CodeDesc desc;
409 assm.GetCode(&desc);
410 // Call the function from C++.
411 int result = FUNCTION_CAST<F2>(buffer)(3, 2);
412 CHECK_EQ(3, result);
413}
414
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000415
Steve Blocka7e24c12009-10-30 11:49:00 +0000416TEST(AssemblerX64ControlFlow) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000417 CcTest::InitializeVM();
Steve Blocka7e24c12009-10-30 11:49:00 +0000418 // Allocate an executable page of memory.
419 size_t actual_size;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000420 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
421 Assembler::kMinimalBufferSize, &actual_size, true));
Steve Blocka7e24c12009-10-30 11:49:00 +0000422 CHECK(buffer);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000423 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
Steve Blocka7e24c12009-10-30 11:49:00 +0000424
Steve Block3ce2e202009-11-05 08:53:23 +0000425 // Assemble a simple function that copies argument 1 and returns it.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000426 __ pushq(rbp);
Steve Block3ce2e202009-11-05 08:53:23 +0000427
Steve Blocka7e24c12009-10-30 11:49:00 +0000428 __ movq(rbp, rsp);
Steve Block3ce2e202009-11-05 08:53:23 +0000429 __ movq(rax, arg1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000430 Label target;
431 __ jmp(&target);
Steve Block3ce2e202009-11-05 08:53:23 +0000432 __ movq(rax, arg2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000433 __ bind(&target);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000434 __ popq(rbp);
Steve Blocka7e24c12009-10-30 11:49:00 +0000435 __ ret(0);
436
437 CodeDesc desc;
438 assm.GetCode(&desc);
439 // Call the function from C++.
440 int result = FUNCTION_CAST<F2>(buffer)(3, 2);
441 CHECK_EQ(3, result);
442}
443
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000444
Steve Blocka7e24c12009-10-30 11:49:00 +0000445TEST(AssemblerX64LoopImmediates) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000446 CcTest::InitializeVM();
Steve Blocka7e24c12009-10-30 11:49:00 +0000447 // Allocate an executable page of memory.
448 size_t actual_size;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000449 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
450 Assembler::kMinimalBufferSize, &actual_size, true));
Steve Blocka7e24c12009-10-30 11:49:00 +0000451 CHECK(buffer);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000452 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
Steve Blocka7e24c12009-10-30 11:49:00 +0000453 // Assemble two loops using rax as counter, and verify the ending counts.
454 Label Fail;
455 __ movq(rax, Immediate(-3));
456 Label Loop1_test;
457 Label Loop1_body;
458 __ jmp(&Loop1_test);
459 __ bind(&Loop1_body);
460 __ addq(rax, Immediate(7));
461 __ bind(&Loop1_test);
462 __ cmpq(rax, Immediate(20));
463 __ j(less_equal, &Loop1_body);
464 // Did the loop terminate with the expected value?
465 __ cmpq(rax, Immediate(25));
466 __ j(not_equal, &Fail);
467
468 Label Loop2_test;
469 Label Loop2_body;
470 __ movq(rax, Immediate(0x11FEED00));
471 __ jmp(&Loop2_test);
472 __ bind(&Loop2_body);
473 __ addq(rax, Immediate(-0x1100));
474 __ bind(&Loop2_test);
475 __ cmpq(rax, Immediate(0x11FE8000));
476 __ j(greater, &Loop2_body);
477 // Did the loop terminate with the expected value?
478 __ cmpq(rax, Immediate(0x11FE7600));
479 __ j(not_equal, &Fail);
480
481 __ movq(rax, Immediate(1));
482 __ ret(0);
483 __ bind(&Fail);
484 __ movq(rax, Immediate(0));
485 __ ret(0);
486
487 CodeDesc desc;
488 assm.GetCode(&desc);
489 // Call the function from C++.
490 int result = FUNCTION_CAST<F0>(buffer)();
491 CHECK_EQ(1, result);
492}
493
Steve Block1e0659c2011-05-24 12:43:12 +0100494
495TEST(OperandRegisterDependency) {
496 int offsets[4] = {0, 1, 0xfed, 0xbeefcad};
497 for (int i = 0; i < 4; i++) {
498 int offset = offsets[i];
499 CHECK(Operand(rax, offset).AddressUsesRegister(rax));
500 CHECK(!Operand(rax, offset).AddressUsesRegister(r8));
501 CHECK(!Operand(rax, offset).AddressUsesRegister(rcx));
502
503 CHECK(Operand(rax, rax, times_1, offset).AddressUsesRegister(rax));
504 CHECK(!Operand(rax, rax, times_1, offset).AddressUsesRegister(r8));
505 CHECK(!Operand(rax, rax, times_1, offset).AddressUsesRegister(rcx));
506
507 CHECK(Operand(rax, rcx, times_1, offset).AddressUsesRegister(rax));
508 CHECK(Operand(rax, rcx, times_1, offset).AddressUsesRegister(rcx));
509 CHECK(!Operand(rax, rcx, times_1, offset).AddressUsesRegister(r8));
510 CHECK(!Operand(rax, rcx, times_1, offset).AddressUsesRegister(r9));
511 CHECK(!Operand(rax, rcx, times_1, offset).AddressUsesRegister(rdx));
512 CHECK(!Operand(rax, rcx, times_1, offset).AddressUsesRegister(rsp));
513
514 CHECK(Operand(rsp, offset).AddressUsesRegister(rsp));
515 CHECK(!Operand(rsp, offset).AddressUsesRegister(rax));
Steve Block44f0eee2011-05-26 01:26:41 +0100516 CHECK(!Operand(rsp, offset).AddressUsesRegister(r15));
Steve Block1e0659c2011-05-24 12:43:12 +0100517
518 CHECK(Operand(rbp, offset).AddressUsesRegister(rbp));
519 CHECK(!Operand(rbp, offset).AddressUsesRegister(rax));
520 CHECK(!Operand(rbp, offset).AddressUsesRegister(r13));
521
522 CHECK(Operand(rbp, rax, times_1, offset).AddressUsesRegister(rbp));
523 CHECK(Operand(rbp, rax, times_1, offset).AddressUsesRegister(rax));
524 CHECK(!Operand(rbp, rax, times_1, offset).AddressUsesRegister(rcx));
525 CHECK(!Operand(rbp, rax, times_1, offset).AddressUsesRegister(r13));
526 CHECK(!Operand(rbp, rax, times_1, offset).AddressUsesRegister(r8));
527 CHECK(!Operand(rbp, rax, times_1, offset).AddressUsesRegister(rsp));
528
529 CHECK(Operand(rsp, rbp, times_1, offset).AddressUsesRegister(rsp));
530 CHECK(Operand(rsp, rbp, times_1, offset).AddressUsesRegister(rbp));
531 CHECK(!Operand(rsp, rbp, times_1, offset).AddressUsesRegister(rax));
Steve Block44f0eee2011-05-26 01:26:41 +0100532 CHECK(!Operand(rsp, rbp, times_1, offset).AddressUsesRegister(r15));
Steve Block1e0659c2011-05-24 12:43:12 +0100533 CHECK(!Operand(rsp, rbp, times_1, offset).AddressUsesRegister(r13));
534 }
535}
536
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000537
538TEST(AssemblerX64LabelChaining) {
539 // Test chaining of label usages within instructions (issue 1644).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000540 CcTest::InitializeVM();
541 v8::HandleScope scope(CcTest::isolate());
542 Assembler assm(CcTest::i_isolate(), NULL, 0);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000543
544 Label target;
545 __ j(equal, &target);
546 __ j(not_equal, &target);
547 __ bind(&target);
548 __ nop();
549}
550
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100551
552TEST(AssemblerMultiByteNop) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000553 CcTest::InitializeVM();
554 v8::HandleScope scope(CcTest::isolate());
555 byte buffer[1024];
556 Isolate* isolate = CcTest::i_isolate();
557 Assembler assm(isolate, buffer, sizeof(buffer));
558 __ pushq(rbx);
559 __ pushq(rcx);
560 __ pushq(rdx);
561 __ pushq(rdi);
562 __ pushq(rsi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100563 __ movq(rax, Immediate(1));
564 __ movq(rbx, Immediate(2));
565 __ movq(rcx, Immediate(3));
566 __ movq(rdx, Immediate(4));
567 __ movq(rdi, Immediate(5));
568 __ movq(rsi, Immediate(6));
569 for (int i = 0; i < 16; i++) {
570 int before = assm.pc_offset();
571 __ Nop(i);
572 CHECK_EQ(assm.pc_offset() - before, i);
573 }
574
575 Label fail;
576 __ cmpq(rax, Immediate(1));
577 __ j(not_equal, &fail);
578 __ cmpq(rbx, Immediate(2));
579 __ j(not_equal, &fail);
580 __ cmpq(rcx, Immediate(3));
581 __ j(not_equal, &fail);
582 __ cmpq(rdx, Immediate(4));
583 __ j(not_equal, &fail);
584 __ cmpq(rdi, Immediate(5));
585 __ j(not_equal, &fail);
586 __ cmpq(rsi, Immediate(6));
587 __ j(not_equal, &fail);
588 __ movq(rax, Immediate(42));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000589 __ popq(rsi);
590 __ popq(rdi);
591 __ popq(rdx);
592 __ popq(rcx);
593 __ popq(rbx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100594 __ ret(0);
595 __ bind(&fail);
596 __ movq(rax, Immediate(13));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000597 __ popq(rsi);
598 __ popq(rdi);
599 __ popq(rdx);
600 __ popq(rcx);
601 __ popq(rbx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100602 __ ret(0);
603
604 CodeDesc desc;
605 assm.GetCode(&desc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000606 Handle<Code> code = isolate->factory()->NewCode(
607 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100608
609 F0 f = FUNCTION_CAST<F0>(code->entry());
610 int res = f();
611 CHECK_EQ(42, res);
612}
613
614
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000615#ifdef __GNUC__
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000616#define ELEMENT_COUNT 4u
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000617
618void DoSSE2(const v8::FunctionCallbackInfo<v8::Value>& args) {
619 v8::HandleScope scope(CcTest::isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000620 v8::Local<v8::Context> context = CcTest::isolate()->GetCurrentContext();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000621 byte buffer[1024];
622
623 CHECK(args[0]->IsArray());
624 v8::Local<v8::Array> vec = v8::Local<v8::Array>::Cast(args[0]);
625 CHECK_EQ(ELEMENT_COUNT, vec->Length());
626
627 Isolate* isolate = CcTest::i_isolate();
628 Assembler assm(isolate, buffer, sizeof(buffer));
629
630 // Remove return address from the stack for fix stack frame alignment.
631 __ popq(rcx);
632
633 // Store input vector on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000634 for (unsigned i = 0; i < ELEMENT_COUNT; i++) {
635 __ movl(rax, Immediate(vec->Get(context, i)
636 .ToLocalChecked()
637 ->Int32Value(context)
638 .FromJust()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000639 __ shlq(rax, Immediate(0x20));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000640 __ orq(rax, Immediate(vec->Get(context, ++i)
641 .ToLocalChecked()
642 ->Int32Value(context)
643 .FromJust()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000644 __ pushq(rax);
645 }
646
647 // Read vector into a xmm register.
648 __ xorps(xmm0, xmm0);
649 __ movdqa(xmm0, Operand(rsp, 0));
650 // Create mask and store it in the return register.
651 __ movmskps(rax, xmm0);
652
653 // Remove unused data from the stack.
654 __ addq(rsp, Immediate(ELEMENT_COUNT * sizeof(int32_t)));
655 // Restore return address.
656 __ pushq(rcx);
657
658 __ ret(0);
659
660 CodeDesc desc;
661 assm.GetCode(&desc);
662 Handle<Code> code = isolate->factory()->NewCode(
663 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
664
665 F0 f = FUNCTION_CAST<F0>(code->entry());
666 int res = f();
667 args.GetReturnValue().Set(v8::Integer::New(CcTest::isolate(), res));
668}
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100669
670
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000671TEST(StackAlignmentForSSE2) {
672 CcTest::InitializeVM();
673 CHECK_EQ(0, v8::base::OS::ActivationFrameAlignment() % 16);
674
675 v8::Isolate* isolate = CcTest::isolate();
676 v8::HandleScope handle_scope(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000677 v8::Local<v8::ObjectTemplate> global_template =
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000678 v8::ObjectTemplate::New(isolate);
679 global_template->Set(v8_str("do_sse2"),
680 v8::FunctionTemplate::New(isolate, DoSSE2));
681
682 LocalContext env(NULL, global_template);
683 CompileRun(
684 "function foo(vec) {"
685 " return do_sse2(vec);"
686 "}");
687
688 v8::Local<v8::Object> global_object = env->Global();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000689 v8::Local<v8::Function> foo = v8::Local<v8::Function>::Cast(
690 global_object->Get(env.local(), v8_str("foo")).ToLocalChecked());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000691
692 int32_t vec[ELEMENT_COUNT] = { -1, 1, 1, 1 };
693 v8::Local<v8::Array> v8_vec = v8::Array::New(isolate, ELEMENT_COUNT);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000694 for (unsigned i = 0; i < ELEMENT_COUNT; i++) {
695 v8_vec->Set(env.local(), i, v8_num(vec[i])).FromJust();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000696 }
697
698 v8::Local<v8::Value> args[] = { v8_vec };
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000699 v8::Local<v8::Value> result =
700 foo->Call(env.local(), global_object, 1, args).ToLocalChecked();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000701
702 // The mask should be 0b1000.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000703 CHECK_EQ(8, result->Int32Value(env.local()).FromJust());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000704}
705
706#undef ELEMENT_COUNT
707#endif // __GNUC__
708
709
710TEST(AssemblerX64Extractps) {
711 CcTest::InitializeVM();
712 if (!CpuFeatures::IsSupported(SSE4_1)) return;
713
714 v8::HandleScope scope(CcTest::isolate());
715 byte buffer[256];
716 Isolate* isolate = CcTest::i_isolate();
717 Assembler assm(isolate, buffer, sizeof(buffer));
718 { CpuFeatureScope fscope2(&assm, SSE4_1);
719 __ extractps(rax, xmm0, 0x1);
720 __ ret(0);
721 }
722
723 CodeDesc desc;
724 assm.GetCode(&desc);
725 Handle<Code> code = isolate->factory()->NewCode(
726 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
727#ifdef OBJECT_PRINT
728 OFStream os(stdout);
729 code->Print(os);
730#endif
731
732 F3 f = FUNCTION_CAST<F3>(code->entry());
733 uint64_t value1 = V8_2PART_UINT64_C(0x12345678, 87654321);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000734 CHECK_EQ(0x12345678u, f(uint64_to_double(value1)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000735 uint64_t value2 = V8_2PART_UINT64_C(0x87654321, 12345678);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000736 CHECK_EQ(0x87654321u, f(uint64_to_double(value2)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000737}
738
739
740typedef int (*F6)(float x, float y);
741TEST(AssemblerX64SSE) {
742 CcTest::InitializeVM();
743
744 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
745 HandleScope scope(isolate);
746 v8::internal::byte buffer[256];
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000747 MacroAssembler assm(isolate, buffer, sizeof(buffer),
748 v8::internal::CodeObjectRequired::kYes);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000749 {
750 __ shufps(xmm0, xmm0, 0x0); // brocast first argument
751 __ shufps(xmm1, xmm1, 0x0); // brocast second argument
752 __ movaps(xmm2, xmm1);
753 __ addps(xmm2, xmm0);
754 __ mulps(xmm2, xmm1);
755 __ subps(xmm2, xmm0);
756 __ divps(xmm2, xmm1);
757 __ cvttss2si(rax, xmm2);
758 __ ret(0);
759 }
760
761 CodeDesc desc;
762 assm.GetCode(&desc);
763 Handle<Code> code = isolate->factory()->NewCode(
764 desc,
765 Code::ComputeFlags(Code::STUB),
766 Handle<Code>());
767#ifdef OBJECT_PRINT
768 OFStream os(stdout);
769 code->Print(os);
770#endif
771
772 F6 f = FUNCTION_CAST<F6>(code->entry());
773 CHECK_EQ(2, f(1.0, 2.0));
774}
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400775
776
777typedef int (*F7)(double x, double y, double z);
778TEST(AssemblerX64FMA_sd) {
779 CcTest::InitializeVM();
780 if (!CpuFeatures::IsSupported(FMA3)) return;
781
782 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
783 HandleScope scope(isolate);
784 v8::internal::byte buffer[1024];
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000785 MacroAssembler assm(isolate, buffer, sizeof(buffer),
786 v8::internal::CodeObjectRequired::kYes);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400787 {
788 CpuFeatureScope fscope(&assm, FMA3);
789 Label exit;
790 // argument in xmm0, xmm1 and xmm2
791 // xmm0 * xmm1 + xmm2
792 __ movaps(xmm3, xmm0);
793 __ mulsd(xmm3, xmm1);
794 __ addsd(xmm3, xmm2); // Expected result in xmm3
795
796 __ subq(rsp, Immediate(kDoubleSize)); // For memory operand
797 // vfmadd132sd
798 __ movl(rax, Immediate(1)); // Test number
799 __ movaps(xmm8, xmm0);
800 __ vfmadd132sd(xmm8, xmm2, xmm1);
801 __ ucomisd(xmm8, xmm3);
802 __ j(not_equal, &exit);
803 // vfmadd213sd
804 __ incq(rax);
805 __ movaps(xmm8, xmm1);
806 __ vfmadd213sd(xmm8, xmm0, xmm2);
807 __ ucomisd(xmm8, xmm3);
808 __ j(not_equal, &exit);
809 // vfmadd231sd
810 __ incq(rax);
811 __ movaps(xmm8, xmm2);
812 __ vfmadd231sd(xmm8, xmm0, xmm1);
813 __ ucomisd(xmm8, xmm3);
814 __ j(not_equal, &exit);
815
816 // vfmadd132sd
817 __ incq(rax);
818 __ movaps(xmm8, xmm0);
819 __ movsd(Operand(rsp, 0), xmm1);
820 __ vfmadd132sd(xmm8, xmm2, Operand(rsp, 0));
821 __ ucomisd(xmm8, xmm3);
822 __ j(not_equal, &exit);
823 // vfmadd213sd
824 __ incq(rax);
825 __ movaps(xmm8, xmm1);
826 __ movsd(Operand(rsp, 0), xmm2);
827 __ vfmadd213sd(xmm8, xmm0, Operand(rsp, 0));
828 __ ucomisd(xmm8, xmm3);
829 __ j(not_equal, &exit);
830 // vfmadd231sd
831 __ incq(rax);
832 __ movaps(xmm8, xmm2);
833 __ movsd(Operand(rsp, 0), xmm1);
834 __ vfmadd231sd(xmm8, xmm0, Operand(rsp, 0));
835 __ ucomisd(xmm8, xmm3);
836 __ j(not_equal, &exit);
837
838 // xmm0 * xmm1 - xmm2
839 __ movaps(xmm3, xmm0);
840 __ mulsd(xmm3, xmm1);
841 __ subsd(xmm3, xmm2); // Expected result in xmm3
842
843 // vfmsub132sd
844 __ incq(rax);
845 __ movaps(xmm8, xmm0);
846 __ vfmsub132sd(xmm8, xmm2, xmm1);
847 __ ucomisd(xmm8, xmm3);
848 __ j(not_equal, &exit);
849 // vfmadd213sd
850 __ incq(rax);
851 __ movaps(xmm8, xmm1);
852 __ vfmsub213sd(xmm8, xmm0, xmm2);
853 __ ucomisd(xmm8, xmm3);
854 __ j(not_equal, &exit);
855 // vfmsub231sd
856 __ incq(rax);
857 __ movaps(xmm8, xmm2);
858 __ vfmsub231sd(xmm8, xmm0, xmm1);
859 __ ucomisd(xmm8, xmm3);
860 __ j(not_equal, &exit);
861
862 // vfmsub132sd
863 __ incq(rax);
864 __ movaps(xmm8, xmm0);
865 __ movsd(Operand(rsp, 0), xmm1);
866 __ vfmsub132sd(xmm8, xmm2, Operand(rsp, 0));
867 __ ucomisd(xmm8, xmm3);
868 __ j(not_equal, &exit);
869 // vfmsub213sd
870 __ incq(rax);
871 __ movaps(xmm8, xmm1);
872 __ movsd(Operand(rsp, 0), xmm2);
873 __ vfmsub213sd(xmm8, xmm0, Operand(rsp, 0));
874 __ ucomisd(xmm8, xmm3);
875 __ j(not_equal, &exit);
876 // vfmsub231sd
877 __ incq(rax);
878 __ movaps(xmm8, xmm2);
879 __ movsd(Operand(rsp, 0), xmm1);
880 __ vfmsub231sd(xmm8, xmm0, Operand(rsp, 0));
881 __ ucomisd(xmm8, xmm3);
882 __ j(not_equal, &exit);
883
884
885 // - xmm0 * xmm1 + xmm2
886 __ movaps(xmm3, xmm0);
887 __ mulsd(xmm3, xmm1);
888 __ Move(xmm4, (uint64_t)1 << 63);
889 __ xorpd(xmm3, xmm4);
890 __ addsd(xmm3, xmm2); // Expected result in xmm3
891
892 // vfnmadd132sd
893 __ incq(rax);
894 __ movaps(xmm8, xmm0);
895 __ vfnmadd132sd(xmm8, xmm2, xmm1);
896 __ ucomisd(xmm8, xmm3);
897 __ j(not_equal, &exit);
898 // vfmadd213sd
899 __ incq(rax);
900 __ movaps(xmm8, xmm1);
901 __ vfnmadd213sd(xmm8, xmm0, xmm2);
902 __ ucomisd(xmm8, xmm3);
903 __ j(not_equal, &exit);
904 // vfnmadd231sd
905 __ incq(rax);
906 __ movaps(xmm8, xmm2);
907 __ vfnmadd231sd(xmm8, xmm0, xmm1);
908 __ ucomisd(xmm8, xmm3);
909 __ j(not_equal, &exit);
910
911 // vfnmadd132sd
912 __ incq(rax);
913 __ movaps(xmm8, xmm0);
914 __ movsd(Operand(rsp, 0), xmm1);
915 __ vfnmadd132sd(xmm8, xmm2, Operand(rsp, 0));
916 __ ucomisd(xmm8, xmm3);
917 __ j(not_equal, &exit);
918 // vfnmadd213sd
919 __ incq(rax);
920 __ movaps(xmm8, xmm1);
921 __ movsd(Operand(rsp, 0), xmm2);
922 __ vfnmadd213sd(xmm8, xmm0, Operand(rsp, 0));
923 __ ucomisd(xmm8, xmm3);
924 __ j(not_equal, &exit);
925 // vfnmadd231sd
926 __ incq(rax);
927 __ movaps(xmm8, xmm2);
928 __ movsd(Operand(rsp, 0), xmm1);
929 __ vfnmadd231sd(xmm8, xmm0, Operand(rsp, 0));
930 __ ucomisd(xmm8, xmm3);
931 __ j(not_equal, &exit);
932
933
934 // - xmm0 * xmm1 - xmm2
935 __ movaps(xmm3, xmm0);
936 __ mulsd(xmm3, xmm1);
937 __ Move(xmm4, (uint64_t)1 << 63);
938 __ xorpd(xmm3, xmm4);
939 __ subsd(xmm3, xmm2); // Expected result in xmm3
940
941 // vfnmsub132sd
942 __ incq(rax);
943 __ movaps(xmm8, xmm0);
944 __ vfnmsub132sd(xmm8, xmm2, xmm1);
945 __ ucomisd(xmm8, xmm3);
946 __ j(not_equal, &exit);
947 // vfmsub213sd
948 __ incq(rax);
949 __ movaps(xmm8, xmm1);
950 __ vfnmsub213sd(xmm8, xmm0, xmm2);
951 __ ucomisd(xmm8, xmm3);
952 __ j(not_equal, &exit);
953 // vfnmsub231sd
954 __ incq(rax);
955 __ movaps(xmm8, xmm2);
956 __ vfnmsub231sd(xmm8, xmm0, xmm1);
957 __ ucomisd(xmm8, xmm3);
958 __ j(not_equal, &exit);
959
960 // vfnmsub132sd
961 __ incq(rax);
962 __ movaps(xmm8, xmm0);
963 __ movsd(Operand(rsp, 0), xmm1);
964 __ vfnmsub132sd(xmm8, xmm2, Operand(rsp, 0));
965 __ ucomisd(xmm8, xmm3);
966 __ j(not_equal, &exit);
967 // vfnmsub213sd
968 __ incq(rax);
969 __ movaps(xmm8, xmm1);
970 __ movsd(Operand(rsp, 0), xmm2);
971 __ vfnmsub213sd(xmm8, xmm0, Operand(rsp, 0));
972 __ ucomisd(xmm8, xmm3);
973 __ j(not_equal, &exit);
974 // vfnmsub231sd
975 __ incq(rax);
976 __ movaps(xmm8, xmm2);
977 __ movsd(Operand(rsp, 0), xmm1);
978 __ vfnmsub231sd(xmm8, xmm0, Operand(rsp, 0));
979 __ ucomisd(xmm8, xmm3);
980 __ j(not_equal, &exit);
981
982
983 __ xorl(rax, rax);
984 __ bind(&exit);
985 __ addq(rsp, Immediate(kDoubleSize));
986 __ ret(0);
987 }
988
989 CodeDesc desc;
990 assm.GetCode(&desc);
991 Handle<Code> code = isolate->factory()->NewCode(
992 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
993#ifdef OBJECT_PRINT
994 OFStream os(stdout);
995 code->Print(os);
996#endif
997
998 F7 f = FUNCTION_CAST<F7>(code->entry());
999 CHECK_EQ(0, f(0.000092662107262076, -2.460774966188315, -1.0958787393627414));
1000}
1001
1002
1003typedef int (*F8)(float x, float y, float z);
1004TEST(AssemblerX64FMA_ss) {
1005 CcTest::InitializeVM();
1006 if (!CpuFeatures::IsSupported(FMA3)) return;
1007
1008 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
1009 HandleScope scope(isolate);
1010 v8::internal::byte buffer[1024];
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001011 MacroAssembler assm(isolate, buffer, sizeof(buffer),
1012 v8::internal::CodeObjectRequired::kYes);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001013 {
1014 CpuFeatureScope fscope(&assm, FMA3);
1015 Label exit;
1016 // arguments in xmm0, xmm1 and xmm2
1017 // xmm0 * xmm1 + xmm2
1018 __ movaps(xmm3, xmm0);
1019 __ mulss(xmm3, xmm1);
1020 __ addss(xmm3, xmm2); // Expected result in xmm3
1021
1022 __ subq(rsp, Immediate(kDoubleSize)); // For memory operand
1023 // vfmadd132ss
1024 __ movl(rax, Immediate(1)); // Test number
1025 __ movaps(xmm8, xmm0);
1026 __ vfmadd132ss(xmm8, xmm2, xmm1);
1027 __ ucomiss(xmm8, xmm3);
1028 __ j(not_equal, &exit);
1029 // vfmadd213ss
1030 __ incq(rax);
1031 __ movaps(xmm8, xmm1);
1032 __ vfmadd213ss(xmm8, xmm0, xmm2);
1033 __ ucomiss(xmm8, xmm3);
1034 __ j(not_equal, &exit);
1035 // vfmadd231ss
1036 __ incq(rax);
1037 __ movaps(xmm8, xmm2);
1038 __ vfmadd231ss(xmm8, xmm0, xmm1);
1039 __ ucomiss(xmm8, xmm3);
1040 __ j(not_equal, &exit);
1041
1042 // vfmadd132ss
1043 __ incq(rax);
1044 __ movaps(xmm8, xmm0);
1045 __ movss(Operand(rsp, 0), xmm1);
1046 __ vfmadd132ss(xmm8, xmm2, Operand(rsp, 0));
1047 __ ucomiss(xmm8, xmm3);
1048 __ j(not_equal, &exit);
1049 // vfmadd213ss
1050 __ incq(rax);
1051 __ movaps(xmm8, xmm1);
1052 __ movss(Operand(rsp, 0), xmm2);
1053 __ vfmadd213ss(xmm8, xmm0, Operand(rsp, 0));
1054 __ ucomiss(xmm8, xmm3);
1055 __ j(not_equal, &exit);
1056 // vfmadd231ss
1057 __ incq(rax);
1058 __ movaps(xmm8, xmm2);
1059 __ movss(Operand(rsp, 0), xmm1);
1060 __ vfmadd231ss(xmm8, xmm0, Operand(rsp, 0));
1061 __ ucomiss(xmm8, xmm3);
1062 __ j(not_equal, &exit);
1063
1064 // xmm0 * xmm1 - xmm2
1065 __ movaps(xmm3, xmm0);
1066 __ mulss(xmm3, xmm1);
1067 __ subss(xmm3, xmm2); // Expected result in xmm3
1068
1069 // vfmsub132ss
1070 __ incq(rax);
1071 __ movaps(xmm8, xmm0);
1072 __ vfmsub132ss(xmm8, xmm2, xmm1);
1073 __ ucomiss(xmm8, xmm3);
1074 __ j(not_equal, &exit);
1075 // vfmadd213ss
1076 __ incq(rax);
1077 __ movaps(xmm8, xmm1);
1078 __ vfmsub213ss(xmm8, xmm0, xmm2);
1079 __ ucomiss(xmm8, xmm3);
1080 __ j(not_equal, &exit);
1081 // vfmsub231ss
1082 __ incq(rax);
1083 __ movaps(xmm8, xmm2);
1084 __ vfmsub231ss(xmm8, xmm0, xmm1);
1085 __ ucomiss(xmm8, xmm3);
1086 __ j(not_equal, &exit);
1087
1088 // vfmsub132ss
1089 __ incq(rax);
1090 __ movaps(xmm8, xmm0);
1091 __ movss(Operand(rsp, 0), xmm1);
1092 __ vfmsub132ss(xmm8, xmm2, Operand(rsp, 0));
1093 __ ucomiss(xmm8, xmm3);
1094 __ j(not_equal, &exit);
1095 // vfmsub213ss
1096 __ incq(rax);
1097 __ movaps(xmm8, xmm1);
1098 __ movss(Operand(rsp, 0), xmm2);
1099 __ vfmsub213ss(xmm8, xmm0, Operand(rsp, 0));
1100 __ ucomiss(xmm8, xmm3);
1101 __ j(not_equal, &exit);
1102 // vfmsub231ss
1103 __ incq(rax);
1104 __ movaps(xmm8, xmm2);
1105 __ movss(Operand(rsp, 0), xmm1);
1106 __ vfmsub231ss(xmm8, xmm0, Operand(rsp, 0));
1107 __ ucomiss(xmm8, xmm3);
1108 __ j(not_equal, &exit);
1109
1110
1111 // - xmm0 * xmm1 + xmm2
1112 __ movaps(xmm3, xmm0);
1113 __ mulss(xmm3, xmm1);
1114 __ Move(xmm4, (uint32_t)1 << 31);
1115 __ xorps(xmm3, xmm4);
1116 __ addss(xmm3, xmm2); // Expected result in xmm3
1117
1118 // vfnmadd132ss
1119 __ incq(rax);
1120 __ movaps(xmm8, xmm0);
1121 __ vfnmadd132ss(xmm8, xmm2, xmm1);
1122 __ ucomiss(xmm8, xmm3);
1123 __ j(not_equal, &exit);
1124 // vfmadd213ss
1125 __ incq(rax);
1126 __ movaps(xmm8, xmm1);
1127 __ vfnmadd213ss(xmm8, xmm0, xmm2);
1128 __ ucomiss(xmm8, xmm3);
1129 __ j(not_equal, &exit);
1130 // vfnmadd231ss
1131 __ incq(rax);
1132 __ movaps(xmm8, xmm2);
1133 __ vfnmadd231ss(xmm8, xmm0, xmm1);
1134 __ ucomiss(xmm8, xmm3);
1135 __ j(not_equal, &exit);
1136
1137 // vfnmadd132ss
1138 __ incq(rax);
1139 __ movaps(xmm8, xmm0);
1140 __ movss(Operand(rsp, 0), xmm1);
1141 __ vfnmadd132ss(xmm8, xmm2, Operand(rsp, 0));
1142 __ ucomiss(xmm8, xmm3);
1143 __ j(not_equal, &exit);
1144 // vfnmadd213ss
1145 __ incq(rax);
1146 __ movaps(xmm8, xmm1);
1147 __ movss(Operand(rsp, 0), xmm2);
1148 __ vfnmadd213ss(xmm8, xmm0, Operand(rsp, 0));
1149 __ ucomiss(xmm8, xmm3);
1150 __ j(not_equal, &exit);
1151 // vfnmadd231ss
1152 __ incq(rax);
1153 __ movaps(xmm8, xmm2);
1154 __ movss(Operand(rsp, 0), xmm1);
1155 __ vfnmadd231ss(xmm8, xmm0, Operand(rsp, 0));
1156 __ ucomiss(xmm8, xmm3);
1157 __ j(not_equal, &exit);
1158
1159
1160 // - xmm0 * xmm1 - xmm2
1161 __ movaps(xmm3, xmm0);
1162 __ mulss(xmm3, xmm1);
1163 __ Move(xmm4, (uint32_t)1 << 31);
1164 __ xorps(xmm3, xmm4);
1165 __ subss(xmm3, xmm2); // Expected result in xmm3
1166
1167 // vfnmsub132ss
1168 __ incq(rax);
1169 __ movaps(xmm8, xmm0);
1170 __ vfnmsub132ss(xmm8, xmm2, xmm1);
1171 __ ucomiss(xmm8, xmm3);
1172 __ j(not_equal, &exit);
1173 // vfmsub213ss
1174 __ incq(rax);
1175 __ movaps(xmm8, xmm1);
1176 __ vfnmsub213ss(xmm8, xmm0, xmm2);
1177 __ ucomiss(xmm8, xmm3);
1178 __ j(not_equal, &exit);
1179 // vfnmsub231ss
1180 __ incq(rax);
1181 __ movaps(xmm8, xmm2);
1182 __ vfnmsub231ss(xmm8, xmm0, xmm1);
1183 __ ucomiss(xmm8, xmm3);
1184 __ j(not_equal, &exit);
1185
1186 // vfnmsub132ss
1187 __ incq(rax);
1188 __ movaps(xmm8, xmm0);
1189 __ movss(Operand(rsp, 0), xmm1);
1190 __ vfnmsub132ss(xmm8, xmm2, Operand(rsp, 0));
1191 __ ucomiss(xmm8, xmm3);
1192 __ j(not_equal, &exit);
1193 // vfnmsub213ss
1194 __ incq(rax);
1195 __ movaps(xmm8, xmm1);
1196 __ movss(Operand(rsp, 0), xmm2);
1197 __ vfnmsub213ss(xmm8, xmm0, Operand(rsp, 0));
1198 __ ucomiss(xmm8, xmm3);
1199 __ j(not_equal, &exit);
1200 // vfnmsub231ss
1201 __ incq(rax);
1202 __ movaps(xmm8, xmm2);
1203 __ movss(Operand(rsp, 0), xmm1);
1204 __ vfnmsub231ss(xmm8, xmm0, Operand(rsp, 0));
1205 __ ucomiss(xmm8, xmm3);
1206 __ j(not_equal, &exit);
1207
1208
1209 __ xorl(rax, rax);
1210 __ bind(&exit);
1211 __ addq(rsp, Immediate(kDoubleSize));
1212 __ ret(0);
1213 }
1214
1215 CodeDesc desc;
1216 assm.GetCode(&desc);
1217 Handle<Code> code = isolate->factory()->NewCode(
1218 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
1219#ifdef OBJECT_PRINT
1220 OFStream os(stdout);
1221 code->Print(os);
1222#endif
1223
1224 F8 f = FUNCTION_CAST<F8>(code->entry());
1225 CHECK_EQ(0, f(9.26621069e-05f, -2.4607749f, -1.09587872f));
1226}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001227
1228
1229TEST(AssemblerX64SSE_ss) {
1230 CcTest::InitializeVM();
1231
1232 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
1233 HandleScope scope(isolate);
1234 v8::internal::byte buffer[1024];
1235 Assembler assm(isolate, buffer, sizeof(buffer));
1236 {
1237 Label exit;
1238 // arguments in xmm0, xmm1 and xmm2
1239 __ movl(rax, Immediate(0));
1240
1241 __ movaps(xmm3, xmm0);
1242 __ maxss(xmm3, xmm1);
1243 __ ucomiss(xmm3, xmm1);
1244 __ j(parity_even, &exit);
1245 __ j(not_equal, &exit);
1246 __ movl(rax, Immediate(1));
1247
1248 __ movaps(xmm3, xmm1);
1249 __ minss(xmm3, xmm2);
1250 __ ucomiss(xmm3, xmm1);
1251 __ j(parity_even, &exit);
1252 __ j(not_equal, &exit);
1253 __ movl(rax, Immediate(2));
1254
1255 __ movaps(xmm3, xmm2);
1256 __ subss(xmm3, xmm1);
1257 __ ucomiss(xmm3, xmm0);
1258 __ j(parity_even, &exit);
1259 __ j(not_equal, &exit);
1260 __ movl(rax, Immediate(3));
1261
1262 __ movaps(xmm3, xmm0);
1263 __ addss(xmm3, xmm1);
1264 __ ucomiss(xmm3, xmm2);
1265 __ j(parity_even, &exit);
1266 __ j(not_equal, &exit);
1267 __ movl(rax, Immediate(4));
1268
1269 __ movaps(xmm3, xmm0);
1270 __ mulss(xmm3, xmm1);
1271 __ ucomiss(xmm3, xmm1);
1272 __ j(parity_even, &exit);
1273 __ j(not_equal, &exit);
1274 __ movl(rax, Immediate(5));
1275
1276 __ movaps(xmm3, xmm0);
1277 __ divss(xmm3, xmm1);
1278 __ mulss(xmm3, xmm2);
1279 __ mulss(xmm3, xmm1);
1280 __ ucomiss(xmm3, xmm2);
1281 __ j(parity_even, &exit);
1282 __ j(not_equal, &exit);
1283 __ movl(rax, Immediate(6));
1284
1285 // result in eax
1286 __ bind(&exit);
1287 __ ret(0);
1288 }
1289
1290 CodeDesc desc;
1291 assm.GetCode(&desc);
1292 Handle<Code> code = isolate->factory()->NewCode(
1293 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
1294#ifdef OBJECT_PRINT
1295 OFStream os(stdout);
1296 code->Print(os);
1297#endif
1298
1299 F8 f = FUNCTION_CAST<F8>(code->entry());
1300 int res = f(1.0f, 2.0f, 3.0f);
1301 PrintF("f(1,2,3) = %d\n", res);
1302 CHECK_EQ(6, res);
1303}
1304
1305
1306TEST(AssemblerX64AVX_ss) {
1307 CcTest::InitializeVM();
1308 if (!CpuFeatures::IsSupported(AVX)) return;
1309
1310 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
1311 HandleScope scope(isolate);
1312 v8::internal::byte buffer[1024];
1313 Assembler assm(isolate, buffer, sizeof(buffer));
1314 {
1315 CpuFeatureScope avx_scope(&assm, AVX);
1316 Label exit;
1317 // arguments in xmm0, xmm1 and xmm2
1318 __ subq(rsp, Immediate(kDoubleSize * 2)); // For memory operand
1319
1320 __ movl(rdx, Immediate(0xc2f64000)); // -123.125
1321 __ vmovd(xmm4, rdx);
1322 __ vmovss(Operand(rsp, 0), xmm4);
1323 __ vmovss(xmm5, Operand(rsp, 0));
1324 __ vmovaps(xmm6, xmm5);
1325 __ vmovd(rcx, xmm6);
1326 __ cmpl(rcx, rdx);
1327 __ movl(rax, Immediate(9));
1328 __ j(not_equal, &exit);
1329
1330 __ movl(rax, Immediate(0));
1331 __ vmaxss(xmm3, xmm0, xmm1);
1332 __ vucomiss(xmm3, xmm1);
1333 __ j(parity_even, &exit);
1334 __ j(not_equal, &exit);
1335 __ movl(rax, Immediate(1));
1336
1337 __ vminss(xmm3, xmm1, xmm2);
1338 __ vucomiss(xmm3, xmm1);
1339 __ j(parity_even, &exit);
1340 __ j(not_equal, &exit);
1341 __ movl(rax, Immediate(2));
1342
1343 __ vsubss(xmm3, xmm2, xmm1);
1344 __ vucomiss(xmm3, xmm0);
1345 __ j(parity_even, &exit);
1346 __ j(not_equal, &exit);
1347 __ movl(rax, Immediate(3));
1348
1349 __ vaddss(xmm3, xmm0, xmm1);
1350 __ vucomiss(xmm3, xmm2);
1351 __ j(parity_even, &exit);
1352 __ j(not_equal, &exit);
1353 __ movl(rax, Immediate(4));
1354
1355 __ vmulss(xmm3, xmm0, xmm1);
1356 __ vucomiss(xmm3, xmm1);
1357 __ j(parity_even, &exit);
1358 __ j(not_equal, &exit);
1359 __ movl(rax, Immediate(5));
1360
1361 __ vdivss(xmm3, xmm0, xmm1);
1362 __ vmulss(xmm3, xmm3, xmm2);
1363 __ vmulss(xmm3, xmm3, xmm1);
1364 __ vucomiss(xmm3, xmm2);
1365 __ j(parity_even, &exit);
1366 __ j(not_equal, &exit);
1367 __ movl(rax, Immediate(6));
1368
1369 // result in eax
1370 __ bind(&exit);
1371 __ addq(rsp, Immediate(kDoubleSize * 2));
1372 __ ret(0);
1373 }
1374
1375 CodeDesc desc;
1376 assm.GetCode(&desc);
1377 Handle<Code> code = isolate->factory()->NewCode(
1378 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
1379#ifdef OBJECT_PRINT
1380 OFStream os(stdout);
1381 code->Print(os);
1382#endif
1383
1384 F8 f = FUNCTION_CAST<F8>(code->entry());
1385 int res = f(1.0f, 2.0f, 3.0f);
1386 PrintF("f(1,2,3) = %d\n", res);
1387 CHECK_EQ(6, res);
1388}
1389
1390
1391TEST(AssemblerX64AVX_sd) {
1392 CcTest::InitializeVM();
1393 if (!CpuFeatures::IsSupported(AVX)) return;
1394
1395 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
1396 HandleScope scope(isolate);
1397 v8::internal::byte buffer[1024];
1398 Assembler assm(isolate, buffer, sizeof(buffer));
1399 {
1400 CpuFeatureScope avx_scope(&assm, AVX);
1401 Label exit;
1402 // arguments in xmm0, xmm1 and xmm2
1403 __ subq(rsp, Immediate(kDoubleSize * 2)); // For memory operand
1404 __ movl(rax, Immediate(0));
1405
1406 __ vmaxsd(xmm4, xmm0, xmm1);
1407 __ vmovsd(Operand(rsp, kDoubleSize), xmm4);
1408 __ vmovsd(xmm5, Operand(rsp, kDoubleSize));
1409 __ vmovsd(xmm6, xmm6, xmm5);
1410 __ vmovapd(xmm3, xmm6);
1411
1412 // Test vcvtss2sd & vcvtsd2ss
1413 __ movl(rax, Immediate(9));
1414 __ movq(rdx, V8_INT64_C(0x426D1A0000000000));
1415 __ movq(Operand(rsp, 0), rdx);
1416 __ vcvtsd2ss(xmm6, xmm6, Operand(rsp, 0));
1417 __ vcvtss2sd(xmm7, xmm6, xmm6);
1418 __ vcvtsd2ss(xmm8, xmm7, xmm7);
1419 __ vmovss(Operand(rsp, 0), xmm8);
1420 __ vcvtss2sd(xmm9, xmm8, Operand(rsp, 0));
1421 __ vmovq(rcx, xmm9);
1422 __ cmpq(rcx, rdx);
1423 __ j(not_equal, &exit);
1424
1425 // Test vcvttsd2si
1426 __ movl(rax, Immediate(10));
1427 __ movl(rdx, Immediate(123));
1428 __ vcvtlsi2sd(xmm6, xmm6, rdx);
1429 __ vcvttsd2si(rcx, xmm6);
1430 __ cmpl(rcx, rdx);
1431 __ j(not_equal, &exit);
1432 __ xorl(rcx, rcx);
1433 __ vmovsd(Operand(rsp, 0), xmm6);
1434 __ vcvttsd2si(rcx, Operand(rsp, 0));
1435 __ cmpl(rcx, rdx);
1436 __ j(not_equal, &exit);
1437
1438 // Test vcvttsd2siq
1439 __ movl(rax, Immediate(11));
1440 __ movq(rdx, V8_INT64_C(0x426D1A94A2000000)); // 1.0e12
1441 __ vmovq(xmm6, rdx);
1442 __ vcvttsd2siq(rcx, xmm6);
1443 __ movq(rdx, V8_INT64_C(1000000000000));
1444 __ cmpq(rcx, rdx);
1445 __ j(not_equal, &exit);
1446 __ xorq(rcx, rcx);
1447 __ vmovsd(Operand(rsp, 0), xmm6);
1448 __ vcvttsd2siq(rcx, Operand(rsp, 0));
1449 __ cmpq(rcx, rdx);
1450 __ j(not_equal, &exit);
1451
1452 // Test vmovmskpd
1453 __ movl(rax, Immediate(12));
1454 __ movq(rdx, V8_INT64_C(0x426D1A94A2000000)); // 1.0e12
1455 __ vmovq(xmm6, rdx);
1456 __ movq(rdx, V8_INT64_C(0xC26D1A94A2000000)); // -1.0e12
1457 __ vmovq(xmm7, rdx);
1458 __ shufps(xmm6, xmm7, 0x44);
1459 __ vmovmskpd(rdx, xmm6);
1460 __ cmpl(rdx, Immediate(2));
1461 __ j(not_equal, &exit);
1462
1463 // Test vpcmpeqd
1464 __ movq(rdx, V8_UINT64_C(0x0123456789abcdef));
1465 __ movq(rcx, V8_UINT64_C(0x0123456788888888));
1466 __ vmovq(xmm6, rdx);
1467 __ vmovq(xmm7, rcx);
1468 __ vpcmpeqd(xmm8, xmm6, xmm7);
1469 __ vmovq(rdx, xmm8);
1470 __ movq(rcx, V8_UINT64_C(0xffffffff00000000));
1471 __ cmpq(rcx, rdx);
1472 __ movl(rax, Immediate(13));
1473 __ j(not_equal, &exit);
1474
1475 // Test vpsllq, vpsrlq
1476 __ movl(rax, Immediate(13));
1477 __ movq(rdx, V8_UINT64_C(0x0123456789abcdef));
1478 __ vmovq(xmm6, rdx);
1479 __ vpsrlq(xmm7, xmm6, 4);
1480 __ vmovq(rdx, xmm7);
1481 __ movq(rcx, V8_UINT64_C(0x00123456789abcde));
1482 __ cmpq(rdx, rcx);
1483 __ j(not_equal, &exit);
1484 __ vpsllq(xmm7, xmm6, 12);
1485 __ vmovq(rdx, xmm7);
1486 __ movq(rcx, V8_UINT64_C(0x3456789abcdef000));
1487 __ cmpq(rdx, rcx);
1488 __ j(not_equal, &exit);
1489
1490 // Test vandpd, vorpd, vxorpd
1491 __ movl(rax, Immediate(14));
1492 __ movl(rdx, Immediate(0x00ff00ff));
1493 __ movl(rcx, Immediate(0x0f0f0f0f));
1494 __ vmovd(xmm4, rdx);
1495 __ vmovd(xmm5, rcx);
1496 __ vandpd(xmm6, xmm4, xmm5);
1497 __ vmovd(rdx, xmm6);
1498 __ cmpl(rdx, Immediate(0x000f000f));
1499 __ j(not_equal, &exit);
1500 __ vorpd(xmm6, xmm4, xmm5);
1501 __ vmovd(rdx, xmm6);
1502 __ cmpl(rdx, Immediate(0x0fff0fff));
1503 __ j(not_equal, &exit);
1504 __ vxorpd(xmm6, xmm4, xmm5);
1505 __ vmovd(rdx, xmm6);
1506 __ cmpl(rdx, Immediate(0x0ff00ff0));
1507 __ j(not_equal, &exit);
1508
1509 // Test vsqrtsd
1510 __ movl(rax, Immediate(15));
1511 __ movq(rdx, V8_UINT64_C(0x4004000000000000)); // 2.5
1512 __ vmovq(xmm4, rdx);
1513 __ vmulsd(xmm5, xmm4, xmm4);
1514 __ vmovsd(Operand(rsp, 0), xmm5);
1515 __ vsqrtsd(xmm6, xmm5, xmm5);
1516 __ vmovq(rcx, xmm6);
1517 __ cmpq(rcx, rdx);
1518 __ j(not_equal, &exit);
1519 __ vsqrtsd(xmm7, xmm7, Operand(rsp, 0));
1520 __ vmovq(rcx, xmm7);
1521 __ cmpq(rcx, rdx);
1522 __ j(not_equal, &exit);
1523
1524 // Test vroundsd
1525 __ movl(rax, Immediate(16));
1526 __ movq(rdx, V8_UINT64_C(0x4002000000000000)); // 2.25
1527 __ vmovq(xmm4, rdx);
1528 __ vroundsd(xmm5, xmm4, xmm4, kRoundUp);
1529 __ movq(rcx, V8_UINT64_C(0x4008000000000000)); // 3.0
1530 __ vmovq(xmm6, rcx);
1531 __ vucomisd(xmm5, xmm6);
1532 __ j(not_equal, &exit);
1533
1534 // Test vcvtlsi2sd
1535 __ movl(rax, Immediate(17));
1536 __ movl(rdx, Immediate(6));
1537 __ movq(rcx, V8_UINT64_C(0x4018000000000000)); // 6.0
1538 __ vmovq(xmm5, rcx);
1539 __ vcvtlsi2sd(xmm6, xmm6, rdx);
1540 __ vucomisd(xmm5, xmm6);
1541 __ j(not_equal, &exit);
1542 __ movl(Operand(rsp, 0), rdx);
1543 __ vcvtlsi2sd(xmm7, xmm7, Operand(rsp, 0));
1544 __ vucomisd(xmm5, xmm6);
1545 __ j(not_equal, &exit);
1546
1547 // Test vcvtqsi2sd
1548 __ movl(rax, Immediate(18));
1549 __ movq(rdx, V8_UINT64_C(0x2000000000000000)); // 2 << 0x3c
1550 __ movq(rcx, V8_UINT64_C(0x43c0000000000000));
1551 __ vmovq(xmm5, rcx);
1552 __ vcvtqsi2sd(xmm6, xmm6, rdx);
1553 __ vucomisd(xmm5, xmm6);
1554 __ j(not_equal, &exit);
1555
1556 // Test vcvtsd2si
1557 __ movl(rax, Immediate(19));
1558 __ movq(rdx, V8_UINT64_C(0x4018000000000000)); // 6.0
1559 __ vmovq(xmm5, rdx);
1560 __ vcvtsd2si(rcx, xmm5);
1561 __ cmpl(rcx, Immediate(6));
1562 __ j(not_equal, &exit);
1563
1564 __ movq(rdx, V8_INT64_C(0x3ff0000000000000)); // 1.0
1565 __ vmovq(xmm7, rdx);
1566 __ vmulsd(xmm1, xmm1, xmm7);
1567 __ movq(Operand(rsp, 0), rdx);
1568 __ vmovq(xmm6, Operand(rsp, 0));
1569 __ vmulsd(xmm1, xmm1, xmm6);
1570
1571 __ vucomisd(xmm3, xmm1);
1572 __ j(parity_even, &exit);
1573 __ j(not_equal, &exit);
1574 __ movl(rax, Immediate(1));
1575
1576 __ vminsd(xmm3, xmm1, xmm2);
1577 __ vucomisd(xmm3, xmm1);
1578 __ j(parity_even, &exit);
1579 __ j(not_equal, &exit);
1580 __ movl(rax, Immediate(2));
1581
1582 __ vsubsd(xmm3, xmm2, xmm1);
1583 __ vucomisd(xmm3, xmm0);
1584 __ j(parity_even, &exit);
1585 __ j(not_equal, &exit);
1586 __ movl(rax, Immediate(3));
1587
1588 __ vaddsd(xmm3, xmm0, xmm1);
1589 __ vucomisd(xmm3, xmm2);
1590 __ j(parity_even, &exit);
1591 __ j(not_equal, &exit);
1592 __ movl(rax, Immediate(4));
1593
1594 __ vmulsd(xmm3, xmm0, xmm1);
1595 __ vucomisd(xmm3, xmm1);
1596 __ j(parity_even, &exit);
1597 __ j(not_equal, &exit);
1598 __ movl(rax, Immediate(5));
1599
1600 __ vdivsd(xmm3, xmm0, xmm1);
1601 __ vmulsd(xmm3, xmm3, xmm2);
1602 __ vmulsd(xmm3, xmm3, xmm1);
1603 __ vucomisd(xmm3, xmm2);
1604 __ j(parity_even, &exit);
1605 __ j(not_equal, &exit);
1606 __ movl(rax, Immediate(6));
1607
1608 // result in eax
1609 __ bind(&exit);
1610 __ addq(rsp, Immediate(kDoubleSize * 2));
1611 __ ret(0);
1612 }
1613
1614 CodeDesc desc;
1615 assm.GetCode(&desc);
1616 Handle<Code> code = isolate->factory()->NewCode(
1617 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
1618#ifdef OBJECT_PRINT
1619 OFStream os(stdout);
1620 code->Print(os);
1621#endif
1622
1623 F7 f = FUNCTION_CAST<F7>(code->entry());
1624 int res = f(1.0, 2.0, 3.0);
1625 PrintF("f(1,2,3) = %d\n", res);
1626 CHECK_EQ(6, res);
1627}
1628
1629
1630TEST(AssemblerX64BMI1) {
1631 CcTest::InitializeVM();
1632 if (!CpuFeatures::IsSupported(BMI1)) return;
1633
1634 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
1635 HandleScope scope(isolate);
1636 v8::internal::byte buffer[1024];
1637 MacroAssembler assm(isolate, buffer, sizeof(buffer),
1638 v8::internal::CodeObjectRequired::kYes);
1639 {
1640 CpuFeatureScope fscope(&assm, BMI1);
1641 Label exit;
1642
1643 __ movq(rcx, V8_UINT64_C(0x1122334455667788)); // source operand
1644 __ pushq(rcx); // For memory operand
1645
1646 // andn
1647 __ movq(rdx, V8_UINT64_C(0x1000000020000000));
1648
1649 __ movl(rax, Immediate(1)); // Test number
1650 __ andnq(r8, rdx, rcx);
1651 __ movq(r9, V8_UINT64_C(0x0122334455667788)); // expected result
1652 __ cmpq(r8, r9);
1653 __ j(not_equal, &exit);
1654
1655 __ incq(rax);
1656 __ andnq(r8, rdx, Operand(rsp, 0));
1657 __ movq(r9, V8_UINT64_C(0x0122334455667788)); // expected result
1658 __ cmpq(r8, r9);
1659 __ j(not_equal, &exit);
1660
1661 __ incq(rax);
1662 __ andnl(r8, rdx, rcx);
1663 __ movq(r9, V8_UINT64_C(0x0000000055667788)); // expected result
1664 __ cmpq(r8, r9);
1665 __ j(not_equal, &exit);
1666
1667 __ incq(rax);
1668 __ andnl(r8, rdx, Operand(rsp, 0));
1669 __ movq(r9, V8_UINT64_C(0x0000000055667788)); // expected result
1670 __ cmpq(r8, r9);
1671 __ j(not_equal, &exit);
1672
1673 // bextr
1674 __ movq(rdx, V8_UINT64_C(0x0000000000002808));
1675
1676 __ incq(rax);
1677 __ bextrq(r8, rcx, rdx);
1678 __ movq(r9, V8_UINT64_C(0x0000003344556677)); // expected result
1679 __ cmpq(r8, r9);
1680 __ j(not_equal, &exit);
1681
1682 __ incq(rax);
1683 __ bextrq(r8, Operand(rsp, 0), rdx);
1684 __ movq(r9, V8_UINT64_C(0x0000003344556677)); // expected result
1685 __ cmpq(r8, r9);
1686 __ j(not_equal, &exit);
1687
1688 __ incq(rax);
1689 __ bextrl(r8, rcx, rdx);
1690 __ movq(r9, V8_UINT64_C(0x0000000000556677)); // expected result
1691 __ cmpq(r8, r9);
1692 __ j(not_equal, &exit);
1693
1694 __ incq(rax);
1695 __ bextrl(r8, Operand(rsp, 0), rdx);
1696 __ movq(r9, V8_UINT64_C(0x0000000000556677)); // expected result
1697 __ cmpq(r8, r9);
1698 __ j(not_equal, &exit);
1699
1700 // blsi
1701 __ incq(rax);
1702 __ blsiq(r8, rcx);
1703 __ movq(r9, V8_UINT64_C(0x0000000000000008)); // expected result
1704 __ cmpq(r8, r9);
1705 __ j(not_equal, &exit);
1706
1707 __ incq(rax);
1708 __ blsiq(r8, Operand(rsp, 0));
1709 __ movq(r9, V8_UINT64_C(0x0000000000000008)); // expected result
1710 __ cmpq(r8, r9);
1711 __ j(not_equal, &exit);
1712
1713 __ incq(rax);
1714 __ blsil(r8, rcx);
1715 __ movq(r9, V8_UINT64_C(0x0000000000000008)); // expected result
1716 __ cmpq(r8, r9);
1717 __ j(not_equal, &exit);
1718
1719 __ incq(rax);
1720 __ blsil(r8, Operand(rsp, 0));
1721 __ movq(r9, V8_UINT64_C(0x0000000000000008)); // expected result
1722 __ cmpq(r8, r9);
1723 __ j(not_equal, &exit);
1724
1725 // blsmsk
1726 __ incq(rax);
1727 __ blsmskq(r8, rcx);
1728 __ movq(r9, V8_UINT64_C(0x000000000000000f)); // expected result
1729 __ cmpq(r8, r9);
1730 __ j(not_equal, &exit);
1731
1732 __ incq(rax);
1733 __ blsmskq(r8, Operand(rsp, 0));
1734 __ movq(r9, V8_UINT64_C(0x000000000000000f)); // expected result
1735 __ cmpq(r8, r9);
1736 __ j(not_equal, &exit);
1737
1738 __ incq(rax);
1739 __ blsmskl(r8, rcx);
1740 __ movq(r9, V8_UINT64_C(0x000000000000000f)); // expected result
1741 __ cmpq(r8, r9);
1742 __ j(not_equal, &exit);
1743
1744 __ incq(rax);
1745 __ blsmskl(r8, Operand(rsp, 0));
1746 __ movq(r9, V8_UINT64_C(0x000000000000000f)); // expected result
1747 __ cmpq(r8, r9);
1748 __ j(not_equal, &exit);
1749
1750 // blsr
1751 __ incq(rax);
1752 __ blsrq(r8, rcx);
1753 __ movq(r9, V8_UINT64_C(0x1122334455667780)); // expected result
1754 __ cmpq(r8, r9);
1755 __ j(not_equal, &exit);
1756
1757 __ incq(rax);
1758 __ blsrq(r8, Operand(rsp, 0));
1759 __ movq(r9, V8_UINT64_C(0x1122334455667780)); // expected result
1760 __ cmpq(r8, r9);
1761 __ j(not_equal, &exit);
1762
1763 __ incq(rax);
1764 __ blsrl(r8, rcx);
1765 __ movq(r9, V8_UINT64_C(0x0000000055667780)); // expected result
1766 __ cmpq(r8, r9);
1767 __ j(not_equal, &exit);
1768
1769 __ incq(rax);
1770 __ blsrl(r8, Operand(rsp, 0));
1771 __ movq(r9, V8_UINT64_C(0x0000000055667780)); // expected result
1772 __ cmpq(r8, r9);
1773 __ j(not_equal, &exit);
1774
1775 // tzcnt
1776 __ incq(rax);
1777 __ tzcntq(r8, rcx);
1778 __ movq(r9, V8_UINT64_C(0x0000000000000003)); // expected result
1779 __ cmpq(r8, r9);
1780 __ j(not_equal, &exit);
1781
1782 __ incq(rax);
1783 __ tzcntq(r8, Operand(rsp, 0));
1784 __ movq(r9, V8_UINT64_C(0x0000000000000003)); // expected result
1785 __ cmpq(r8, r9);
1786 __ j(not_equal, &exit);
1787
1788 __ incq(rax);
1789 __ tzcntl(r8, rcx);
1790 __ movq(r9, V8_UINT64_C(0x0000000000000003)); // expected result
1791 __ cmpq(r8, r9);
1792 __ j(not_equal, &exit);
1793
1794 __ incq(rax);
1795 __ tzcntl(r8, Operand(rsp, 0));
1796 __ movq(r9, V8_UINT64_C(0x0000000000000003)); // expected result
1797 __ cmpq(r8, r9);
1798 __ j(not_equal, &exit);
1799
1800 __ xorl(rax, rax);
1801 __ bind(&exit);
1802 __ popq(rcx);
1803 __ ret(0);
1804 }
1805
1806 CodeDesc desc;
1807 assm.GetCode(&desc);
1808 Handle<Code> code = isolate->factory()->NewCode(
1809 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
1810#ifdef OBJECT_PRINT
1811 OFStream os(stdout);
1812 code->Print(os);
1813#endif
1814
1815 F0 f = FUNCTION_CAST<F0>(code->entry());
1816 CHECK_EQ(0, f());
1817}
1818
1819
1820TEST(AssemblerX64LZCNT) {
1821 CcTest::InitializeVM();
1822 if (!CpuFeatures::IsSupported(LZCNT)) return;
1823
1824 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
1825 HandleScope scope(isolate);
1826 v8::internal::byte buffer[256];
1827 MacroAssembler assm(isolate, buffer, sizeof(buffer),
1828 v8::internal::CodeObjectRequired::kYes);
1829 {
1830 CpuFeatureScope fscope(&assm, LZCNT);
1831 Label exit;
1832
1833 __ movq(rcx, V8_UINT64_C(0x1122334455667788)); // source operand
1834 __ pushq(rcx); // For memory operand
1835
1836 __ movl(rax, Immediate(1)); // Test number
1837 __ lzcntq(r8, rcx);
1838 __ movq(r9, V8_UINT64_C(0x0000000000000003)); // expected result
1839 __ cmpq(r8, r9);
1840 __ j(not_equal, &exit);
1841
1842 __ incq(rax);
1843 __ lzcntq(r8, Operand(rsp, 0));
1844 __ movq(r9, V8_UINT64_C(0x0000000000000003)); // expected result
1845 __ cmpq(r8, r9);
1846 __ j(not_equal, &exit);
1847
1848 __ incq(rax);
1849 __ lzcntl(r8, rcx);
1850 __ movq(r9, V8_UINT64_C(0x0000000000000001)); // expected result
1851 __ cmpq(r8, r9);
1852 __ j(not_equal, &exit);
1853
1854 __ incq(rax);
1855 __ lzcntl(r8, Operand(rsp, 0));
1856 __ movq(r9, V8_UINT64_C(0x0000000000000001)); // expected result
1857 __ cmpq(r8, r9);
1858 __ j(not_equal, &exit);
1859
1860 __ xorl(rax, rax);
1861 __ bind(&exit);
1862 __ popq(rcx);
1863 __ ret(0);
1864 }
1865
1866 CodeDesc desc;
1867 assm.GetCode(&desc);
1868 Handle<Code> code = isolate->factory()->NewCode(
1869 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
1870#ifdef OBJECT_PRINT
1871 OFStream os(stdout);
1872 code->Print(os);
1873#endif
1874
1875 F0 f = FUNCTION_CAST<F0>(code->entry());
1876 CHECK_EQ(0, f());
1877}
1878
1879
1880TEST(AssemblerX64POPCNT) {
1881 CcTest::InitializeVM();
1882 if (!CpuFeatures::IsSupported(POPCNT)) return;
1883
1884 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
1885 HandleScope scope(isolate);
1886 v8::internal::byte buffer[256];
1887 MacroAssembler assm(isolate, buffer, sizeof(buffer),
1888 v8::internal::CodeObjectRequired::kYes);
1889 {
1890 CpuFeatureScope fscope(&assm, POPCNT);
1891 Label exit;
1892
1893 __ movq(rcx, V8_UINT64_C(0x1111111111111100)); // source operand
1894 __ pushq(rcx); // For memory operand
1895
1896 __ movl(rax, Immediate(1)); // Test number
1897 __ popcntq(r8, rcx);
1898 __ movq(r9, V8_UINT64_C(0x000000000000000e)); // expected result
1899 __ cmpq(r8, r9);
1900 __ j(not_equal, &exit);
1901
1902 __ incq(rax);
1903 __ popcntq(r8, Operand(rsp, 0));
1904 __ movq(r9, V8_UINT64_C(0x000000000000000e)); // expected result
1905 __ cmpq(r8, r9);
1906 __ j(not_equal, &exit);
1907
1908 __ incq(rax);
1909 __ popcntl(r8, rcx);
1910 __ movq(r9, V8_UINT64_C(0x0000000000000006)); // expected result
1911 __ cmpq(r8, r9);
1912 __ j(not_equal, &exit);
1913
1914 __ incq(rax);
1915 __ popcntl(r8, Operand(rsp, 0));
1916 __ movq(r9, V8_UINT64_C(0x0000000000000006)); // expected result
1917 __ cmpq(r8, r9);
1918 __ j(not_equal, &exit);
1919
1920 __ xorl(rax, rax);
1921 __ bind(&exit);
1922 __ popq(rcx);
1923 __ ret(0);
1924 }
1925
1926 CodeDesc desc;
1927 assm.GetCode(&desc);
1928 Handle<Code> code = isolate->factory()->NewCode(
1929 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
1930#ifdef OBJECT_PRINT
1931 OFStream os(stdout);
1932 code->Print(os);
1933#endif
1934
1935 F0 f = FUNCTION_CAST<F0>(code->entry());
1936 CHECK_EQ(0, f());
1937}
1938
1939
1940TEST(AssemblerX64BMI2) {
1941 CcTest::InitializeVM();
1942 if (!CpuFeatures::IsSupported(BMI2)) return;
1943
1944 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
1945 HandleScope scope(isolate);
1946 v8::internal::byte buffer[2048];
1947 MacroAssembler assm(isolate, buffer, sizeof(buffer),
1948 v8::internal::CodeObjectRequired::kYes);
1949 {
1950 CpuFeatureScope fscope(&assm, BMI2);
1951 Label exit;
1952 __ pushq(rbx); // save rbx
1953 __ movq(rcx, V8_UINT64_C(0x1122334455667788)); // source operand
1954 __ pushq(rcx); // For memory operand
1955
1956 // bzhi
1957 __ movq(rdx, V8_UINT64_C(0x0000000000000009));
1958
1959 __ movl(rax, Immediate(1)); // Test number
1960 __ bzhiq(r8, rcx, rdx);
1961 __ movq(r9, V8_UINT64_C(0x0000000000000188)); // expected result
1962 __ cmpq(r8, r9);
1963 __ j(not_equal, &exit);
1964
1965 __ incq(rax);
1966 __ bzhiq(r8, Operand(rsp, 0), rdx);
1967 __ movq(r9, V8_UINT64_C(0x0000000000000188)); // expected result
1968 __ cmpq(r8, r9);
1969 __ j(not_equal, &exit);
1970
1971 __ incq(rax);
1972 __ bzhil(r8, rcx, rdx);
1973 __ movq(r9, V8_UINT64_C(0x0000000000000188)); // expected result
1974 __ cmpq(r8, r9);
1975 __ j(not_equal, &exit);
1976
1977 __ incq(rax);
1978 __ bzhil(r8, Operand(rsp, 0), rdx);
1979 __ movq(r9, V8_UINT64_C(0x0000000000000188)); // expected result
1980 __ cmpq(r8, r9);
1981 __ j(not_equal, &exit);
1982
1983 // mulx
1984 __ movq(rdx, V8_UINT64_C(0x0000000000001000));
1985
1986 __ incq(rax);
1987 __ mulxq(r8, r9, rcx);
1988 __ movq(rbx, V8_UINT64_C(0x0000000000000112)); // expected result
1989 __ cmpq(r8, rbx);
1990 __ j(not_equal, &exit);
1991 __ movq(rbx, V8_UINT64_C(0x2334455667788000)); // expected result
1992 __ cmpq(r9, rbx);
1993 __ j(not_equal, &exit);
1994
1995 __ incq(rax);
1996 __ mulxq(r8, r9, Operand(rsp, 0));
1997 __ movq(rbx, V8_UINT64_C(0x0000000000000112)); // expected result
1998 __ cmpq(r8, rbx);
1999 __ j(not_equal, &exit);
2000 __ movq(rbx, V8_UINT64_C(0x2334455667788000)); // expected result
2001 __ cmpq(r9, rbx);
2002 __ j(not_equal, &exit);
2003
2004 __ incq(rax);
2005 __ mulxl(r8, r9, rcx);
2006 __ movq(rbx, V8_UINT64_C(0x0000000000000556)); // expected result
2007 __ cmpq(r8, rbx);
2008 __ j(not_equal, &exit);
2009 __ movq(rbx, V8_UINT64_C(0x0000000067788000)); // expected result
2010 __ cmpq(r9, rbx);
2011 __ j(not_equal, &exit);
2012
2013 __ incq(rax);
2014 __ mulxl(r8, r9, Operand(rsp, 0));
2015 __ movq(rbx, V8_UINT64_C(0x0000000000000556)); // expected result
2016 __ cmpq(r8, rbx);
2017 __ j(not_equal, &exit);
2018 __ movq(rbx, V8_UINT64_C(0x0000000067788000)); // expected result
2019 __ cmpq(r9, rbx);
2020 __ j(not_equal, &exit);
2021
2022 // pdep
2023 __ movq(rdx, V8_UINT64_C(0xfffffffffffffff0));
2024
2025 __ incq(rax);
2026 __ pdepq(r8, rdx, rcx);
2027 __ movq(r9, V8_UINT64_C(0x1122334455667400)); // expected result
2028 __ cmpq(r8, r9);
2029 __ j(not_equal, &exit);
2030
2031 __ incq(rax);
2032 __ pdepq(r8, rdx, Operand(rsp, 0));
2033 __ movq(r9, V8_UINT64_C(0x1122334455667400)); // expected result
2034 __ cmpq(r8, r9);
2035 __ j(not_equal, &exit);
2036
2037 __ incq(rax);
2038 __ pdepl(r8, rdx, rcx);
2039 __ movq(r9, V8_UINT64_C(0x0000000055667400)); // expected result
2040 __ cmpq(r8, r9);
2041 __ j(not_equal, &exit);
2042
2043 __ incq(rax);
2044 __ pdepl(r8, rdx, Operand(rsp, 0));
2045 __ movq(r9, V8_UINT64_C(0x0000000055667400)); // expected result
2046 __ cmpq(r8, r9);
2047 __ j(not_equal, &exit);
2048
2049 // pext
2050 __ movq(rdx, V8_UINT64_C(0xfffffffffffffff0));
2051
2052 __ incq(rax);
2053 __ pextq(r8, rdx, rcx);
2054 __ movq(r9, V8_UINT64_C(0x0000000003fffffe)); // expected result
2055 __ cmpq(r8, r9);
2056 __ j(not_equal, &exit);
2057
2058 __ incq(rax);
2059 __ pextq(r8, rdx, Operand(rsp, 0));
2060 __ movq(r9, V8_UINT64_C(0x0000000003fffffe)); // expected result
2061 __ cmpq(r8, r9);
2062 __ j(not_equal, &exit);
2063
2064 __ incq(rax);
2065 __ pextl(r8, rdx, rcx);
2066 __ movq(r9, V8_UINT64_C(0x000000000000fffe)); // expected result
2067 __ cmpq(r8, r9);
2068 __ j(not_equal, &exit);
2069
2070 __ incq(rax);
2071 __ pextl(r8, rdx, Operand(rsp, 0));
2072 __ movq(r9, V8_UINT64_C(0x000000000000fffe)); // expected result
2073 __ cmpq(r8, r9);
2074 __ j(not_equal, &exit);
2075
2076 // sarx
2077 __ movq(rdx, V8_UINT64_C(0x0000000000000004));
2078
2079 __ incq(rax);
2080 __ sarxq(r8, rcx, rdx);
2081 __ movq(r9, V8_UINT64_C(0x0112233445566778)); // expected result
2082 __ cmpq(r8, r9);
2083 __ j(not_equal, &exit);
2084
2085 __ incq(rax);
2086 __ sarxq(r8, Operand(rsp, 0), rdx);
2087 __ movq(r9, V8_UINT64_C(0x0112233445566778)); // expected result
2088 __ cmpq(r8, r9);
2089 __ j(not_equal, &exit);
2090
2091 __ incq(rax);
2092 __ sarxl(r8, rcx, rdx);
2093 __ movq(r9, V8_UINT64_C(0x0000000005566778)); // expected result
2094 __ cmpq(r8, r9);
2095 __ j(not_equal, &exit);
2096
2097 __ incq(rax);
2098 __ sarxl(r8, Operand(rsp, 0), rdx);
2099 __ movq(r9, V8_UINT64_C(0x0000000005566778)); // expected result
2100 __ cmpq(r8, r9);
2101 __ j(not_equal, &exit);
2102
2103 // shlx
2104 __ movq(rdx, V8_UINT64_C(0x0000000000000004));
2105
2106 __ incq(rax);
2107 __ shlxq(r8, rcx, rdx);
2108 __ movq(r9, V8_UINT64_C(0x1223344556677880)); // expected result
2109 __ cmpq(r8, r9);
2110 __ j(not_equal, &exit);
2111
2112 __ incq(rax);
2113 __ shlxq(r8, Operand(rsp, 0), rdx);
2114 __ movq(r9, V8_UINT64_C(0x1223344556677880)); // expected result
2115 __ cmpq(r8, r9);
2116 __ j(not_equal, &exit);
2117
2118 __ incq(rax);
2119 __ shlxl(r8, rcx, rdx);
2120 __ movq(r9, V8_UINT64_C(0x0000000056677880)); // expected result
2121 __ cmpq(r8, r9);
2122 __ j(not_equal, &exit);
2123
2124 __ incq(rax);
2125 __ shlxl(r8, Operand(rsp, 0), rdx);
2126 __ movq(r9, V8_UINT64_C(0x0000000056677880)); // expected result
2127 __ cmpq(r8, r9);
2128 __ j(not_equal, &exit);
2129
2130 // shrx
2131 __ movq(rdx, V8_UINT64_C(0x0000000000000004));
2132
2133 __ incq(rax);
2134 __ shrxq(r8, rcx, rdx);
2135 __ movq(r9, V8_UINT64_C(0x0112233445566778)); // expected result
2136 __ cmpq(r8, r9);
2137 __ j(not_equal, &exit);
2138
2139 __ incq(rax);
2140 __ shrxq(r8, Operand(rsp, 0), rdx);
2141 __ movq(r9, V8_UINT64_C(0x0112233445566778)); // expected result
2142 __ cmpq(r8, r9);
2143 __ j(not_equal, &exit);
2144
2145 __ incq(rax);
2146 __ shrxl(r8, rcx, rdx);
2147 __ movq(r9, V8_UINT64_C(0x0000000005566778)); // expected result
2148 __ cmpq(r8, r9);
2149 __ j(not_equal, &exit);
2150
2151 __ incq(rax);
2152 __ shrxl(r8, Operand(rsp, 0), rdx);
2153 __ movq(r9, V8_UINT64_C(0x0000000005566778)); // expected result
2154 __ cmpq(r8, r9);
2155 __ j(not_equal, &exit);
2156
2157 // rorx
2158 __ incq(rax);
2159 __ rorxq(r8, rcx, 0x4);
2160 __ movq(r9, V8_UINT64_C(0x8112233445566778)); // expected result
2161 __ cmpq(r8, r9);
2162 __ j(not_equal, &exit);
2163
2164 __ incq(rax);
2165 __ rorxq(r8, Operand(rsp, 0), 0x4);
2166 __ movq(r9, V8_UINT64_C(0x8112233445566778)); // expected result
2167 __ cmpq(r8, r9);
2168 __ j(not_equal, &exit);
2169
2170 __ incq(rax);
2171 __ rorxl(r8, rcx, 0x4);
2172 __ movq(r9, V8_UINT64_C(0x0000000085566778)); // expected result
2173 __ cmpq(r8, r9);
2174 __ j(not_equal, &exit);
2175
2176 __ incq(rax);
2177 __ rorxl(r8, Operand(rsp, 0), 0x4);
2178 __ movq(r9, V8_UINT64_C(0x0000000085566778)); // expected result
2179 __ cmpq(r8, r9);
2180 __ j(not_equal, &exit);
2181
2182 __ xorl(rax, rax);
2183 __ bind(&exit);
2184 __ popq(rcx);
2185 __ popq(rbx);
2186 __ ret(0);
2187 }
2188
2189 CodeDesc desc;
2190 assm.GetCode(&desc);
2191 Handle<Code> code = isolate->factory()->NewCode(
2192 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
2193#ifdef OBJECT_PRINT
2194 OFStream os(stdout);
2195 code->Print(os);
2196#endif
2197
2198 F0 f = FUNCTION_CAST<F0>(code->entry());
2199 CHECK_EQ(0, f());
2200}
2201
2202
2203TEST(AssemblerX64JumpTables1) {
2204 // Test jump tables with forward jumps.
2205 CcTest::InitializeVM();
2206 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
2207 HandleScope scope(isolate);
2208 MacroAssembler assm(isolate, nullptr, 0,
2209 v8::internal::CodeObjectRequired::kYes);
2210
2211 const int kNumCases = 512;
2212 int values[kNumCases];
2213 isolate->random_number_generator()->NextBytes(values, sizeof(values));
2214 Label labels[kNumCases];
2215
2216 Label done, table;
2217 __ leaq(arg2, Operand(&table));
2218 __ jmp(Operand(arg2, arg1, times_8, 0));
2219 __ ud2();
2220 __ bind(&table);
2221 for (int i = 0; i < kNumCases; ++i) {
2222 __ dq(&labels[i]);
2223 }
2224
2225 for (int i = 0; i < kNumCases; ++i) {
2226 __ bind(&labels[i]);
2227 __ movq(rax, Immediate(values[i]));
2228 __ jmp(&done);
2229 }
2230
2231 __ bind(&done);
2232 __ ret(0);
2233
2234 CodeDesc desc;
2235 assm.GetCode(&desc);
2236 Handle<Code> code = isolate->factory()->NewCode(
2237 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
2238#ifdef OBJECT_PRINT
2239 code->Print(std::cout);
2240#endif
2241
2242 F1 f = FUNCTION_CAST<F1>(code->entry());
2243 for (int i = 0; i < kNumCases; ++i) {
2244 int res = f(i);
2245 PrintF("f(%d) = %d\n", i, res);
2246 CHECK_EQ(values[i], res);
2247 }
2248}
2249
2250
2251TEST(AssemblerX64JumpTables2) {
2252 // Test jump tables with backwards jumps.
2253 CcTest::InitializeVM();
2254 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
2255 HandleScope scope(isolate);
2256 MacroAssembler assm(isolate, nullptr, 0,
2257 v8::internal::CodeObjectRequired::kYes);
2258
2259 const int kNumCases = 512;
2260 int values[kNumCases];
2261 isolate->random_number_generator()->NextBytes(values, sizeof(values));
2262 Label labels[kNumCases];
2263
2264 Label done, table;
2265 __ leaq(arg2, Operand(&table));
2266 __ jmp(Operand(arg2, arg1, times_8, 0));
2267 __ ud2();
2268
2269 for (int i = 0; i < kNumCases; ++i) {
2270 __ bind(&labels[i]);
2271 __ movq(rax, Immediate(values[i]));
2272 __ jmp(&done);
2273 }
2274
2275 __ bind(&done);
2276 __ ret(0);
2277
2278 __ bind(&table);
2279 for (int i = 0; i < kNumCases; ++i) {
2280 __ dq(&labels[i]);
2281 }
2282
2283 CodeDesc desc;
2284 assm.GetCode(&desc);
2285 Handle<Code> code = isolate->factory()->NewCode(
2286 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
2287#ifdef OBJECT_PRINT
2288 code->Print(std::cout);
2289#endif
2290
2291 F1 f = FUNCTION_CAST<F1>(code->entry());
2292 for (int i = 0; i < kNumCases; ++i) {
2293 int res = f(i);
2294 PrintF("f(%d) = %d\n", i, res);
2295 CHECK_EQ(values[i], res);
2296 }
2297}
2298
Ben Murdoch61f157c2016-09-16 13:49:30 +01002299TEST(AssemblerX64PslldWithXmm15) {
2300 CcTest::InitializeVM();
2301 // Allocate an executable page of memory.
2302 size_t actual_size;
2303 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
2304 Assembler::kMinimalBufferSize, &actual_size, true));
2305 CHECK(buffer);
2306 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
2307
2308 __ movq(xmm15, arg1);
2309 __ pslld(xmm15, 1);
2310 __ movq(rax, xmm15);
2311 __ ret(0);
2312
2313 CodeDesc desc;
2314 assm.GetCode(&desc);
2315 uint64_t result = FUNCTION_CAST<F5>(buffer)(V8_UINT64_C(0x1122334455667788));
2316 CHECK_EQ(V8_UINT64_C(0x22446688aaccef10), result);
2317}
2318
Steve Blocka7e24c12009-10-30 11:49:00 +00002319#undef __