blob: 66199fb54053c2672af5854e8a2093cfa5a1cd86 [file] [log] [blame]
Steve Blocka7e24c12009-10-30 11:49:00 +00001// Copyright 2009 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000028#include <cstdlib>
29#include <iostream>
Steve Blocka7e24c12009-10-30 11:49:00 +000030
Ben Murdochb8a8cc12014-11-26 15:28:44 +000031#include "src/v8.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000032
Ben Murdochb8a8cc12014-11-26 15:28:44 +000033#include "src/base/platform/platform.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000034#include "src/base/utils/random-number-generator.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000035#include "src/factory.h"
36#include "src/macro-assembler.h"
37#include "src/ostreams.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000038#include "test/cctest/cctest.h"
Steve Blocka7e24c12009-10-30 11:49:00 +000039
Ben Murdochb8a8cc12014-11-26 15:28:44 +000040using namespace v8::internal;
Steve Blocka7e24c12009-10-30 11:49:00 +000041
Steve Blocka7e24c12009-10-30 11:49:00 +000042// Test the x64 assembler by compiling some simple functions into
43// a buffer and executing them. These tests do not initialize the
44// V8 library, create a context, or use any V8 objects.
Steve Block3ce2e202009-11-05 08:53:23 +000045// The AMD64 calling convention is used, with the first six arguments
46// in RDI, RSI, RDX, RCX, R8, and R9, and floating point arguments in
Steve Blocka7e24c12009-10-30 11:49:00 +000047// the XMM registers. The return value is in RAX.
48// This calling convention is used on Linux, with GCC, and on Mac OS,
Steve Block3ce2e202009-11-05 08:53:23 +000049// with GCC. A different convention is used on 64-bit windows,
50// where the first four integer arguments are passed in RCX, RDX, R8 and R9.
Steve Blocka7e24c12009-10-30 11:49:00 +000051
52typedef int (*F0)();
53typedef int (*F1)(int64_t x);
54typedef int (*F2)(int64_t x, int64_t y);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000055typedef unsigned (*F3)(double x);
56typedef uint64_t (*F4)(uint64_t* x, uint64_t* y);
57typedef uint64_t (*F5)(uint64_t x);
Steve Blocka7e24c12009-10-30 11:49:00 +000058
Steve Block3ce2e202009-11-05 08:53:23 +000059#ifdef _WIN64
Ben Murdochb8a8cc12014-11-26 15:28:44 +000060static const Register arg1 = rcx;
61static const Register arg2 = rdx;
Steve Block3ce2e202009-11-05 08:53:23 +000062#else
Ben Murdochb8a8cc12014-11-26 15:28:44 +000063static const Register arg1 = rdi;
64static const Register arg2 = rsi;
Steve Block3ce2e202009-11-05 08:53:23 +000065#endif
66
Steve Blocka7e24c12009-10-30 11:49:00 +000067#define __ assm.
68
69
70TEST(AssemblerX64ReturnOperation) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000071 CcTest::InitializeVM();
Steve Blocka7e24c12009-10-30 11:49:00 +000072 // Allocate an executable page of memory.
73 size_t actual_size;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000074 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
75 Assembler::kMinimalBufferSize, &actual_size, true));
Steve Blocka7e24c12009-10-30 11:49:00 +000076 CHECK(buffer);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000077 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
Steve Blocka7e24c12009-10-30 11:49:00 +000078
79 // Assemble a simple function that copies argument 2 and returns it.
Steve Block3ce2e202009-11-05 08:53:23 +000080 __ movq(rax, arg2);
Steve Blocka7e24c12009-10-30 11:49:00 +000081 __ nop();
82 __ ret(0);
83
84 CodeDesc desc;
85 assm.GetCode(&desc);
86 // Call the function from C++.
87 int result = FUNCTION_CAST<F2>(buffer)(3, 2);
88 CHECK_EQ(2, result);
89}
90
Ben Murdochb8a8cc12014-11-26 15:28:44 +000091
Steve Blocka7e24c12009-10-30 11:49:00 +000092TEST(AssemblerX64StackOperations) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000093 CcTest::InitializeVM();
Steve Blocka7e24c12009-10-30 11:49:00 +000094 // Allocate an executable page of memory.
95 size_t actual_size;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000096 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
97 Assembler::kMinimalBufferSize, &actual_size, true));
Steve Blocka7e24c12009-10-30 11:49:00 +000098 CHECK(buffer);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000099 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
Steve Blocka7e24c12009-10-30 11:49:00 +0000100
101 // Assemble a simple function that copies argument 2 and returns it.
102 // We compile without stack frame pointers, so the gdb debugger shows
103 // incorrect stack frames when debugging this function (which has them).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000104 __ pushq(rbp);
Steve Blocka7e24c12009-10-30 11:49:00 +0000105 __ movq(rbp, rsp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000106 __ pushq(arg2); // Value at (rbp - 8)
107 __ pushq(arg2); // Value at (rbp - 16)
108 __ pushq(arg1); // Value at (rbp - 24)
109 __ popq(rax);
110 __ popq(rax);
111 __ popq(rax);
112 __ popq(rbp);
Steve Blocka7e24c12009-10-30 11:49:00 +0000113 __ nop();
114 __ ret(0);
115
116 CodeDesc desc;
117 assm.GetCode(&desc);
118 // Call the function from C++.
119 int result = FUNCTION_CAST<F2>(buffer)(3, 2);
120 CHECK_EQ(2, result);
121}
122
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000123
Steve Blocka7e24c12009-10-30 11:49:00 +0000124TEST(AssemblerX64ArithmeticOperations) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000125 CcTest::InitializeVM();
Steve Blocka7e24c12009-10-30 11:49:00 +0000126 // Allocate an executable page of memory.
127 size_t actual_size;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000128 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
129 Assembler::kMinimalBufferSize, &actual_size, true));
Steve Blocka7e24c12009-10-30 11:49:00 +0000130 CHECK(buffer);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000131 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
Steve Blocka7e24c12009-10-30 11:49:00 +0000132
133 // Assemble a simple function that adds arguments returning the sum.
Steve Block3ce2e202009-11-05 08:53:23 +0000134 __ movq(rax, arg2);
135 __ addq(rax, arg1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000136 __ ret(0);
137
138 CodeDesc desc;
139 assm.GetCode(&desc);
140 // Call the function from C++.
141 int result = FUNCTION_CAST<F2>(buffer)(3, 2);
142 CHECK_EQ(5, result);
143}
144
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000145
146TEST(AssemblerX64CmpbOperation) {
147 CcTest::InitializeVM();
Steve Blocka7e24c12009-10-30 11:49:00 +0000148 // Allocate an executable page of memory.
149 size_t actual_size;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000150 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
151 Assembler::kMinimalBufferSize, &actual_size, true));
Steve Blocka7e24c12009-10-30 11:49:00 +0000152 CHECK(buffer);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000153 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
154
155 // Assemble a function that compare argument byte returing 1 if equal else 0.
156 // On Windows, it compares rcx with rdx which does not require REX prefix;
157 // on Linux, it compares rdi with rsi which requires REX prefix.
158
159 Label done;
160 __ movq(rax, Immediate(1));
161 __ cmpb(arg1, arg2);
162 __ j(equal, &done);
163 __ movq(rax, Immediate(0));
164 __ bind(&done);
165 __ ret(0);
166
167 CodeDesc desc;
168 assm.GetCode(&desc);
169 // Call the function from C++.
170 int result = FUNCTION_CAST<F2>(buffer)(0x1002, 0x2002);
171 CHECK_EQ(1, result);
172 result = FUNCTION_CAST<F2>(buffer)(0x1002, 0x2003);
173 CHECK_EQ(0, result);
174}
175
176
177TEST(AssemblerX64ImulOperation) {
178 CcTest::InitializeVM();
179 // Allocate an executable page of memory.
180 size_t actual_size;
181 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
182 Assembler::kMinimalBufferSize, &actual_size, true));
183 CHECK(buffer);
184 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
Steve Blocka7e24c12009-10-30 11:49:00 +0000185
186 // Assemble a simple function that multiplies arguments returning the high
187 // word.
Steve Block3ce2e202009-11-05 08:53:23 +0000188 __ movq(rax, arg2);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000189 __ imulq(arg1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000190 __ movq(rax, rdx);
191 __ ret(0);
192
193 CodeDesc desc;
194 assm.GetCode(&desc);
195 // Call the function from C++.
196 int result = FUNCTION_CAST<F2>(buffer)(3, 2);
197 CHECK_EQ(0, result);
198 result = FUNCTION_CAST<F2>(buffer)(0x100000000l, 0x100000000l);
199 CHECK_EQ(1, result);
200 result = FUNCTION_CAST<F2>(buffer)(-0x100000000l, 0x100000000l);
201 CHECK_EQ(-1, result);
202}
203
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000204
205TEST(AssemblerX64XchglOperations) {
206 CcTest::InitializeVM();
Steve Blocka7e24c12009-10-30 11:49:00 +0000207 // Allocate an executable page of memory.
208 size_t actual_size;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000209 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
210 Assembler::kMinimalBufferSize, &actual_size, true));
Steve Blocka7e24c12009-10-30 11:49:00 +0000211 CHECK(buffer);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000212 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
213
214 __ movq(rax, Operand(arg1, 0));
215 __ movq(r11, Operand(arg2, 0));
216 __ xchgl(rax, r11);
217 __ movq(Operand(arg1, 0), rax);
218 __ movq(Operand(arg2, 0), r11);
219 __ ret(0);
220
221 CodeDesc desc;
222 assm.GetCode(&desc);
223 // Call the function from C++.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000224 uint64_t left = V8_2PART_UINT64_C(0x10000000, 20000000);
225 uint64_t right = V8_2PART_UINT64_C(0x30000000, 40000000);
226 uint64_t result = FUNCTION_CAST<F4>(buffer)(&left, &right);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000227 CHECK_EQ(V8_2PART_UINT64_C(0x00000000, 40000000), left);
228 CHECK_EQ(V8_2PART_UINT64_C(0x00000000, 20000000), right);
229 USE(result);
230}
231
232
233TEST(AssemblerX64OrlOperations) {
234 CcTest::InitializeVM();
235 // Allocate an executable page of memory.
236 size_t actual_size;
237 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
238 Assembler::kMinimalBufferSize, &actual_size, true));
239 CHECK(buffer);
240 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
241
242 __ movq(rax, Operand(arg2, 0));
243 __ orl(Operand(arg1, 0), rax);
244 __ ret(0);
245
246 CodeDesc desc;
247 assm.GetCode(&desc);
248 // Call the function from C++.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000249 uint64_t left = V8_2PART_UINT64_C(0x10000000, 20000000);
250 uint64_t right = V8_2PART_UINT64_C(0x30000000, 40000000);
251 uint64_t result = FUNCTION_CAST<F4>(buffer)(&left, &right);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000252 CHECK_EQ(V8_2PART_UINT64_C(0x10000000, 60000000), left);
253 USE(result);
254}
255
256
257TEST(AssemblerX64RollOperations) {
258 CcTest::InitializeVM();
259 // Allocate an executable page of memory.
260 size_t actual_size;
261 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
262 Assembler::kMinimalBufferSize, &actual_size, true));
263 CHECK(buffer);
264 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
265
266 __ movq(rax, arg1);
267 __ roll(rax, Immediate(1));
268 __ ret(0);
269
270 CodeDesc desc;
271 assm.GetCode(&desc);
272 // Call the function from C++.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000273 uint64_t src = V8_2PART_UINT64_C(0x10000000, C0000000);
274 uint64_t result = FUNCTION_CAST<F5>(buffer)(src);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000275 CHECK_EQ(V8_2PART_UINT64_C(0x00000000, 80000001), result);
276}
277
278
279TEST(AssemblerX64SublOperations) {
280 CcTest::InitializeVM();
281 // Allocate an executable page of memory.
282 size_t actual_size;
283 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
284 Assembler::kMinimalBufferSize, &actual_size, true));
285 CHECK(buffer);
286 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
287
288 __ movq(rax, Operand(arg2, 0));
289 __ subl(Operand(arg1, 0), rax);
290 __ ret(0);
291
292 CodeDesc desc;
293 assm.GetCode(&desc);
294 // Call the function from C++.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000295 uint64_t left = V8_2PART_UINT64_C(0x10000000, 20000000);
296 uint64_t right = V8_2PART_UINT64_C(0x30000000, 40000000);
297 uint64_t result = FUNCTION_CAST<F4>(buffer)(&left, &right);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000298 CHECK_EQ(V8_2PART_UINT64_C(0x10000000, e0000000), left);
299 USE(result);
300}
301
302
303TEST(AssemblerX64TestlOperations) {
304 CcTest::InitializeVM();
305 // Allocate an executable page of memory.
306 size_t actual_size;
307 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
308 Assembler::kMinimalBufferSize, &actual_size, true));
309 CHECK(buffer);
310 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
311
312 // Set rax with the ZF flag of the testl instruction.
313 Label done;
314 __ movq(rax, Immediate(1));
315 __ movq(r11, Operand(arg2, 0));
316 __ testl(Operand(arg1, 0), r11);
317 __ j(zero, &done, Label::kNear);
318 __ movq(rax, Immediate(0));
319 __ bind(&done);
320 __ ret(0);
321
322 CodeDesc desc;
323 assm.GetCode(&desc);
324 // Call the function from C++.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000325 uint64_t left = V8_2PART_UINT64_C(0x10000000, 20000000);
326 uint64_t right = V8_2PART_UINT64_C(0x30000000, 00000000);
327 uint64_t result = FUNCTION_CAST<F4>(buffer)(&left, &right);
328 CHECK_EQ(1u, result);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000329}
330
331
332TEST(AssemblerX64XorlOperations) {
333 CcTest::InitializeVM();
334 // Allocate an executable page of memory.
335 size_t actual_size;
336 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
337 Assembler::kMinimalBufferSize, &actual_size, true));
338 CHECK(buffer);
339 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
340
341 __ movq(rax, Operand(arg2, 0));
342 __ xorl(Operand(arg1, 0), rax);
343 __ ret(0);
344
345 CodeDesc desc;
346 assm.GetCode(&desc);
347 // Call the function from C++.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000348 uint64_t left = V8_2PART_UINT64_C(0x10000000, 20000000);
349 uint64_t right = V8_2PART_UINT64_C(0x30000000, 60000000);
350 uint64_t result = FUNCTION_CAST<F4>(buffer)(&left, &right);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000351 CHECK_EQ(V8_2PART_UINT64_C(0x10000000, 40000000), left);
352 USE(result);
353}
354
355
356TEST(AssemblerX64MemoryOperands) {
357 CcTest::InitializeVM();
358 // Allocate an executable page of memory.
359 size_t actual_size;
360 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
361 Assembler::kMinimalBufferSize, &actual_size, true));
362 CHECK(buffer);
363 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
Steve Blocka7e24c12009-10-30 11:49:00 +0000364
365 // Assemble a simple function that copies argument 2 and returns it.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000366 __ pushq(rbp);
Steve Blocka7e24c12009-10-30 11:49:00 +0000367 __ movq(rbp, rsp);
Steve Block3ce2e202009-11-05 08:53:23 +0000368
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000369 __ pushq(arg2); // Value at (rbp - 8)
370 __ pushq(arg2); // Value at (rbp - 16)
371 __ pushq(arg1); // Value at (rbp - 24)
Steve Block3ce2e202009-11-05 08:53:23 +0000372
Steve Blocka7e24c12009-10-30 11:49:00 +0000373 const int kStackElementSize = 8;
374 __ movq(rax, Operand(rbp, -3 * kStackElementSize));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000375 __ popq(arg2);
376 __ popq(arg2);
377 __ popq(arg2);
378 __ popq(rbp);
Steve Blocka7e24c12009-10-30 11:49:00 +0000379 __ nop();
380 __ ret(0);
381
382 CodeDesc desc;
383 assm.GetCode(&desc);
384 // Call the function from C++.
385 int result = FUNCTION_CAST<F2>(buffer)(3, 2);
386 CHECK_EQ(3, result);
387}
388
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000389
Steve Blocka7e24c12009-10-30 11:49:00 +0000390TEST(AssemblerX64ControlFlow) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000391 CcTest::InitializeVM();
Steve Blocka7e24c12009-10-30 11:49:00 +0000392 // Allocate an executable page of memory.
393 size_t actual_size;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000394 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
395 Assembler::kMinimalBufferSize, &actual_size, true));
Steve Blocka7e24c12009-10-30 11:49:00 +0000396 CHECK(buffer);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000397 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
Steve Blocka7e24c12009-10-30 11:49:00 +0000398
Steve Block3ce2e202009-11-05 08:53:23 +0000399 // Assemble a simple function that copies argument 1 and returns it.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000400 __ pushq(rbp);
Steve Block3ce2e202009-11-05 08:53:23 +0000401
Steve Blocka7e24c12009-10-30 11:49:00 +0000402 __ movq(rbp, rsp);
Steve Block3ce2e202009-11-05 08:53:23 +0000403 __ movq(rax, arg1);
Steve Blocka7e24c12009-10-30 11:49:00 +0000404 Label target;
405 __ jmp(&target);
Steve Block3ce2e202009-11-05 08:53:23 +0000406 __ movq(rax, arg2);
Steve Blocka7e24c12009-10-30 11:49:00 +0000407 __ bind(&target);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000408 __ popq(rbp);
Steve Blocka7e24c12009-10-30 11:49:00 +0000409 __ ret(0);
410
411 CodeDesc desc;
412 assm.GetCode(&desc);
413 // Call the function from C++.
414 int result = FUNCTION_CAST<F2>(buffer)(3, 2);
415 CHECK_EQ(3, result);
416}
417
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000418
Steve Blocka7e24c12009-10-30 11:49:00 +0000419TEST(AssemblerX64LoopImmediates) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000420 CcTest::InitializeVM();
Steve Blocka7e24c12009-10-30 11:49:00 +0000421 // Allocate an executable page of memory.
422 size_t actual_size;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000423 byte* buffer = static_cast<byte*>(v8::base::OS::Allocate(
424 Assembler::kMinimalBufferSize, &actual_size, true));
Steve Blocka7e24c12009-10-30 11:49:00 +0000425 CHECK(buffer);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000426 Assembler assm(CcTest::i_isolate(), buffer, static_cast<int>(actual_size));
Steve Blocka7e24c12009-10-30 11:49:00 +0000427 // Assemble two loops using rax as counter, and verify the ending counts.
428 Label Fail;
429 __ movq(rax, Immediate(-3));
430 Label Loop1_test;
431 Label Loop1_body;
432 __ jmp(&Loop1_test);
433 __ bind(&Loop1_body);
434 __ addq(rax, Immediate(7));
435 __ bind(&Loop1_test);
436 __ cmpq(rax, Immediate(20));
437 __ j(less_equal, &Loop1_body);
438 // Did the loop terminate with the expected value?
439 __ cmpq(rax, Immediate(25));
440 __ j(not_equal, &Fail);
441
442 Label Loop2_test;
443 Label Loop2_body;
444 __ movq(rax, Immediate(0x11FEED00));
445 __ jmp(&Loop2_test);
446 __ bind(&Loop2_body);
447 __ addq(rax, Immediate(-0x1100));
448 __ bind(&Loop2_test);
449 __ cmpq(rax, Immediate(0x11FE8000));
450 __ j(greater, &Loop2_body);
451 // Did the loop terminate with the expected value?
452 __ cmpq(rax, Immediate(0x11FE7600));
453 __ j(not_equal, &Fail);
454
455 __ movq(rax, Immediate(1));
456 __ ret(0);
457 __ bind(&Fail);
458 __ movq(rax, Immediate(0));
459 __ ret(0);
460
461 CodeDesc desc;
462 assm.GetCode(&desc);
463 // Call the function from C++.
464 int result = FUNCTION_CAST<F0>(buffer)();
465 CHECK_EQ(1, result);
466}
467
Steve Block1e0659c2011-05-24 12:43:12 +0100468
469TEST(OperandRegisterDependency) {
470 int offsets[4] = {0, 1, 0xfed, 0xbeefcad};
471 for (int i = 0; i < 4; i++) {
472 int offset = offsets[i];
473 CHECK(Operand(rax, offset).AddressUsesRegister(rax));
474 CHECK(!Operand(rax, offset).AddressUsesRegister(r8));
475 CHECK(!Operand(rax, offset).AddressUsesRegister(rcx));
476
477 CHECK(Operand(rax, rax, times_1, offset).AddressUsesRegister(rax));
478 CHECK(!Operand(rax, rax, times_1, offset).AddressUsesRegister(r8));
479 CHECK(!Operand(rax, rax, times_1, offset).AddressUsesRegister(rcx));
480
481 CHECK(Operand(rax, rcx, times_1, offset).AddressUsesRegister(rax));
482 CHECK(Operand(rax, rcx, times_1, offset).AddressUsesRegister(rcx));
483 CHECK(!Operand(rax, rcx, times_1, offset).AddressUsesRegister(r8));
484 CHECK(!Operand(rax, rcx, times_1, offset).AddressUsesRegister(r9));
485 CHECK(!Operand(rax, rcx, times_1, offset).AddressUsesRegister(rdx));
486 CHECK(!Operand(rax, rcx, times_1, offset).AddressUsesRegister(rsp));
487
488 CHECK(Operand(rsp, offset).AddressUsesRegister(rsp));
489 CHECK(!Operand(rsp, offset).AddressUsesRegister(rax));
Steve Block44f0eee2011-05-26 01:26:41 +0100490 CHECK(!Operand(rsp, offset).AddressUsesRegister(r15));
Steve Block1e0659c2011-05-24 12:43:12 +0100491
492 CHECK(Operand(rbp, offset).AddressUsesRegister(rbp));
493 CHECK(!Operand(rbp, offset).AddressUsesRegister(rax));
494 CHECK(!Operand(rbp, offset).AddressUsesRegister(r13));
495
496 CHECK(Operand(rbp, rax, times_1, offset).AddressUsesRegister(rbp));
497 CHECK(Operand(rbp, rax, times_1, offset).AddressUsesRegister(rax));
498 CHECK(!Operand(rbp, rax, times_1, offset).AddressUsesRegister(rcx));
499 CHECK(!Operand(rbp, rax, times_1, offset).AddressUsesRegister(r13));
500 CHECK(!Operand(rbp, rax, times_1, offset).AddressUsesRegister(r8));
501 CHECK(!Operand(rbp, rax, times_1, offset).AddressUsesRegister(rsp));
502
503 CHECK(Operand(rsp, rbp, times_1, offset).AddressUsesRegister(rsp));
504 CHECK(Operand(rsp, rbp, times_1, offset).AddressUsesRegister(rbp));
505 CHECK(!Operand(rsp, rbp, times_1, offset).AddressUsesRegister(rax));
Steve Block44f0eee2011-05-26 01:26:41 +0100506 CHECK(!Operand(rsp, rbp, times_1, offset).AddressUsesRegister(r15));
Steve Block1e0659c2011-05-24 12:43:12 +0100507 CHECK(!Operand(rsp, rbp, times_1, offset).AddressUsesRegister(r13));
508 }
509}
510
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000511
512TEST(AssemblerX64LabelChaining) {
513 // Test chaining of label usages within instructions (issue 1644).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000514 CcTest::InitializeVM();
515 v8::HandleScope scope(CcTest::isolate());
516 Assembler assm(CcTest::i_isolate(), NULL, 0);
Ben Murdoch69a99ed2011-11-30 16:03:39 +0000517
518 Label target;
519 __ j(equal, &target);
520 __ j(not_equal, &target);
521 __ bind(&target);
522 __ nop();
523}
524
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100525
526TEST(AssemblerMultiByteNop) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000527 CcTest::InitializeVM();
528 v8::HandleScope scope(CcTest::isolate());
529 byte buffer[1024];
530 Isolate* isolate = CcTest::i_isolate();
531 Assembler assm(isolate, buffer, sizeof(buffer));
532 __ pushq(rbx);
533 __ pushq(rcx);
534 __ pushq(rdx);
535 __ pushq(rdi);
536 __ pushq(rsi);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100537 __ movq(rax, Immediate(1));
538 __ movq(rbx, Immediate(2));
539 __ movq(rcx, Immediate(3));
540 __ movq(rdx, Immediate(4));
541 __ movq(rdi, Immediate(5));
542 __ movq(rsi, Immediate(6));
543 for (int i = 0; i < 16; i++) {
544 int before = assm.pc_offset();
545 __ Nop(i);
546 CHECK_EQ(assm.pc_offset() - before, i);
547 }
548
549 Label fail;
550 __ cmpq(rax, Immediate(1));
551 __ j(not_equal, &fail);
552 __ cmpq(rbx, Immediate(2));
553 __ j(not_equal, &fail);
554 __ cmpq(rcx, Immediate(3));
555 __ j(not_equal, &fail);
556 __ cmpq(rdx, Immediate(4));
557 __ j(not_equal, &fail);
558 __ cmpq(rdi, Immediate(5));
559 __ j(not_equal, &fail);
560 __ cmpq(rsi, Immediate(6));
561 __ j(not_equal, &fail);
562 __ movq(rax, Immediate(42));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000563 __ popq(rsi);
564 __ popq(rdi);
565 __ popq(rdx);
566 __ popq(rcx);
567 __ popq(rbx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100568 __ ret(0);
569 __ bind(&fail);
570 __ movq(rax, Immediate(13));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000571 __ popq(rsi);
572 __ popq(rdi);
573 __ popq(rdx);
574 __ popq(rcx);
575 __ popq(rbx);
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100576 __ ret(0);
577
578 CodeDesc desc;
579 assm.GetCode(&desc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000580 Handle<Code> code = isolate->factory()->NewCode(
581 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100582
583 F0 f = FUNCTION_CAST<F0>(code->entry());
584 int res = f();
585 CHECK_EQ(42, res);
586}
587
588
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000589#ifdef __GNUC__
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000590#define ELEMENT_COUNT 4u
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000591
592void DoSSE2(const v8::FunctionCallbackInfo<v8::Value>& args) {
593 v8::HandleScope scope(CcTest::isolate());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000594 v8::Local<v8::Context> context = CcTest::isolate()->GetCurrentContext();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000595 byte buffer[1024];
596
597 CHECK(args[0]->IsArray());
598 v8::Local<v8::Array> vec = v8::Local<v8::Array>::Cast(args[0]);
599 CHECK_EQ(ELEMENT_COUNT, vec->Length());
600
601 Isolate* isolate = CcTest::i_isolate();
602 Assembler assm(isolate, buffer, sizeof(buffer));
603
604 // Remove return address from the stack for fix stack frame alignment.
605 __ popq(rcx);
606
607 // Store input vector on the stack.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000608 for (unsigned i = 0; i < ELEMENT_COUNT; i++) {
609 __ movl(rax, Immediate(vec->Get(context, i)
610 .ToLocalChecked()
611 ->Int32Value(context)
612 .FromJust()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000613 __ shlq(rax, Immediate(0x20));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000614 __ orq(rax, Immediate(vec->Get(context, ++i)
615 .ToLocalChecked()
616 ->Int32Value(context)
617 .FromJust()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000618 __ pushq(rax);
619 }
620
621 // Read vector into a xmm register.
622 __ xorps(xmm0, xmm0);
623 __ movdqa(xmm0, Operand(rsp, 0));
624 // Create mask and store it in the return register.
625 __ movmskps(rax, xmm0);
626
627 // Remove unused data from the stack.
628 __ addq(rsp, Immediate(ELEMENT_COUNT * sizeof(int32_t)));
629 // Restore return address.
630 __ pushq(rcx);
631
632 __ ret(0);
633
634 CodeDesc desc;
635 assm.GetCode(&desc);
636 Handle<Code> code = isolate->factory()->NewCode(
637 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
638
639 F0 f = FUNCTION_CAST<F0>(code->entry());
640 int res = f();
641 args.GetReturnValue().Set(v8::Integer::New(CcTest::isolate(), res));
642}
Ben Murdoch3ef787d2012-04-12 10:51:47 +0100643
644
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000645TEST(StackAlignmentForSSE2) {
646 CcTest::InitializeVM();
647 CHECK_EQ(0, v8::base::OS::ActivationFrameAlignment() % 16);
648
649 v8::Isolate* isolate = CcTest::isolate();
650 v8::HandleScope handle_scope(isolate);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000651 v8::Local<v8::ObjectTemplate> global_template =
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000652 v8::ObjectTemplate::New(isolate);
653 global_template->Set(v8_str("do_sse2"),
654 v8::FunctionTemplate::New(isolate, DoSSE2));
655
656 LocalContext env(NULL, global_template);
657 CompileRun(
658 "function foo(vec) {"
659 " return do_sse2(vec);"
660 "}");
661
662 v8::Local<v8::Object> global_object = env->Global();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000663 v8::Local<v8::Function> foo = v8::Local<v8::Function>::Cast(
664 global_object->Get(env.local(), v8_str("foo")).ToLocalChecked());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000665
666 int32_t vec[ELEMENT_COUNT] = { -1, 1, 1, 1 };
667 v8::Local<v8::Array> v8_vec = v8::Array::New(isolate, ELEMENT_COUNT);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000668 for (unsigned i = 0; i < ELEMENT_COUNT; i++) {
669 v8_vec->Set(env.local(), i, v8_num(vec[i])).FromJust();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000670 }
671
672 v8::Local<v8::Value> args[] = { v8_vec };
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000673 v8::Local<v8::Value> result =
674 foo->Call(env.local(), global_object, 1, args).ToLocalChecked();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000675
676 // The mask should be 0b1000.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000677 CHECK_EQ(8, result->Int32Value(env.local()).FromJust());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000678}
679
680#undef ELEMENT_COUNT
681#endif // __GNUC__
682
683
684TEST(AssemblerX64Extractps) {
685 CcTest::InitializeVM();
686 if (!CpuFeatures::IsSupported(SSE4_1)) return;
687
688 v8::HandleScope scope(CcTest::isolate());
689 byte buffer[256];
690 Isolate* isolate = CcTest::i_isolate();
691 Assembler assm(isolate, buffer, sizeof(buffer));
692 { CpuFeatureScope fscope2(&assm, SSE4_1);
693 __ extractps(rax, xmm0, 0x1);
694 __ ret(0);
695 }
696
697 CodeDesc desc;
698 assm.GetCode(&desc);
699 Handle<Code> code = isolate->factory()->NewCode(
700 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
701#ifdef OBJECT_PRINT
702 OFStream os(stdout);
703 code->Print(os);
704#endif
705
706 F3 f = FUNCTION_CAST<F3>(code->entry());
707 uint64_t value1 = V8_2PART_UINT64_C(0x12345678, 87654321);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000708 CHECK_EQ(0x12345678u, f(uint64_to_double(value1)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000709 uint64_t value2 = V8_2PART_UINT64_C(0x87654321, 12345678);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000710 CHECK_EQ(0x87654321u, f(uint64_to_double(value2)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000711}
712
713
714typedef int (*F6)(float x, float y);
715TEST(AssemblerX64SSE) {
716 CcTest::InitializeVM();
717
718 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
719 HandleScope scope(isolate);
720 v8::internal::byte buffer[256];
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000721 MacroAssembler assm(isolate, buffer, sizeof(buffer),
722 v8::internal::CodeObjectRequired::kYes);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000723 {
724 __ shufps(xmm0, xmm0, 0x0); // brocast first argument
725 __ shufps(xmm1, xmm1, 0x0); // brocast second argument
726 __ movaps(xmm2, xmm1);
727 __ addps(xmm2, xmm0);
728 __ mulps(xmm2, xmm1);
729 __ subps(xmm2, xmm0);
730 __ divps(xmm2, xmm1);
731 __ cvttss2si(rax, xmm2);
732 __ ret(0);
733 }
734
735 CodeDesc desc;
736 assm.GetCode(&desc);
737 Handle<Code> code = isolate->factory()->NewCode(
738 desc,
739 Code::ComputeFlags(Code::STUB),
740 Handle<Code>());
741#ifdef OBJECT_PRINT
742 OFStream os(stdout);
743 code->Print(os);
744#endif
745
746 F6 f = FUNCTION_CAST<F6>(code->entry());
747 CHECK_EQ(2, f(1.0, 2.0));
748}
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400749
750
751typedef int (*F7)(double x, double y, double z);
752TEST(AssemblerX64FMA_sd) {
753 CcTest::InitializeVM();
754 if (!CpuFeatures::IsSupported(FMA3)) return;
755
756 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
757 HandleScope scope(isolate);
758 v8::internal::byte buffer[1024];
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000759 MacroAssembler assm(isolate, buffer, sizeof(buffer),
760 v8::internal::CodeObjectRequired::kYes);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400761 {
762 CpuFeatureScope fscope(&assm, FMA3);
763 Label exit;
764 // argument in xmm0, xmm1 and xmm2
765 // xmm0 * xmm1 + xmm2
766 __ movaps(xmm3, xmm0);
767 __ mulsd(xmm3, xmm1);
768 __ addsd(xmm3, xmm2); // Expected result in xmm3
769
770 __ subq(rsp, Immediate(kDoubleSize)); // For memory operand
771 // vfmadd132sd
772 __ movl(rax, Immediate(1)); // Test number
773 __ movaps(xmm8, xmm0);
774 __ vfmadd132sd(xmm8, xmm2, xmm1);
775 __ ucomisd(xmm8, xmm3);
776 __ j(not_equal, &exit);
777 // vfmadd213sd
778 __ incq(rax);
779 __ movaps(xmm8, xmm1);
780 __ vfmadd213sd(xmm8, xmm0, xmm2);
781 __ ucomisd(xmm8, xmm3);
782 __ j(not_equal, &exit);
783 // vfmadd231sd
784 __ incq(rax);
785 __ movaps(xmm8, xmm2);
786 __ vfmadd231sd(xmm8, xmm0, xmm1);
787 __ ucomisd(xmm8, xmm3);
788 __ j(not_equal, &exit);
789
790 // vfmadd132sd
791 __ incq(rax);
792 __ movaps(xmm8, xmm0);
793 __ movsd(Operand(rsp, 0), xmm1);
794 __ vfmadd132sd(xmm8, xmm2, Operand(rsp, 0));
795 __ ucomisd(xmm8, xmm3);
796 __ j(not_equal, &exit);
797 // vfmadd213sd
798 __ incq(rax);
799 __ movaps(xmm8, xmm1);
800 __ movsd(Operand(rsp, 0), xmm2);
801 __ vfmadd213sd(xmm8, xmm0, Operand(rsp, 0));
802 __ ucomisd(xmm8, xmm3);
803 __ j(not_equal, &exit);
804 // vfmadd231sd
805 __ incq(rax);
806 __ movaps(xmm8, xmm2);
807 __ movsd(Operand(rsp, 0), xmm1);
808 __ vfmadd231sd(xmm8, xmm0, Operand(rsp, 0));
809 __ ucomisd(xmm8, xmm3);
810 __ j(not_equal, &exit);
811
812 // xmm0 * xmm1 - xmm2
813 __ movaps(xmm3, xmm0);
814 __ mulsd(xmm3, xmm1);
815 __ subsd(xmm3, xmm2); // Expected result in xmm3
816
817 // vfmsub132sd
818 __ incq(rax);
819 __ movaps(xmm8, xmm0);
820 __ vfmsub132sd(xmm8, xmm2, xmm1);
821 __ ucomisd(xmm8, xmm3);
822 __ j(not_equal, &exit);
823 // vfmadd213sd
824 __ incq(rax);
825 __ movaps(xmm8, xmm1);
826 __ vfmsub213sd(xmm8, xmm0, xmm2);
827 __ ucomisd(xmm8, xmm3);
828 __ j(not_equal, &exit);
829 // vfmsub231sd
830 __ incq(rax);
831 __ movaps(xmm8, xmm2);
832 __ vfmsub231sd(xmm8, xmm0, xmm1);
833 __ ucomisd(xmm8, xmm3);
834 __ j(not_equal, &exit);
835
836 // vfmsub132sd
837 __ incq(rax);
838 __ movaps(xmm8, xmm0);
839 __ movsd(Operand(rsp, 0), xmm1);
840 __ vfmsub132sd(xmm8, xmm2, Operand(rsp, 0));
841 __ ucomisd(xmm8, xmm3);
842 __ j(not_equal, &exit);
843 // vfmsub213sd
844 __ incq(rax);
845 __ movaps(xmm8, xmm1);
846 __ movsd(Operand(rsp, 0), xmm2);
847 __ vfmsub213sd(xmm8, xmm0, Operand(rsp, 0));
848 __ ucomisd(xmm8, xmm3);
849 __ j(not_equal, &exit);
850 // vfmsub231sd
851 __ incq(rax);
852 __ movaps(xmm8, xmm2);
853 __ movsd(Operand(rsp, 0), xmm1);
854 __ vfmsub231sd(xmm8, xmm0, Operand(rsp, 0));
855 __ ucomisd(xmm8, xmm3);
856 __ j(not_equal, &exit);
857
858
859 // - xmm0 * xmm1 + xmm2
860 __ movaps(xmm3, xmm0);
861 __ mulsd(xmm3, xmm1);
862 __ Move(xmm4, (uint64_t)1 << 63);
863 __ xorpd(xmm3, xmm4);
864 __ addsd(xmm3, xmm2); // Expected result in xmm3
865
866 // vfnmadd132sd
867 __ incq(rax);
868 __ movaps(xmm8, xmm0);
869 __ vfnmadd132sd(xmm8, xmm2, xmm1);
870 __ ucomisd(xmm8, xmm3);
871 __ j(not_equal, &exit);
872 // vfmadd213sd
873 __ incq(rax);
874 __ movaps(xmm8, xmm1);
875 __ vfnmadd213sd(xmm8, xmm0, xmm2);
876 __ ucomisd(xmm8, xmm3);
877 __ j(not_equal, &exit);
878 // vfnmadd231sd
879 __ incq(rax);
880 __ movaps(xmm8, xmm2);
881 __ vfnmadd231sd(xmm8, xmm0, xmm1);
882 __ ucomisd(xmm8, xmm3);
883 __ j(not_equal, &exit);
884
885 // vfnmadd132sd
886 __ incq(rax);
887 __ movaps(xmm8, xmm0);
888 __ movsd(Operand(rsp, 0), xmm1);
889 __ vfnmadd132sd(xmm8, xmm2, Operand(rsp, 0));
890 __ ucomisd(xmm8, xmm3);
891 __ j(not_equal, &exit);
892 // vfnmadd213sd
893 __ incq(rax);
894 __ movaps(xmm8, xmm1);
895 __ movsd(Operand(rsp, 0), xmm2);
896 __ vfnmadd213sd(xmm8, xmm0, Operand(rsp, 0));
897 __ ucomisd(xmm8, xmm3);
898 __ j(not_equal, &exit);
899 // vfnmadd231sd
900 __ incq(rax);
901 __ movaps(xmm8, xmm2);
902 __ movsd(Operand(rsp, 0), xmm1);
903 __ vfnmadd231sd(xmm8, xmm0, Operand(rsp, 0));
904 __ ucomisd(xmm8, xmm3);
905 __ j(not_equal, &exit);
906
907
908 // - xmm0 * xmm1 - xmm2
909 __ movaps(xmm3, xmm0);
910 __ mulsd(xmm3, xmm1);
911 __ Move(xmm4, (uint64_t)1 << 63);
912 __ xorpd(xmm3, xmm4);
913 __ subsd(xmm3, xmm2); // Expected result in xmm3
914
915 // vfnmsub132sd
916 __ incq(rax);
917 __ movaps(xmm8, xmm0);
918 __ vfnmsub132sd(xmm8, xmm2, xmm1);
919 __ ucomisd(xmm8, xmm3);
920 __ j(not_equal, &exit);
921 // vfmsub213sd
922 __ incq(rax);
923 __ movaps(xmm8, xmm1);
924 __ vfnmsub213sd(xmm8, xmm0, xmm2);
925 __ ucomisd(xmm8, xmm3);
926 __ j(not_equal, &exit);
927 // vfnmsub231sd
928 __ incq(rax);
929 __ movaps(xmm8, xmm2);
930 __ vfnmsub231sd(xmm8, xmm0, xmm1);
931 __ ucomisd(xmm8, xmm3);
932 __ j(not_equal, &exit);
933
934 // vfnmsub132sd
935 __ incq(rax);
936 __ movaps(xmm8, xmm0);
937 __ movsd(Operand(rsp, 0), xmm1);
938 __ vfnmsub132sd(xmm8, xmm2, Operand(rsp, 0));
939 __ ucomisd(xmm8, xmm3);
940 __ j(not_equal, &exit);
941 // vfnmsub213sd
942 __ incq(rax);
943 __ movaps(xmm8, xmm1);
944 __ movsd(Operand(rsp, 0), xmm2);
945 __ vfnmsub213sd(xmm8, xmm0, Operand(rsp, 0));
946 __ ucomisd(xmm8, xmm3);
947 __ j(not_equal, &exit);
948 // vfnmsub231sd
949 __ incq(rax);
950 __ movaps(xmm8, xmm2);
951 __ movsd(Operand(rsp, 0), xmm1);
952 __ vfnmsub231sd(xmm8, xmm0, Operand(rsp, 0));
953 __ ucomisd(xmm8, xmm3);
954 __ j(not_equal, &exit);
955
956
957 __ xorl(rax, rax);
958 __ bind(&exit);
959 __ addq(rsp, Immediate(kDoubleSize));
960 __ ret(0);
961 }
962
963 CodeDesc desc;
964 assm.GetCode(&desc);
965 Handle<Code> code = isolate->factory()->NewCode(
966 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
967#ifdef OBJECT_PRINT
968 OFStream os(stdout);
969 code->Print(os);
970#endif
971
972 F7 f = FUNCTION_CAST<F7>(code->entry());
973 CHECK_EQ(0, f(0.000092662107262076, -2.460774966188315, -1.0958787393627414));
974}
975
976
977typedef int (*F8)(float x, float y, float z);
978TEST(AssemblerX64FMA_ss) {
979 CcTest::InitializeVM();
980 if (!CpuFeatures::IsSupported(FMA3)) return;
981
982 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
983 HandleScope scope(isolate);
984 v8::internal::byte buffer[1024];
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000985 MacroAssembler assm(isolate, buffer, sizeof(buffer),
986 v8::internal::CodeObjectRequired::kYes);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400987 {
988 CpuFeatureScope fscope(&assm, FMA3);
989 Label exit;
990 // arguments in xmm0, xmm1 and xmm2
991 // xmm0 * xmm1 + xmm2
992 __ movaps(xmm3, xmm0);
993 __ mulss(xmm3, xmm1);
994 __ addss(xmm3, xmm2); // Expected result in xmm3
995
996 __ subq(rsp, Immediate(kDoubleSize)); // For memory operand
997 // vfmadd132ss
998 __ movl(rax, Immediate(1)); // Test number
999 __ movaps(xmm8, xmm0);
1000 __ vfmadd132ss(xmm8, xmm2, xmm1);
1001 __ ucomiss(xmm8, xmm3);
1002 __ j(not_equal, &exit);
1003 // vfmadd213ss
1004 __ incq(rax);
1005 __ movaps(xmm8, xmm1);
1006 __ vfmadd213ss(xmm8, xmm0, xmm2);
1007 __ ucomiss(xmm8, xmm3);
1008 __ j(not_equal, &exit);
1009 // vfmadd231ss
1010 __ incq(rax);
1011 __ movaps(xmm8, xmm2);
1012 __ vfmadd231ss(xmm8, xmm0, xmm1);
1013 __ ucomiss(xmm8, xmm3);
1014 __ j(not_equal, &exit);
1015
1016 // vfmadd132ss
1017 __ incq(rax);
1018 __ movaps(xmm8, xmm0);
1019 __ movss(Operand(rsp, 0), xmm1);
1020 __ vfmadd132ss(xmm8, xmm2, Operand(rsp, 0));
1021 __ ucomiss(xmm8, xmm3);
1022 __ j(not_equal, &exit);
1023 // vfmadd213ss
1024 __ incq(rax);
1025 __ movaps(xmm8, xmm1);
1026 __ movss(Operand(rsp, 0), xmm2);
1027 __ vfmadd213ss(xmm8, xmm0, Operand(rsp, 0));
1028 __ ucomiss(xmm8, xmm3);
1029 __ j(not_equal, &exit);
1030 // vfmadd231ss
1031 __ incq(rax);
1032 __ movaps(xmm8, xmm2);
1033 __ movss(Operand(rsp, 0), xmm1);
1034 __ vfmadd231ss(xmm8, xmm0, Operand(rsp, 0));
1035 __ ucomiss(xmm8, xmm3);
1036 __ j(not_equal, &exit);
1037
1038 // xmm0 * xmm1 - xmm2
1039 __ movaps(xmm3, xmm0);
1040 __ mulss(xmm3, xmm1);
1041 __ subss(xmm3, xmm2); // Expected result in xmm3
1042
1043 // vfmsub132ss
1044 __ incq(rax);
1045 __ movaps(xmm8, xmm0);
1046 __ vfmsub132ss(xmm8, xmm2, xmm1);
1047 __ ucomiss(xmm8, xmm3);
1048 __ j(not_equal, &exit);
1049 // vfmadd213ss
1050 __ incq(rax);
1051 __ movaps(xmm8, xmm1);
1052 __ vfmsub213ss(xmm8, xmm0, xmm2);
1053 __ ucomiss(xmm8, xmm3);
1054 __ j(not_equal, &exit);
1055 // vfmsub231ss
1056 __ incq(rax);
1057 __ movaps(xmm8, xmm2);
1058 __ vfmsub231ss(xmm8, xmm0, xmm1);
1059 __ ucomiss(xmm8, xmm3);
1060 __ j(not_equal, &exit);
1061
1062 // vfmsub132ss
1063 __ incq(rax);
1064 __ movaps(xmm8, xmm0);
1065 __ movss(Operand(rsp, 0), xmm1);
1066 __ vfmsub132ss(xmm8, xmm2, Operand(rsp, 0));
1067 __ ucomiss(xmm8, xmm3);
1068 __ j(not_equal, &exit);
1069 // vfmsub213ss
1070 __ incq(rax);
1071 __ movaps(xmm8, xmm1);
1072 __ movss(Operand(rsp, 0), xmm2);
1073 __ vfmsub213ss(xmm8, xmm0, Operand(rsp, 0));
1074 __ ucomiss(xmm8, xmm3);
1075 __ j(not_equal, &exit);
1076 // vfmsub231ss
1077 __ incq(rax);
1078 __ movaps(xmm8, xmm2);
1079 __ movss(Operand(rsp, 0), xmm1);
1080 __ vfmsub231ss(xmm8, xmm0, Operand(rsp, 0));
1081 __ ucomiss(xmm8, xmm3);
1082 __ j(not_equal, &exit);
1083
1084
1085 // - xmm0 * xmm1 + xmm2
1086 __ movaps(xmm3, xmm0);
1087 __ mulss(xmm3, xmm1);
1088 __ Move(xmm4, (uint32_t)1 << 31);
1089 __ xorps(xmm3, xmm4);
1090 __ addss(xmm3, xmm2); // Expected result in xmm3
1091
1092 // vfnmadd132ss
1093 __ incq(rax);
1094 __ movaps(xmm8, xmm0);
1095 __ vfnmadd132ss(xmm8, xmm2, xmm1);
1096 __ ucomiss(xmm8, xmm3);
1097 __ j(not_equal, &exit);
1098 // vfmadd213ss
1099 __ incq(rax);
1100 __ movaps(xmm8, xmm1);
1101 __ vfnmadd213ss(xmm8, xmm0, xmm2);
1102 __ ucomiss(xmm8, xmm3);
1103 __ j(not_equal, &exit);
1104 // vfnmadd231ss
1105 __ incq(rax);
1106 __ movaps(xmm8, xmm2);
1107 __ vfnmadd231ss(xmm8, xmm0, xmm1);
1108 __ ucomiss(xmm8, xmm3);
1109 __ j(not_equal, &exit);
1110
1111 // vfnmadd132ss
1112 __ incq(rax);
1113 __ movaps(xmm8, xmm0);
1114 __ movss(Operand(rsp, 0), xmm1);
1115 __ vfnmadd132ss(xmm8, xmm2, Operand(rsp, 0));
1116 __ ucomiss(xmm8, xmm3);
1117 __ j(not_equal, &exit);
1118 // vfnmadd213ss
1119 __ incq(rax);
1120 __ movaps(xmm8, xmm1);
1121 __ movss(Operand(rsp, 0), xmm2);
1122 __ vfnmadd213ss(xmm8, xmm0, Operand(rsp, 0));
1123 __ ucomiss(xmm8, xmm3);
1124 __ j(not_equal, &exit);
1125 // vfnmadd231ss
1126 __ incq(rax);
1127 __ movaps(xmm8, xmm2);
1128 __ movss(Operand(rsp, 0), xmm1);
1129 __ vfnmadd231ss(xmm8, xmm0, Operand(rsp, 0));
1130 __ ucomiss(xmm8, xmm3);
1131 __ j(not_equal, &exit);
1132
1133
1134 // - xmm0 * xmm1 - xmm2
1135 __ movaps(xmm3, xmm0);
1136 __ mulss(xmm3, xmm1);
1137 __ Move(xmm4, (uint32_t)1 << 31);
1138 __ xorps(xmm3, xmm4);
1139 __ subss(xmm3, xmm2); // Expected result in xmm3
1140
1141 // vfnmsub132ss
1142 __ incq(rax);
1143 __ movaps(xmm8, xmm0);
1144 __ vfnmsub132ss(xmm8, xmm2, xmm1);
1145 __ ucomiss(xmm8, xmm3);
1146 __ j(not_equal, &exit);
1147 // vfmsub213ss
1148 __ incq(rax);
1149 __ movaps(xmm8, xmm1);
1150 __ vfnmsub213ss(xmm8, xmm0, xmm2);
1151 __ ucomiss(xmm8, xmm3);
1152 __ j(not_equal, &exit);
1153 // vfnmsub231ss
1154 __ incq(rax);
1155 __ movaps(xmm8, xmm2);
1156 __ vfnmsub231ss(xmm8, xmm0, xmm1);
1157 __ ucomiss(xmm8, xmm3);
1158 __ j(not_equal, &exit);
1159
1160 // vfnmsub132ss
1161 __ incq(rax);
1162 __ movaps(xmm8, xmm0);
1163 __ movss(Operand(rsp, 0), xmm1);
1164 __ vfnmsub132ss(xmm8, xmm2, Operand(rsp, 0));
1165 __ ucomiss(xmm8, xmm3);
1166 __ j(not_equal, &exit);
1167 // vfnmsub213ss
1168 __ incq(rax);
1169 __ movaps(xmm8, xmm1);
1170 __ movss(Operand(rsp, 0), xmm2);
1171 __ vfnmsub213ss(xmm8, xmm0, Operand(rsp, 0));
1172 __ ucomiss(xmm8, xmm3);
1173 __ j(not_equal, &exit);
1174 // vfnmsub231ss
1175 __ incq(rax);
1176 __ movaps(xmm8, xmm2);
1177 __ movss(Operand(rsp, 0), xmm1);
1178 __ vfnmsub231ss(xmm8, xmm0, Operand(rsp, 0));
1179 __ ucomiss(xmm8, xmm3);
1180 __ j(not_equal, &exit);
1181
1182
1183 __ xorl(rax, rax);
1184 __ bind(&exit);
1185 __ addq(rsp, Immediate(kDoubleSize));
1186 __ ret(0);
1187 }
1188
1189 CodeDesc desc;
1190 assm.GetCode(&desc);
1191 Handle<Code> code = isolate->factory()->NewCode(
1192 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
1193#ifdef OBJECT_PRINT
1194 OFStream os(stdout);
1195 code->Print(os);
1196#endif
1197
1198 F8 f = FUNCTION_CAST<F8>(code->entry());
1199 CHECK_EQ(0, f(9.26621069e-05f, -2.4607749f, -1.09587872f));
1200}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001201
1202
1203TEST(AssemblerX64SSE_ss) {
1204 CcTest::InitializeVM();
1205
1206 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
1207 HandleScope scope(isolate);
1208 v8::internal::byte buffer[1024];
1209 Assembler assm(isolate, buffer, sizeof(buffer));
1210 {
1211 Label exit;
1212 // arguments in xmm0, xmm1 and xmm2
1213 __ movl(rax, Immediate(0));
1214
1215 __ movaps(xmm3, xmm0);
1216 __ maxss(xmm3, xmm1);
1217 __ ucomiss(xmm3, xmm1);
1218 __ j(parity_even, &exit);
1219 __ j(not_equal, &exit);
1220 __ movl(rax, Immediate(1));
1221
1222 __ movaps(xmm3, xmm1);
1223 __ minss(xmm3, xmm2);
1224 __ ucomiss(xmm3, xmm1);
1225 __ j(parity_even, &exit);
1226 __ j(not_equal, &exit);
1227 __ movl(rax, Immediate(2));
1228
1229 __ movaps(xmm3, xmm2);
1230 __ subss(xmm3, xmm1);
1231 __ ucomiss(xmm3, xmm0);
1232 __ j(parity_even, &exit);
1233 __ j(not_equal, &exit);
1234 __ movl(rax, Immediate(3));
1235
1236 __ movaps(xmm3, xmm0);
1237 __ addss(xmm3, xmm1);
1238 __ ucomiss(xmm3, xmm2);
1239 __ j(parity_even, &exit);
1240 __ j(not_equal, &exit);
1241 __ movl(rax, Immediate(4));
1242
1243 __ movaps(xmm3, xmm0);
1244 __ mulss(xmm3, xmm1);
1245 __ ucomiss(xmm3, xmm1);
1246 __ j(parity_even, &exit);
1247 __ j(not_equal, &exit);
1248 __ movl(rax, Immediate(5));
1249
1250 __ movaps(xmm3, xmm0);
1251 __ divss(xmm3, xmm1);
1252 __ mulss(xmm3, xmm2);
1253 __ mulss(xmm3, xmm1);
1254 __ ucomiss(xmm3, xmm2);
1255 __ j(parity_even, &exit);
1256 __ j(not_equal, &exit);
1257 __ movl(rax, Immediate(6));
1258
1259 // result in eax
1260 __ bind(&exit);
1261 __ ret(0);
1262 }
1263
1264 CodeDesc desc;
1265 assm.GetCode(&desc);
1266 Handle<Code> code = isolate->factory()->NewCode(
1267 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
1268#ifdef OBJECT_PRINT
1269 OFStream os(stdout);
1270 code->Print(os);
1271#endif
1272
1273 F8 f = FUNCTION_CAST<F8>(code->entry());
1274 int res = f(1.0f, 2.0f, 3.0f);
1275 PrintF("f(1,2,3) = %d\n", res);
1276 CHECK_EQ(6, res);
1277}
1278
1279
1280TEST(AssemblerX64AVX_ss) {
1281 CcTest::InitializeVM();
1282 if (!CpuFeatures::IsSupported(AVX)) return;
1283
1284 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
1285 HandleScope scope(isolate);
1286 v8::internal::byte buffer[1024];
1287 Assembler assm(isolate, buffer, sizeof(buffer));
1288 {
1289 CpuFeatureScope avx_scope(&assm, AVX);
1290 Label exit;
1291 // arguments in xmm0, xmm1 and xmm2
1292 __ subq(rsp, Immediate(kDoubleSize * 2)); // For memory operand
1293
1294 __ movl(rdx, Immediate(0xc2f64000)); // -123.125
1295 __ vmovd(xmm4, rdx);
1296 __ vmovss(Operand(rsp, 0), xmm4);
1297 __ vmovss(xmm5, Operand(rsp, 0));
1298 __ vmovaps(xmm6, xmm5);
1299 __ vmovd(rcx, xmm6);
1300 __ cmpl(rcx, rdx);
1301 __ movl(rax, Immediate(9));
1302 __ j(not_equal, &exit);
1303
1304 __ movl(rax, Immediate(0));
1305 __ vmaxss(xmm3, xmm0, xmm1);
1306 __ vucomiss(xmm3, xmm1);
1307 __ j(parity_even, &exit);
1308 __ j(not_equal, &exit);
1309 __ movl(rax, Immediate(1));
1310
1311 __ vminss(xmm3, xmm1, xmm2);
1312 __ vucomiss(xmm3, xmm1);
1313 __ j(parity_even, &exit);
1314 __ j(not_equal, &exit);
1315 __ movl(rax, Immediate(2));
1316
1317 __ vsubss(xmm3, xmm2, xmm1);
1318 __ vucomiss(xmm3, xmm0);
1319 __ j(parity_even, &exit);
1320 __ j(not_equal, &exit);
1321 __ movl(rax, Immediate(3));
1322
1323 __ vaddss(xmm3, xmm0, xmm1);
1324 __ vucomiss(xmm3, xmm2);
1325 __ j(parity_even, &exit);
1326 __ j(not_equal, &exit);
1327 __ movl(rax, Immediate(4));
1328
1329 __ vmulss(xmm3, xmm0, xmm1);
1330 __ vucomiss(xmm3, xmm1);
1331 __ j(parity_even, &exit);
1332 __ j(not_equal, &exit);
1333 __ movl(rax, Immediate(5));
1334
1335 __ vdivss(xmm3, xmm0, xmm1);
1336 __ vmulss(xmm3, xmm3, xmm2);
1337 __ vmulss(xmm3, xmm3, xmm1);
1338 __ vucomiss(xmm3, xmm2);
1339 __ j(parity_even, &exit);
1340 __ j(not_equal, &exit);
1341 __ movl(rax, Immediate(6));
1342
1343 // result in eax
1344 __ bind(&exit);
1345 __ addq(rsp, Immediate(kDoubleSize * 2));
1346 __ ret(0);
1347 }
1348
1349 CodeDesc desc;
1350 assm.GetCode(&desc);
1351 Handle<Code> code = isolate->factory()->NewCode(
1352 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
1353#ifdef OBJECT_PRINT
1354 OFStream os(stdout);
1355 code->Print(os);
1356#endif
1357
1358 F8 f = FUNCTION_CAST<F8>(code->entry());
1359 int res = f(1.0f, 2.0f, 3.0f);
1360 PrintF("f(1,2,3) = %d\n", res);
1361 CHECK_EQ(6, res);
1362}
1363
1364
1365TEST(AssemblerX64AVX_sd) {
1366 CcTest::InitializeVM();
1367 if (!CpuFeatures::IsSupported(AVX)) return;
1368
1369 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
1370 HandleScope scope(isolate);
1371 v8::internal::byte buffer[1024];
1372 Assembler assm(isolate, buffer, sizeof(buffer));
1373 {
1374 CpuFeatureScope avx_scope(&assm, AVX);
1375 Label exit;
1376 // arguments in xmm0, xmm1 and xmm2
1377 __ subq(rsp, Immediate(kDoubleSize * 2)); // For memory operand
1378 __ movl(rax, Immediate(0));
1379
1380 __ vmaxsd(xmm4, xmm0, xmm1);
1381 __ vmovsd(Operand(rsp, kDoubleSize), xmm4);
1382 __ vmovsd(xmm5, Operand(rsp, kDoubleSize));
1383 __ vmovsd(xmm6, xmm6, xmm5);
1384 __ vmovapd(xmm3, xmm6);
1385
1386 // Test vcvtss2sd & vcvtsd2ss
1387 __ movl(rax, Immediate(9));
1388 __ movq(rdx, V8_INT64_C(0x426D1A0000000000));
1389 __ movq(Operand(rsp, 0), rdx);
1390 __ vcvtsd2ss(xmm6, xmm6, Operand(rsp, 0));
1391 __ vcvtss2sd(xmm7, xmm6, xmm6);
1392 __ vcvtsd2ss(xmm8, xmm7, xmm7);
1393 __ vmovss(Operand(rsp, 0), xmm8);
1394 __ vcvtss2sd(xmm9, xmm8, Operand(rsp, 0));
1395 __ vmovq(rcx, xmm9);
1396 __ cmpq(rcx, rdx);
1397 __ j(not_equal, &exit);
1398
1399 // Test vcvttsd2si
1400 __ movl(rax, Immediate(10));
1401 __ movl(rdx, Immediate(123));
1402 __ vcvtlsi2sd(xmm6, xmm6, rdx);
1403 __ vcvttsd2si(rcx, xmm6);
1404 __ cmpl(rcx, rdx);
1405 __ j(not_equal, &exit);
1406 __ xorl(rcx, rcx);
1407 __ vmovsd(Operand(rsp, 0), xmm6);
1408 __ vcvttsd2si(rcx, Operand(rsp, 0));
1409 __ cmpl(rcx, rdx);
1410 __ j(not_equal, &exit);
1411
1412 // Test vcvttsd2siq
1413 __ movl(rax, Immediate(11));
1414 __ movq(rdx, V8_INT64_C(0x426D1A94A2000000)); // 1.0e12
1415 __ vmovq(xmm6, rdx);
1416 __ vcvttsd2siq(rcx, xmm6);
1417 __ movq(rdx, V8_INT64_C(1000000000000));
1418 __ cmpq(rcx, rdx);
1419 __ j(not_equal, &exit);
1420 __ xorq(rcx, rcx);
1421 __ vmovsd(Operand(rsp, 0), xmm6);
1422 __ vcvttsd2siq(rcx, Operand(rsp, 0));
1423 __ cmpq(rcx, rdx);
1424 __ j(not_equal, &exit);
1425
1426 // Test vmovmskpd
1427 __ movl(rax, Immediate(12));
1428 __ movq(rdx, V8_INT64_C(0x426D1A94A2000000)); // 1.0e12
1429 __ vmovq(xmm6, rdx);
1430 __ movq(rdx, V8_INT64_C(0xC26D1A94A2000000)); // -1.0e12
1431 __ vmovq(xmm7, rdx);
1432 __ shufps(xmm6, xmm7, 0x44);
1433 __ vmovmskpd(rdx, xmm6);
1434 __ cmpl(rdx, Immediate(2));
1435 __ j(not_equal, &exit);
1436
1437 // Test vpcmpeqd
1438 __ movq(rdx, V8_UINT64_C(0x0123456789abcdef));
1439 __ movq(rcx, V8_UINT64_C(0x0123456788888888));
1440 __ vmovq(xmm6, rdx);
1441 __ vmovq(xmm7, rcx);
1442 __ vpcmpeqd(xmm8, xmm6, xmm7);
1443 __ vmovq(rdx, xmm8);
1444 __ movq(rcx, V8_UINT64_C(0xffffffff00000000));
1445 __ cmpq(rcx, rdx);
1446 __ movl(rax, Immediate(13));
1447 __ j(not_equal, &exit);
1448
1449 // Test vpsllq, vpsrlq
1450 __ movl(rax, Immediate(13));
1451 __ movq(rdx, V8_UINT64_C(0x0123456789abcdef));
1452 __ vmovq(xmm6, rdx);
1453 __ vpsrlq(xmm7, xmm6, 4);
1454 __ vmovq(rdx, xmm7);
1455 __ movq(rcx, V8_UINT64_C(0x00123456789abcde));
1456 __ cmpq(rdx, rcx);
1457 __ j(not_equal, &exit);
1458 __ vpsllq(xmm7, xmm6, 12);
1459 __ vmovq(rdx, xmm7);
1460 __ movq(rcx, V8_UINT64_C(0x3456789abcdef000));
1461 __ cmpq(rdx, rcx);
1462 __ j(not_equal, &exit);
1463
1464 // Test vandpd, vorpd, vxorpd
1465 __ movl(rax, Immediate(14));
1466 __ movl(rdx, Immediate(0x00ff00ff));
1467 __ movl(rcx, Immediate(0x0f0f0f0f));
1468 __ vmovd(xmm4, rdx);
1469 __ vmovd(xmm5, rcx);
1470 __ vandpd(xmm6, xmm4, xmm5);
1471 __ vmovd(rdx, xmm6);
1472 __ cmpl(rdx, Immediate(0x000f000f));
1473 __ j(not_equal, &exit);
1474 __ vorpd(xmm6, xmm4, xmm5);
1475 __ vmovd(rdx, xmm6);
1476 __ cmpl(rdx, Immediate(0x0fff0fff));
1477 __ j(not_equal, &exit);
1478 __ vxorpd(xmm6, xmm4, xmm5);
1479 __ vmovd(rdx, xmm6);
1480 __ cmpl(rdx, Immediate(0x0ff00ff0));
1481 __ j(not_equal, &exit);
1482
1483 // Test vsqrtsd
1484 __ movl(rax, Immediate(15));
1485 __ movq(rdx, V8_UINT64_C(0x4004000000000000)); // 2.5
1486 __ vmovq(xmm4, rdx);
1487 __ vmulsd(xmm5, xmm4, xmm4);
1488 __ vmovsd(Operand(rsp, 0), xmm5);
1489 __ vsqrtsd(xmm6, xmm5, xmm5);
1490 __ vmovq(rcx, xmm6);
1491 __ cmpq(rcx, rdx);
1492 __ j(not_equal, &exit);
1493 __ vsqrtsd(xmm7, xmm7, Operand(rsp, 0));
1494 __ vmovq(rcx, xmm7);
1495 __ cmpq(rcx, rdx);
1496 __ j(not_equal, &exit);
1497
1498 // Test vroundsd
1499 __ movl(rax, Immediate(16));
1500 __ movq(rdx, V8_UINT64_C(0x4002000000000000)); // 2.25
1501 __ vmovq(xmm4, rdx);
1502 __ vroundsd(xmm5, xmm4, xmm4, kRoundUp);
1503 __ movq(rcx, V8_UINT64_C(0x4008000000000000)); // 3.0
1504 __ vmovq(xmm6, rcx);
1505 __ vucomisd(xmm5, xmm6);
1506 __ j(not_equal, &exit);
1507
1508 // Test vcvtlsi2sd
1509 __ movl(rax, Immediate(17));
1510 __ movl(rdx, Immediate(6));
1511 __ movq(rcx, V8_UINT64_C(0x4018000000000000)); // 6.0
1512 __ vmovq(xmm5, rcx);
1513 __ vcvtlsi2sd(xmm6, xmm6, rdx);
1514 __ vucomisd(xmm5, xmm6);
1515 __ j(not_equal, &exit);
1516 __ movl(Operand(rsp, 0), rdx);
1517 __ vcvtlsi2sd(xmm7, xmm7, Operand(rsp, 0));
1518 __ vucomisd(xmm5, xmm6);
1519 __ j(not_equal, &exit);
1520
1521 // Test vcvtqsi2sd
1522 __ movl(rax, Immediate(18));
1523 __ movq(rdx, V8_UINT64_C(0x2000000000000000)); // 2 << 0x3c
1524 __ movq(rcx, V8_UINT64_C(0x43c0000000000000));
1525 __ vmovq(xmm5, rcx);
1526 __ vcvtqsi2sd(xmm6, xmm6, rdx);
1527 __ vucomisd(xmm5, xmm6);
1528 __ j(not_equal, &exit);
1529
1530 // Test vcvtsd2si
1531 __ movl(rax, Immediate(19));
1532 __ movq(rdx, V8_UINT64_C(0x4018000000000000)); // 6.0
1533 __ vmovq(xmm5, rdx);
1534 __ vcvtsd2si(rcx, xmm5);
1535 __ cmpl(rcx, Immediate(6));
1536 __ j(not_equal, &exit);
1537
1538 __ movq(rdx, V8_INT64_C(0x3ff0000000000000)); // 1.0
1539 __ vmovq(xmm7, rdx);
1540 __ vmulsd(xmm1, xmm1, xmm7);
1541 __ movq(Operand(rsp, 0), rdx);
1542 __ vmovq(xmm6, Operand(rsp, 0));
1543 __ vmulsd(xmm1, xmm1, xmm6);
1544
1545 __ vucomisd(xmm3, xmm1);
1546 __ j(parity_even, &exit);
1547 __ j(not_equal, &exit);
1548 __ movl(rax, Immediate(1));
1549
1550 __ vminsd(xmm3, xmm1, xmm2);
1551 __ vucomisd(xmm3, xmm1);
1552 __ j(parity_even, &exit);
1553 __ j(not_equal, &exit);
1554 __ movl(rax, Immediate(2));
1555
1556 __ vsubsd(xmm3, xmm2, xmm1);
1557 __ vucomisd(xmm3, xmm0);
1558 __ j(parity_even, &exit);
1559 __ j(not_equal, &exit);
1560 __ movl(rax, Immediate(3));
1561
1562 __ vaddsd(xmm3, xmm0, xmm1);
1563 __ vucomisd(xmm3, xmm2);
1564 __ j(parity_even, &exit);
1565 __ j(not_equal, &exit);
1566 __ movl(rax, Immediate(4));
1567
1568 __ vmulsd(xmm3, xmm0, xmm1);
1569 __ vucomisd(xmm3, xmm1);
1570 __ j(parity_even, &exit);
1571 __ j(not_equal, &exit);
1572 __ movl(rax, Immediate(5));
1573
1574 __ vdivsd(xmm3, xmm0, xmm1);
1575 __ vmulsd(xmm3, xmm3, xmm2);
1576 __ vmulsd(xmm3, xmm3, xmm1);
1577 __ vucomisd(xmm3, xmm2);
1578 __ j(parity_even, &exit);
1579 __ j(not_equal, &exit);
1580 __ movl(rax, Immediate(6));
1581
1582 // result in eax
1583 __ bind(&exit);
1584 __ addq(rsp, Immediate(kDoubleSize * 2));
1585 __ ret(0);
1586 }
1587
1588 CodeDesc desc;
1589 assm.GetCode(&desc);
1590 Handle<Code> code = isolate->factory()->NewCode(
1591 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
1592#ifdef OBJECT_PRINT
1593 OFStream os(stdout);
1594 code->Print(os);
1595#endif
1596
1597 F7 f = FUNCTION_CAST<F7>(code->entry());
1598 int res = f(1.0, 2.0, 3.0);
1599 PrintF("f(1,2,3) = %d\n", res);
1600 CHECK_EQ(6, res);
1601}
1602
1603
1604TEST(AssemblerX64BMI1) {
1605 CcTest::InitializeVM();
1606 if (!CpuFeatures::IsSupported(BMI1)) return;
1607
1608 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
1609 HandleScope scope(isolate);
1610 v8::internal::byte buffer[1024];
1611 MacroAssembler assm(isolate, buffer, sizeof(buffer),
1612 v8::internal::CodeObjectRequired::kYes);
1613 {
1614 CpuFeatureScope fscope(&assm, BMI1);
1615 Label exit;
1616
1617 __ movq(rcx, V8_UINT64_C(0x1122334455667788)); // source operand
1618 __ pushq(rcx); // For memory operand
1619
1620 // andn
1621 __ movq(rdx, V8_UINT64_C(0x1000000020000000));
1622
1623 __ movl(rax, Immediate(1)); // Test number
1624 __ andnq(r8, rdx, rcx);
1625 __ movq(r9, V8_UINT64_C(0x0122334455667788)); // expected result
1626 __ cmpq(r8, r9);
1627 __ j(not_equal, &exit);
1628
1629 __ incq(rax);
1630 __ andnq(r8, rdx, Operand(rsp, 0));
1631 __ movq(r9, V8_UINT64_C(0x0122334455667788)); // expected result
1632 __ cmpq(r8, r9);
1633 __ j(not_equal, &exit);
1634
1635 __ incq(rax);
1636 __ andnl(r8, rdx, rcx);
1637 __ movq(r9, V8_UINT64_C(0x0000000055667788)); // expected result
1638 __ cmpq(r8, r9);
1639 __ j(not_equal, &exit);
1640
1641 __ incq(rax);
1642 __ andnl(r8, rdx, Operand(rsp, 0));
1643 __ movq(r9, V8_UINT64_C(0x0000000055667788)); // expected result
1644 __ cmpq(r8, r9);
1645 __ j(not_equal, &exit);
1646
1647 // bextr
1648 __ movq(rdx, V8_UINT64_C(0x0000000000002808));
1649
1650 __ incq(rax);
1651 __ bextrq(r8, rcx, rdx);
1652 __ movq(r9, V8_UINT64_C(0x0000003344556677)); // expected result
1653 __ cmpq(r8, r9);
1654 __ j(not_equal, &exit);
1655
1656 __ incq(rax);
1657 __ bextrq(r8, Operand(rsp, 0), rdx);
1658 __ movq(r9, V8_UINT64_C(0x0000003344556677)); // expected result
1659 __ cmpq(r8, r9);
1660 __ j(not_equal, &exit);
1661
1662 __ incq(rax);
1663 __ bextrl(r8, rcx, rdx);
1664 __ movq(r9, V8_UINT64_C(0x0000000000556677)); // expected result
1665 __ cmpq(r8, r9);
1666 __ j(not_equal, &exit);
1667
1668 __ incq(rax);
1669 __ bextrl(r8, Operand(rsp, 0), rdx);
1670 __ movq(r9, V8_UINT64_C(0x0000000000556677)); // expected result
1671 __ cmpq(r8, r9);
1672 __ j(not_equal, &exit);
1673
1674 // blsi
1675 __ incq(rax);
1676 __ blsiq(r8, rcx);
1677 __ movq(r9, V8_UINT64_C(0x0000000000000008)); // expected result
1678 __ cmpq(r8, r9);
1679 __ j(not_equal, &exit);
1680
1681 __ incq(rax);
1682 __ blsiq(r8, Operand(rsp, 0));
1683 __ movq(r9, V8_UINT64_C(0x0000000000000008)); // expected result
1684 __ cmpq(r8, r9);
1685 __ j(not_equal, &exit);
1686
1687 __ incq(rax);
1688 __ blsil(r8, rcx);
1689 __ movq(r9, V8_UINT64_C(0x0000000000000008)); // expected result
1690 __ cmpq(r8, r9);
1691 __ j(not_equal, &exit);
1692
1693 __ incq(rax);
1694 __ blsil(r8, Operand(rsp, 0));
1695 __ movq(r9, V8_UINT64_C(0x0000000000000008)); // expected result
1696 __ cmpq(r8, r9);
1697 __ j(not_equal, &exit);
1698
1699 // blsmsk
1700 __ incq(rax);
1701 __ blsmskq(r8, rcx);
1702 __ movq(r9, V8_UINT64_C(0x000000000000000f)); // expected result
1703 __ cmpq(r8, r9);
1704 __ j(not_equal, &exit);
1705
1706 __ incq(rax);
1707 __ blsmskq(r8, Operand(rsp, 0));
1708 __ movq(r9, V8_UINT64_C(0x000000000000000f)); // expected result
1709 __ cmpq(r8, r9);
1710 __ j(not_equal, &exit);
1711
1712 __ incq(rax);
1713 __ blsmskl(r8, rcx);
1714 __ movq(r9, V8_UINT64_C(0x000000000000000f)); // expected result
1715 __ cmpq(r8, r9);
1716 __ j(not_equal, &exit);
1717
1718 __ incq(rax);
1719 __ blsmskl(r8, Operand(rsp, 0));
1720 __ movq(r9, V8_UINT64_C(0x000000000000000f)); // expected result
1721 __ cmpq(r8, r9);
1722 __ j(not_equal, &exit);
1723
1724 // blsr
1725 __ incq(rax);
1726 __ blsrq(r8, rcx);
1727 __ movq(r9, V8_UINT64_C(0x1122334455667780)); // expected result
1728 __ cmpq(r8, r9);
1729 __ j(not_equal, &exit);
1730
1731 __ incq(rax);
1732 __ blsrq(r8, Operand(rsp, 0));
1733 __ movq(r9, V8_UINT64_C(0x1122334455667780)); // expected result
1734 __ cmpq(r8, r9);
1735 __ j(not_equal, &exit);
1736
1737 __ incq(rax);
1738 __ blsrl(r8, rcx);
1739 __ movq(r9, V8_UINT64_C(0x0000000055667780)); // expected result
1740 __ cmpq(r8, r9);
1741 __ j(not_equal, &exit);
1742
1743 __ incq(rax);
1744 __ blsrl(r8, Operand(rsp, 0));
1745 __ movq(r9, V8_UINT64_C(0x0000000055667780)); // expected result
1746 __ cmpq(r8, r9);
1747 __ j(not_equal, &exit);
1748
1749 // tzcnt
1750 __ incq(rax);
1751 __ tzcntq(r8, rcx);
1752 __ movq(r9, V8_UINT64_C(0x0000000000000003)); // expected result
1753 __ cmpq(r8, r9);
1754 __ j(not_equal, &exit);
1755
1756 __ incq(rax);
1757 __ tzcntq(r8, Operand(rsp, 0));
1758 __ movq(r9, V8_UINT64_C(0x0000000000000003)); // expected result
1759 __ cmpq(r8, r9);
1760 __ j(not_equal, &exit);
1761
1762 __ incq(rax);
1763 __ tzcntl(r8, rcx);
1764 __ movq(r9, V8_UINT64_C(0x0000000000000003)); // expected result
1765 __ cmpq(r8, r9);
1766 __ j(not_equal, &exit);
1767
1768 __ incq(rax);
1769 __ tzcntl(r8, Operand(rsp, 0));
1770 __ movq(r9, V8_UINT64_C(0x0000000000000003)); // expected result
1771 __ cmpq(r8, r9);
1772 __ j(not_equal, &exit);
1773
1774 __ xorl(rax, rax);
1775 __ bind(&exit);
1776 __ popq(rcx);
1777 __ ret(0);
1778 }
1779
1780 CodeDesc desc;
1781 assm.GetCode(&desc);
1782 Handle<Code> code = isolate->factory()->NewCode(
1783 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
1784#ifdef OBJECT_PRINT
1785 OFStream os(stdout);
1786 code->Print(os);
1787#endif
1788
1789 F0 f = FUNCTION_CAST<F0>(code->entry());
1790 CHECK_EQ(0, f());
1791}
1792
1793
1794TEST(AssemblerX64LZCNT) {
1795 CcTest::InitializeVM();
1796 if (!CpuFeatures::IsSupported(LZCNT)) return;
1797
1798 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
1799 HandleScope scope(isolate);
1800 v8::internal::byte buffer[256];
1801 MacroAssembler assm(isolate, buffer, sizeof(buffer),
1802 v8::internal::CodeObjectRequired::kYes);
1803 {
1804 CpuFeatureScope fscope(&assm, LZCNT);
1805 Label exit;
1806
1807 __ movq(rcx, V8_UINT64_C(0x1122334455667788)); // source operand
1808 __ pushq(rcx); // For memory operand
1809
1810 __ movl(rax, Immediate(1)); // Test number
1811 __ lzcntq(r8, rcx);
1812 __ movq(r9, V8_UINT64_C(0x0000000000000003)); // expected result
1813 __ cmpq(r8, r9);
1814 __ j(not_equal, &exit);
1815
1816 __ incq(rax);
1817 __ lzcntq(r8, Operand(rsp, 0));
1818 __ movq(r9, V8_UINT64_C(0x0000000000000003)); // expected result
1819 __ cmpq(r8, r9);
1820 __ j(not_equal, &exit);
1821
1822 __ incq(rax);
1823 __ lzcntl(r8, rcx);
1824 __ movq(r9, V8_UINT64_C(0x0000000000000001)); // expected result
1825 __ cmpq(r8, r9);
1826 __ j(not_equal, &exit);
1827
1828 __ incq(rax);
1829 __ lzcntl(r8, Operand(rsp, 0));
1830 __ movq(r9, V8_UINT64_C(0x0000000000000001)); // expected result
1831 __ cmpq(r8, r9);
1832 __ j(not_equal, &exit);
1833
1834 __ xorl(rax, rax);
1835 __ bind(&exit);
1836 __ popq(rcx);
1837 __ ret(0);
1838 }
1839
1840 CodeDesc desc;
1841 assm.GetCode(&desc);
1842 Handle<Code> code = isolate->factory()->NewCode(
1843 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
1844#ifdef OBJECT_PRINT
1845 OFStream os(stdout);
1846 code->Print(os);
1847#endif
1848
1849 F0 f = FUNCTION_CAST<F0>(code->entry());
1850 CHECK_EQ(0, f());
1851}
1852
1853
1854TEST(AssemblerX64POPCNT) {
1855 CcTest::InitializeVM();
1856 if (!CpuFeatures::IsSupported(POPCNT)) return;
1857
1858 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
1859 HandleScope scope(isolate);
1860 v8::internal::byte buffer[256];
1861 MacroAssembler assm(isolate, buffer, sizeof(buffer),
1862 v8::internal::CodeObjectRequired::kYes);
1863 {
1864 CpuFeatureScope fscope(&assm, POPCNT);
1865 Label exit;
1866
1867 __ movq(rcx, V8_UINT64_C(0x1111111111111100)); // source operand
1868 __ pushq(rcx); // For memory operand
1869
1870 __ movl(rax, Immediate(1)); // Test number
1871 __ popcntq(r8, rcx);
1872 __ movq(r9, V8_UINT64_C(0x000000000000000e)); // expected result
1873 __ cmpq(r8, r9);
1874 __ j(not_equal, &exit);
1875
1876 __ incq(rax);
1877 __ popcntq(r8, Operand(rsp, 0));
1878 __ movq(r9, V8_UINT64_C(0x000000000000000e)); // expected result
1879 __ cmpq(r8, r9);
1880 __ j(not_equal, &exit);
1881
1882 __ incq(rax);
1883 __ popcntl(r8, rcx);
1884 __ movq(r9, V8_UINT64_C(0x0000000000000006)); // expected result
1885 __ cmpq(r8, r9);
1886 __ j(not_equal, &exit);
1887
1888 __ incq(rax);
1889 __ popcntl(r8, Operand(rsp, 0));
1890 __ movq(r9, V8_UINT64_C(0x0000000000000006)); // expected result
1891 __ cmpq(r8, r9);
1892 __ j(not_equal, &exit);
1893
1894 __ xorl(rax, rax);
1895 __ bind(&exit);
1896 __ popq(rcx);
1897 __ ret(0);
1898 }
1899
1900 CodeDesc desc;
1901 assm.GetCode(&desc);
1902 Handle<Code> code = isolate->factory()->NewCode(
1903 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
1904#ifdef OBJECT_PRINT
1905 OFStream os(stdout);
1906 code->Print(os);
1907#endif
1908
1909 F0 f = FUNCTION_CAST<F0>(code->entry());
1910 CHECK_EQ(0, f());
1911}
1912
1913
1914TEST(AssemblerX64BMI2) {
1915 CcTest::InitializeVM();
1916 if (!CpuFeatures::IsSupported(BMI2)) return;
1917
1918 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
1919 HandleScope scope(isolate);
1920 v8::internal::byte buffer[2048];
1921 MacroAssembler assm(isolate, buffer, sizeof(buffer),
1922 v8::internal::CodeObjectRequired::kYes);
1923 {
1924 CpuFeatureScope fscope(&assm, BMI2);
1925 Label exit;
1926 __ pushq(rbx); // save rbx
1927 __ movq(rcx, V8_UINT64_C(0x1122334455667788)); // source operand
1928 __ pushq(rcx); // For memory operand
1929
1930 // bzhi
1931 __ movq(rdx, V8_UINT64_C(0x0000000000000009));
1932
1933 __ movl(rax, Immediate(1)); // Test number
1934 __ bzhiq(r8, rcx, rdx);
1935 __ movq(r9, V8_UINT64_C(0x0000000000000188)); // expected result
1936 __ cmpq(r8, r9);
1937 __ j(not_equal, &exit);
1938
1939 __ incq(rax);
1940 __ bzhiq(r8, Operand(rsp, 0), rdx);
1941 __ movq(r9, V8_UINT64_C(0x0000000000000188)); // expected result
1942 __ cmpq(r8, r9);
1943 __ j(not_equal, &exit);
1944
1945 __ incq(rax);
1946 __ bzhil(r8, rcx, rdx);
1947 __ movq(r9, V8_UINT64_C(0x0000000000000188)); // expected result
1948 __ cmpq(r8, r9);
1949 __ j(not_equal, &exit);
1950
1951 __ incq(rax);
1952 __ bzhil(r8, Operand(rsp, 0), rdx);
1953 __ movq(r9, V8_UINT64_C(0x0000000000000188)); // expected result
1954 __ cmpq(r8, r9);
1955 __ j(not_equal, &exit);
1956
1957 // mulx
1958 __ movq(rdx, V8_UINT64_C(0x0000000000001000));
1959
1960 __ incq(rax);
1961 __ mulxq(r8, r9, rcx);
1962 __ movq(rbx, V8_UINT64_C(0x0000000000000112)); // expected result
1963 __ cmpq(r8, rbx);
1964 __ j(not_equal, &exit);
1965 __ movq(rbx, V8_UINT64_C(0x2334455667788000)); // expected result
1966 __ cmpq(r9, rbx);
1967 __ j(not_equal, &exit);
1968
1969 __ incq(rax);
1970 __ mulxq(r8, r9, Operand(rsp, 0));
1971 __ movq(rbx, V8_UINT64_C(0x0000000000000112)); // expected result
1972 __ cmpq(r8, rbx);
1973 __ j(not_equal, &exit);
1974 __ movq(rbx, V8_UINT64_C(0x2334455667788000)); // expected result
1975 __ cmpq(r9, rbx);
1976 __ j(not_equal, &exit);
1977
1978 __ incq(rax);
1979 __ mulxl(r8, r9, rcx);
1980 __ movq(rbx, V8_UINT64_C(0x0000000000000556)); // expected result
1981 __ cmpq(r8, rbx);
1982 __ j(not_equal, &exit);
1983 __ movq(rbx, V8_UINT64_C(0x0000000067788000)); // expected result
1984 __ cmpq(r9, rbx);
1985 __ j(not_equal, &exit);
1986
1987 __ incq(rax);
1988 __ mulxl(r8, r9, Operand(rsp, 0));
1989 __ movq(rbx, V8_UINT64_C(0x0000000000000556)); // expected result
1990 __ cmpq(r8, rbx);
1991 __ j(not_equal, &exit);
1992 __ movq(rbx, V8_UINT64_C(0x0000000067788000)); // expected result
1993 __ cmpq(r9, rbx);
1994 __ j(not_equal, &exit);
1995
1996 // pdep
1997 __ movq(rdx, V8_UINT64_C(0xfffffffffffffff0));
1998
1999 __ incq(rax);
2000 __ pdepq(r8, rdx, rcx);
2001 __ movq(r9, V8_UINT64_C(0x1122334455667400)); // expected result
2002 __ cmpq(r8, r9);
2003 __ j(not_equal, &exit);
2004
2005 __ incq(rax);
2006 __ pdepq(r8, rdx, Operand(rsp, 0));
2007 __ movq(r9, V8_UINT64_C(0x1122334455667400)); // expected result
2008 __ cmpq(r8, r9);
2009 __ j(not_equal, &exit);
2010
2011 __ incq(rax);
2012 __ pdepl(r8, rdx, rcx);
2013 __ movq(r9, V8_UINT64_C(0x0000000055667400)); // expected result
2014 __ cmpq(r8, r9);
2015 __ j(not_equal, &exit);
2016
2017 __ incq(rax);
2018 __ pdepl(r8, rdx, Operand(rsp, 0));
2019 __ movq(r9, V8_UINT64_C(0x0000000055667400)); // expected result
2020 __ cmpq(r8, r9);
2021 __ j(not_equal, &exit);
2022
2023 // pext
2024 __ movq(rdx, V8_UINT64_C(0xfffffffffffffff0));
2025
2026 __ incq(rax);
2027 __ pextq(r8, rdx, rcx);
2028 __ movq(r9, V8_UINT64_C(0x0000000003fffffe)); // expected result
2029 __ cmpq(r8, r9);
2030 __ j(not_equal, &exit);
2031
2032 __ incq(rax);
2033 __ pextq(r8, rdx, Operand(rsp, 0));
2034 __ movq(r9, V8_UINT64_C(0x0000000003fffffe)); // expected result
2035 __ cmpq(r8, r9);
2036 __ j(not_equal, &exit);
2037
2038 __ incq(rax);
2039 __ pextl(r8, rdx, rcx);
2040 __ movq(r9, V8_UINT64_C(0x000000000000fffe)); // expected result
2041 __ cmpq(r8, r9);
2042 __ j(not_equal, &exit);
2043
2044 __ incq(rax);
2045 __ pextl(r8, rdx, Operand(rsp, 0));
2046 __ movq(r9, V8_UINT64_C(0x000000000000fffe)); // expected result
2047 __ cmpq(r8, r9);
2048 __ j(not_equal, &exit);
2049
2050 // sarx
2051 __ movq(rdx, V8_UINT64_C(0x0000000000000004));
2052
2053 __ incq(rax);
2054 __ sarxq(r8, rcx, rdx);
2055 __ movq(r9, V8_UINT64_C(0x0112233445566778)); // expected result
2056 __ cmpq(r8, r9);
2057 __ j(not_equal, &exit);
2058
2059 __ incq(rax);
2060 __ sarxq(r8, Operand(rsp, 0), rdx);
2061 __ movq(r9, V8_UINT64_C(0x0112233445566778)); // expected result
2062 __ cmpq(r8, r9);
2063 __ j(not_equal, &exit);
2064
2065 __ incq(rax);
2066 __ sarxl(r8, rcx, rdx);
2067 __ movq(r9, V8_UINT64_C(0x0000000005566778)); // expected result
2068 __ cmpq(r8, r9);
2069 __ j(not_equal, &exit);
2070
2071 __ incq(rax);
2072 __ sarxl(r8, Operand(rsp, 0), rdx);
2073 __ movq(r9, V8_UINT64_C(0x0000000005566778)); // expected result
2074 __ cmpq(r8, r9);
2075 __ j(not_equal, &exit);
2076
2077 // shlx
2078 __ movq(rdx, V8_UINT64_C(0x0000000000000004));
2079
2080 __ incq(rax);
2081 __ shlxq(r8, rcx, rdx);
2082 __ movq(r9, V8_UINT64_C(0x1223344556677880)); // expected result
2083 __ cmpq(r8, r9);
2084 __ j(not_equal, &exit);
2085
2086 __ incq(rax);
2087 __ shlxq(r8, Operand(rsp, 0), rdx);
2088 __ movq(r9, V8_UINT64_C(0x1223344556677880)); // expected result
2089 __ cmpq(r8, r9);
2090 __ j(not_equal, &exit);
2091
2092 __ incq(rax);
2093 __ shlxl(r8, rcx, rdx);
2094 __ movq(r9, V8_UINT64_C(0x0000000056677880)); // expected result
2095 __ cmpq(r8, r9);
2096 __ j(not_equal, &exit);
2097
2098 __ incq(rax);
2099 __ shlxl(r8, Operand(rsp, 0), rdx);
2100 __ movq(r9, V8_UINT64_C(0x0000000056677880)); // expected result
2101 __ cmpq(r8, r9);
2102 __ j(not_equal, &exit);
2103
2104 // shrx
2105 __ movq(rdx, V8_UINT64_C(0x0000000000000004));
2106
2107 __ incq(rax);
2108 __ shrxq(r8, rcx, rdx);
2109 __ movq(r9, V8_UINT64_C(0x0112233445566778)); // expected result
2110 __ cmpq(r8, r9);
2111 __ j(not_equal, &exit);
2112
2113 __ incq(rax);
2114 __ shrxq(r8, Operand(rsp, 0), rdx);
2115 __ movq(r9, V8_UINT64_C(0x0112233445566778)); // expected result
2116 __ cmpq(r8, r9);
2117 __ j(not_equal, &exit);
2118
2119 __ incq(rax);
2120 __ shrxl(r8, rcx, rdx);
2121 __ movq(r9, V8_UINT64_C(0x0000000005566778)); // expected result
2122 __ cmpq(r8, r9);
2123 __ j(not_equal, &exit);
2124
2125 __ incq(rax);
2126 __ shrxl(r8, Operand(rsp, 0), rdx);
2127 __ movq(r9, V8_UINT64_C(0x0000000005566778)); // expected result
2128 __ cmpq(r8, r9);
2129 __ j(not_equal, &exit);
2130
2131 // rorx
2132 __ incq(rax);
2133 __ rorxq(r8, rcx, 0x4);
2134 __ movq(r9, V8_UINT64_C(0x8112233445566778)); // expected result
2135 __ cmpq(r8, r9);
2136 __ j(not_equal, &exit);
2137
2138 __ incq(rax);
2139 __ rorxq(r8, Operand(rsp, 0), 0x4);
2140 __ movq(r9, V8_UINT64_C(0x8112233445566778)); // expected result
2141 __ cmpq(r8, r9);
2142 __ j(not_equal, &exit);
2143
2144 __ incq(rax);
2145 __ rorxl(r8, rcx, 0x4);
2146 __ movq(r9, V8_UINT64_C(0x0000000085566778)); // expected result
2147 __ cmpq(r8, r9);
2148 __ j(not_equal, &exit);
2149
2150 __ incq(rax);
2151 __ rorxl(r8, Operand(rsp, 0), 0x4);
2152 __ movq(r9, V8_UINT64_C(0x0000000085566778)); // expected result
2153 __ cmpq(r8, r9);
2154 __ j(not_equal, &exit);
2155
2156 __ xorl(rax, rax);
2157 __ bind(&exit);
2158 __ popq(rcx);
2159 __ popq(rbx);
2160 __ ret(0);
2161 }
2162
2163 CodeDesc desc;
2164 assm.GetCode(&desc);
2165 Handle<Code> code = isolate->factory()->NewCode(
2166 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
2167#ifdef OBJECT_PRINT
2168 OFStream os(stdout);
2169 code->Print(os);
2170#endif
2171
2172 F0 f = FUNCTION_CAST<F0>(code->entry());
2173 CHECK_EQ(0, f());
2174}
2175
2176
2177TEST(AssemblerX64JumpTables1) {
2178 // Test jump tables with forward jumps.
2179 CcTest::InitializeVM();
2180 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
2181 HandleScope scope(isolate);
2182 MacroAssembler assm(isolate, nullptr, 0,
2183 v8::internal::CodeObjectRequired::kYes);
2184
2185 const int kNumCases = 512;
2186 int values[kNumCases];
2187 isolate->random_number_generator()->NextBytes(values, sizeof(values));
2188 Label labels[kNumCases];
2189
2190 Label done, table;
2191 __ leaq(arg2, Operand(&table));
2192 __ jmp(Operand(arg2, arg1, times_8, 0));
2193 __ ud2();
2194 __ bind(&table);
2195 for (int i = 0; i < kNumCases; ++i) {
2196 __ dq(&labels[i]);
2197 }
2198
2199 for (int i = 0; i < kNumCases; ++i) {
2200 __ bind(&labels[i]);
2201 __ movq(rax, Immediate(values[i]));
2202 __ jmp(&done);
2203 }
2204
2205 __ bind(&done);
2206 __ ret(0);
2207
2208 CodeDesc desc;
2209 assm.GetCode(&desc);
2210 Handle<Code> code = isolate->factory()->NewCode(
2211 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
2212#ifdef OBJECT_PRINT
2213 code->Print(std::cout);
2214#endif
2215
2216 F1 f = FUNCTION_CAST<F1>(code->entry());
2217 for (int i = 0; i < kNumCases; ++i) {
2218 int res = f(i);
2219 PrintF("f(%d) = %d\n", i, res);
2220 CHECK_EQ(values[i], res);
2221 }
2222}
2223
2224
2225TEST(AssemblerX64JumpTables2) {
2226 // Test jump tables with backwards jumps.
2227 CcTest::InitializeVM();
2228 Isolate* isolate = reinterpret_cast<Isolate*>(CcTest::isolate());
2229 HandleScope scope(isolate);
2230 MacroAssembler assm(isolate, nullptr, 0,
2231 v8::internal::CodeObjectRequired::kYes);
2232
2233 const int kNumCases = 512;
2234 int values[kNumCases];
2235 isolate->random_number_generator()->NextBytes(values, sizeof(values));
2236 Label labels[kNumCases];
2237
2238 Label done, table;
2239 __ leaq(arg2, Operand(&table));
2240 __ jmp(Operand(arg2, arg1, times_8, 0));
2241 __ ud2();
2242
2243 for (int i = 0; i < kNumCases; ++i) {
2244 __ bind(&labels[i]);
2245 __ movq(rax, Immediate(values[i]));
2246 __ jmp(&done);
2247 }
2248
2249 __ bind(&done);
2250 __ ret(0);
2251
2252 __ bind(&table);
2253 for (int i = 0; i < kNumCases; ++i) {
2254 __ dq(&labels[i]);
2255 }
2256
2257 CodeDesc desc;
2258 assm.GetCode(&desc);
2259 Handle<Code> code = isolate->factory()->NewCode(
2260 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
2261#ifdef OBJECT_PRINT
2262 code->Print(std::cout);
2263#endif
2264
2265 F1 f = FUNCTION_CAST<F1>(code->entry());
2266 for (int i = 0; i < kNumCases; ++i) {
2267 int res = f(i);
2268 PrintF("f(%d) = %d\n", i, res);
2269 CHECK_EQ(values[i], res);
2270 }
2271}
2272
Steve Blocka7e24c12009-10-30 11:49:00 +00002273#undef __