blob: c97c781c12eeab411b68b2fbc242dcf91c15adda [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/base/platform/elapsed-timer.h"
6#include "src/signature.h"
7
Ben Murdoch097c5b22016-05-18 11:27:45 +01008#include "src/bit-vector.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009#include "src/flags.h"
10#include "src/handles.h"
11#include "src/zone-containers.h"
12
13#include "src/wasm/ast-decoder.h"
14#include "src/wasm/decoder.h"
15#include "src/wasm/wasm-module.h"
16#include "src/wasm/wasm-opcodes.h"
17
18#include "src/compiler/wasm-compiler.h"
19
20namespace v8 {
21namespace internal {
22namespace wasm {
23
24#if DEBUG
25#define TRACE(...) \
26 do { \
27 if (FLAG_trace_wasm_decoder) PrintF(__VA_ARGS__); \
28 } while (false)
29#else
30#define TRACE(...)
31#endif
32
33// The root of a decoded tree.
34struct Tree {
35 LocalType type; // tree type.
36 uint32_t count; // number of children.
37 const byte* pc; // start of the syntax tree.
38 TFNode* node; // node in the TurboFan graph.
39 Tree* children[1]; // pointers to children.
40
41 WasmOpcode opcode() const { return static_cast<WasmOpcode>(*pc); }
42};
43
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000044// A production represents an incomplete decoded tree in the LR decoder.
45struct Production {
46 Tree* tree; // the root of the syntax tree.
47 int index; // the current index into the children of the tree.
48
49 WasmOpcode opcode() const { return static_cast<WasmOpcode>(*pc()); }
50 const byte* pc() const { return tree->pc; }
51 bool done() const { return index >= static_cast<int>(tree->count); }
52 Tree* last() const { return index > 0 ? tree->children[index - 1] : nullptr; }
53};
54
55
56// An SsaEnv environment carries the current local variable renaming
57// as well as the current effect and control dependency in the TF graph.
58// It maintains a control state that tracks whether the environment
59// is reachable, has reached a control end, or has been merged.
60struct SsaEnv {
61 enum State { kControlEnd, kUnreachable, kReached, kMerged };
62
63 State state;
64 TFNode* control;
65 TFNode* effect;
66 TFNode** locals;
67
68 bool go() { return state >= kReached; }
69 void Kill(State new_state = kControlEnd) {
70 state = new_state;
71 locals = nullptr;
72 control = nullptr;
73 effect = nullptr;
74 }
75};
76
77
78// An entry in the stack of blocks during decoding.
79struct Block {
80 SsaEnv* ssa_env; // SSA renaming environment.
81 int stack_depth; // production stack depth.
82};
83
84
85// An entry in the stack of ifs during decoding.
86struct IfEnv {
87 SsaEnv* false_env;
88 SsaEnv* merge_env;
89 SsaEnv** case_envs;
90};
91
92
93// Macros that build nodes only if there is a graph and the current SSA
94// environment is reachable from start. This avoids problems with malformed
95// TF graphs when decoding inputs that have unreachable code.
96#define BUILD(func, ...) (build() ? builder_->func(__VA_ARGS__) : nullptr)
97#define BUILD0(func) (build() ? builder_->func() : nullptr)
98
99
Ben Murdoch097c5b22016-05-18 11:27:45 +0100100// Generic Wasm bytecode decoder with utilities for decoding operands,
101// lengths, etc.
102class WasmDecoder : public Decoder {
103 public:
104 WasmDecoder() : Decoder(nullptr, nullptr), function_env_(nullptr) {}
105 WasmDecoder(FunctionEnv* env, const byte* start, const byte* end)
106 : Decoder(start, end), function_env_(env) {}
107 FunctionEnv* function_env_;
108
109 void Reset(FunctionEnv* function_env, const byte* start, const byte* end) {
110 Decoder::Reset(start, end);
111 function_env_ = function_env;
112 }
113
114 byte ByteOperand(const byte* pc, const char* msg = "missing 1-byte operand") {
115 if ((pc + sizeof(byte)) >= limit_) {
116 error(pc, msg);
117 return 0;
118 }
119 return pc[1];
120 }
121
122 uint32_t Uint32Operand(const byte* pc) {
123 if ((pc + sizeof(uint32_t)) >= limit_) {
124 error(pc, "missing 4-byte operand");
125 return 0;
126 }
127 return read_u32(pc + 1);
128 }
129
130 uint64_t Uint64Operand(const byte* pc) {
131 if ((pc + sizeof(uint64_t)) >= limit_) {
132 error(pc, "missing 8-byte operand");
133 return 0;
134 }
135 return read_u64(pc + 1);
136 }
137
138 inline bool Validate(const byte* pc, LocalIndexOperand& operand) {
139 if (operand.index < function_env_->total_locals) {
140 operand.type = function_env_->GetLocalType(operand.index);
141 return true;
142 }
143 error(pc, pc + 1, "invalid local index");
144 return false;
145 }
146
147 inline bool Validate(const byte* pc, GlobalIndexOperand& operand) {
148 ModuleEnv* m = function_env_->module;
149 if (m && m->module && operand.index < m->module->globals->size()) {
150 operand.machine_type = m->module->globals->at(operand.index).type;
151 operand.type = WasmOpcodes::LocalTypeFor(operand.machine_type);
152 return true;
153 }
154 error(pc, pc + 1, "invalid global index");
155 return false;
156 }
157
158 inline bool Validate(const byte* pc, FunctionIndexOperand& operand) {
159 ModuleEnv* m = function_env_->module;
160 if (m && m->module && operand.index < m->module->functions->size()) {
161 operand.sig = m->module->functions->at(operand.index).sig;
162 return true;
163 }
164 error(pc, pc + 1, "invalid function index");
165 return false;
166 }
167
168 inline bool Validate(const byte* pc, SignatureIndexOperand& operand) {
169 ModuleEnv* m = function_env_->module;
170 if (m && m->module && operand.index < m->module->signatures->size()) {
171 operand.sig = m->module->signatures->at(operand.index);
172 return true;
173 }
174 error(pc, pc + 1, "invalid signature index");
175 return false;
176 }
177
178 inline bool Validate(const byte* pc, ImportIndexOperand& operand) {
179 ModuleEnv* m = function_env_->module;
180 if (m && m->module && operand.index < m->module->import_table->size()) {
181 operand.sig = m->module->import_table->at(operand.index).sig;
182 return true;
183 }
184 error(pc, pc + 1, "invalid signature index");
185 return false;
186 }
187
188 inline bool Validate(const byte* pc, BreakDepthOperand& operand,
189 ZoneVector<Block>& blocks) {
190 if (operand.depth < blocks.size()) {
191 operand.target = &blocks[blocks.size() - operand.depth - 1];
192 return true;
193 }
194 error(pc, pc + 1, "invalid break depth");
195 return false;
196 }
197
198 bool Validate(const byte* pc, TableSwitchOperand& operand,
199 size_t block_depth) {
200 if (operand.table_count == 0) {
201 error(pc, "tableswitch with 0 entries");
202 return false;
203 }
204 // Verify table.
205 for (uint32_t i = 0; i < operand.table_count; i++) {
206 uint16_t target = operand.read_entry(this, i);
207 if (target >= 0x8000) {
208 size_t depth = target - 0x8000;
209 if (depth > block_depth) {
210 error(operand.table + i * 2, "improper branch in tableswitch");
211 return false;
212 }
213 } else {
214 if (target >= operand.case_count) {
215 error(operand.table + i * 2, "invalid case target in tableswitch");
216 return false;
217 }
218 }
219 }
220 return true;
221 }
222
223 int OpcodeArity(const byte* pc) {
224#define DECLARE_ARITY(name, ...) \
225 static const LocalType kTypes_##name[] = {__VA_ARGS__}; \
226 static const int kArity_##name = \
227 static_cast<int>(arraysize(kTypes_##name) - 1);
228
229 FOREACH_SIGNATURE(DECLARE_ARITY);
230#undef DECLARE_ARITY
231
232 switch (static_cast<WasmOpcode>(*pc)) {
233 case kExprI8Const:
234 case kExprI32Const:
235 case kExprI64Const:
236 case kExprF64Const:
237 case kExprF32Const:
238 case kExprGetLocal:
239 case kExprLoadGlobal:
240 case kExprNop:
241 case kExprUnreachable:
242 return 0;
243
244 case kExprBr:
245 case kExprStoreGlobal:
246 case kExprSetLocal:
247 return 1;
248
249 case kExprIf:
250 case kExprBrIf:
251 return 2;
252 case kExprIfElse:
253 case kExprSelect:
254 return 3;
255
256 case kExprBlock:
257 case kExprLoop: {
258 BlockCountOperand operand(this, pc);
259 return operand.count;
260 }
261
262 case kExprCallFunction: {
263 FunctionIndexOperand operand(this, pc);
264 return static_cast<int>(
265 function_env_->module->GetFunctionSignature(operand.index)
266 ->parameter_count());
267 }
268 case kExprCallIndirect: {
269 SignatureIndexOperand operand(this, pc);
270 return 1 + static_cast<int>(
271 function_env_->module->GetSignature(operand.index)
272 ->parameter_count());
273 }
274 case kExprCallImport: {
275 ImportIndexOperand operand(this, pc);
276 return static_cast<int>(
277 function_env_->module->GetImportSignature(operand.index)
278 ->parameter_count());
279 }
280 case kExprReturn: {
281 return static_cast<int>(function_env_->sig->return_count());
282 }
283 case kExprTableSwitch: {
284 TableSwitchOperand operand(this, pc);
285 return 1 + operand.case_count;
286 }
287
288#define DECLARE_OPCODE_CASE(name, opcode, sig) \
289 case kExpr##name: \
290 return kArity_##sig;
291
292 FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE)
293 FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE)
294 FOREACH_MISC_MEM_OPCODE(DECLARE_OPCODE_CASE)
295 FOREACH_SIMPLE_OPCODE(DECLARE_OPCODE_CASE)
296#undef DECLARE_OPCODE_CASE
297 }
298 UNREACHABLE();
299 return 0;
300 }
301
302 int OpcodeLength(const byte* pc) {
303 switch (static_cast<WasmOpcode>(*pc)) {
304#define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name:
305 FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE)
306 FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE)
307#undef DECLARE_OPCODE_CASE
308 {
309 MemoryAccessOperand operand(this, pc);
310 return 1 + operand.length;
311 }
312 case kExprBlock:
313 case kExprLoop: {
314 BlockCountOperand operand(this, pc);
315 return 1 + operand.length;
316 }
317 case kExprBr:
318 case kExprBrIf: {
319 BreakDepthOperand operand(this, pc);
320 return 1 + operand.length;
321 }
322 case kExprStoreGlobal:
323 case kExprLoadGlobal: {
324 GlobalIndexOperand operand(this, pc);
325 return 1 + operand.length;
326 }
327
328 case kExprCallFunction: {
329 FunctionIndexOperand operand(this, pc);
330 return 1 + operand.length;
331 }
332 case kExprCallIndirect: {
333 SignatureIndexOperand operand(this, pc);
334 return 1 + operand.length;
335 }
336 case kExprCallImport: {
337 ImportIndexOperand operand(this, pc);
338 return 1 + operand.length;
339 }
340
341 case kExprSetLocal:
342 case kExprGetLocal: {
343 LocalIndexOperand operand(this, pc);
344 return 1 + operand.length;
345 }
346 case kExprTableSwitch: {
347 TableSwitchOperand operand(this, pc);
348 return 1 + operand.length;
349 }
350 case kExprI8Const:
351 return 2;
352 case kExprI32Const:
353 case kExprF32Const:
354 return 5;
355 case kExprI64Const:
356 case kExprF64Const:
357 return 9;
358
359 default:
360 return 1;
361 }
362 }
363};
364
365
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000366// A shift-reduce-parser strategy for decoding Wasm code that uses an explicit
367// shift-reduce strategy with multiple internal stacks.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100368class LR_WasmDecoder : public WasmDecoder {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000369 public:
370 LR_WasmDecoder(Zone* zone, TFBuilder* builder)
Ben Murdoch097c5b22016-05-18 11:27:45 +0100371 : zone_(zone),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000372 builder_(builder),
373 trees_(zone),
374 stack_(zone),
375 blocks_(zone),
376 ifs_(zone) {}
377
378 TreeResult Decode(FunctionEnv* function_env, const byte* base, const byte* pc,
379 const byte* end) {
380 base::ElapsedTimer decode_timer;
381 if (FLAG_trace_wasm_decode_time) {
382 decode_timer.Start();
383 }
384 trees_.clear();
385 stack_.clear();
386 blocks_.clear();
387 ifs_.clear();
388
389 if (end < pc) {
390 error(pc, "function body end < start");
391 return result_;
392 }
393
394 base_ = base;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100395 Reset(function_env, pc, end);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000396
397 InitSsaEnv();
398 DecodeFunctionBody();
399
400 Tree* tree = nullptr;
401 if (ok()) {
402 if (ssa_env_->go()) {
403 if (stack_.size() > 0) {
404 error(stack_.back().pc(), end, "fell off end of code");
405 }
406 AddImplicitReturnAtEnd();
407 }
408 if (trees_.size() == 0) {
409 if (function_env_->sig->return_count() > 0) {
410 error(start_, "no trees created");
411 }
412 } else {
413 tree = trees_[0];
414 }
415 }
416
417 if (ok()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100418 if (FLAG_trace_wasm_ast) {
419 PrintAst(function_env, pc, end);
420 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000421 if (FLAG_trace_wasm_decode_time) {
422 double ms = decode_timer.Elapsed().InMillisecondsF();
Ben Murdoch097c5b22016-05-18 11:27:45 +0100423 PrintF("wasm-decode ok (%0.3f ms)\n\n", ms);
424 } else {
425 TRACE("wasm-decode ok\n\n");
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000426 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000427 } else {
428 TRACE("wasm-error module+%-6d func+%d: %s\n\n", baserel(error_pc_),
429 startrel(error_pc_), error_msg_.get());
430 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100431
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000432 return toResult(tree);
433 }
434
435 private:
436 static const size_t kErrorMsgSize = 128;
437
438 Zone* zone_;
439 TFBuilder* builder_;
440 const byte* base_;
441 TreeResult result_;
442
443 SsaEnv* ssa_env_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000444
445 ZoneVector<Tree*> trees_;
446 ZoneVector<Production> stack_;
447 ZoneVector<Block> blocks_;
448 ZoneVector<IfEnv> ifs_;
449
450 inline bool build() { return builder_ && ssa_env_->go(); }
451
452 void InitSsaEnv() {
453 FunctionSig* sig = function_env_->sig;
454 int param_count = static_cast<int>(sig->parameter_count());
455 TFNode* start = nullptr;
456 SsaEnv* ssa_env = reinterpret_cast<SsaEnv*>(zone_->New(sizeof(SsaEnv)));
457 size_t size = sizeof(TFNode*) * EnvironmentCount();
458 ssa_env->state = SsaEnv::kReached;
459 ssa_env->locals =
460 size > 0 ? reinterpret_cast<TFNode**>(zone_->New(size)) : nullptr;
461
462 int pos = 0;
463 if (builder_) {
464 start = builder_->Start(param_count + 1);
465 // Initialize parameters.
466 for (int i = 0; i < param_count; i++) {
467 ssa_env->locals[pos++] = builder_->Param(i, sig->GetParam(i));
468 }
469 // Initialize int32 locals.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100470 if (function_env_->local_i32_count > 0) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000471 TFNode* zero = builder_->Int32Constant(0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100472 for (uint32_t i = 0; i < function_env_->local_i32_count; i++) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000473 ssa_env->locals[pos++] = zero;
474 }
475 }
476 // Initialize int64 locals.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100477 if (function_env_->local_i64_count > 0) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000478 TFNode* zero = builder_->Int64Constant(0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100479 for (uint32_t i = 0; i < function_env_->local_i64_count; i++) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000480 ssa_env->locals[pos++] = zero;
481 }
482 }
483 // Initialize float32 locals.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100484 if (function_env_->local_f32_count > 0) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000485 TFNode* zero = builder_->Float32Constant(0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100486 for (uint32_t i = 0; i < function_env_->local_f32_count; i++) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000487 ssa_env->locals[pos++] = zero;
488 }
489 }
490 // Initialize float64 locals.
Ben Murdoch097c5b22016-05-18 11:27:45 +0100491 if (function_env_->local_f64_count > 0) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000492 TFNode* zero = builder_->Float64Constant(0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100493 for (uint32_t i = 0; i < function_env_->local_f64_count; i++) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000494 ssa_env->locals[pos++] = zero;
495 }
496 }
497 DCHECK_EQ(function_env_->total_locals, pos);
498 DCHECK_EQ(EnvironmentCount(), pos);
499 builder_->set_module(function_env_->module);
500 }
501 ssa_env->control = start;
502 ssa_env->effect = start;
503 SetEnv("initial", ssa_env);
504 }
505
506 void Leaf(LocalType type, TFNode* node = nullptr) {
507 size_t size = sizeof(Tree);
508 Tree* tree = reinterpret_cast<Tree*>(zone_->New(size));
509 tree->type = type;
510 tree->count = 0;
511 tree->pc = pc_;
512 tree->node = node;
513 tree->children[0] = nullptr;
514 Reduce(tree);
515 }
516
517 void Shift(LocalType type, uint32_t count) {
518 size_t size =
519 sizeof(Tree) + (count == 0 ? 0 : ((count - 1) * sizeof(Tree*)));
520 Tree* tree = reinterpret_cast<Tree*>(zone_->New(size));
521 tree->type = type;
522 tree->count = count;
523 tree->pc = pc_;
524 tree->node = nullptr;
525 for (uint32_t i = 0; i < count; i++) tree->children[i] = nullptr;
526 if (count == 0) {
527 Production p = {tree, 0};
528 Reduce(&p);
529 Reduce(tree);
530 } else {
531 stack_.push_back({tree, 0});
532 }
533 }
534
535 void Reduce(Tree* tree) {
536 while (true) {
537 if (stack_.size() == 0) {
538 trees_.push_back(tree);
539 break;
540 }
541 Production* p = &stack_.back();
542 p->tree->children[p->index++] = tree;
543 Reduce(p);
544 if (p->done()) {
545 tree = p->tree;
546 stack_.pop_back();
547 } else {
548 break;
549 }
550 }
551 }
552
553 char* indentation() {
554 static const int kMaxIndent = 64;
555 static char bytes[kMaxIndent + 1];
556 for (int i = 0; i < kMaxIndent; i++) bytes[i] = ' ';
557 bytes[kMaxIndent] = 0;
558 if (stack_.size() < kMaxIndent / 2) {
559 bytes[stack_.size() * 2] = 0;
560 }
561 return bytes;
562 }
563
564 // Decodes the body of a function, producing reduced trees into {result}.
565 void DecodeFunctionBody() {
566 TRACE("wasm-decode %p...%p (%d bytes) %s\n",
567 reinterpret_cast<const void*>(start_),
568 reinterpret_cast<const void*>(limit_),
569 static_cast<int>(limit_ - start_), builder_ ? "graph building" : "");
570
571 if (pc_ >= limit_) return; // Nothing to do.
572
573 while (true) { // decoding loop.
574 int len = 1;
575 WasmOpcode opcode = static_cast<WasmOpcode>(*pc_);
576 TRACE("wasm-decode module+%-6d %s func+%d: 0x%02x %s\n", baserel(pc_),
577 indentation(), startrel(pc_), opcode,
578 WasmOpcodes::OpcodeName(opcode));
579
580 FunctionSig* sig = WasmOpcodes::Signature(opcode);
581 if (sig) {
582 // A simple expression with a fixed signature.
583 Shift(sig->GetReturn(), static_cast<uint32_t>(sig->parameter_count()));
584 pc_ += len;
585 if (pc_ >= limit_) {
586 // End of code reached or exceeded.
587 if (pc_ > limit_ && ok()) {
588 error("Beyond end of code");
589 }
590 return;
591 }
592 continue; // back to decoding loop.
593 }
594
595 switch (opcode) {
596 case kExprNop:
597 Leaf(kAstStmt);
598 break;
599 case kExprBlock: {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100600 BlockCountOperand operand(this, pc_);
601 if (operand.count < 1) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000602 Leaf(kAstStmt);
603 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100604 Shift(kAstEnd, operand.count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000605 // The break environment is the outer environment.
606 SsaEnv* break_env = ssa_env_;
607 PushBlock(break_env);
608 SetEnv("block:start", Steal(break_env));
609 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100610 len = 1 + operand.length;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000611 break;
612 }
613 case kExprLoop: {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100614 BlockCountOperand operand(this, pc_);
615 if (operand.count < 1) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000616 Leaf(kAstStmt);
617 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100618 Shift(kAstEnd, operand.count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000619 // The break environment is the outer environment.
620 SsaEnv* break_env = ssa_env_;
621 PushBlock(break_env);
622 SsaEnv* cont_env = Steal(break_env);
623 // The continue environment is the inner environment.
624 PrepareForLoop(cont_env);
625 SetEnv("loop:start", Split(cont_env));
626 if (ssa_env_->go()) ssa_env_->state = SsaEnv::kReached;
627 PushBlock(cont_env);
628 blocks_.back().stack_depth = -1; // no production for inner block.
629 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100630 len = 1 + operand.length;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000631 break;
632 }
633 case kExprIf:
634 Shift(kAstStmt, 2);
635 break;
636 case kExprIfElse:
637 Shift(kAstEnd, 3); // Result type is typeof(x) in {c ? x : y}.
638 break;
639 case kExprSelect:
640 Shift(kAstStmt, 3); // Result type is typeof(x) in {c ? x : y}.
641 break;
642 case kExprBr: {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100643 BreakDepthOperand operand(this, pc_);
644 if (Validate(pc_, operand, blocks_)) {
645 Shift(kAstEnd, 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000646 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100647 len = 1 + operand.length;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000648 break;
649 }
650 case kExprBrIf: {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100651 BreakDepthOperand operand(this, pc_);
652 if (Validate(pc_, operand, blocks_)) {
653 Shift(kAstStmt, 2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000654 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100655 len = 1 + operand.length;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000656 break;
657 }
658 case kExprTableSwitch: {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100659 TableSwitchOperand operand(this, pc_);
660 if (Validate(pc_, operand, blocks_.size())) {
661 Shift(kAstEnd, 1 + operand.case_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000662 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100663 len = 1 + operand.length;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000664 break;
665 }
666 case kExprReturn: {
667 int count = static_cast<int>(function_env_->sig->return_count());
668 if (count == 0) {
669 BUILD(Return, 0, builder_->Buffer(0));
670 ssa_env_->Kill();
671 Leaf(kAstEnd);
672 } else {
673 Shift(kAstEnd, count);
674 }
675 break;
676 }
677 case kExprUnreachable: {
678 BUILD0(Unreachable);
679 ssa_env_->Kill(SsaEnv::kControlEnd);
680 Leaf(kAstEnd, nullptr);
681 break;
682 }
683 case kExprI8Const: {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100684 ImmI8Operand operand(this, pc_);
685 Leaf(kAstI32, BUILD(Int32Constant, operand.value));
686 len = 1 + operand.length;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000687 break;
688 }
689 case kExprI32Const: {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100690 ImmI32Operand operand(this, pc_);
691 Leaf(kAstI32, BUILD(Int32Constant, operand.value));
692 len = 1 + operand.length;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000693 break;
694 }
695 case kExprI64Const: {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100696 ImmI64Operand operand(this, pc_);
697 Leaf(kAstI64, BUILD(Int64Constant, operand.value));
698 len = 1 + operand.length;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000699 break;
700 }
701 case kExprF32Const: {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100702 ImmF32Operand operand(this, pc_);
703 Leaf(kAstF32, BUILD(Float32Constant, operand.value));
704 len = 1 + operand.length;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000705 break;
706 }
707 case kExprF64Const: {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100708 ImmF64Operand operand(this, pc_);
709 Leaf(kAstF64, BUILD(Float64Constant, operand.value));
710 len = 1 + operand.length;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000711 break;
712 }
713 case kExprGetLocal: {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100714 LocalIndexOperand operand(this, pc_);
715 if (Validate(pc_, operand)) {
716 TFNode* val = build() ? ssa_env_->locals[operand.index] : nullptr;
717 Leaf(operand.type, val);
718 }
719 len = 1 + operand.length;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000720 break;
721 }
722 case kExprSetLocal: {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100723 LocalIndexOperand operand(this, pc_);
724 if (Validate(pc_, operand)) {
725 Shift(operand.type, 1);
726 }
727 len = 1 + operand.length;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000728 break;
729 }
730 case kExprLoadGlobal: {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100731 GlobalIndexOperand operand(this, pc_);
732 if (Validate(pc_, operand)) {
733 Leaf(operand.type, BUILD(LoadGlobal, operand.index));
734 }
735 len = 1 + operand.length;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000736 break;
737 }
738 case kExprStoreGlobal: {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100739 GlobalIndexOperand operand(this, pc_);
740 if (Validate(pc_, operand)) {
741 Shift(operand.type, 1);
742 }
743 len = 1 + operand.length;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000744 break;
745 }
746 case kExprI32LoadMem8S:
747 case kExprI32LoadMem8U:
748 case kExprI32LoadMem16S:
749 case kExprI32LoadMem16U:
750 case kExprI32LoadMem:
751 len = DecodeLoadMem(pc_, kAstI32);
752 break;
753 case kExprI64LoadMem8S:
754 case kExprI64LoadMem8U:
755 case kExprI64LoadMem16S:
756 case kExprI64LoadMem16U:
757 case kExprI64LoadMem32S:
758 case kExprI64LoadMem32U:
759 case kExprI64LoadMem:
760 len = DecodeLoadMem(pc_, kAstI64);
761 break;
762 case kExprF32LoadMem:
763 len = DecodeLoadMem(pc_, kAstF32);
764 break;
765 case kExprF64LoadMem:
766 len = DecodeLoadMem(pc_, kAstF64);
767 break;
768 case kExprI32StoreMem8:
769 case kExprI32StoreMem16:
770 case kExprI32StoreMem:
771 len = DecodeStoreMem(pc_, kAstI32);
772 break;
773 case kExprI64StoreMem8:
774 case kExprI64StoreMem16:
775 case kExprI64StoreMem32:
776 case kExprI64StoreMem:
777 len = DecodeStoreMem(pc_, kAstI64);
778 break;
779 case kExprF32StoreMem:
780 len = DecodeStoreMem(pc_, kAstF32);
781 break;
782 case kExprF64StoreMem:
783 len = DecodeStoreMem(pc_, kAstF64);
784 break;
785 case kExprMemorySize:
786 Leaf(kAstI32, BUILD(MemSize, 0));
787 break;
788 case kExprGrowMemory:
789 Shift(kAstI32, 1);
790 break;
791 case kExprCallFunction: {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100792 FunctionIndexOperand operand(this, pc_);
793 if (Validate(pc_, operand)) {
794 LocalType type = operand.sig->return_count() == 0
795 ? kAstStmt
796 : operand.sig->GetReturn();
797 Shift(type, static_cast<int>(operand.sig->parameter_count()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000798 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100799 len = 1 + operand.length;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000800 break;
801 }
802 case kExprCallIndirect: {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100803 SignatureIndexOperand operand(this, pc_);
804 if (Validate(pc_, operand)) {
805 LocalType type = operand.sig->return_count() == 0
806 ? kAstStmt
807 : operand.sig->GetReturn();
808 Shift(type, static_cast<int>(1 + operand.sig->parameter_count()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000809 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100810 len = 1 + operand.length;
811 break;
812 }
813 case kExprCallImport: {
814 ImportIndexOperand operand(this, pc_);
815 if (Validate(pc_, operand)) {
816 LocalType type = operand.sig->return_count() == 0
817 ? kAstStmt
818 : operand.sig->GetReturn();
819 Shift(type, static_cast<int>(operand.sig->parameter_count()));
820 }
821 len = 1 + operand.length;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000822 break;
823 }
824 default:
825 error("Invalid opcode");
826 return;
827 }
828 pc_ += len;
829 if (pc_ >= limit_) {
830 // End of code reached or exceeded.
831 if (pc_ > limit_ && ok()) {
832 error("Beyond end of code");
833 }
834 return;
835 }
836 }
837 }
838
839 void PushBlock(SsaEnv* ssa_env) {
840 blocks_.push_back({ssa_env, static_cast<int>(stack_.size() - 1)});
841 }
842
843 int DecodeLoadMem(const byte* pc, LocalType type) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100844 MemoryAccessOperand operand(this, pc);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000845 Shift(type, 1);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100846 return 1 + operand.length;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000847 }
848
849 int DecodeStoreMem(const byte* pc, LocalType type) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100850 MemoryAccessOperand operand(this, pc);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000851 Shift(type, 2);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100852 return 1 + operand.length;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000853 }
854
855 void AddImplicitReturnAtEnd() {
856 int retcount = static_cast<int>(function_env_->sig->return_count());
857 if (retcount == 0) {
858 BUILD0(ReturnVoid);
859 return;
860 }
861
862 if (static_cast<int>(trees_.size()) < retcount) {
863 error(limit_, nullptr,
864 "ImplicitReturn expects %d arguments, only %d remain", retcount,
865 static_cast<int>(trees_.size()));
866 return;
867 }
868
869 TRACE("wasm-decode implicit return of %d args\n", retcount);
870
871 TFNode** buffer = BUILD(Buffer, retcount);
872 for (int index = 0; index < retcount; index++) {
873 Tree* tree = trees_[trees_.size() - 1 - index];
874 if (buffer) buffer[index] = tree->node;
875 LocalType expected = function_env_->sig->GetReturn(index);
876 if (tree->type != expected) {
877 error(limit_, tree->pc,
878 "ImplicitReturn[%d] expected type %s, found %s of type %s", index,
879 WasmOpcodes::TypeName(expected),
880 WasmOpcodes::OpcodeName(tree->opcode()),
881 WasmOpcodes::TypeName(tree->type));
882 return;
883 }
884 }
885
886 BUILD(Return, retcount, buffer);
887 }
888
889 int baserel(const byte* ptr) {
890 return base_ ? static_cast<int>(ptr - base_) : 0;
891 }
892
893 int startrel(const byte* ptr) { return static_cast<int>(ptr - start_); }
894
895 void Reduce(Production* p) {
896 WasmOpcode opcode = p->opcode();
897 TRACE("-----reduce module+%-6d %s func+%d: 0x%02x %s\n", baserel(p->pc()),
898 indentation(), startrel(p->pc()), opcode,
899 WasmOpcodes::OpcodeName(opcode));
900 FunctionSig* sig = WasmOpcodes::Signature(opcode);
901 if (sig) {
902 // A simple expression with a fixed signature.
903 TypeCheckLast(p, sig->GetParam(p->index - 1));
904 if (p->done() && build()) {
905 if (sig->parameter_count() == 2) {
906 p->tree->node = builder_->Binop(opcode, p->tree->children[0]->node,
907 p->tree->children[1]->node);
908 } else if (sig->parameter_count() == 1) {
909 p->tree->node = builder_->Unop(opcode, p->tree->children[0]->node);
910 } else {
911 UNREACHABLE();
912 }
913 }
914 return;
915 }
916
917 switch (opcode) {
918 case kExprBlock: {
919 if (p->done()) {
920 Block* last = &blocks_.back();
921 DCHECK_EQ(stack_.size() - 1, last->stack_depth);
922 // fallthrough with the last expression.
923 ReduceBreakToExprBlock(p, last);
924 SetEnv("block:end", last->ssa_env);
925 blocks_.pop_back();
926 }
927 break;
928 }
929 case kExprLoop: {
930 if (p->done()) {
931 // Pop the continue environment.
932 blocks_.pop_back();
933 // Get the break environment.
934 Block* last = &blocks_.back();
935 DCHECK_EQ(stack_.size() - 1, last->stack_depth);
936 // fallthrough with the last expression.
937 ReduceBreakToExprBlock(p, last);
938 SetEnv("loop:end", last->ssa_env);
939 blocks_.pop_back();
940 }
941 break;
942 }
943 case kExprIf: {
944 if (p->index == 1) {
945 // Condition done. Split environment for true branch.
946 TypeCheckLast(p, kAstI32);
947 SsaEnv* false_env = ssa_env_;
948 SsaEnv* true_env = Split(ssa_env_);
949 ifs_.push_back({nullptr, false_env, nullptr});
950 BUILD(Branch, p->last()->node, &true_env->control,
951 &false_env->control);
952 SetEnv("if:true", true_env);
953 } else if (p->index == 2) {
954 // True block done. Merge true and false environments.
955 IfEnv* env = &ifs_.back();
956 SsaEnv* merge = env->merge_env;
957 if (merge->go()) {
958 merge->state = SsaEnv::kReached;
959 Goto(ssa_env_, merge);
960 }
961 SetEnv("if:merge", merge);
962 ifs_.pop_back();
963 }
964 break;
965 }
966 case kExprIfElse: {
967 if (p->index == 1) {
968 // Condition done. Split environment for true and false branches.
969 TypeCheckLast(p, kAstI32);
970 SsaEnv* merge_env = ssa_env_;
971 TFNode* if_true = nullptr;
972 TFNode* if_false = nullptr;
973 BUILD(Branch, p->last()->node, &if_true, &if_false);
974 SsaEnv* false_env = Split(ssa_env_);
975 SsaEnv* true_env = Steal(ssa_env_);
976 false_env->control = if_false;
977 true_env->control = if_true;
978 ifs_.push_back({false_env, merge_env, nullptr});
979 SetEnv("if_else:true", true_env);
980 } else if (p->index == 2) {
981 // True expr done.
982 IfEnv* env = &ifs_.back();
983 MergeIntoProduction(p, env->merge_env, p->last());
984 // Switch to environment for false branch.
985 SsaEnv* false_env = ifs_.back().false_env;
986 SetEnv("if_else:false", false_env);
987 } else if (p->index == 3) {
988 // False expr done.
989 IfEnv* env = &ifs_.back();
990 MergeIntoProduction(p, env->merge_env, p->last());
991 SetEnv("if_else:merge", env->merge_env);
992 ifs_.pop_back();
993 }
994 break;
995 }
996 case kExprSelect: {
997 if (p->index == 1) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000998 // True expression done.
999 p->tree->type = p->last()->type;
1000 if (p->tree->type == kAstStmt) {
1001 error(p->pc(), p->tree->children[1]->pc,
1002 "select operand should be expression");
1003 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001004 } else if (p->index == 2) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001005 // False expression done.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001006 TypeCheckLast(p, p->tree->type);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001007 } else {
1008 // Condition done.
1009 DCHECK(p->done());
1010 TypeCheckLast(p, kAstI32);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001011 if (build()) {
1012 TFNode* controls[2];
Ben Murdoch097c5b22016-05-18 11:27:45 +01001013 builder_->Branch(p->tree->children[2]->node, &controls[0],
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001014 &controls[1]);
1015 TFNode* merge = builder_->Merge(2, controls);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001016 TFNode* vals[2] = {p->tree->children[0]->node,
1017 p->tree->children[1]->node};
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001018 TFNode* phi = builder_->Phi(p->tree->type, 2, vals, merge);
1019 p->tree->node = phi;
1020 ssa_env_->control = merge;
1021 }
1022 }
1023 break;
1024 }
1025 case kExprBr: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001026 BreakDepthOperand operand(this, p->pc());
1027 CHECK(Validate(p->pc(), operand, blocks_));
1028 ReduceBreakToExprBlock(p, operand.target);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001029 break;
1030 }
1031 case kExprBrIf: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001032 if (p->done()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001033 TypeCheckLast(p, kAstI32);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001034 BreakDepthOperand operand(this, p->pc());
1035 CHECK(Validate(p->pc(), operand, blocks_));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001036 SsaEnv* fenv = ssa_env_;
1037 SsaEnv* tenv = Split(fenv);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001038 BUILD(Branch, p->tree->children[1]->node, &tenv->control,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001039 &fenv->control);
1040 ssa_env_ = tenv;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001041 ReduceBreakToExprBlock(p, operand.target, p->tree->children[0]);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001042 ssa_env_ = fenv;
1043 }
1044 break;
1045 }
1046 case kExprTableSwitch: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001047 if (p->index == 1) {
1048 // Switch key finished.
1049 TypeCheckLast(p, kAstI32);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001050 if (failed()) break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001051
Ben Murdoch097c5b22016-05-18 11:27:45 +01001052 TableSwitchOperand operand(this, p->pc());
1053 DCHECK(Validate(p->pc(), operand, blocks_.size()));
1054
1055 // Build the switch only if it has more than just a default target.
1056 bool build_switch = operand.table_count > 1;
1057 TFNode* sw = nullptr;
1058 if (build_switch)
1059 sw = BUILD(Switch, operand.table_count, p->last()->node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001060
1061 // Allocate environments for each case.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001062 SsaEnv** case_envs = zone_->NewArray<SsaEnv*>(operand.case_count);
1063 for (uint32_t i = 0; i < operand.case_count; i++) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001064 case_envs[i] = UnreachableEnv();
1065 }
1066
1067 ifs_.push_back({nullptr, nullptr, case_envs});
1068 SsaEnv* break_env = ssa_env_;
1069 PushBlock(break_env);
1070 SsaEnv* copy = Steal(break_env);
1071 ssa_env_ = copy;
1072
1073 // Build the environments for each case based on the table.
Ben Murdoch097c5b22016-05-18 11:27:45 +01001074 for (uint32_t i = 0; i < operand.table_count; i++) {
1075 uint16_t target = operand.read_entry(this, i);
1076 SsaEnv* env = copy;
1077 if (build_switch) {
1078 env = Split(env);
1079 env->control = (i == operand.table_count - 1)
1080 ? BUILD(IfDefault, sw)
1081 : BUILD(IfValue, i, sw);
1082 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001083 if (target >= 0x8000) {
1084 // Targets an outer block.
1085 int depth = target - 0x8000;
1086 SsaEnv* tenv = blocks_[blocks_.size() - depth - 1].ssa_env;
1087 Goto(env, tenv);
1088 } else {
1089 // Targets a case.
1090 Goto(env, case_envs[target]);
1091 }
1092 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001093 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001094
Ben Murdoch097c5b22016-05-18 11:27:45 +01001095 if (p->done()) {
1096 // Last case. Fall through to the end.
1097 Block* block = &blocks_.back();
1098 if (p->index > 1) ReduceBreakToExprBlock(p, block);
1099 SsaEnv* next = block->ssa_env;
1100 blocks_.pop_back();
1101 ifs_.pop_back();
1102 SetEnv("switch:end", next);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001103 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001104 // Interior case. Maybe fall through to the next case.
1105 SsaEnv* next = ifs_.back().case_envs[p->index - 1];
1106 if (p->index > 1 && ssa_env_->go()) Goto(ssa_env_, next);
1107 SetEnv("switch:case", next);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001108 }
1109 break;
1110 }
1111 case kExprReturn: {
1112 TypeCheckLast(p, function_env_->sig->GetReturn(p->index - 1));
1113 if (p->done()) {
1114 if (build()) {
1115 int count = p->tree->count;
1116 TFNode** buffer = builder_->Buffer(count);
1117 for (int i = 0; i < count; i++) {
1118 buffer[i] = p->tree->children[i]->node;
1119 }
1120 BUILD(Return, count, buffer);
1121 }
1122 ssa_env_->Kill(SsaEnv::kControlEnd);
1123 }
1124 break;
1125 }
1126 case kExprSetLocal: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001127 LocalIndexOperand operand(this, p->pc());
1128 CHECK(Validate(p->pc(), operand));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001129 Tree* val = p->last();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001130 if (operand.type == val->type) {
1131 if (build()) ssa_env_->locals[operand.index] = val->node;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001132 p->tree->node = val->node;
1133 } else {
1134 error(p->pc(), val->pc, "Typecheck failed in SetLocal");
1135 }
1136 break;
1137 }
1138 case kExprStoreGlobal: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001139 GlobalIndexOperand operand(this, p->pc());
1140 CHECK(Validate(p->pc(), operand));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001141 Tree* val = p->last();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001142 if (operand.type == val->type) {
1143 BUILD(StoreGlobal, operand.index, val->node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001144 p->tree->node = val->node;
1145 } else {
1146 error(p->pc(), val->pc, "Typecheck failed in StoreGlobal");
1147 }
1148 break;
1149 }
1150
1151 case kExprI32LoadMem8S:
1152 return ReduceLoadMem(p, kAstI32, MachineType::Int8());
1153 case kExprI32LoadMem8U:
1154 return ReduceLoadMem(p, kAstI32, MachineType::Uint8());
1155 case kExprI32LoadMem16S:
1156 return ReduceLoadMem(p, kAstI32, MachineType::Int16());
1157 case kExprI32LoadMem16U:
1158 return ReduceLoadMem(p, kAstI32, MachineType::Uint16());
1159 case kExprI32LoadMem:
1160 return ReduceLoadMem(p, kAstI32, MachineType::Int32());
1161
1162 case kExprI64LoadMem8S:
1163 return ReduceLoadMem(p, kAstI64, MachineType::Int8());
1164 case kExprI64LoadMem8U:
1165 return ReduceLoadMem(p, kAstI64, MachineType::Uint8());
1166 case kExprI64LoadMem16S:
1167 return ReduceLoadMem(p, kAstI64, MachineType::Int16());
1168 case kExprI64LoadMem16U:
1169 return ReduceLoadMem(p, kAstI64, MachineType::Uint16());
1170 case kExprI64LoadMem32S:
1171 return ReduceLoadMem(p, kAstI64, MachineType::Int32());
1172 case kExprI64LoadMem32U:
1173 return ReduceLoadMem(p, kAstI64, MachineType::Uint32());
1174 case kExprI64LoadMem:
1175 return ReduceLoadMem(p, kAstI64, MachineType::Int64());
1176
1177 case kExprF32LoadMem:
1178 return ReduceLoadMem(p, kAstF32, MachineType::Float32());
1179
1180 case kExprF64LoadMem:
1181 return ReduceLoadMem(p, kAstF64, MachineType::Float64());
1182
1183 case kExprI32StoreMem8:
1184 return ReduceStoreMem(p, kAstI32, MachineType::Int8());
1185 case kExprI32StoreMem16:
1186 return ReduceStoreMem(p, kAstI32, MachineType::Int16());
1187 case kExprI32StoreMem:
1188 return ReduceStoreMem(p, kAstI32, MachineType::Int32());
1189
1190 case kExprI64StoreMem8:
1191 return ReduceStoreMem(p, kAstI64, MachineType::Int8());
1192 case kExprI64StoreMem16:
1193 return ReduceStoreMem(p, kAstI64, MachineType::Int16());
1194 case kExprI64StoreMem32:
1195 return ReduceStoreMem(p, kAstI64, MachineType::Int32());
1196 case kExprI64StoreMem:
1197 return ReduceStoreMem(p, kAstI64, MachineType::Int64());
1198
1199 case kExprF32StoreMem:
1200 return ReduceStoreMem(p, kAstF32, MachineType::Float32());
1201
1202 case kExprF64StoreMem:
1203 return ReduceStoreMem(p, kAstF64, MachineType::Float64());
1204
1205 case kExprGrowMemory:
1206 TypeCheckLast(p, kAstI32);
1207 // TODO(titzer): build node for GrowMemory
1208 p->tree->node = BUILD(Int32Constant, 0);
1209 return;
1210
1211 case kExprCallFunction: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001212 FunctionIndexOperand operand(this, p->pc());
1213 CHECK(Validate(p->pc(), operand));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001214 if (p->index > 0) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001215 TypeCheckLast(p, operand.sig->GetParam(p->index - 1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001216 }
1217 if (p->done() && build()) {
1218 uint32_t count = p->tree->count + 1;
1219 TFNode** buffer = builder_->Buffer(count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001220 buffer[0] = nullptr; // reserved for code object.
1221 for (uint32_t i = 1; i < count; i++) {
1222 buffer[i] = p->tree->children[i - 1]->node;
1223 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001224 p->tree->node = builder_->CallDirect(operand.index, buffer);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001225 }
1226 break;
1227 }
1228 case kExprCallIndirect: {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001229 SignatureIndexOperand operand(this, p->pc());
1230 CHECK(Validate(p->pc(), operand));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001231 if (p->index == 1) {
1232 TypeCheckLast(p, kAstI32);
1233 } else {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001234 TypeCheckLast(p, operand.sig->GetParam(p->index - 2));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001235 }
1236 if (p->done() && build()) {
1237 uint32_t count = p->tree->count;
1238 TFNode** buffer = builder_->Buffer(count);
1239 for (uint32_t i = 0; i < count; i++) {
1240 buffer[i] = p->tree->children[i]->node;
1241 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001242 p->tree->node = builder_->CallIndirect(operand.index, buffer);
1243 }
1244 break;
1245 }
1246 case kExprCallImport: {
1247 ImportIndexOperand operand(this, p->pc());
1248 CHECK(Validate(p->pc(), operand));
1249 if (p->index > 0) {
1250 TypeCheckLast(p, operand.sig->GetParam(p->index - 1));
1251 }
1252 if (p->done() && build()) {
1253 uint32_t count = p->tree->count + 1;
1254 TFNode** buffer = builder_->Buffer(count);
1255 buffer[0] = nullptr; // reserved for code object.
1256 for (uint32_t i = 1; i < count; i++) {
1257 buffer[i] = p->tree->children[i - 1]->node;
1258 }
1259 p->tree->node = builder_->CallImport(operand.index, buffer);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001260 }
1261 break;
1262 }
1263 default:
1264 break;
1265 }
1266 }
1267
1268 void ReduceBreakToExprBlock(Production* p, Block* block) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001269 ReduceBreakToExprBlock(p, block, p->tree->count > 0 ? p->last() : nullptr);
1270 }
1271
1272 void ReduceBreakToExprBlock(Production* p, Block* block, Tree* val) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001273 if (block->stack_depth < 0) {
1274 // This is the inner loop block, which does not have a value.
1275 Goto(ssa_env_, block->ssa_env);
1276 } else {
1277 // Merge the value into the production for the block.
1278 Production* bp = &stack_[block->stack_depth];
Ben Murdoch097c5b22016-05-18 11:27:45 +01001279 MergeIntoProduction(bp, block->ssa_env, val);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001280 }
1281 }
1282
1283 void MergeIntoProduction(Production* p, SsaEnv* target, Tree* expr) {
1284 if (!ssa_env_->go()) return;
1285
1286 bool first = target->state == SsaEnv::kUnreachable;
1287 Goto(ssa_env_, target);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001288 if (expr == nullptr || expr->type == kAstEnd) return;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001289
1290 if (first) {
1291 // first merge to this environment; set the type and the node.
1292 p->tree->type = expr->type;
1293 p->tree->node = expr->node;
1294 } else {
1295 // merge with the existing value for this block.
1296 LocalType type = p->tree->type;
1297 if (expr->type != type) {
1298 type = kAstStmt;
1299 p->tree->type = kAstStmt;
1300 p->tree->node = nullptr;
1301 } else if (type != kAstStmt) {
1302 p->tree->node = CreateOrMergeIntoPhi(type, target->control,
1303 p->tree->node, expr->node);
1304 }
1305 }
1306 }
1307
1308 void ReduceLoadMem(Production* p, LocalType type, MachineType mem_type) {
1309 DCHECK_EQ(1, p->index);
1310 TypeCheckLast(p, kAstI32); // index
1311 if (build()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001312 MemoryAccessOperand operand(this, p->pc());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001313 p->tree->node =
Ben Murdoch097c5b22016-05-18 11:27:45 +01001314 builder_->LoadMem(type, mem_type, p->last()->node, operand.offset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001315 }
1316 }
1317
1318 void ReduceStoreMem(Production* p, LocalType type, MachineType mem_type) {
1319 if (p->index == 1) {
1320 TypeCheckLast(p, kAstI32); // index
1321 } else {
1322 DCHECK_EQ(2, p->index);
1323 TypeCheckLast(p, type);
1324 if (build()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001325 MemoryAccessOperand operand(this, p->pc());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001326 TFNode* val = p->tree->children[1]->node;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001327 builder_->StoreMem(mem_type, p->tree->children[0]->node, operand.offset,
1328 val);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001329 p->tree->node = val;
1330 }
1331 }
1332 }
1333
1334 void TypeCheckLast(Production* p, LocalType expected) {
1335 LocalType result = p->last()->type;
1336 if (result == expected) return;
1337 if (result == kAstEnd) return;
1338 if (expected != kAstStmt) {
1339 error(p->pc(), p->last()->pc,
1340 "%s[%d] expected type %s, found %s of type %s",
1341 WasmOpcodes::OpcodeName(p->opcode()), p->index - 1,
1342 WasmOpcodes::TypeName(expected),
1343 WasmOpcodes::OpcodeName(p->last()->opcode()),
1344 WasmOpcodes::TypeName(p->last()->type));
1345 }
1346 }
1347
1348 void SetEnv(const char* reason, SsaEnv* env) {
1349 TRACE(" env = %p, block depth = %d, reason = %s", static_cast<void*>(env),
1350 static_cast<int>(blocks_.size()), reason);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001351 if (FLAG_trace_wasm_decoder && env && env->control) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001352 TRACE(", control = ");
1353 compiler::WasmGraphBuilder::PrintDebugName(env->control);
1354 }
1355 TRACE("\n");
1356 ssa_env_ = env;
1357 if (builder_) {
1358 builder_->set_control_ptr(&env->control);
1359 builder_->set_effect_ptr(&env->effect);
1360 }
1361 }
1362
1363 void Goto(SsaEnv* from, SsaEnv* to) {
1364 DCHECK_NOT_NULL(to);
1365 if (!from->go()) return;
1366 switch (to->state) {
1367 case SsaEnv::kUnreachable: { // Overwrite destination.
1368 to->state = SsaEnv::kReached;
1369 to->locals = from->locals;
1370 to->control = from->control;
1371 to->effect = from->effect;
1372 break;
1373 }
1374 case SsaEnv::kReached: { // Create a new merge.
1375 to->state = SsaEnv::kMerged;
1376 if (!builder_) break;
1377 // Merge control.
1378 TFNode* controls[] = {to->control, from->control};
1379 TFNode* merge = builder_->Merge(2, controls);
1380 to->control = merge;
1381 // Merge effects.
1382 if (from->effect != to->effect) {
1383 TFNode* effects[] = {to->effect, from->effect, merge};
1384 to->effect = builder_->EffectPhi(2, effects, merge);
1385 }
1386 // Merge SSA values.
1387 for (int i = EnvironmentCount() - 1; i >= 0; i--) {
1388 TFNode* a = to->locals[i];
1389 TFNode* b = from->locals[i];
1390 if (a != b) {
1391 TFNode* vals[] = {a, b};
1392 to->locals[i] =
1393 builder_->Phi(function_env_->GetLocalType(i), 2, vals, merge);
1394 }
1395 }
1396 break;
1397 }
1398 case SsaEnv::kMerged: {
1399 if (!builder_) break;
1400 TFNode* merge = to->control;
1401 // Extend the existing merge.
1402 builder_->AppendToMerge(merge, from->control);
1403 // Merge effects.
1404 if (builder_->IsPhiWithMerge(to->effect, merge)) {
1405 builder_->AppendToPhi(merge, to->effect, from->effect);
1406 } else if (to->effect != from->effect) {
1407 uint32_t count = builder_->InputCount(merge);
1408 TFNode** effects = builder_->Buffer(count);
1409 for (uint32_t j = 0; j < count - 1; j++) {
1410 effects[j] = to->effect;
1411 }
1412 effects[count - 1] = from->effect;
1413 to->effect = builder_->EffectPhi(count, effects, merge);
1414 }
1415 // Merge locals.
1416 for (int i = EnvironmentCount() - 1; i >= 0; i--) {
1417 TFNode* tnode = to->locals[i];
1418 TFNode* fnode = from->locals[i];
1419 if (builder_->IsPhiWithMerge(tnode, merge)) {
1420 builder_->AppendToPhi(merge, tnode, fnode);
1421 } else if (tnode != fnode) {
1422 uint32_t count = builder_->InputCount(merge);
1423 TFNode** vals = builder_->Buffer(count);
1424 for (uint32_t j = 0; j < count - 1; j++) {
1425 vals[j] = tnode;
1426 }
1427 vals[count - 1] = fnode;
1428 to->locals[i] = builder_->Phi(function_env_->GetLocalType(i), count,
1429 vals, merge);
1430 }
1431 }
1432 break;
1433 }
1434 default:
1435 UNREACHABLE();
1436 }
1437 return from->Kill();
1438 }
1439
1440 TFNode* CreateOrMergeIntoPhi(LocalType type, TFNode* merge, TFNode* tnode,
1441 TFNode* fnode) {
1442 if (builder_->IsPhiWithMerge(tnode, merge)) {
1443 builder_->AppendToPhi(merge, tnode, fnode);
1444 } else if (tnode != fnode) {
1445 uint32_t count = builder_->InputCount(merge);
1446 TFNode** vals = builder_->Buffer(count);
1447 for (uint32_t j = 0; j < count - 1; j++) vals[j] = tnode;
1448 vals[count - 1] = fnode;
1449 return builder_->Phi(type, count, vals, merge);
1450 }
1451 return tnode;
1452 }
1453
1454 void BuildInfiniteLoop() {
1455 if (ssa_env_->go()) {
1456 PrepareForLoop(ssa_env_);
1457 SsaEnv* cont_env = ssa_env_;
1458 ssa_env_ = Split(ssa_env_);
1459 ssa_env_->state = SsaEnv::kReached;
1460 Goto(ssa_env_, cont_env);
1461 }
1462 }
1463
1464 void PrepareForLoop(SsaEnv* env) {
1465 if (env->go()) {
1466 env->state = SsaEnv::kMerged;
1467 if (builder_) {
1468 env->control = builder_->Loop(env->control);
1469 env->effect = builder_->EffectPhi(1, &env->effect, env->control);
1470 builder_->Terminate(env->effect, env->control);
1471 for (int i = EnvironmentCount() - 1; i >= 0; i--) {
1472 env->locals[i] = builder_->Phi(function_env_->GetLocalType(i), 1,
1473 &env->locals[i], env->control);
1474 }
1475 }
1476 }
1477 }
1478
1479 // Create a complete copy of the {from}.
1480 SsaEnv* Split(SsaEnv* from) {
1481 DCHECK_NOT_NULL(from);
1482 SsaEnv* result = reinterpret_cast<SsaEnv*>(zone_->New(sizeof(SsaEnv)));
1483 size_t size = sizeof(TFNode*) * EnvironmentCount();
1484 result->control = from->control;
1485 result->effect = from->effect;
1486 result->state = from->state == SsaEnv::kUnreachable ? SsaEnv::kUnreachable
1487 : SsaEnv::kReached;
1488
1489 if (from->go()) {
1490 result->state = SsaEnv::kReached;
1491 result->locals =
1492 size > 0 ? reinterpret_cast<TFNode**>(zone_->New(size)) : nullptr;
1493 memcpy(result->locals, from->locals, size);
1494 } else {
1495 result->state = SsaEnv::kUnreachable;
1496 result->locals = nullptr;
1497 }
1498
1499 return result;
1500 }
1501
1502 // Create a copy of {from} that steals its state and leaves {from}
1503 // unreachable.
1504 SsaEnv* Steal(SsaEnv* from) {
1505 DCHECK_NOT_NULL(from);
1506 if (!from->go()) return UnreachableEnv();
1507 SsaEnv* result = reinterpret_cast<SsaEnv*>(zone_->New(sizeof(SsaEnv)));
1508 result->state = SsaEnv::kReached;
1509 result->locals = from->locals;
1510 result->control = from->control;
1511 result->effect = from->effect;
1512 from->Kill(SsaEnv::kUnreachable);
1513 return result;
1514 }
1515
1516 // Create an unreachable environment.
1517 SsaEnv* UnreachableEnv() {
1518 SsaEnv* result = reinterpret_cast<SsaEnv*>(zone_->New(sizeof(SsaEnv)));
1519 result->state = SsaEnv::kUnreachable;
1520 result->control = nullptr;
1521 result->effect = nullptr;
1522 result->locals = nullptr;
1523 return result;
1524 }
1525
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001526 int EnvironmentCount() {
1527 if (builder_) return static_cast<int>(function_env_->GetLocalCount());
1528 return 0; // if we aren't building a graph, don't bother with SSA renaming.
1529 }
1530
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001531 virtual void onFirstError() {
1532 limit_ = start_; // Terminate decoding loop.
1533 builder_ = nullptr; // Don't build any more nodes.
1534#if DEBUG
1535 PrintStackForDebugging();
1536#endif
1537 }
1538
1539#if DEBUG
1540 void PrintStackForDebugging() { PrintProduction(0); }
1541
1542 void PrintProduction(size_t depth) {
1543 if (depth >= stack_.size()) return;
1544 Production* p = &stack_[depth];
1545 for (size_t d = 0; d < depth; d++) PrintF(" ");
1546
1547 PrintF("@%d %s [%d]\n", static_cast<int>(p->tree->pc - start_),
1548 WasmOpcodes::OpcodeName(p->opcode()), p->tree->count);
1549 for (int i = 0; i < p->index; i++) {
1550 Tree* child = p->tree->children[i];
1551 for (size_t d = 0; d <= depth; d++) PrintF(" ");
1552 PrintF("@%d %s [%d]", static_cast<int>(child->pc - start_),
1553 WasmOpcodes::OpcodeName(child->opcode()), child->count);
1554 if (child->node) {
1555 PrintF(" => TF");
1556 compiler::WasmGraphBuilder::PrintDebugName(child->node);
1557 }
1558 PrintF("\n");
1559 }
1560 PrintProduction(depth + 1);
1561 }
1562#endif
1563};
1564
1565
1566TreeResult VerifyWasmCode(FunctionEnv* env, const byte* base, const byte* start,
1567 const byte* end) {
1568 Zone zone;
1569 LR_WasmDecoder decoder(&zone, nullptr);
1570 TreeResult result = decoder.Decode(env, base, start, end);
1571 return result;
1572}
1573
1574
1575TreeResult BuildTFGraph(TFBuilder* builder, FunctionEnv* env, const byte* base,
1576 const byte* start, const byte* end) {
1577 Zone zone;
1578 LR_WasmDecoder decoder(&zone, builder);
1579 TreeResult result = decoder.Decode(env, base, start, end);
1580 return result;
1581}
1582
1583
1584std::ostream& operator<<(std::ostream& os, const Tree& tree) {
1585 if (tree.pc == nullptr) {
1586 os << "null";
1587 return os;
1588 }
1589 PrintF("%s", WasmOpcodes::OpcodeName(tree.opcode()));
1590 if (tree.count > 0) os << "(";
1591 for (uint32_t i = 0; i < tree.count; i++) {
1592 if (i > 0) os << ", ";
1593 os << *tree.children[i];
1594 }
1595 if (tree.count > 0) os << ")";
1596 return os;
1597}
1598
1599
1600ReadUnsignedLEB128ErrorCode ReadUnsignedLEB128Operand(const byte* pc,
1601 const byte* limit,
1602 int* length,
1603 uint32_t* result) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001604 Decoder decoder(pc, limit);
1605 *result = decoder.checked_read_u32v(pc, 0, length);
1606 if (decoder.ok()) return kNoError;
1607 return (limit - pc) > 1 ? kInvalidLEB128 : kMissingLEB128;
1608}
1609
1610int OpcodeLength(const byte* pc, const byte* end) {
1611 WasmDecoder decoder(nullptr, pc, end);
1612 return decoder.OpcodeLength(pc);
1613}
1614
1615int OpcodeArity(FunctionEnv* env, const byte* pc, const byte* end) {
1616 WasmDecoder decoder(env, pc, end);
1617 return decoder.OpcodeArity(pc);
1618}
1619
1620void PrintAst(FunctionEnv* env, const byte* start, const byte* end) {
1621 WasmDecoder decoder(env, start, end);
1622 const byte* pc = start;
1623 std::vector<int> arity_stack;
1624 while (pc < end) {
1625 int arity = decoder.OpcodeArity(pc);
1626 size_t length = decoder.OpcodeLength(pc);
1627
1628 for (auto arity : arity_stack) {
1629 printf(" ");
1630 USE(arity);
1631 }
1632
1633 WasmOpcode opcode = static_cast<WasmOpcode>(*pc);
1634 printf("k%s,", WasmOpcodes::OpcodeName(opcode));
1635
1636 for (size_t i = 1; i < length; i++) {
1637 printf(" 0x%02x,", pc[i]);
1638 }
1639 pc += length;
1640 printf("\n");
1641
1642 arity_stack.push_back(arity);
1643 while (arity_stack.back() == 0) {
1644 arity_stack.pop_back();
1645 if (arity_stack.empty()) break;
1646 arity_stack.back()--;
1647 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001648 }
1649}
1650
Ben Murdoch097c5b22016-05-18 11:27:45 +01001651// Analyzes loop bodies for static assignments to locals, which helps in
1652// reducing the number of phis introduced at loop headers.
1653class LoopAssignmentAnalyzer : public WasmDecoder {
1654 public:
1655 LoopAssignmentAnalyzer(Zone* zone, FunctionEnv* function_env) : zone_(zone) {
1656 function_env_ = function_env;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001657 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001658
1659 BitVector* Analyze(const byte* pc, const byte* limit) {
1660 Decoder::Reset(pc, limit);
1661 if (pc_ >= limit_) return nullptr;
1662 if (*pc_ != kExprLoop) return nullptr;
1663
1664 BitVector* assigned =
1665 new (zone_) BitVector(function_env_->total_locals, zone_);
1666 // Keep a stack to model the nesting of expressions.
1667 std::vector<int> arity_stack;
1668 arity_stack.push_back(OpcodeArity(pc_));
1669 pc_ += OpcodeLength(pc_);
1670
1671 // Iteratively process all AST nodes nested inside the loop.
1672 while (pc_ < limit_) {
1673 WasmOpcode opcode = static_cast<WasmOpcode>(*pc_);
1674 int arity = 0;
1675 int length = 1;
1676 if (opcode == kExprSetLocal) {
1677 LocalIndexOperand operand(this, pc_);
1678 if (assigned->length() > 0 &&
1679 static_cast<int>(operand.index) < assigned->length()) {
1680 // Unverified code might have an out-of-bounds index.
1681 assigned->Add(operand.index);
1682 }
1683 arity = 1;
1684 length = 1 + operand.length;
1685 } else {
1686 arity = OpcodeArity(pc_);
1687 length = OpcodeLength(pc_);
1688 }
1689
1690 pc_ += length;
1691 arity_stack.push_back(arity);
1692 while (arity_stack.back() == 0) {
1693 arity_stack.pop_back();
1694 if (arity_stack.empty()) return assigned; // reached end of loop
1695 arity_stack.back()--;
1696 }
1697 }
1698 return assigned;
1699 }
1700
1701 private:
1702 Zone* zone_;
1703};
1704
1705
1706BitVector* AnalyzeLoopAssignmentForTesting(Zone* zone, FunctionEnv* env,
1707 const byte* start, const byte* end) {
1708 LoopAssignmentAnalyzer analyzer(zone, env);
1709 return analyzer.Analyze(start, end);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001710}
1711
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001712} // namespace wasm
1713} // namespace internal
1714} // namespace v8