blob: 9f6cfa373567099855f48a1f48852165b4befe14 [file] [log] [blame]
Dan Gohman10e730a2015-06-29 23:51:55 +00001// WebAssemblyInstrAtomics.td-WebAssembly Atomic codegen support-*- tablegen -*-
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
JF Bastien5ca0bac2015-07-10 18:23:10 +00009///
10/// \file
Adrian Prantl5f8f34e42018-05-01 15:54:18 +000011/// WebAssembly Atomic operand code-gen constructs.
JF Bastien5ca0bac2015-07-10 18:23:10 +000012///
Dan Gohman10e730a2015-06-29 23:51:55 +000013//===----------------------------------------------------------------------===//
14
Dan Gohman10e730a2015-06-29 23:51:55 +000015//===----------------------------------------------------------------------===//
16// Atomic loads
17//===----------------------------------------------------------------------===//
18
Derek Schuff18ba1922017-08-30 18:07:45 +000019let Defs = [ARGUMENTS] in {
Derek Schuff885dc592017-10-05 21:18:42 +000020def ATOMIC_LOAD_I32 : WebAssemblyLoad<I32, "i32.atomic.load", 0xfe10>;
21def ATOMIC_LOAD_I64 : WebAssemblyLoad<I64, "i64.atomic.load", 0xfe11>;
Derek Schuff18ba1922017-08-30 18:07:45 +000022} // Defs = [ARGUMENTS]
23
24// Select loads with no constant offset.
25let Predicates = [HasAtomics] in {
Derek Schuff885dc592017-10-05 21:18:42 +000026def : LoadPatNoOffset<i32, atomic_load_32, ATOMIC_LOAD_I32>;
27def : LoadPatNoOffset<i64, atomic_load_64, ATOMIC_LOAD_I64>;
Derek Schuff0f3bc0f2017-08-31 21:51:48 +000028
Derek Schuff885dc592017-10-05 21:18:42 +000029// Select loads with a constant offset.
30
31// Pattern with address + immediate offset
32def : LoadPatImmOff<i32, atomic_load_32, regPlusImm, ATOMIC_LOAD_I32>;
33def : LoadPatImmOff<i64, atomic_load_64, regPlusImm, ATOMIC_LOAD_I64>;
34def : LoadPatImmOff<i32, atomic_load_32, or_is_add, ATOMIC_LOAD_I32>;
35def : LoadPatImmOff<i64, atomic_load_64, or_is_add, ATOMIC_LOAD_I64>;
36
37def : LoadPatGlobalAddr<i32, atomic_load_32, ATOMIC_LOAD_I32>;
38def : LoadPatGlobalAddr<i64, atomic_load_64, ATOMIC_LOAD_I64>;
39
40def : LoadPatExternalSym<i32, atomic_load_32, ATOMIC_LOAD_I32>;
41def : LoadPatExternalSym<i64, atomic_load_64, ATOMIC_LOAD_I64>;
42
43
44// Select loads with just a constant offset.
45def : LoadPatOffsetOnly<i32, atomic_load_32, ATOMIC_LOAD_I32>;
46def : LoadPatOffsetOnly<i64, atomic_load_64, ATOMIC_LOAD_I64>;
47
48def : LoadPatGlobalAddrOffOnly<i32, atomic_load_32, ATOMIC_LOAD_I32>;
49def : LoadPatGlobalAddrOffOnly<i64, atomic_load_64, ATOMIC_LOAD_I64>;
50
51def : LoadPatExternSymOffOnly<i32, atomic_load_32, ATOMIC_LOAD_I32>;
52def : LoadPatExternSymOffOnly<i64, atomic_load_64, ATOMIC_LOAD_I64>;
53
54} // Predicates = [HasAtomics]
55
56// Extending loads. Note that there are only zero-extending atomic loads, no
57// sign-extending loads.
58let Defs = [ARGUMENTS] in {
59def ATOMIC_LOAD8_U_I32 : WebAssemblyLoad<I32, "i32.atomic.load8_u", 0xfe12>;
60def ATOMIC_LOAD16_U_I32 : WebAssemblyLoad<I32, "i32.atomic.load16_u", 0xfe13>;
61def ATOMIC_LOAD8_U_I64 : WebAssemblyLoad<I64, "i64.atomic.load8_u", 0xfe14>;
62def ATOMIC_LOAD16_U_I64 : WebAssemblyLoad<I64, "i64.atomic.load16_u", 0xfe15>;
63def ATOMIC_LOAD32_U_I64 : WebAssemblyLoad<I64, "i64.atomic.load32_u", 0xfe16>;
64} // Defs = [ARGUMENTS]
65
66// Fragments for exending loads. These are different from regular loads because
67// the SDNodes are derived from AtomicSDNode rather than LoadSDNode and
68// therefore don't have the extension type field. So instead of matching that,
69// we match the patterns that the type legalizer expands them to.
70
71// We directly match zext patterns and select the zext atomic loads.
72// i32 (zext (i8 (atomic_load_8))) gets legalized to
73// i32 (and (i32 (atomic_load_8)), 255)
74// These can be selected to a single zero-extending atomic load instruction.
75def zext_aload_8 : PatFrag<(ops node:$addr),
76 (and (i32 (atomic_load_8 node:$addr)), 255)>;
77def zext_aload_16 : PatFrag<(ops node:$addr),
78 (and (i32 (atomic_load_16 node:$addr)), 65535)>;
79// Unlike regular loads, extension to i64 is handled differently than i32.
80// i64 (zext (i8 (atomic_load_8))) gets legalized to
81// i64 (and (i64 (anyext (i32 (atomic_load_8)))), 255)
82def zext_aload_8_64 :
83 PatFrag<(ops node:$addr),
84 (and (i64 (anyext (i32 (atomic_load_8 node:$addr)))), 255)>;
85def zext_aload_16_64 :
86 PatFrag<(ops node:$addr),
87 (and (i64 (anyext (i32 (atomic_load_16 node:$addr)))), 65535)>;
88def zext_aload_32_64 :
89 PatFrag<(ops node:$addr),
90 (zext (i32 (atomic_load node:$addr)))>;
91
92// We don't have single sext atomic load instructions. So for sext loads, we
93// match bare subword loads (for 32-bit results) and anyext loads (for 64-bit
94// results) and select a zext load; the next instruction will be sext_inreg
95// which is selected by itself.
96def anyext_aload_8_64 :
97 PatFrag<(ops node:$addr), (anyext (i32 (atomic_load_8 node:$addr)))>;
98def anyext_aload_16_64 :
99 PatFrag<(ops node:$addr), (anyext (i32 (atomic_load_16 node:$addr)))>;
100
101let Predicates = [HasAtomics] in {
102// Select zero-extending loads with no constant offset.
103def : LoadPatNoOffset<i32, zext_aload_8, ATOMIC_LOAD8_U_I32>;
104def : LoadPatNoOffset<i32, zext_aload_16, ATOMIC_LOAD16_U_I32>;
105def : LoadPatNoOffset<i64, zext_aload_8_64, ATOMIC_LOAD8_U_I64>;
106def : LoadPatNoOffset<i64, zext_aload_16_64, ATOMIC_LOAD16_U_I64>;
107def : LoadPatNoOffset<i64, zext_aload_32_64, ATOMIC_LOAD32_U_I64>;
108
109// Select sign-extending loads with no constant offset
110def : LoadPatNoOffset<i32, atomic_load_8, ATOMIC_LOAD8_U_I32>;
111def : LoadPatNoOffset<i32, atomic_load_16, ATOMIC_LOAD16_U_I32>;
112def : LoadPatNoOffset<i64, anyext_aload_8_64, ATOMIC_LOAD8_U_I64>;
113def : LoadPatNoOffset<i64, anyext_aload_16_64, ATOMIC_LOAD16_U_I64>;
114// 32->64 sext load gets selected as i32.atomic.load, i64.extend_s/i64
115
116
117// Zero-extending loads with constant offset
118def : LoadPatImmOff<i32, zext_aload_8, regPlusImm, ATOMIC_LOAD8_U_I32>;
119def : LoadPatImmOff<i32, zext_aload_16, regPlusImm, ATOMIC_LOAD16_U_I32>;
120def : LoadPatImmOff<i32, zext_aload_8, or_is_add, ATOMIC_LOAD8_U_I32>;
121def : LoadPatImmOff<i32, zext_aload_16, or_is_add, ATOMIC_LOAD16_U_I32>;
122def : LoadPatImmOff<i64, zext_aload_8_64, regPlusImm, ATOMIC_LOAD8_U_I64>;
123def : LoadPatImmOff<i64, zext_aload_16_64, regPlusImm, ATOMIC_LOAD16_U_I64>;
124def : LoadPatImmOff<i64, zext_aload_32_64, regPlusImm, ATOMIC_LOAD32_U_I64>;
125def : LoadPatImmOff<i64, zext_aload_8_64, or_is_add, ATOMIC_LOAD8_U_I64>;
126def : LoadPatImmOff<i64, zext_aload_16_64, or_is_add, ATOMIC_LOAD16_U_I64>;
127def : LoadPatImmOff<i64, zext_aload_32_64, or_is_add, ATOMIC_LOAD32_U_I64>;
128
129// Sign-extending loads with constant offset
130def : LoadPatImmOff<i32, atomic_load_8, regPlusImm, ATOMIC_LOAD8_U_I32>;
131def : LoadPatImmOff<i32, atomic_load_16, regPlusImm, ATOMIC_LOAD16_U_I32>;
132def : LoadPatImmOff<i32, atomic_load_8, or_is_add, ATOMIC_LOAD8_U_I32>;
133def : LoadPatImmOff<i32, atomic_load_16, or_is_add, ATOMIC_LOAD16_U_I32>;
134def : LoadPatImmOff<i64, anyext_aload_8_64, regPlusImm, ATOMIC_LOAD8_U_I64>;
135def : LoadPatImmOff<i64, anyext_aload_16_64, regPlusImm, ATOMIC_LOAD16_U_I64>;
136def : LoadPatImmOff<i64, anyext_aload_8_64, or_is_add, ATOMIC_LOAD8_U_I64>;
137def : LoadPatImmOff<i64, anyext_aload_16_64, or_is_add, ATOMIC_LOAD16_U_I64>;
138// No 32->64 patterns, just use i32.atomic.load and i64.extend_s/i64
139
140def : LoadPatGlobalAddr<i32, zext_aload_8, ATOMIC_LOAD8_U_I32>;
141def : LoadPatGlobalAddr<i32, zext_aload_16, ATOMIC_LOAD16_U_I32>;
142def : LoadPatGlobalAddr<i64, zext_aload_8_64, ATOMIC_LOAD8_U_I64>;
143def : LoadPatGlobalAddr<i64, zext_aload_16_64, ATOMIC_LOAD16_U_I64>;
144def : LoadPatGlobalAddr<i64, zext_aload_32_64, ATOMIC_LOAD32_U_I64>;
145def : LoadPatGlobalAddr<i32, atomic_load_8, ATOMIC_LOAD8_U_I32>;
146def : LoadPatGlobalAddr<i32, atomic_load_16, ATOMIC_LOAD16_U_I32>;
147def : LoadPatGlobalAddr<i64, anyext_aload_8_64, ATOMIC_LOAD8_U_I64>;
148def : LoadPatGlobalAddr<i64, anyext_aload_16_64, ATOMIC_LOAD16_U_I64>;
149
150def : LoadPatExternalSym<i32, zext_aload_8, ATOMIC_LOAD8_U_I32>;
151def : LoadPatExternalSym<i32, zext_aload_16, ATOMIC_LOAD16_U_I32>;
152def : LoadPatExternalSym<i64, zext_aload_8_64, ATOMIC_LOAD8_U_I64>;
153def : LoadPatExternalSym<i64, zext_aload_16_64, ATOMIC_LOAD16_U_I64>;
154def : LoadPatExternalSym<i64, zext_aload_32_64, ATOMIC_LOAD32_U_I64>;
155def : LoadPatExternalSym<i32, atomic_load_8, ATOMIC_LOAD8_U_I32>;
156def : LoadPatExternalSym<i32, atomic_load_16, ATOMIC_LOAD16_U_I32>;
157def : LoadPatExternalSym<i64, anyext_aload_8_64, ATOMIC_LOAD8_U_I64>;
158def : LoadPatExternalSym<i64, anyext_aload_16_64, ATOMIC_LOAD16_U_I64>;
159
160
161// Extending loads with just a constant offset
162def : LoadPatOffsetOnly<i32, zext_aload_8, ATOMIC_LOAD8_U_I32>;
163def : LoadPatOffsetOnly<i32, zext_aload_16, ATOMIC_LOAD16_U_I32>;
164def : LoadPatOffsetOnly<i64, zext_aload_8_64, ATOMIC_LOAD8_U_I64>;
165def : LoadPatOffsetOnly<i64, zext_aload_16_64, ATOMIC_LOAD16_U_I64>;
166def : LoadPatOffsetOnly<i64, zext_aload_32_64, ATOMIC_LOAD32_U_I64>;
167def : LoadPatOffsetOnly<i32, atomic_load_8, ATOMIC_LOAD8_U_I32>;
168def : LoadPatOffsetOnly<i32, atomic_load_16, ATOMIC_LOAD16_U_I32>;
169def : LoadPatOffsetOnly<i64, anyext_aload_8_64, ATOMIC_LOAD8_U_I64>;
170def : LoadPatOffsetOnly<i64, anyext_aload_16_64, ATOMIC_LOAD16_U_I64>;
171
172def : LoadPatGlobalAddrOffOnly<i32, zext_aload_8, ATOMIC_LOAD8_U_I32>;
173def : LoadPatGlobalAddrOffOnly<i32, zext_aload_16, ATOMIC_LOAD16_U_I32>;
174def : LoadPatGlobalAddrOffOnly<i64, zext_aload_8_64, ATOMIC_LOAD8_U_I64>;
175def : LoadPatGlobalAddrOffOnly<i64, zext_aload_16_64, ATOMIC_LOAD16_U_I64>;
176def : LoadPatGlobalAddrOffOnly<i64, zext_aload_32_64, ATOMIC_LOAD32_U_I64>;
177def : LoadPatGlobalAddrOffOnly<i32, atomic_load_8, ATOMIC_LOAD8_U_I32>;
178def : LoadPatGlobalAddrOffOnly<i32, atomic_load_16, ATOMIC_LOAD16_U_I32>;
179def : LoadPatGlobalAddrOffOnly<i64, anyext_aload_8_64, ATOMIC_LOAD8_U_I64>;
180def : LoadPatGlobalAddrOffOnly<i64, anyext_aload_16_64, ATOMIC_LOAD16_U_I64>;
181
182def : LoadPatExternSymOffOnly<i32, zext_aload_8, ATOMIC_LOAD8_U_I32>;
183def : LoadPatExternSymOffOnly<i32, zext_aload_16, ATOMIC_LOAD16_U_I32>;
184def : LoadPatExternSymOffOnly<i64, zext_aload_8_64, ATOMIC_LOAD8_U_I64>;
185def : LoadPatExternSymOffOnly<i64, zext_aload_16_64, ATOMIC_LOAD16_U_I64>;
186def : LoadPatExternSymOffOnly<i64, zext_aload_32_64, ATOMIC_LOAD32_U_I64>;
187def : LoadPatExternSymOffOnly<i32, atomic_load_8, ATOMIC_LOAD8_U_I32>;
188def : LoadPatExternSymOffOnly<i32, atomic_load_16, ATOMIC_LOAD16_U_I32>;
189def : LoadPatExternSymOffOnly<i64, anyext_aload_8_64, ATOMIC_LOAD8_U_I64>;
190def : LoadPatExternSymOffOnly<i64, anyext_aload_16_64, ATOMIC_LOAD16_U_I64>;
191
192
193} // Predicates = [HasAtomics]
Dan Gohman10e730a2015-06-29 23:51:55 +0000194
195//===----------------------------------------------------------------------===//
196// Atomic stores
197//===----------------------------------------------------------------------===//
198
199// TODO: add atomic stores here...
200
201//===----------------------------------------------------------------------===//
202// Low-level exclusive operations
203//===----------------------------------------------------------------------===//
204
205// TODO: add exclusive operations here...
206
207// Load-exclusives.
208
209// Store-exclusives.
210
211// Store-release-exclusives.
212
213// And clear exclusive.
Derek Schuff18ba1922017-08-30 18:07:45 +0000214