blob: e19de1ac0ef055cb40840b317d94e7fbc060fb95 [file] [log] [blame]
Dan Gohman10e730a2015-06-29 23:51:55 +00001// WebAssemblyInstrAtomics.td-WebAssembly Atomic codegen support-*- tablegen -*-
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
JF Bastien5ca0bac2015-07-10 18:23:10 +00009///
10/// \file
Adrian Prantl5f8f34e42018-05-01 15:54:18 +000011/// WebAssembly Atomic operand code-gen constructs.
JF Bastien5ca0bac2015-07-10 18:23:10 +000012///
Dan Gohman10e730a2015-06-29 23:51:55 +000013//===----------------------------------------------------------------------===//
14
Dan Gohman10e730a2015-06-29 23:51:55 +000015//===----------------------------------------------------------------------===//
16// Atomic loads
17//===----------------------------------------------------------------------===//
18
Derek Schuff18ba1922017-08-30 18:07:45 +000019let Defs = [ARGUMENTS] in {
Wouter van Oortmerssen48dac312018-06-18 21:22:44 +000020defm ATOMIC_LOAD_I32 : WebAssemblyLoad<I32, "i32.atomic.load", 0xfe10>;
21defm ATOMIC_LOAD_I64 : WebAssemblyLoad<I64, "i64.atomic.load", 0xfe11>;
Derek Schuff18ba1922017-08-30 18:07:45 +000022} // Defs = [ARGUMENTS]
23
24// Select loads with no constant offset.
25let Predicates = [HasAtomics] in {
Derek Schuff885dc592017-10-05 21:18:42 +000026def : LoadPatNoOffset<i32, atomic_load_32, ATOMIC_LOAD_I32>;
27def : LoadPatNoOffset<i64, atomic_load_64, ATOMIC_LOAD_I64>;
Derek Schuff0f3bc0f2017-08-31 21:51:48 +000028
Derek Schuff885dc592017-10-05 21:18:42 +000029// Select loads with a constant offset.
30
31// Pattern with address + immediate offset
32def : LoadPatImmOff<i32, atomic_load_32, regPlusImm, ATOMIC_LOAD_I32>;
33def : LoadPatImmOff<i64, atomic_load_64, regPlusImm, ATOMIC_LOAD_I64>;
34def : LoadPatImmOff<i32, atomic_load_32, or_is_add, ATOMIC_LOAD_I32>;
35def : LoadPatImmOff<i64, atomic_load_64, or_is_add, ATOMIC_LOAD_I64>;
36
37def : LoadPatGlobalAddr<i32, atomic_load_32, ATOMIC_LOAD_I32>;
38def : LoadPatGlobalAddr<i64, atomic_load_64, ATOMIC_LOAD_I64>;
39
40def : LoadPatExternalSym<i32, atomic_load_32, ATOMIC_LOAD_I32>;
41def : LoadPatExternalSym<i64, atomic_load_64, ATOMIC_LOAD_I64>;
42
43
44// Select loads with just a constant offset.
45def : LoadPatOffsetOnly<i32, atomic_load_32, ATOMIC_LOAD_I32>;
46def : LoadPatOffsetOnly<i64, atomic_load_64, ATOMIC_LOAD_I64>;
47
48def : LoadPatGlobalAddrOffOnly<i32, atomic_load_32, ATOMIC_LOAD_I32>;
49def : LoadPatGlobalAddrOffOnly<i64, atomic_load_64, ATOMIC_LOAD_I64>;
50
51def : LoadPatExternSymOffOnly<i32, atomic_load_32, ATOMIC_LOAD_I32>;
52def : LoadPatExternSymOffOnly<i64, atomic_load_64, ATOMIC_LOAD_I64>;
53
54} // Predicates = [HasAtomics]
55
56// Extending loads. Note that there are only zero-extending atomic loads, no
57// sign-extending loads.
58let Defs = [ARGUMENTS] in {
Wouter van Oortmerssen48dac312018-06-18 21:22:44 +000059defm ATOMIC_LOAD8_U_I32 : WebAssemblyLoad<I32, "i32.atomic.load8_u", 0xfe12>;
60defm ATOMIC_LOAD16_U_I32 : WebAssemblyLoad<I32, "i32.atomic.load16_u", 0xfe13>;
61defm ATOMIC_LOAD8_U_I64 : WebAssemblyLoad<I64, "i64.atomic.load8_u", 0xfe14>;
62defm ATOMIC_LOAD16_U_I64 : WebAssemblyLoad<I64, "i64.atomic.load16_u", 0xfe15>;
63defm ATOMIC_LOAD32_U_I64 : WebAssemblyLoad<I64, "i64.atomic.load32_u", 0xfe16>;
Derek Schuff885dc592017-10-05 21:18:42 +000064} // Defs = [ARGUMENTS]
65
66// Fragments for exending loads. These are different from regular loads because
67// the SDNodes are derived from AtomicSDNode rather than LoadSDNode and
68// therefore don't have the extension type field. So instead of matching that,
69// we match the patterns that the type legalizer expands them to.
70
71// We directly match zext patterns and select the zext atomic loads.
72// i32 (zext (i8 (atomic_load_8))) gets legalized to
73// i32 (and (i32 (atomic_load_8)), 255)
74// These can be selected to a single zero-extending atomic load instruction.
75def zext_aload_8 : PatFrag<(ops node:$addr),
76 (and (i32 (atomic_load_8 node:$addr)), 255)>;
77def zext_aload_16 : PatFrag<(ops node:$addr),
78 (and (i32 (atomic_load_16 node:$addr)), 65535)>;
79// Unlike regular loads, extension to i64 is handled differently than i32.
80// i64 (zext (i8 (atomic_load_8))) gets legalized to
81// i64 (and (i64 (anyext (i32 (atomic_load_8)))), 255)
82def zext_aload_8_64 :
83 PatFrag<(ops node:$addr),
84 (and (i64 (anyext (i32 (atomic_load_8 node:$addr)))), 255)>;
85def zext_aload_16_64 :
86 PatFrag<(ops node:$addr),
87 (and (i64 (anyext (i32 (atomic_load_16 node:$addr)))), 65535)>;
88def zext_aload_32_64 :
89 PatFrag<(ops node:$addr),
90 (zext (i32 (atomic_load node:$addr)))>;
91
92// We don't have single sext atomic load instructions. So for sext loads, we
93// match bare subword loads (for 32-bit results) and anyext loads (for 64-bit
94// results) and select a zext load; the next instruction will be sext_inreg
95// which is selected by itself.
96def anyext_aload_8_64 :
97 PatFrag<(ops node:$addr), (anyext (i32 (atomic_load_8 node:$addr)))>;
98def anyext_aload_16_64 :
99 PatFrag<(ops node:$addr), (anyext (i32 (atomic_load_16 node:$addr)))>;
100
101let Predicates = [HasAtomics] in {
102// Select zero-extending loads with no constant offset.
103def : LoadPatNoOffset<i32, zext_aload_8, ATOMIC_LOAD8_U_I32>;
104def : LoadPatNoOffset<i32, zext_aload_16, ATOMIC_LOAD16_U_I32>;
105def : LoadPatNoOffset<i64, zext_aload_8_64, ATOMIC_LOAD8_U_I64>;
106def : LoadPatNoOffset<i64, zext_aload_16_64, ATOMIC_LOAD16_U_I64>;
107def : LoadPatNoOffset<i64, zext_aload_32_64, ATOMIC_LOAD32_U_I64>;
108
109// Select sign-extending loads with no constant offset
110def : LoadPatNoOffset<i32, atomic_load_8, ATOMIC_LOAD8_U_I32>;
111def : LoadPatNoOffset<i32, atomic_load_16, ATOMIC_LOAD16_U_I32>;
112def : LoadPatNoOffset<i64, anyext_aload_8_64, ATOMIC_LOAD8_U_I64>;
113def : LoadPatNoOffset<i64, anyext_aload_16_64, ATOMIC_LOAD16_U_I64>;
114// 32->64 sext load gets selected as i32.atomic.load, i64.extend_s/i64
115
116
117// Zero-extending loads with constant offset
118def : LoadPatImmOff<i32, zext_aload_8, regPlusImm, ATOMIC_LOAD8_U_I32>;
119def : LoadPatImmOff<i32, zext_aload_16, regPlusImm, ATOMIC_LOAD16_U_I32>;
120def : LoadPatImmOff<i32, zext_aload_8, or_is_add, ATOMIC_LOAD8_U_I32>;
121def : LoadPatImmOff<i32, zext_aload_16, or_is_add, ATOMIC_LOAD16_U_I32>;
122def : LoadPatImmOff<i64, zext_aload_8_64, regPlusImm, ATOMIC_LOAD8_U_I64>;
123def : LoadPatImmOff<i64, zext_aload_16_64, regPlusImm, ATOMIC_LOAD16_U_I64>;
124def : LoadPatImmOff<i64, zext_aload_32_64, regPlusImm, ATOMIC_LOAD32_U_I64>;
125def : LoadPatImmOff<i64, zext_aload_8_64, or_is_add, ATOMIC_LOAD8_U_I64>;
126def : LoadPatImmOff<i64, zext_aload_16_64, or_is_add, ATOMIC_LOAD16_U_I64>;
127def : LoadPatImmOff<i64, zext_aload_32_64, or_is_add, ATOMIC_LOAD32_U_I64>;
128
129// Sign-extending loads with constant offset
130def : LoadPatImmOff<i32, atomic_load_8, regPlusImm, ATOMIC_LOAD8_U_I32>;
131def : LoadPatImmOff<i32, atomic_load_16, regPlusImm, ATOMIC_LOAD16_U_I32>;
132def : LoadPatImmOff<i32, atomic_load_8, or_is_add, ATOMIC_LOAD8_U_I32>;
133def : LoadPatImmOff<i32, atomic_load_16, or_is_add, ATOMIC_LOAD16_U_I32>;
134def : LoadPatImmOff<i64, anyext_aload_8_64, regPlusImm, ATOMIC_LOAD8_U_I64>;
135def : LoadPatImmOff<i64, anyext_aload_16_64, regPlusImm, ATOMIC_LOAD16_U_I64>;
136def : LoadPatImmOff<i64, anyext_aload_8_64, or_is_add, ATOMIC_LOAD8_U_I64>;
137def : LoadPatImmOff<i64, anyext_aload_16_64, or_is_add, ATOMIC_LOAD16_U_I64>;
138// No 32->64 patterns, just use i32.atomic.load and i64.extend_s/i64
139
140def : LoadPatGlobalAddr<i32, zext_aload_8, ATOMIC_LOAD8_U_I32>;
141def : LoadPatGlobalAddr<i32, zext_aload_16, ATOMIC_LOAD16_U_I32>;
142def : LoadPatGlobalAddr<i64, zext_aload_8_64, ATOMIC_LOAD8_U_I64>;
143def : LoadPatGlobalAddr<i64, zext_aload_16_64, ATOMIC_LOAD16_U_I64>;
144def : LoadPatGlobalAddr<i64, zext_aload_32_64, ATOMIC_LOAD32_U_I64>;
145def : LoadPatGlobalAddr<i32, atomic_load_8, ATOMIC_LOAD8_U_I32>;
146def : LoadPatGlobalAddr<i32, atomic_load_16, ATOMIC_LOAD16_U_I32>;
147def : LoadPatGlobalAddr<i64, anyext_aload_8_64, ATOMIC_LOAD8_U_I64>;
148def : LoadPatGlobalAddr<i64, anyext_aload_16_64, ATOMIC_LOAD16_U_I64>;
149
150def : LoadPatExternalSym<i32, zext_aload_8, ATOMIC_LOAD8_U_I32>;
151def : LoadPatExternalSym<i32, zext_aload_16, ATOMIC_LOAD16_U_I32>;
152def : LoadPatExternalSym<i64, zext_aload_8_64, ATOMIC_LOAD8_U_I64>;
153def : LoadPatExternalSym<i64, zext_aload_16_64, ATOMIC_LOAD16_U_I64>;
154def : LoadPatExternalSym<i64, zext_aload_32_64, ATOMIC_LOAD32_U_I64>;
155def : LoadPatExternalSym<i32, atomic_load_8, ATOMIC_LOAD8_U_I32>;
156def : LoadPatExternalSym<i32, atomic_load_16, ATOMIC_LOAD16_U_I32>;
157def : LoadPatExternalSym<i64, anyext_aload_8_64, ATOMIC_LOAD8_U_I64>;
158def : LoadPatExternalSym<i64, anyext_aload_16_64, ATOMIC_LOAD16_U_I64>;
159
160
161// Extending loads with just a constant offset
162def : LoadPatOffsetOnly<i32, zext_aload_8, ATOMIC_LOAD8_U_I32>;
163def : LoadPatOffsetOnly<i32, zext_aload_16, ATOMIC_LOAD16_U_I32>;
164def : LoadPatOffsetOnly<i64, zext_aload_8_64, ATOMIC_LOAD8_U_I64>;
165def : LoadPatOffsetOnly<i64, zext_aload_16_64, ATOMIC_LOAD16_U_I64>;
166def : LoadPatOffsetOnly<i64, zext_aload_32_64, ATOMIC_LOAD32_U_I64>;
167def : LoadPatOffsetOnly<i32, atomic_load_8, ATOMIC_LOAD8_U_I32>;
168def : LoadPatOffsetOnly<i32, atomic_load_16, ATOMIC_LOAD16_U_I32>;
169def : LoadPatOffsetOnly<i64, anyext_aload_8_64, ATOMIC_LOAD8_U_I64>;
170def : LoadPatOffsetOnly<i64, anyext_aload_16_64, ATOMIC_LOAD16_U_I64>;
171
172def : LoadPatGlobalAddrOffOnly<i32, zext_aload_8, ATOMIC_LOAD8_U_I32>;
173def : LoadPatGlobalAddrOffOnly<i32, zext_aload_16, ATOMIC_LOAD16_U_I32>;
174def : LoadPatGlobalAddrOffOnly<i64, zext_aload_8_64, ATOMIC_LOAD8_U_I64>;
175def : LoadPatGlobalAddrOffOnly<i64, zext_aload_16_64, ATOMIC_LOAD16_U_I64>;
176def : LoadPatGlobalAddrOffOnly<i64, zext_aload_32_64, ATOMIC_LOAD32_U_I64>;
177def : LoadPatGlobalAddrOffOnly<i32, atomic_load_8, ATOMIC_LOAD8_U_I32>;
178def : LoadPatGlobalAddrOffOnly<i32, atomic_load_16, ATOMIC_LOAD16_U_I32>;
179def : LoadPatGlobalAddrOffOnly<i64, anyext_aload_8_64, ATOMIC_LOAD8_U_I64>;
180def : LoadPatGlobalAddrOffOnly<i64, anyext_aload_16_64, ATOMIC_LOAD16_U_I64>;
181
182def : LoadPatExternSymOffOnly<i32, zext_aload_8, ATOMIC_LOAD8_U_I32>;
183def : LoadPatExternSymOffOnly<i32, zext_aload_16, ATOMIC_LOAD16_U_I32>;
184def : LoadPatExternSymOffOnly<i64, zext_aload_8_64, ATOMIC_LOAD8_U_I64>;
185def : LoadPatExternSymOffOnly<i64, zext_aload_16_64, ATOMIC_LOAD16_U_I64>;
186def : LoadPatExternSymOffOnly<i64, zext_aload_32_64, ATOMIC_LOAD32_U_I64>;
187def : LoadPatExternSymOffOnly<i32, atomic_load_8, ATOMIC_LOAD8_U_I32>;
188def : LoadPatExternSymOffOnly<i32, atomic_load_16, ATOMIC_LOAD16_U_I32>;
189def : LoadPatExternSymOffOnly<i64, anyext_aload_8_64, ATOMIC_LOAD8_U_I64>;
190def : LoadPatExternSymOffOnly<i64, anyext_aload_16_64, ATOMIC_LOAD16_U_I64>;
191
192
193} // Predicates = [HasAtomics]
Dan Gohman10e730a2015-06-29 23:51:55 +0000194
195//===----------------------------------------------------------------------===//
196// Atomic stores
197//===----------------------------------------------------------------------===//
198
Heejin Ahn402b4902018-07-02 21:22:59 +0000199let Defs = [ARGUMENTS] in {
200defm ATOMIC_STORE_I32 : WebAssemblyStore<I32, "i32.atomic.store", 0xfe17>;
201defm ATOMIC_STORE_I64 : WebAssemblyStore<I64, "i64.atomic.store", 0xfe18>;
202} // Defs = [ARGUMENTS]
203
204// We need an 'atomic' version of store patterns because store and atomic_store
205// nodes have different operand orders:
206// store: (store $val, $ptr)
207// atomic_store: (store $ptr, $val)
208
209let Predicates = [HasAtomics] in {
210
211// Select stores with no constant offset.
212class AStorePatNoOffset<ValueType ty, PatFrag node, NI inst> :
213 Pat<(node I32:$addr, ty:$val), (inst 0, 0, $addr, $val)>;
214def : AStorePatNoOffset<i32, atomic_store_32, ATOMIC_STORE_I32>;
215def : AStorePatNoOffset<i64, atomic_store_64, ATOMIC_STORE_I64>;
216
217// Select stores with a constant offset.
218
219// Pattern with address + immediate offset
220class AStorePatImmOff<ValueType ty, PatFrag storekind, PatFrag operand,
221 NI inst> :
222 Pat<(storekind (operand I32:$addr, imm:$off), ty:$val),
223 (inst 0, imm:$off, $addr, ty:$val)>;
224def : AStorePatImmOff<i32, atomic_store_32, regPlusImm, ATOMIC_STORE_I32>;
225def : AStorePatImmOff<i64, atomic_store_64, regPlusImm, ATOMIC_STORE_I64>;
226def : AStorePatImmOff<i32, atomic_store_32, or_is_add, ATOMIC_STORE_I32>;
227def : AStorePatImmOff<i64, atomic_store_64, or_is_add, ATOMIC_STORE_I64>;
228
229class AStorePatGlobalAddr<ValueType ty, PatFrag storekind, NI inst> :
230 Pat<(storekind (regPlusGA I32:$addr, (WebAssemblywrapper tglobaladdr:$off)),
231 ty:$val),
232 (inst 0, tglobaladdr:$off, I32:$addr, ty:$val)>;
233def : AStorePatGlobalAddr<i32, atomic_store_32, ATOMIC_STORE_I32>;
234def : AStorePatGlobalAddr<i64, atomic_store_64, ATOMIC_STORE_I64>;
235
236class AStorePatExternalSym<ValueType ty, PatFrag storekind, NI inst> :
237 Pat<(storekind (add I32:$addr, (WebAssemblywrapper texternalsym:$off)),
238 ty:$val),
239 (inst 0, texternalsym:$off, I32:$addr, ty:$val)>;
240def : AStorePatExternalSym<i32, atomic_store_32, ATOMIC_STORE_I32>;
241def : AStorePatExternalSym<i64, atomic_store_64, ATOMIC_STORE_I64>;
242
243// Select stores with just a constant offset.
244class AStorePatOffsetOnly<ValueType ty, PatFrag storekind, NI inst> :
245 Pat<(storekind imm:$off, ty:$val),
246 (inst 0, imm:$off, (CONST_I32 0), ty:$val)>;
247def : AStorePatOffsetOnly<i32, atomic_store_32, ATOMIC_STORE_I32>;
248def : AStorePatOffsetOnly<i64, atomic_store_64, ATOMIC_STORE_I64>;
249
250class AStorePatGlobalAddrOffOnly<ValueType ty, PatFrag storekind, NI inst> :
251 Pat<(storekind (WebAssemblywrapper tglobaladdr:$off), ty:$val),
252 (inst 0, tglobaladdr:$off, (CONST_I32 0), ty:$val)>;
253def : AStorePatGlobalAddrOffOnly<i32, atomic_store_32, ATOMIC_STORE_I32>;
254def : AStorePatGlobalAddrOffOnly<i64, atomic_store_64, ATOMIC_STORE_I64>;
255
256class AStorePatExternSymOffOnly<ValueType ty, PatFrag storekind, NI inst> :
257 Pat<(storekind (WebAssemblywrapper texternalsym:$off), ty:$val),
258 (inst 0, texternalsym:$off, (CONST_I32 0), ty:$val)>;
259def : AStorePatExternSymOffOnly<i32, atomic_store_32, ATOMIC_STORE_I32>;
260def : AStorePatExternSymOffOnly<i64, atomic_store_64, ATOMIC_STORE_I64>;
261
262} // Predicates = [HasAtomics]
263
264// Truncating stores.
265let Defs = [ARGUMENTS] in {
266defm ATOMIC_STORE8_I32 : WebAssemblyStore<I32, "i32.atomic.store8", 0xfe19>;
267defm ATOMIC_STORE16_I32 : WebAssemblyStore<I32, "i32.atomic.store16", 0xfe1a>;
268defm ATOMIC_STORE8_I64 : WebAssemblyStore<I64, "i64.atomic.store8", 0xfe1b>;
269defm ATOMIC_STORE16_I64 : WebAssemblyStore<I64, "i64.atomic.store16", 0xfe1c>;
270defm ATOMIC_STORE32_I64 : WebAssemblyStore<I64, "i64.atomic.store32", 0xfe1d>;
271} // Defs = [ARGUMENTS]
272
273// Fragments for truncating stores.
274
275// We don't have single truncating atomic store instructions. For 32-bit
276// instructions, we just need to match bare atomic stores. On the other hand,
277// truncating stores from i64 values are once truncated to i32 first.
278class trunc_astore_64<PatFrag storekind> :
279 PatFrag<(ops node:$addr, node:$val),
280 (storekind node:$addr, (i32 (trunc (i64 node:$val))))>;
281def trunc_astore_8_64 : trunc_astore_64<atomic_store_8>;
282def trunc_astore_16_64 : trunc_astore_64<atomic_store_16>;
283def trunc_astore_32_64 : trunc_astore_64<atomic_store_32>;
284
285let Predicates = [HasAtomics] in {
286
287// Truncating stores with no constant offset
288def : AStorePatNoOffset<i32, atomic_store_8, ATOMIC_STORE8_I32>;
289def : AStorePatNoOffset<i32, atomic_store_16, ATOMIC_STORE16_I32>;
290def : AStorePatNoOffset<i64, trunc_astore_8_64, ATOMIC_STORE8_I64>;
291def : AStorePatNoOffset<i64, trunc_astore_16_64, ATOMIC_STORE16_I64>;
292def : AStorePatNoOffset<i64, trunc_astore_32_64, ATOMIC_STORE32_I64>;
293
294// Truncating stores with a constant offset
295def : AStorePatImmOff<i32, atomic_store_8, regPlusImm, ATOMIC_STORE8_I32>;
296def : AStorePatImmOff<i32, atomic_store_16, regPlusImm, ATOMIC_STORE16_I32>;
297def : AStorePatImmOff<i64, trunc_astore_8_64, regPlusImm, ATOMIC_STORE8_I64>;
298def : AStorePatImmOff<i64, trunc_astore_16_64, regPlusImm, ATOMIC_STORE16_I64>;
299def : AStorePatImmOff<i64, trunc_astore_32_64, regPlusImm, ATOMIC_STORE32_I64>;
300def : AStorePatImmOff<i32, atomic_store_8, or_is_add, ATOMIC_STORE8_I32>;
301def : AStorePatImmOff<i32, atomic_store_16, or_is_add, ATOMIC_STORE16_I32>;
302def : AStorePatImmOff<i64, trunc_astore_8_64, or_is_add, ATOMIC_STORE8_I64>;
303def : AStorePatImmOff<i64, trunc_astore_16_64, or_is_add, ATOMIC_STORE16_I64>;
304def : AStorePatImmOff<i64, trunc_astore_32_64, or_is_add, ATOMIC_STORE32_I64>;
305
306def : AStorePatGlobalAddr<i32, atomic_store_8, ATOMIC_STORE8_I32>;
307def : AStorePatGlobalAddr<i32, atomic_store_16, ATOMIC_STORE16_I32>;
308def : AStorePatGlobalAddr<i64, trunc_astore_8_64, ATOMIC_STORE8_I64>;
309def : AStorePatGlobalAddr<i64, trunc_astore_16_64, ATOMIC_STORE16_I64>;
310def : AStorePatGlobalAddr<i64, trunc_astore_32_64, ATOMIC_STORE32_I64>;
311
312def : AStorePatExternalSym<i32, atomic_store_8, ATOMIC_STORE8_I32>;
313def : AStorePatExternalSym<i32, atomic_store_16, ATOMIC_STORE16_I32>;
314def : AStorePatExternalSym<i64, trunc_astore_8_64, ATOMIC_STORE8_I64>;
315def : AStorePatExternalSym<i64, trunc_astore_16_64, ATOMIC_STORE16_I64>;
316def : AStorePatExternalSym<i64, trunc_astore_32_64, ATOMIC_STORE32_I64>;
317
318// Truncating stores with just a constant offset
319def : AStorePatOffsetOnly<i32, atomic_store_8, ATOMIC_STORE8_I32>;
320def : AStorePatOffsetOnly<i32, atomic_store_16, ATOMIC_STORE16_I32>;
321def : AStorePatOffsetOnly<i64, trunc_astore_8_64, ATOMIC_STORE8_I64>;
322def : AStorePatOffsetOnly<i64, trunc_astore_16_64, ATOMIC_STORE16_I64>;
323def : AStorePatOffsetOnly<i64, trunc_astore_32_64, ATOMIC_STORE32_I64>;
324
325def : AStorePatGlobalAddrOffOnly<i32, atomic_store_8, ATOMIC_STORE8_I32>;
326def : AStorePatGlobalAddrOffOnly<i32, atomic_store_16, ATOMIC_STORE16_I32>;
327def : AStorePatGlobalAddrOffOnly<i64, trunc_astore_8_64, ATOMIC_STORE8_I64>;
328def : AStorePatGlobalAddrOffOnly<i64, trunc_astore_16_64, ATOMIC_STORE16_I64>;
329def : AStorePatGlobalAddrOffOnly<i64, trunc_astore_32_64, ATOMIC_STORE32_I64>;
330
331def : AStorePatExternSymOffOnly<i32, atomic_store_8, ATOMIC_STORE8_I32>;
332def : AStorePatExternSymOffOnly<i32, atomic_store_16, ATOMIC_STORE16_I32>;
333def : AStorePatExternSymOffOnly<i64, trunc_astore_8_64, ATOMIC_STORE8_I64>;
334def : AStorePatExternSymOffOnly<i64, trunc_astore_16_64, ATOMIC_STORE16_I64>;
335def : AStorePatExternSymOffOnly<i64, trunc_astore_32_64, ATOMIC_STORE32_I64>;
336
337} // Predicates = [HasAtomics]
Dan Gohman10e730a2015-06-29 23:51:55 +0000338
339//===----------------------------------------------------------------------===//
340// Low-level exclusive operations
341//===----------------------------------------------------------------------===//
342
343// TODO: add exclusive operations here...
344
345// Load-exclusives.
346
347// Store-exclusives.
348
349// Store-release-exclusives.
350
351// And clear exclusive.
Derek Schuff18ba1922017-08-30 18:07:45 +0000352