blob: 51ac249ef552bdff993d96b6a52678f215ffdab7 [file] [log] [blame]
Robert Sloan4c22c5f2019-03-01 15:53:37 -08001// This file is generated from a similarly-named Perl script in the BoringSSL
2// source tree. Do not edit by hand.
3
Pete Bentley0c61efe2019-08-13 09:32:23 +01004#if !defined(__has_feature)
5#define __has_feature(x) 0
6#endif
Robert Sloan4c22c5f2019-03-01 15:53:37 -08007#if __has_feature(memory_sanitizer) && !defined(OPENSSL_NO_ASM)
8#define OPENSSL_NO_ASM
9#endif
Robert Sloan4c22c5f2019-03-01 15:53:37 -080010
11#if !defined(OPENSSL_NO_ASM)
12#if defined(BORINGSSL_PREFIX)
13#include <boringssl_prefix_symbols_asm.h>
14#endif
15.syntax unified
16
17
18
19
20.text
21
22@ abi_test_trampoline loads callee-saved registers from |state|, calls |func|
23@ with |argv|, then saves the callee-saved registers into |state|. It returns
24@ the result of |func|. The |unwind| argument is unused.
25@ uint32_t abi_test_trampoline(void (*func)(...), CallerState *state,
26@ const uint32_t *argv, size_t argc,
27@ int unwind);
28
29.globl _abi_test_trampoline
30.private_extern _abi_test_trampoline
31.align 4
32_abi_test_trampoline:
33Labi_test_trampoline_begin:
34 @ Save parameters and all callee-saved registers. For convenience, we
35 @ save r9 on iOS even though it's volatile.
36 vstmdb sp!, {d8,d9,d10,d11,d12,d13,d14,d15}
37 stmdb sp!, {r0,r1,r2,r3,r4,r5,r6,r7,r8,r9,r10,r11,lr}
38
39 @ Reserve stack space for six (10-4) stack parameters, plus an extra 4
40 @ bytes to keep it 8-byte-aligned (see AAPCS, section 5.3).
41 sub sp, sp, #28
42
43 @ Every register in AAPCS is either non-volatile or a parameter (except
44 @ r9 on iOS), so this code, by the actual call, loses all its scratch
45 @ registers. First fill in stack parameters while there are registers
46 @ to spare.
47 cmp r3, #4
48 bls Lstack_args_done
49 mov r4, sp @ r4 is the output pointer.
50 add r5, r2, r3, lsl #2 @ Set r5 to the end of argv.
51 add r2, r2, #16 @ Skip four arguments.
52Lstack_args_loop:
53 ldr r6, [r2], #4
54 cmp r2, r5
55 str r6, [r4], #4
56 bne Lstack_args_loop
57
58Lstack_args_done:
59 @ Load registers from |r1|.
60 vldmia r1!, {d8,d9,d10,d11,d12,d13,d14,d15}
61#if defined(__APPLE__)
62 @ r9 is not volatile on iOS.
63 ldmia r1!, {r4,r5,r6,r7,r8,r10-r11}
64#else
65 ldmia r1!, {r4,r5,r6,r7,r8,r9,r10,r11}
66#endif
67
68 @ Load register parameters. This uses up our remaining registers, so we
69 @ repurpose lr as scratch space.
70 ldr r3, [sp, #40] @ Reload argc.
71 ldr lr, [sp, #36] @ Load argv into lr.
72 cmp r3, #3
73 bhi Larg_r3
74 beq Larg_r2
75 cmp r3, #1
76 bhi Larg_r1
77 beq Larg_r0
78 b Largs_done
79
80Larg_r3:
81 ldr r3, [lr, #12] @ argv[3]
82Larg_r2:
83 ldr r2, [lr, #8] @ argv[2]
84Larg_r1:
85 ldr r1, [lr, #4] @ argv[1]
86Larg_r0:
87 ldr r0, [lr] @ argv[0]
88Largs_done:
89
90 @ With every other register in use, load the function pointer into lr
91 @ and call the function.
92 ldr lr, [sp, #28]
93 blx lr
94
95 @ r1-r3 are free for use again. The trampoline only supports
96 @ single-return functions. Pass r4-r11 to the caller.
97 ldr r1, [sp, #32]
98 vstmia r1!, {d8,d9,d10,d11,d12,d13,d14,d15}
99#if defined(__APPLE__)
100 @ r9 is not volatile on iOS.
101 stmia r1!, {r4,r5,r6,r7,r8,r10-r11}
102#else
103 stmia r1!, {r4,r5,r6,r7,r8,r9,r10,r11}
104#endif
105
106 @ Unwind the stack and restore registers.
107 add sp, sp, #44 @ 44 = 28+16
108 ldmia sp!, {r4,r5,r6,r7,r8,r9,r10,r11,lr} @ Skip r0-r3 (see +16 above).
109 vldmia sp!, {d8,d9,d10,d11,d12,d13,d14,d15}
110
111 bx lr
112
113
114.globl _abi_test_clobber_r0
115.private_extern _abi_test_clobber_r0
116.align 4
117_abi_test_clobber_r0:
118 mov r0, #0
119 bx lr
120
121
122.globl _abi_test_clobber_r1
123.private_extern _abi_test_clobber_r1
124.align 4
125_abi_test_clobber_r1:
126 mov r1, #0
127 bx lr
128
129
130.globl _abi_test_clobber_r2
131.private_extern _abi_test_clobber_r2
132.align 4
133_abi_test_clobber_r2:
134 mov r2, #0
135 bx lr
136
137
138.globl _abi_test_clobber_r3
139.private_extern _abi_test_clobber_r3
140.align 4
141_abi_test_clobber_r3:
142 mov r3, #0
143 bx lr
144
145
146.globl _abi_test_clobber_r4
147.private_extern _abi_test_clobber_r4
148.align 4
149_abi_test_clobber_r4:
150 mov r4, #0
151 bx lr
152
153
154.globl _abi_test_clobber_r5
155.private_extern _abi_test_clobber_r5
156.align 4
157_abi_test_clobber_r5:
158 mov r5, #0
159 bx lr
160
161
162.globl _abi_test_clobber_r6
163.private_extern _abi_test_clobber_r6
164.align 4
165_abi_test_clobber_r6:
166 mov r6, #0
167 bx lr
168
169
170.globl _abi_test_clobber_r7
171.private_extern _abi_test_clobber_r7
172.align 4
173_abi_test_clobber_r7:
174 mov r7, #0
175 bx lr
176
177
178.globl _abi_test_clobber_r8
179.private_extern _abi_test_clobber_r8
180.align 4
181_abi_test_clobber_r8:
182 mov r8, #0
183 bx lr
184
185
186.globl _abi_test_clobber_r9
187.private_extern _abi_test_clobber_r9
188.align 4
189_abi_test_clobber_r9:
190 mov r9, #0
191 bx lr
192
193
194.globl _abi_test_clobber_r10
195.private_extern _abi_test_clobber_r10
196.align 4
197_abi_test_clobber_r10:
198 mov r10, #0
199 bx lr
200
201
202.globl _abi_test_clobber_r11
203.private_extern _abi_test_clobber_r11
204.align 4
205_abi_test_clobber_r11:
206 mov r11, #0
207 bx lr
208
209
210.globl _abi_test_clobber_r12
211.private_extern _abi_test_clobber_r12
212.align 4
213_abi_test_clobber_r12:
214 mov r12, #0
215 bx lr
216
217
218.globl _abi_test_clobber_d0
219.private_extern _abi_test_clobber_d0
220.align 4
221_abi_test_clobber_d0:
222 mov r0, #0
223 vmov s0, r0
224 vmov s1, r0
225 bx lr
226
227
228.globl _abi_test_clobber_d1
229.private_extern _abi_test_clobber_d1
230.align 4
231_abi_test_clobber_d1:
232 mov r0, #0
233 vmov s2, r0
234 vmov s3, r0
235 bx lr
236
237
238.globl _abi_test_clobber_d2
239.private_extern _abi_test_clobber_d2
240.align 4
241_abi_test_clobber_d2:
242 mov r0, #0
243 vmov s4, r0
244 vmov s5, r0
245 bx lr
246
247
248.globl _abi_test_clobber_d3
249.private_extern _abi_test_clobber_d3
250.align 4
251_abi_test_clobber_d3:
252 mov r0, #0
253 vmov s6, r0
254 vmov s7, r0
255 bx lr
256
257
258.globl _abi_test_clobber_d4
259.private_extern _abi_test_clobber_d4
260.align 4
261_abi_test_clobber_d4:
262 mov r0, #0
263 vmov s8, r0
264 vmov s9, r0
265 bx lr
266
267
268.globl _abi_test_clobber_d5
269.private_extern _abi_test_clobber_d5
270.align 4
271_abi_test_clobber_d5:
272 mov r0, #0
273 vmov s10, r0
274 vmov s11, r0
275 bx lr
276
277
278.globl _abi_test_clobber_d6
279.private_extern _abi_test_clobber_d6
280.align 4
281_abi_test_clobber_d6:
282 mov r0, #0
283 vmov s12, r0
284 vmov s13, r0
285 bx lr
286
287
288.globl _abi_test_clobber_d7
289.private_extern _abi_test_clobber_d7
290.align 4
291_abi_test_clobber_d7:
292 mov r0, #0
293 vmov s14, r0
294 vmov s15, r0
295 bx lr
296
297
298.globl _abi_test_clobber_d8
299.private_extern _abi_test_clobber_d8
300.align 4
301_abi_test_clobber_d8:
302 mov r0, #0
303 vmov s16, r0
304 vmov s17, r0
305 bx lr
306
307
308.globl _abi_test_clobber_d9
309.private_extern _abi_test_clobber_d9
310.align 4
311_abi_test_clobber_d9:
312 mov r0, #0
313 vmov s18, r0
314 vmov s19, r0
315 bx lr
316
317
318.globl _abi_test_clobber_d10
319.private_extern _abi_test_clobber_d10
320.align 4
321_abi_test_clobber_d10:
322 mov r0, #0
323 vmov s20, r0
324 vmov s21, r0
325 bx lr
326
327
328.globl _abi_test_clobber_d11
329.private_extern _abi_test_clobber_d11
330.align 4
331_abi_test_clobber_d11:
332 mov r0, #0
333 vmov s22, r0
334 vmov s23, r0
335 bx lr
336
337
338.globl _abi_test_clobber_d12
339.private_extern _abi_test_clobber_d12
340.align 4
341_abi_test_clobber_d12:
342 mov r0, #0
343 vmov s24, r0
344 vmov s25, r0
345 bx lr
346
347
348.globl _abi_test_clobber_d13
349.private_extern _abi_test_clobber_d13
350.align 4
351_abi_test_clobber_d13:
352 mov r0, #0
353 vmov s26, r0
354 vmov s27, r0
355 bx lr
356
357
358.globl _abi_test_clobber_d14
359.private_extern _abi_test_clobber_d14
360.align 4
361_abi_test_clobber_d14:
362 mov r0, #0
363 vmov s28, r0
364 vmov s29, r0
365 bx lr
366
367
368.globl _abi_test_clobber_d15
369.private_extern _abi_test_clobber_d15
370.align 4
371_abi_test_clobber_d15:
372 mov r0, #0
373 vmov s30, r0
374 vmov s31, r0
375 bx lr
376
377#endif // !OPENSSL_NO_ASM