blob: 1d316fcea5c4a64205cecceca72abce0519be483 [file] [log] [blame]
Stuart Monteithb95a5342014-03-12 13:32:32 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "asm_support_arm64.S"
18
19#include "arch/quick_alloc_entrypoints.S"
20
21
22 /*
23 * Macro that sets up the callee save frame to conform with
24 * Runtime::CreateCalleeSaveMethod(kSaveAll)
25 */
26.macro SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
Zheng Xub551fdc2014-07-25 11:49:42 +080027 adrp xIP0, :got:_ZN3art7Runtime9instance_E
28 ldr xIP0, [xIP0, #:got_lo12:_ZN3art7Runtime9instance_E]
Stuart Monteithb95a5342014-03-12 13:32:32 +000029
30 // Our registers aren't intermixed - just spill in order.
Zheng Xub551fdc2014-07-25 11:49:42 +080031 ldr xIP0, [xIP0] // xIP0 = & (art::Runtime * art::Runtime.instance_) .
Stuart Monteithb95a5342014-03-12 13:32:32 +000032
Zheng Xub551fdc2014-07-25 11:49:42 +080033 // xIP0 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] .
Hiroshi Yamauchiab088112014-07-14 13:00:14 -070034 THIS_LOAD_REQUIRES_READ_BARRIER
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070035 ldr wIP0, [xIP0, RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET ]
Andreas Gampe5c1e4352014-04-21 19:28:24 -070036
37 sub sp, sp, #176
38 .cfi_adjust_cfa_offset 176
39
40 // Ugly compile-time check, but we only have the preprocessor.
Zheng Xub551fdc2014-07-25 11:49:42 +080041#if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVE != 176)
42#error "SAVE_ALL_CALLEE_SAVE_FRAME(ARM64) size not as expected."
Andreas Gampe5c1e4352014-04-21 19:28:24 -070043#endif
44
45 // FP callee-saves
46 stp d8, d9, [sp, #8]
47 stp d10, d11, [sp, #24]
48 stp d12, d13, [sp, #40]
49 stp d14, d15, [sp, #56]
50
Zheng Xu69a50302015-04-14 20:04:41 +080051 // Thread register and x19 (callee-save)
52 stp xSELF, x19, [sp, #72]
Andreas Gampe5c1e4352014-04-21 19:28:24 -070053 .cfi_rel_offset x18, 72
54 .cfi_rel_offset x19, 80
55
Zheng Xub551fdc2014-07-25 11:49:42 +080056 // callee-saves
Andreas Gampe5c1e4352014-04-21 19:28:24 -070057 stp x20, x21, [sp, #88]
58 .cfi_rel_offset x20, 88
59 .cfi_rel_offset x21, 96
60
61 stp x22, x23, [sp, #104]
62 .cfi_rel_offset x22, 104
63 .cfi_rel_offset x23, 112
64
65 stp x24, x25, [sp, #120]
66 .cfi_rel_offset x24, 120
67 .cfi_rel_offset x25, 128
68
69 stp x26, x27, [sp, #136]
70 .cfi_rel_offset x26, 136
71 .cfi_rel_offset x27, 144
72
Zheng Xub551fdc2014-07-25 11:49:42 +080073 stp x28, x29, [sp, #152]
Andreas Gampe5c1e4352014-04-21 19:28:24 -070074 .cfi_rel_offset x28, 152
75 .cfi_rel_offset x29, 160
76
77 str xLR, [sp, #168]
78 .cfi_rel_offset x30, 168
79
80 // Loads appropriate callee-save-method
Zheng Xub551fdc2014-07-25 11:49:42 +080081 str xIP0, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs]
Ian Rogers1d8cdbc2014-09-22 22:51:09 -070082 // Place sp in Thread::Current()->top_quick_frame.
83 mov xIP0, sp
84 str xIP0, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET]
Stuart Monteithb95a5342014-03-12 13:32:32 +000085.endm
86
Zheng Xub551fdc2014-07-25 11:49:42 +080087 /*
88 * Macro that sets up the callee save frame to conform with
89 * Runtime::CreateCalleeSaveMethod(kRefsOnly).
90 */
Ian Rogers1d8cdbc2014-09-22 22:51:09 -070091.macro SETUP_REFS_ONLY_CALLEE_SAVE_FRAME
Zheng Xub551fdc2014-07-25 11:49:42 +080092 adrp xIP0, :got:_ZN3art7Runtime9instance_E
93 ldr xIP0, [xIP0, #:got_lo12:_ZN3art7Runtime9instance_E]
94
95 // Our registers aren't intermixed - just spill in order.
96 ldr xIP0, [xIP0] // xIP0 = & (art::Runtime * art::Runtime.instance_) .
97
98 // xIP0 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] .
99 THIS_LOAD_REQUIRES_READ_BARRIER
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700100 ldr wIP0, [xIP0, RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET ]
Zheng Xub551fdc2014-07-25 11:49:42 +0800101
Zheng Xu69a50302015-04-14 20:04:41 +0800102 sub sp, sp, #112
103 .cfi_adjust_cfa_offset 112
Zheng Xub551fdc2014-07-25 11:49:42 +0800104
105 // Ugly compile-time check, but we only have the preprocessor.
Zheng Xu69a50302015-04-14 20:04:41 +0800106#if (FRAME_SIZE_REFS_ONLY_CALLEE_SAVE != 112)
Zheng Xub551fdc2014-07-25 11:49:42 +0800107#error "REFS_ONLY_CALLEE_SAVE_FRAME(ARM64) size not as expected."
108#endif
109
110 // Callee-saves
Zheng Xu69a50302015-04-14 20:04:41 +0800111 stp x19, x20, [sp, #16]
112 .cfi_rel_offset x19, 16
113 .cfi_rel_offset x20, 24
Zheng Xub551fdc2014-07-25 11:49:42 +0800114
Zheng Xu69a50302015-04-14 20:04:41 +0800115 stp x21, x22, [sp, #32]
116 .cfi_rel_offset x21, 32
117 .cfi_rel_offset x22, 40
Zheng Xub551fdc2014-07-25 11:49:42 +0800118
Zheng Xu69a50302015-04-14 20:04:41 +0800119 stp x23, x24, [sp, #48]
120 .cfi_rel_offset x23, 48
121 .cfi_rel_offset x24, 56
Zheng Xub551fdc2014-07-25 11:49:42 +0800122
Zheng Xu69a50302015-04-14 20:04:41 +0800123 stp x25, x26, [sp, #64]
124 .cfi_rel_offset x25, 64
125 .cfi_rel_offset x26, 72
Zheng Xub551fdc2014-07-25 11:49:42 +0800126
Zheng Xu69a50302015-04-14 20:04:41 +0800127 stp x27, x28, [sp, #80]
128 .cfi_rel_offset x27, 80
129 .cfi_rel_offset x28, 88
Zheng Xub551fdc2014-07-25 11:49:42 +0800130
Zheng Xu69a50302015-04-14 20:04:41 +0800131 // x29(callee-save) and LR
132 stp x29, xLR, [sp, #96]
133 .cfi_rel_offset x29, 96
134 .cfi_rel_offset x30, 104
Zheng Xub551fdc2014-07-25 11:49:42 +0800135
136 // Save xSELF to xETR.
137 mov xETR, xSELF
138
139 // Loads appropriate callee-save-method
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700140 str xIP0, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kRefsOnly]
141 // Place sp in Thread::Current()->top_quick_frame.
142 mov xIP0, sp
143 str xIP0, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET]
Zheng Xub551fdc2014-07-25 11:49:42 +0800144.endm
145
146// TODO: Probably no need to restore registers preserved by aapcs64.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700147.macro RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Zheng Xub551fdc2014-07-25 11:49:42 +0800148 // Restore xSELF.
149 mov xSELF, xETR
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700150
Zheng Xub551fdc2014-07-25 11:49:42 +0800151 // Callee-saves
Zheng Xu69a50302015-04-14 20:04:41 +0800152 ldp x19, x20, [sp, #16]
153 .cfi_restore x19
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700154 .cfi_restore x20
Zheng Xu69a50302015-04-14 20:04:41 +0800155
156 ldp x21, x22, [sp, #32]
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700157 .cfi_restore x21
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700158 .cfi_restore x22
Zheng Xu69a50302015-04-14 20:04:41 +0800159
160 ldp x23, x24, [sp, #48]
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700161 .cfi_restore x23
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700162 .cfi_restore x24
Zheng Xu69a50302015-04-14 20:04:41 +0800163
164 ldp x25, x26, [sp, #64]
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700165 .cfi_restore x25
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700166 .cfi_restore x26
Zheng Xu69a50302015-04-14 20:04:41 +0800167
168 ldp x27, x28, [sp, #80]
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700169 .cfi_restore x27
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700170 .cfi_restore x28
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700171
Zheng Xu69a50302015-04-14 20:04:41 +0800172 // x29(callee-save) and LR
173 ldp x29, xLR, [sp, #96]
174 .cfi_restore x29
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700175 .cfi_restore x30
176
Zheng Xu69a50302015-04-14 20:04:41 +0800177 add sp, sp, #112
178 .cfi_adjust_cfa_offset -112
Stuart Monteithb95a5342014-03-12 13:32:32 +0000179.endm
180
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700181.macro POP_REFS_ONLY_CALLEE_SAVE_FRAME
Zheng Xub551fdc2014-07-25 11:49:42 +0800182 // Restore xSELF as it might be scratched.
183 mov xSELF, xETR
184 // ETR
185 ldr xETR, [sp, #16]
186 .cfi_restore x21
187
Zheng Xu69a50302015-04-14 20:04:41 +0800188 add sp, sp, #112
189 .cfi_adjust_cfa_offset -112
Andreas Gamped58342c2014-06-05 14:18:08 -0700190.endm
191
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700192.macro RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
193 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Zheng Xu48241e72014-05-23 11:52:42 +0800194 ret
Stuart Monteithb95a5342014-03-12 13:32:32 +0000195.endm
196
197
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700198.macro SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
Zheng Xub551fdc2014-07-25 11:49:42 +0800199 sub sp, sp, #224
200 .cfi_adjust_cfa_offset 224
Stuart Monteithb95a5342014-03-12 13:32:32 +0000201
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700202 // Ugly compile-time check, but we only have the preprocessor.
Zheng Xub551fdc2014-07-25 11:49:42 +0800203#if (FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE != 224)
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700204#error "REFS_AND_ARGS_CALLEE_SAVE_FRAME(ARM64) size not as expected."
205#endif
206
Zheng Xu69a50302015-04-14 20:04:41 +0800207 // FP args.
208 stp d0, d1, [sp, #8]
209 stp d2, d3, [sp, #24]
210 stp d4, d5, [sp, #40]
211 stp d6, d7, [sp, #56]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000212
Zheng Xu69a50302015-04-14 20:04:41 +0800213 // Core args.
214 str x1, [sp, 72]
215 .cfi_rel_offset x1, 72
Stuart Monteithb95a5342014-03-12 13:32:32 +0000216
Zheng Xu69a50302015-04-14 20:04:41 +0800217 stp x2, x3, [sp, #80]
218 .cfi_rel_offset x2, 80
219 .cfi_rel_offset x3, 88
Andreas Gampe03906cf2014-04-07 12:08:28 -0700220
Zheng Xu69a50302015-04-14 20:04:41 +0800221 stp x4, x5, [sp, #96]
222 .cfi_rel_offset x4, 96
223 .cfi_rel_offset x5, 104
Andreas Gampe03906cf2014-04-07 12:08:28 -0700224
Zheng Xu69a50302015-04-14 20:04:41 +0800225 stp x6, x7, [sp, #112]
226 .cfi_rel_offset x6, 112
227 .cfi_rel_offset x7, 120
Andreas Gampe03906cf2014-04-07 12:08:28 -0700228
Zheng Xub551fdc2014-07-25 11:49:42 +0800229 // Callee-saves.
Zheng Xu69a50302015-04-14 20:04:41 +0800230 stp x19, x20, [sp, #128]
231 .cfi_rel_offset x19, 128
232 .cfi_rel_offset x20, 136
233
Zheng Xub551fdc2014-07-25 11:49:42 +0800234 stp x21, x22, [sp, #144]
235 .cfi_rel_offset x21, 144
236 .cfi_rel_offset x22, 152
Andreas Gampe03906cf2014-04-07 12:08:28 -0700237
Zheng Xub551fdc2014-07-25 11:49:42 +0800238 stp x23, x24, [sp, #160]
239 .cfi_rel_offset x23, 160
240 .cfi_rel_offset x24, 168
Andreas Gampe03906cf2014-04-07 12:08:28 -0700241
Zheng Xub551fdc2014-07-25 11:49:42 +0800242 stp x25, x26, [sp, #176]
243 .cfi_rel_offset x25, 176
244 .cfi_rel_offset x26, 184
Andreas Gampe03906cf2014-04-07 12:08:28 -0700245
Zheng Xub551fdc2014-07-25 11:49:42 +0800246 stp x27, x28, [sp, #192]
247 .cfi_rel_offset x27, 192
248 .cfi_rel_offset x28, 200
Andreas Gampe03906cf2014-04-07 12:08:28 -0700249
Zheng Xub551fdc2014-07-25 11:49:42 +0800250 // x29(callee-save) and LR
251 stp x29, xLR, [sp, #208]
252 .cfi_rel_offset x29, 208
253 .cfi_rel_offset x30, 216
Andreas Gampe03906cf2014-04-07 12:08:28 -0700254
Zheng Xub551fdc2014-07-25 11:49:42 +0800255 // Save xSELF to xETR.
256 mov xETR, xSELF
Stuart Monteithb95a5342014-03-12 13:32:32 +0000257.endm
258
259 /*
260 * Macro that sets up the callee save frame to conform with
261 * Runtime::CreateCalleeSaveMethod(kRefsAndArgs).
262 *
263 * TODO This is probably too conservative - saving FP & LR.
264 */
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700265.macro SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Zheng Xub551fdc2014-07-25 11:49:42 +0800266 adrp xIP0, :got:_ZN3art7Runtime9instance_E
267 ldr xIP0, [xIP0, #:got_lo12:_ZN3art7Runtime9instance_E]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000268
269 // Our registers aren't intermixed - just spill in order.
Zheng Xub551fdc2014-07-25 11:49:42 +0800270 ldr xIP0, [xIP0] // xIP0 = & (art::Runtime * art::Runtime.instance_) .
Stuart Monteithb95a5342014-03-12 13:32:32 +0000271
Zheng Xub551fdc2014-07-25 11:49:42 +0800272 // xIP0 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] .
Hiroshi Yamauchiab088112014-07-14 13:00:14 -0700273 THIS_LOAD_REQUIRES_READ_BARRIER
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700274 ldr wIP0, [xIP0, RUNTIME_REFS_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET ]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000275
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700276 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
Stuart Monteithb95a5342014-03-12 13:32:32 +0000277
Zheng Xub551fdc2014-07-25 11:49:42 +0800278 str xIP0, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs]
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700279 // Place sp in Thread::Current()->top_quick_frame.
280 mov xIP0, sp
281 str xIP0, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET]
282.endm
283
284.macro SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_X0
285 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
286 str x0, [sp, #0] // Store ArtMethod* to bottom of stack.
287 // Place sp in Thread::Current()->top_quick_frame.
288 mov xIP0, sp
289 str xIP0, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000290.endm
291
Zheng Xub551fdc2014-07-25 11:49:42 +0800292// TODO: Probably no need to restore registers preserved by aapcs64.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700293.macro RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Zheng Xub551fdc2014-07-25 11:49:42 +0800294 // Restore xSELF.
295 mov xSELF, xETR
Stuart Monteithb95a5342014-03-12 13:32:32 +0000296
Zheng Xu69a50302015-04-14 20:04:41 +0800297 // FP args.
298 ldp d0, d1, [sp, #8]
299 ldp d2, d3, [sp, #24]
300 ldp d4, d5, [sp, #40]
301 ldp d6, d7, [sp, #56]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000302
Zheng Xu69a50302015-04-14 20:04:41 +0800303 // Core args.
304 ldr x1, [sp, 72]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700305 .cfi_restore x1
Zheng Xu69a50302015-04-14 20:04:41 +0800306
307 ldp x2, x3, [sp, #80]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700308 .cfi_restore x2
Andreas Gampe03906cf2014-04-07 12:08:28 -0700309 .cfi_restore x3
Zheng Xu69a50302015-04-14 20:04:41 +0800310
311 ldp x4, x5, [sp, #96]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700312 .cfi_restore x4
Andreas Gampe03906cf2014-04-07 12:08:28 -0700313 .cfi_restore x5
Andreas Gampe03906cf2014-04-07 12:08:28 -0700314
Zheng Xu69a50302015-04-14 20:04:41 +0800315 ldp x6, x7, [sp, #112]
316 .cfi_restore x6
Andreas Gampe03906cf2014-04-07 12:08:28 -0700317 .cfi_restore x7
Andreas Gampe03906cf2014-04-07 12:08:28 -0700318
Zheng Xub551fdc2014-07-25 11:49:42 +0800319 // Callee-saves.
Zheng Xu69a50302015-04-14 20:04:41 +0800320 ldp x19, x20, [sp, #128]
321 .cfi_restore x19
322 .cfi_restore x20
323
Zheng Xub551fdc2014-07-25 11:49:42 +0800324 ldp x21, x22, [sp, #144]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700325 .cfi_restore x21
326 .cfi_restore x22
327
Zheng Xub551fdc2014-07-25 11:49:42 +0800328 ldp x23, x24, [sp, #160]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700329 .cfi_restore x23
330 .cfi_restore x24
331
Zheng Xub551fdc2014-07-25 11:49:42 +0800332 ldp x25, x26, [sp, #176]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700333 .cfi_restore x25
334 .cfi_restore x26
335
Zheng Xub551fdc2014-07-25 11:49:42 +0800336 ldp x27, x28, [sp, #192]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700337 .cfi_restore x27
338 .cfi_restore x28
339
Zheng Xub551fdc2014-07-25 11:49:42 +0800340 // x29(callee-save) and LR
341 ldp x29, xLR, [sp, #208]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700342 .cfi_restore x29
343 .cfi_restore x30
Stuart Monteithb95a5342014-03-12 13:32:32 +0000344
Zheng Xub551fdc2014-07-25 11:49:42 +0800345 add sp, sp, #224
346 .cfi_adjust_cfa_offset -224
Stuart Monteithb95a5342014-03-12 13:32:32 +0000347.endm
348
349.macro RETURN_IF_RESULT_IS_ZERO
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700350 cbnz x0, 1f // result non-zero branch over
351 ret // return
3521:
Stuart Monteithb95a5342014-03-12 13:32:32 +0000353.endm
354
355.macro RETURN_IF_RESULT_IS_NON_ZERO
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700356 cbz x0, 1f // result zero branch over
357 ret // return
3581:
Stuart Monteithb95a5342014-03-12 13:32:32 +0000359.endm
360
361 /*
362 * Macro that set calls through to artDeliverPendingExceptionFromCode, where the pending
363 * exception is Thread::Current()->exception_
364 */
365.macro DELIVER_PENDING_EXCEPTION
366 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
367 mov x0, xSELF
Stuart Monteithb95a5342014-03-12 13:32:32 +0000368
369 // Point of no return.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700370 b artDeliverPendingExceptionFromCode // artDeliverPendingExceptionFromCode(Thread*)
Stuart Monteithb95a5342014-03-12 13:32:32 +0000371 brk 0 // Unreached
372.endm
373
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700374.macro RETURN_OR_DELIVER_PENDING_EXCEPTION_REG reg
375 ldr \reg, [xSELF, # THREAD_EXCEPTION_OFFSET] // Get exception field.
376 cbnz \reg, 1f
Stuart Monteithb95a5342014-03-12 13:32:32 +0000377 ret
3781:
379 DELIVER_PENDING_EXCEPTION
380.endm
381
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700382.macro RETURN_OR_DELIVER_PENDING_EXCEPTION
Zheng Xub551fdc2014-07-25 11:49:42 +0800383 RETURN_OR_DELIVER_PENDING_EXCEPTION_REG xIP0
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700384.endm
385
386// Same as above with x1. This is helpful in stubs that want to avoid clobbering another register.
387.macro RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
388 RETURN_OR_DELIVER_PENDING_EXCEPTION_REG x1
389.endm
390
391.macro RETURN_IF_W0_IS_ZERO_OR_DELIVER
392 cbnz w0, 1f // result non-zero branch over
393 ret // return
3941:
395 DELIVER_PENDING_EXCEPTION
396.endm
397
Stuart Monteithb95a5342014-03-12 13:32:32 +0000398.macro NO_ARG_RUNTIME_EXCEPTION c_name, cxx_name
399 .extern \cxx_name
400ENTRY \c_name
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700401 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
Zheng Xub551fdc2014-07-25 11:49:42 +0800402 mov x0, xSELF // pass Thread::Current
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700403 b \cxx_name // \cxx_name(Thread*)
Stuart Monteithb95a5342014-03-12 13:32:32 +0000404END \c_name
405.endm
406
407.macro ONE_ARG_RUNTIME_EXCEPTION c_name, cxx_name
408 .extern \cxx_name
409ENTRY \c_name
Serban Constantinescu75b91132014-04-09 18:39:10 +0100410 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context.
Zheng Xub551fdc2014-07-25 11:49:42 +0800411 mov x1, xSELF // pass Thread::Current.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700412 b \cxx_name // \cxx_name(arg, Thread*).
Stuart Monteithb95a5342014-03-12 13:32:32 +0000413 brk 0
414END \c_name
415.endm
416
417.macro TWO_ARG_RUNTIME_EXCEPTION c_name, cxx_name
418 .extern \cxx_name
419ENTRY \c_name
420 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
Zheng Xub551fdc2014-07-25 11:49:42 +0800421 mov x2, xSELF // pass Thread::Current
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700422 b \cxx_name // \cxx_name(arg1, arg2, Thread*)
Stuart Monteithb95a5342014-03-12 13:32:32 +0000423 brk 0
424END \c_name
425.endm
426
427 /*
428 * Called by managed code, saves callee saves and then calls artThrowException
429 * that will place a mock Method* at the bottom of the stack. Arg1 holds the exception.
430 */
431ONE_ARG_RUNTIME_EXCEPTION art_quick_deliver_exception, artDeliverExceptionFromCode
432
433 /*
434 * Called by managed code to create and deliver a NullPointerException.
435 */
436NO_ARG_RUNTIME_EXCEPTION art_quick_throw_null_pointer_exception, artThrowNullPointerExceptionFromCode
437
438 /*
439 * Called by managed code to create and deliver an ArithmeticException.
440 */
441NO_ARG_RUNTIME_EXCEPTION art_quick_throw_div_zero, artThrowDivZeroFromCode
442
443 /*
444 * Called by managed code to create and deliver an ArrayIndexOutOfBoundsException. Arg1 holds
445 * index, arg2 holds limit.
446 */
447TWO_ARG_RUNTIME_EXCEPTION art_quick_throw_array_bounds, artThrowArrayBoundsFromCode
448
449 /*
450 * Called by managed code to create and deliver a StackOverflowError.
451 */
452NO_ARG_RUNTIME_EXCEPTION art_quick_throw_stack_overflow, artThrowStackOverflowFromCode
453
454 /*
455 * Called by managed code to create and deliver a NoSuchMethodError.
456 */
457ONE_ARG_RUNTIME_EXCEPTION art_quick_throw_no_such_method, artThrowNoSuchMethodFromCode
458
459 /*
Stuart Monteithb95a5342014-03-12 13:32:32 +0000460 * All generated callsites for interface invokes and invocation slow paths will load arguments
Andreas Gampe51f76352014-05-21 08:28:48 -0700461 * as usual - except instead of loading arg0/x0 with the target Method*, arg0/x0 will contain
462 * the method_idx. This wrapper will save arg1-arg3, load the caller's Method*, align the
Stuart Monteithb95a5342014-03-12 13:32:32 +0000463 * stack and call the appropriate C helper.
Andreas Gampe51f76352014-05-21 08:28:48 -0700464 * NOTE: "this" is first visible argument of the target, and so can be found in arg1/x1.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000465 *
Andreas Gampe51f76352014-05-21 08:28:48 -0700466 * The helper will attempt to locate the target and return a 128-bit result in x0/x1 consisting
Stuart Monteithb95a5342014-03-12 13:32:32 +0000467 * of the target Method* in x0 and method->code_ in x1.
468 *
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700469 * If unsuccessful, the helper will return null/????. There will be a pending exception in the
Stuart Monteithb95a5342014-03-12 13:32:32 +0000470 * thread and we branch to another stub to deliver it.
471 *
472 * On success this wrapper will restore arguments and *jump* to the target, leaving the lr
473 * pointing back to the original caller.
Andreas Gampe51f76352014-05-21 08:28:48 -0700474 *
475 * Adapted from ARM32 code.
476 *
Zheng Xub551fdc2014-07-25 11:49:42 +0800477 * Clobbers xIP0.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000478 */
479.macro INVOKE_TRAMPOLINE c_name, cxx_name
480 .extern \cxx_name
481ENTRY \c_name
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700482 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME // save callee saves in case allocation triggers GC
Andreas Gampe51f76352014-05-21 08:28:48 -0700483 // Helper signature is always
484 // (method_idx, *this_object, *caller_method, *self, sp)
485
Alexei Zavjalov41c507a2014-05-15 16:02:46 +0700486 ldr w2, [sp, #FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE] // pass caller Method*
Andreas Gampe51f76352014-05-21 08:28:48 -0700487 mov x3, xSELF // pass Thread::Current
488 mov x4, sp
489 bl \cxx_name // (method_idx, this, caller, Thread*, SP)
Zheng Xub551fdc2014-07-25 11:49:42 +0800490 mov xIP0, x1 // save Method*->code_
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700491 RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Andreas Gampe51f76352014-05-21 08:28:48 -0700492 cbz x0, 1f // did we find the target? if not go to exception delivery
Zheng Xub551fdc2014-07-25 11:49:42 +0800493 br xIP0 // tail call to target
Andreas Gampe51f76352014-05-21 08:28:48 -07004941:
495 DELIVER_PENDING_EXCEPTION
Stuart Monteithb95a5342014-03-12 13:32:32 +0000496END \c_name
497.endm
498
499INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline, artInvokeInterfaceTrampoline
500INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline_with_access_check, artInvokeInterfaceTrampolineWithAccessCheck
501
502INVOKE_TRAMPOLINE art_quick_invoke_static_trampoline_with_access_check, artInvokeStaticTrampolineWithAccessCheck
503INVOKE_TRAMPOLINE art_quick_invoke_direct_trampoline_with_access_check, artInvokeDirectTrampolineWithAccessCheck
504INVOKE_TRAMPOLINE art_quick_invoke_super_trampoline_with_access_check, artInvokeSuperTrampolineWithAccessCheck
505INVOKE_TRAMPOLINE art_quick_invoke_virtual_trampoline_with_access_check, artInvokeVirtualTrampolineWithAccessCheck
506
Andreas Gampe03906cf2014-04-07 12:08:28 -0700507
508.macro INVOKE_STUB_CREATE_FRAME
509
Zheng Xu69a50302015-04-14 20:04:41 +0800510SAVE_SIZE=15*8 // x4, x5, x19, x20, x21, x22, x23, x24, x25, x26, x27, x28, SP, LR, FP saved.
Andreas Gampecf4035a2014-05-28 22:43:01 -0700511SAVE_SIZE_AND_METHOD=SAVE_SIZE+STACK_REFERENCE_SIZE
512
Andreas Gampe03906cf2014-04-07 12:08:28 -0700513
Zheng Xu48241e72014-05-23 11:52:42 +0800514 mov x9, sp // Save stack pointer.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700515 .cfi_register sp,x9
516
Zheng Xu48241e72014-05-23 11:52:42 +0800517 add x10, x2, # SAVE_SIZE_AND_METHOD // calculate size of frame.
518 sub x10, sp, x10 // Calculate SP position - saves + ArtMethod* + args
519 and x10, x10, # ~0xf // Enforce 16 byte stack alignment.
520 mov sp, x10 // Set new SP.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700521
Zheng Xu48241e72014-05-23 11:52:42 +0800522 sub x10, x9, #SAVE_SIZE // Calculate new FP (later). Done here as we must move SP
523 .cfi_def_cfa_register x10 // before this.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700524 .cfi_adjust_cfa_offset SAVE_SIZE
525
Nicolas Geoffray48088462014-12-12 10:29:38 +0000526 str x28, [x10, #112]
527 .cfi_rel_offset x28, 112
528
529 stp x26, x27, [x10, #96]
530 .cfi_rel_offset x26, 96
531 .cfi_rel_offset x27, 104
532
533 stp x24, x25, [x10, #80]
534 .cfi_rel_offset x24, 80
535 .cfi_rel_offset x25, 88
536
537 stp x22, x23, [x10, #64]
538 .cfi_rel_offset x22, 64
539 .cfi_rel_offset x23, 72
540
541 stp x20, x21, [x10, #48]
542 .cfi_rel_offset x20, 48
543 .cfi_rel_offset x21, 56
544
Zheng Xu69a50302015-04-14 20:04:41 +0800545 stp x9, x19, [x10, #32] // Save old stack pointer and x19.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700546 .cfi_rel_offset sp, 32
Andreas Gampecf4035a2014-05-28 22:43:01 -0700547 .cfi_rel_offset x19, 40
Andreas Gampe03906cf2014-04-07 12:08:28 -0700548
Zheng Xu48241e72014-05-23 11:52:42 +0800549 stp x4, x5, [x10, #16] // Save result and shorty addresses.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700550 .cfi_rel_offset x4, 16
551 .cfi_rel_offset x5, 24
552
Zheng Xu48241e72014-05-23 11:52:42 +0800553 stp xFP, xLR, [x10] // Store LR & FP.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700554 .cfi_rel_offset x29, 0
555 .cfi_rel_offset x30, 8
556
Zheng Xu48241e72014-05-23 11:52:42 +0800557 mov xFP, x10 // Use xFP now, as it's callee-saved.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700558 .cfi_def_cfa_register x29
Zheng Xu48241e72014-05-23 11:52:42 +0800559 mov xSELF, x3 // Move thread pointer into SELF register.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700560
561 // Copy arguments into stack frame.
562 // Use simple copy routine for now.
563 // 4 bytes per slot.
564 // X1 - source address
565 // W2 - args length
566 // X9 - destination address.
567 // W10 - temporary
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700568 add x9, sp, #4 // Destination address is bottom of stack + null.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700569
570 // Use \@ to differentiate between macro invocations.
571.LcopyParams\@:
572 cmp w2, #0
573 beq .LendCopyParams\@
574 sub w2, w2, #4 // Need 65536 bytes of range.
575 ldr w10, [x1, x2]
576 str w10, [x9, x2]
577
578 b .LcopyParams\@
579
580.LendCopyParams\@:
581
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700582 // Store null into StackReference<Method>* at bottom of frame.
Andreas Gampecf4035a2014-05-28 22:43:01 -0700583 str wzr, [sp]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700584
Andreas Gampecf4035a2014-05-28 22:43:01 -0700585#if (STACK_REFERENCE_SIZE != 4)
586#error "STACK_REFERENCE_SIZE(ARM64) size not as expected."
587#endif
Andreas Gampe03906cf2014-04-07 12:08:28 -0700588.endm
589
590.macro INVOKE_STUB_CALL_AND_RETURN
591
592 // load method-> METHOD_QUICK_CODE_OFFSET
Mathieu Chartier2d721012014-11-10 11:08:06 -0800593 ldr x9, [x0 , #MIRROR_ART_METHOD_QUICK_CODE_OFFSET_64]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700594 // Branch to method.
595 blr x9
596
597 // Restore return value address and shorty address.
598 ldp x4,x5, [xFP, #16]
599 .cfi_restore x4
600 .cfi_restore x5
601
Nicolas Geoffray48088462014-12-12 10:29:38 +0000602 ldr x28, [xFP, #112]
603 .cfi_restore x28
604
605 ldp x26, x27, [xFP, #96]
606 .cfi_restore x26
607 .cfi_restore x27
608
609 ldp x24, x25, [xFP, #80]
610 .cfi_restore x24
611 .cfi_restore x25
612
613 ldp x22, x23, [xFP, #64]
614 .cfi_restore x22
615 .cfi_restore x23
616
617 ldp x20, x21, [xFP, #48]
618 .cfi_restore x20
619 .cfi_restore x21
620
Andreas Gampe03906cf2014-04-07 12:08:28 -0700621 // Store result (w0/x0/s0/d0) appropriately, depending on resultType.
622 ldrb w10, [x5]
623
624 // Don't set anything for a void type.
625 cmp w10, #'V'
626 beq .Lexit_art_quick_invoke_stub\@
627
628 cmp w10, #'D'
629 bne .Lreturn_is_float\@
630 str d0, [x4]
631 b .Lexit_art_quick_invoke_stub\@
632
633.Lreturn_is_float\@:
634 cmp w10, #'F'
635 bne .Lreturn_is_int\@
636 str s0, [x4]
637 b .Lexit_art_quick_invoke_stub\@
638
639 // Just store x0. Doesn't matter if it is 64 or 32 bits.
640.Lreturn_is_int\@:
641 str x0, [x4]
642
643.Lexit_art_quick_invoke_stub\@:
Zheng Xu69a50302015-04-14 20:04:41 +0800644 ldp x2, x19, [xFP, #32] // Restore stack pointer and x19.
Andreas Gampecf4035a2014-05-28 22:43:01 -0700645 .cfi_restore x19
Andreas Gampe03906cf2014-04-07 12:08:28 -0700646 mov sp, x2
647 .cfi_restore sp
648
Andreas Gamped58342c2014-06-05 14:18:08 -0700649 ldp xFP, xLR, [xFP] // Restore old frame pointer and link register.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700650 .cfi_restore x29
651 .cfi_restore x30
652
653 ret
654
655.endm
656
657
Stuart Monteithb95a5342014-03-12 13:32:32 +0000658/*
659 * extern"C" void art_quick_invoke_stub(ArtMethod *method, x0
660 * uint32_t *args, x1
661 * uint32_t argsize, w2
662 * Thread *self, x3
663 * JValue *result, x4
664 * char *shorty); x5
665 * +----------------------+
666 * | |
667 * | C/C++ frame |
668 * | LR'' |
669 * | FP'' | <- SP'
670 * +----------------------+
671 * +----------------------+
Zheng Xu69a50302015-04-14 20:04:41 +0800672 * | x28 | <- TODO: Remove callee-saves.
673 * | : |
674 * | x19 |
Stuart Monteithb95a5342014-03-12 13:32:32 +0000675 * | SP' |
676 * | X5 |
677 * | X4 | Saved registers
678 * | LR' |
679 * | FP' | <- FP
680 * +----------------------+
681 * | uint32_t out[n-1] |
682 * | : : | Outs
683 * | uint32_t out[0] |
Andreas Gampecf4035a2014-05-28 22:43:01 -0700684 * | StackRef<ArtMethod> | <- SP value=null
Stuart Monteithb95a5342014-03-12 13:32:32 +0000685 * +----------------------+
686 *
687 * Outgoing registers:
688 * x0 - Method*
689 * x1-x7 - integer parameters.
690 * d0-d7 - Floating point parameters.
691 * xSELF = self
692 * SP = & of ArtMethod*
693 * x1 = "this" pointer.
694 *
695 */
696ENTRY art_quick_invoke_stub
697 // Spill registers as per AACPS64 calling convention.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700698 INVOKE_STUB_CREATE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +0000699
700 // Fill registers x/w1 to x/w7 and s/d0 to s/d7 with parameters.
701 // Parse the passed shorty to determine which register to load.
702 // Load addresses for routines that load WXSD registers.
703 adr x11, .LstoreW2
704 adr x12, .LstoreX2
705 adr x13, .LstoreS0
706 adr x14, .LstoreD0
707
708 // Initialize routine offsets to 0 for integers and floats.
709 // x8 for integers, x15 for floating point.
710 mov x8, #0
711 mov x15, #0
712
713 add x10, x5, #1 // Load shorty address, plus one to skip return value.
714 ldr w1, [x9],#4 // Load "this" parameter, and increment arg pointer.
715
716 // Loop to fill registers.
717.LfillRegisters:
718 ldrb w17, [x10], #1 // Load next character in signature, and increment.
719 cbz w17, .LcallFunction // Exit at end of signature. Shorty 0 terminated.
720
721 cmp w17, #'F' // is this a float?
722 bne .LisDouble
723
724 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700725 beq .Ladvance4
Stuart Monteithb95a5342014-03-12 13:32:32 +0000726
727 add x17, x13, x15 // Calculate subroutine to jump to.
728 br x17
729
730.LisDouble:
731 cmp w17, #'D' // is this a double?
732 bne .LisLong
733
734 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700735 beq .Ladvance8
Stuart Monteithb95a5342014-03-12 13:32:32 +0000736
737 add x17, x14, x15 // Calculate subroutine to jump to.
738 br x17
739
740.LisLong:
741 cmp w17, #'J' // is this a long?
742 bne .LisOther
743
Andreas Gampe9de65ff2014-03-21 17:25:57 -0700744 cmp x8, # 6*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700745 beq .Ladvance8
Stuart Monteithb95a5342014-03-12 13:32:32 +0000746
747 add x17, x12, x8 // Calculate subroutine to jump to.
748 br x17
749
Stuart Monteithb95a5342014-03-12 13:32:32 +0000750.LisOther: // Everything else takes one vReg.
Andreas Gampe9de65ff2014-03-21 17:25:57 -0700751 cmp x8, # 6*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700752 beq .Ladvance4
753
Stuart Monteithb95a5342014-03-12 13:32:32 +0000754 add x17, x11, x8 // Calculate subroutine to jump to.
755 br x17
756
Andreas Gampe03906cf2014-04-07 12:08:28 -0700757.Ladvance4:
758 add x9, x9, #4
759 b .LfillRegisters
760
761.Ladvance8:
762 add x9, x9, #8
763 b .LfillRegisters
764
Stuart Monteithb95a5342014-03-12 13:32:32 +0000765// Macro for loading a parameter into a register.
766// counter - the register with offset into these tables
767// size - the size of the register - 4 or 8 bytes.
768// register - the name of the register to be loaded.
769.macro LOADREG counter size register return
770 ldr \register , [x9], #\size
771 add \counter, \counter, 12
772 b \return
773.endm
774
775// Store ints.
776.LstoreW2:
777 LOADREG x8 4 w2 .LfillRegisters
778 LOADREG x8 4 w3 .LfillRegisters
779 LOADREG x8 4 w4 .LfillRegisters
780 LOADREG x8 4 w5 .LfillRegisters
781 LOADREG x8 4 w6 .LfillRegisters
782 LOADREG x8 4 w7 .LfillRegisters
783
784// Store longs.
785.LstoreX2:
786 LOADREG x8 8 x2 .LfillRegisters
787 LOADREG x8 8 x3 .LfillRegisters
788 LOADREG x8 8 x4 .LfillRegisters
789 LOADREG x8 8 x5 .LfillRegisters
790 LOADREG x8 8 x6 .LfillRegisters
791 LOADREG x8 8 x7 .LfillRegisters
792
793// Store singles.
794.LstoreS0:
795 LOADREG x15 4 s0 .LfillRegisters
796 LOADREG x15 4 s1 .LfillRegisters
797 LOADREG x15 4 s2 .LfillRegisters
798 LOADREG x15 4 s3 .LfillRegisters
799 LOADREG x15 4 s4 .LfillRegisters
800 LOADREG x15 4 s5 .LfillRegisters
801 LOADREG x15 4 s6 .LfillRegisters
802 LOADREG x15 4 s7 .LfillRegisters
803
804// Store doubles.
805.LstoreD0:
806 LOADREG x15 8 d0 .LfillRegisters
807 LOADREG x15 8 d1 .LfillRegisters
808 LOADREG x15 8 d2 .LfillRegisters
809 LOADREG x15 8 d3 .LfillRegisters
810 LOADREG x15 8 d4 .LfillRegisters
811 LOADREG x15 8 d5 .LfillRegisters
812 LOADREG x15 8 d6 .LfillRegisters
813 LOADREG x15 8 d7 .LfillRegisters
814
815
816.LcallFunction:
817
Andreas Gampe03906cf2014-04-07 12:08:28 -0700818 INVOKE_STUB_CALL_AND_RETURN
Stuart Monteithb95a5342014-03-12 13:32:32 +0000819
Stuart Monteithb95a5342014-03-12 13:32:32 +0000820END art_quick_invoke_stub
821
822/* extern"C"
823 * void art_quick_invoke_static_stub(ArtMethod *method, x0
824 * uint32_t *args, x1
825 * uint32_t argsize, w2
826 * Thread *self, x3
827 * JValue *result, x4
828 * char *shorty); x5
829 */
830ENTRY art_quick_invoke_static_stub
831 // Spill registers as per AACPS64 calling convention.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700832 INVOKE_STUB_CREATE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +0000833
834 // Fill registers x/w1 to x/w7 and s/d0 to s/d7 with parameters.
835 // Parse the passed shorty to determine which register to load.
836 // Load addresses for routines that load WXSD registers.
837 adr x11, .LstoreW1_2
838 adr x12, .LstoreX1_2
839 adr x13, .LstoreS0_2
840 adr x14, .LstoreD0_2
841
842 // Initialize routine offsets to 0 for integers and floats.
843 // x8 for integers, x15 for floating point.
844 mov x8, #0
845 mov x15, #0
846
847 add x10, x5, #1 // Load shorty address, plus one to skip return value.
848
849 // Loop to fill registers.
850.LfillRegisters2:
851 ldrb w17, [x10], #1 // Load next character in signature, and increment.
852 cbz w17, .LcallFunction2 // Exit at end of signature. Shorty 0 terminated.
853
854 cmp w17, #'F' // is this a float?
855 bne .LisDouble2
856
857 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700858 beq .Ladvance4_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000859
860 add x17, x13, x15 // Calculate subroutine to jump to.
861 br x17
862
863.LisDouble2:
864 cmp w17, #'D' // is this a double?
865 bne .LisLong2
866
867 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700868 beq .Ladvance8_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000869
870 add x17, x14, x15 // Calculate subroutine to jump to.
871 br x17
872
873.LisLong2:
874 cmp w17, #'J' // is this a long?
875 bne .LisOther2
876
877 cmp x8, # 7*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700878 beq .Ladvance8_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000879
880 add x17, x12, x8 // Calculate subroutine to jump to.
881 br x17
882
Stuart Monteithb95a5342014-03-12 13:32:32 +0000883.LisOther2: // Everything else takes one vReg.
884 cmp x8, # 7*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700885 beq .Ladvance4_2
886
Stuart Monteithb95a5342014-03-12 13:32:32 +0000887 add x17, x11, x8 // Calculate subroutine to jump to.
888 br x17
889
Andreas Gampe03906cf2014-04-07 12:08:28 -0700890.Ladvance4_2:
891 add x9, x9, #4
892 b .LfillRegisters2
893
894.Ladvance8_2:
895 add x9, x9, #8
896 b .LfillRegisters2
897
Stuart Monteithb95a5342014-03-12 13:32:32 +0000898// Store ints.
899.LstoreW1_2:
900 LOADREG x8 4 w1 .LfillRegisters2
901 LOADREG x8 4 w2 .LfillRegisters2
902 LOADREG x8 4 w3 .LfillRegisters2
903 LOADREG x8 4 w4 .LfillRegisters2
904 LOADREG x8 4 w5 .LfillRegisters2
905 LOADREG x8 4 w6 .LfillRegisters2
906 LOADREG x8 4 w7 .LfillRegisters2
907
908// Store longs.
909.LstoreX1_2:
910 LOADREG x8 8 x1 .LfillRegisters2
911 LOADREG x8 8 x2 .LfillRegisters2
912 LOADREG x8 8 x3 .LfillRegisters2
913 LOADREG x8 8 x4 .LfillRegisters2
914 LOADREG x8 8 x5 .LfillRegisters2
915 LOADREG x8 8 x6 .LfillRegisters2
916 LOADREG x8 8 x7 .LfillRegisters2
917
918// Store singles.
919.LstoreS0_2:
920 LOADREG x15 4 s0 .LfillRegisters2
921 LOADREG x15 4 s1 .LfillRegisters2
922 LOADREG x15 4 s2 .LfillRegisters2
923 LOADREG x15 4 s3 .LfillRegisters2
924 LOADREG x15 4 s4 .LfillRegisters2
925 LOADREG x15 4 s5 .LfillRegisters2
926 LOADREG x15 4 s6 .LfillRegisters2
927 LOADREG x15 4 s7 .LfillRegisters2
928
929// Store doubles.
930.LstoreD0_2:
931 LOADREG x15 8 d0 .LfillRegisters2
932 LOADREG x15 8 d1 .LfillRegisters2
933 LOADREG x15 8 d2 .LfillRegisters2
934 LOADREG x15 8 d3 .LfillRegisters2
935 LOADREG x15 8 d4 .LfillRegisters2
936 LOADREG x15 8 d5 .LfillRegisters2
937 LOADREG x15 8 d6 .LfillRegisters2
938 LOADREG x15 8 d7 .LfillRegisters2
939
940
941.LcallFunction2:
942
Andreas Gampe03906cf2014-04-07 12:08:28 -0700943 INVOKE_STUB_CALL_AND_RETURN
Stuart Monteithb95a5342014-03-12 13:32:32 +0000944
Stuart Monteithb95a5342014-03-12 13:32:32 +0000945END art_quick_invoke_static_stub
946
Andreas Gampe03906cf2014-04-07 12:08:28 -0700947
Stuart Monteithb95a5342014-03-12 13:32:32 +0000948
949 /*
950 * On entry x0 is uintptr_t* gprs_ and x1 is uint64_t* fprs_
951 */
952
953ENTRY art_quick_do_long_jump
954 // Load FPRs
955 ldp d0, d1, [x1], #16
956 ldp d2, d3, [x1], #16
957 ldp d4, d5, [x1], #16
958 ldp d6, d7, [x1], #16
959 ldp d8, d9, [x1], #16
960 ldp d10, d11, [x1], #16
961 ldp d12, d13, [x1], #16
962 ldp d14, d15, [x1], #16
963 ldp d16, d17, [x1], #16
964 ldp d18, d19, [x1], #16
965 ldp d20, d21, [x1], #16
966 ldp d22, d23, [x1], #16
967 ldp d24, d25, [x1], #16
968 ldp d26, d27, [x1], #16
969 ldp d28, d29, [x1], #16
970 ldp d30, d31, [x1]
971
972 // Load GPRs
973 // TODO: lots of those are smashed, could optimize.
974 add x0, x0, #30*8
975 ldp x30, x1, [x0], #-16
976 ldp x28, x29, [x0], #-16
977 ldp x26, x27, [x0], #-16
978 ldp x24, x25, [x0], #-16
979 ldp x22, x23, [x0], #-16
980 ldp x20, x21, [x0], #-16
981 ldp x18, x19, [x0], #-16
982 ldp x16, x17, [x0], #-16
983 ldp x14, x15, [x0], #-16
984 ldp x12, x13, [x0], #-16
985 ldp x10, x11, [x0], #-16
986 ldp x8, x9, [x0], #-16
987 ldp x6, x7, [x0], #-16
988 ldp x4, x5, [x0], #-16
989 ldp x2, x3, [x0], #-16
990 mov sp, x1
991
992 // TODO: Is it really OK to use LR for the target PC?
993 mov x0, #0
994 mov x1, #0
995 br xLR
996END art_quick_do_long_jump
997
Andreas Gampef4e910b2014-04-29 16:55:52 -0700998 /*
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700999 * Entry from managed code that calls artLockObjectFromCode, may block for GC. x0 holds the
1000 * possibly null object to lock.
1001 *
1002 * Derived from arm32 code.
1003 */
1004 .extern artLockObjectFromCode
1005ENTRY art_quick_lock_object
1006 cbz w0, .Lslow_lock
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001007 add x4, x0, #MIRROR_OBJECT_LOCK_WORD_OFFSET // exclusive load/store has no immediate anymore
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001008.Lretry_lock:
1009 ldr w2, [xSELF, #THREAD_ID_OFFSET] // TODO: Can the thread ID really change during the loop?
1010 ldxr w1, [x4]
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001011 mov x3, x1
1012 and w3, w3, #LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED // zero the read barrier bits
1013 cbnz w3, .Lnot_unlocked // already thin locked
1014 // unlocked case - x1: original lock word that's zero except for the read barrier bits.
1015 orr x2, x1, x2 // x2 holds thread id with count of 0 with preserved read barrier bits
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001016 stxr w3, w2, [x4]
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001017 cbnz w3, .Llock_stxr_fail // store failed, retry
Andreas Gampe675967d2014-05-14 16:28:34 -07001018 dmb ishld // full (LoadLoad|LoadStore) memory barrier
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001019 ret
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001020.Lnot_unlocked: // x1: original lock word
1021 lsr w3, w1, LOCK_WORD_STATE_SHIFT
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001022 cbnz w3, .Lslow_lock // if either of the top two bits are set, go slow path
1023 eor w2, w1, w2 // lock_word.ThreadId() ^ self->ThreadId()
1024 uxth w2, w2 // zero top 16 bits
1025 cbnz w2, .Lslow_lock // lock word and self thread id's match -> recursive lock
1026 // else contention, go to slow path
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001027 mov x3, x1 // copy the lock word to check count overflow.
1028 and w3, w3, #LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED // zero the read barrier bits.
1029 add w2, w3, #LOCK_WORD_THIN_LOCK_COUNT_ONE // increment count in lock word placing in w2 to check overflow
1030 lsr w3, w2, LOCK_WORD_READ_BARRIER_STATE_SHIFT // if either of the upper two bits (28-29) are set, we overflowed.
1031 cbnz w3, .Lslow_lock // if we overflow the count go slow path
1032 add w2, w1, #LOCK_WORD_THIN_LOCK_COUNT_ONE // increment count for real
1033 stxr w3, w2, [x4]
1034 cbnz w3, .Llock_stxr_fail // store failed, retry
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001035 ret
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001036.Llock_stxr_fail:
1037 b .Lretry_lock // retry
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001038.Lslow_lock:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001039 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case we block
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001040 mov x1, xSELF // pass Thread::Current
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001041 bl artLockObjectFromCode // (Object* obj, Thread*)
1042 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001043 RETURN_IF_W0_IS_ZERO_OR_DELIVER
1044END art_quick_lock_object
1045
1046 /*
1047 * Entry from managed code that calls artUnlockObjectFromCode and delivers exception on failure.
1048 * x0 holds the possibly null object to lock.
1049 *
1050 * Derived from arm32 code.
1051 */
1052 .extern artUnlockObjectFromCode
1053ENTRY art_quick_unlock_object
1054 cbz x0, .Lslow_unlock
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001055 add x4, x0, #MIRROR_OBJECT_LOCK_WORD_OFFSET // exclusive load/store has no immediate anymore
1056.Lretry_unlock:
1057#ifndef USE_READ_BARRIER
1058 ldr w1, [x4]
1059#else
1060 ldxr w1, [x4] // Need to use atomic instructions for read barrier
1061#endif
1062 lsr w2, w1, LOCK_WORD_STATE_SHIFT
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001063 cbnz w2, .Lslow_unlock // if either of the top two bits are set, go slow path
1064 ldr w2, [xSELF, #THREAD_ID_OFFSET]
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001065 mov x3, x1 // copy lock word to check thread id equality
1066 and w3, w3, #LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED // zero the read barrier bits
1067 eor w3, w3, w2 // lock_word.ThreadId() ^ self->ThreadId()
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001068 uxth w3, w3 // zero top 16 bits
1069 cbnz w3, .Lslow_unlock // do lock word and self thread id's match?
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001070 mov x3, x1 // copy lock word to detect transition to unlocked
1071 and w3, w3, #LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED // zero the read barrier bits
1072 cmp w3, #LOCK_WORD_THIN_LOCK_COUNT_ONE
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001073 bpl .Lrecursive_thin_unlock
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001074 // transition to unlocked
1075 mov x3, x1
1076 and w3, w3, #LOCK_WORD_READ_BARRIER_STATE_MASK // w3: zero except for the preserved read barrier bits
Andreas Gampe675967d2014-05-14 16:28:34 -07001077 dmb ish // full (LoadStore|StoreStore) memory barrier
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001078#ifndef USE_READ_BARRIER
1079 str w3, [x4]
1080#else
1081 stxr w2, w3, [x4] // Need to use atomic instructions for read barrier
1082 cbnz w2, .Lunlock_stxr_fail // store failed, retry
1083#endif
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001084 ret
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001085.Lrecursive_thin_unlock: // w1: original lock word
1086 sub w1, w1, #LOCK_WORD_THIN_LOCK_COUNT_ONE // decrement count
1087#ifndef USE_READ_BARRIER
1088 str w1, [x4]
1089#else
1090 stxr w2, w1, [x4] // Need to use atomic instructions for read barrier
1091 cbnz w2, .Lunlock_stxr_fail // store failed, retry
1092#endif
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001093 ret
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001094.Lunlock_stxr_fail:
1095 b .Lretry_unlock // retry
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001096.Lslow_unlock:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001097 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case exception allocation triggers GC
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001098 mov x1, xSELF // pass Thread::Current
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001099 bl artUnlockObjectFromCode // (Object* obj, Thread*)
1100 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001101 RETURN_IF_W0_IS_ZERO_OR_DELIVER
1102END art_quick_unlock_object
Andreas Gampe525cde22014-04-22 15:44:50 -07001103
1104 /*
1105 * Entry from managed code that calls artIsAssignableFromCode and on failure calls
1106 * artThrowClassCastException.
1107 */
1108 .extern artThrowClassCastException
1109ENTRY art_quick_check_cast
1110 // Store arguments and link register
1111 sub sp, sp, #32 // Stack needs to be 16b aligned on calls
1112 .cfi_adjust_cfa_offset 32
1113 stp x0, x1, [sp]
1114 .cfi_rel_offset x0, 0
1115 .cfi_rel_offset x1, 8
1116 stp xSELF, xLR, [sp, #16]
1117 .cfi_rel_offset x18, 16
1118 .cfi_rel_offset x30, 24
1119
1120 // Call runtime code
1121 bl artIsAssignableFromCode
1122
1123 // Check for exception
1124 cbz x0, .Lthrow_class_cast_exception
1125
1126 // Restore and return
1127 ldp x0, x1, [sp]
1128 .cfi_restore x0
1129 .cfi_restore x1
1130 ldp xSELF, xLR, [sp, #16]
1131 .cfi_restore x18
1132 .cfi_restore x30
1133 add sp, sp, #32
1134 .cfi_adjust_cfa_offset -32
1135 ret
1136
1137.Lthrow_class_cast_exception:
1138 // Restore
1139 ldp x0, x1, [sp]
1140 .cfi_restore x0
1141 .cfi_restore x1
1142 ldp xSELF, xLR, [sp, #16]
1143 .cfi_restore x18
1144 .cfi_restore x30
1145 add sp, sp, #32
1146 .cfi_adjust_cfa_offset -32
1147
1148 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
1149 mov x2, xSELF // pass Thread::Current
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001150 b artThrowClassCastException // (Class*, Class*, Thread*)
Andreas Gampe525cde22014-04-22 15:44:50 -07001151 brk 0 // We should not return here...
1152END art_quick_check_cast
1153
Andreas Gampef4e910b2014-04-29 16:55:52 -07001154 /*
1155 * Entry from managed code for array put operations of objects where the value being stored
1156 * needs to be checked for compatibility.
1157 * x0 = array, x1 = index, x2 = value
1158 *
1159 * Currently all values should fit into w0/w1/w2, and w1 always will as indices are 32b. We
1160 * assume, though, that the upper 32b are zeroed out. At least for x1/w1 we can do better by
1161 * using index-zero-extension in load/stores.
1162 *
1163 * Temporaries: x3, x4
1164 * TODO: x4 OK? ip seems wrong here.
1165 */
1166ENTRY art_quick_aput_obj_with_null_and_bound_check
1167 tst x0, x0
1168 bne art_quick_aput_obj_with_bound_check
1169 b art_quick_throw_null_pointer_exception
1170END art_quick_aput_obj_with_null_and_bound_check
1171
1172ENTRY art_quick_aput_obj_with_bound_check
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001173 ldr w3, [x0, #MIRROR_ARRAY_LENGTH_OFFSET]
Andreas Gampef4e910b2014-04-29 16:55:52 -07001174 cmp w3, w1
1175 bhi art_quick_aput_obj
1176 mov x0, x1
1177 mov x1, x3
1178 b art_quick_throw_array_bounds
1179END art_quick_aput_obj_with_bound_check
1180
1181ENTRY art_quick_aput_obj
1182 cbz x2, .Ldo_aput_null
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001183 ldr w3, [x0, #MIRROR_OBJECT_CLASS_OFFSET] // Heap reference = 32b
Andreas Gampef4e910b2014-04-29 16:55:52 -07001184 // This also zero-extends to x3
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001185 ldr w4, [x2, #MIRROR_OBJECT_CLASS_OFFSET] // Heap reference = 32b
Andreas Gampef4e910b2014-04-29 16:55:52 -07001186 // This also zero-extends to x4
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001187 ldr w3, [x3, #MIRROR_CLASS_COMPONENT_TYPE_OFFSET] // Heap reference = 32b
Andreas Gampef4e910b2014-04-29 16:55:52 -07001188 // This also zero-extends to x3
1189 cmp w3, w4 // value's type == array's component type - trivial assignability
1190 bne .Lcheck_assignability
1191.Ldo_aput:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001192 add x3, x0, #MIRROR_OBJECT_ARRAY_DATA_OFFSET
Andreas Gampef4e910b2014-04-29 16:55:52 -07001193 // "Compress" = do nothing
1194 str w2, [x3, x1, lsl #2] // Heap reference = 32b
1195 ldr x3, [xSELF, #THREAD_CARD_TABLE_OFFSET]
1196 lsr x0, x0, #7
1197 strb w3, [x3, x0]
1198 ret
1199.Ldo_aput_null:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001200 add x3, x0, #MIRROR_OBJECT_ARRAY_DATA_OFFSET
Andreas Gampef4e910b2014-04-29 16:55:52 -07001201 // "Compress" = do nothing
1202 str w2, [x3, x1, lsl #2] // Heap reference = 32b
1203 ret
1204.Lcheck_assignability:
1205 // Store arguments and link register
1206 sub sp, sp, #48 // Stack needs to be 16b aligned on calls
1207 .cfi_adjust_cfa_offset 48
1208 stp x0, x1, [sp]
1209 .cfi_rel_offset x0, 0
1210 .cfi_rel_offset x1, 8
1211 stp x2, xSELF, [sp, #16]
1212 .cfi_rel_offset x2, 16
1213 .cfi_rel_offset x18, 24
1214 str xLR, [sp, #32]
1215 .cfi_rel_offset x30, 32
1216
1217 // Call runtime code
1218 mov x0, x3 // Heap reference, 32b, "uncompress" = do nothing, already zero-extended
1219 mov x1, x4 // Heap reference, 32b, "uncompress" = do nothing, already zero-extended
1220 bl artIsAssignableFromCode
1221
1222 // Check for exception
1223 cbz x0, .Lthrow_array_store_exception
1224
1225 // Restore
1226 ldp x0, x1, [sp]
1227 .cfi_restore x0
1228 .cfi_restore x1
1229 ldp x2, xSELF, [sp, #16]
1230 .cfi_restore x2
1231 .cfi_restore x18
1232 ldr xLR, [sp, #32]
1233 .cfi_restore x30
1234 add sp, sp, #48
1235 .cfi_adjust_cfa_offset -48
1236
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001237 add x3, x0, #MIRROR_OBJECT_ARRAY_DATA_OFFSET
Andreas Gampef4e910b2014-04-29 16:55:52 -07001238 // "Compress" = do nothing
1239 str w2, [x3, x1, lsl #2] // Heap reference = 32b
1240 ldr x3, [xSELF, #THREAD_CARD_TABLE_OFFSET]
1241 lsr x0, x0, #7
1242 strb w3, [x3, x0]
1243 ret
1244.Lthrow_array_store_exception:
1245 ldp x0, x1, [sp]
1246 .cfi_restore x0
1247 .cfi_restore x1
1248 ldp x2, xSELF, [sp, #16]
1249 .cfi_restore x2
1250 .cfi_restore x18
1251 ldr xLR, [sp, #32]
1252 .cfi_restore x30
1253 add sp, sp, #48
1254 .cfi_adjust_cfa_offset -48
1255
1256 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
1257 mov x1, x2 // Pass value.
1258 mov x2, xSELF // Pass Thread::Current.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001259 b artThrowArrayStoreException // (Object*, Object*, Thread*).
Andreas Gampef4e910b2014-04-29 16:55:52 -07001260 brk 0 // Unreached.
1261END art_quick_aput_obj
1262
Stuart Monteithb95a5342014-03-12 13:32:32 +00001263// Macro to facilitate adding new allocation entrypoints.
Vladimir Marko5ea536a2015-04-20 20:11:30 +01001264.macro ONE_ARG_DOWNCALL name, entrypoint, return
1265 .extern \entrypoint
1266ENTRY \name
Jeff Hao848f70a2014-01-15 13:49:50 -08001267 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Vladimir Marko5ea536a2015-04-20 20:11:30 +01001268 mov x1, xSELF // pass Thread::Current
1269 bl \entrypoint // (uint32_t type_idx, Method* method, Thread*)
1270 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
1271 \return
1272END \name
1273.endm
1274
1275// Macro to facilitate adding new allocation entrypoints.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001276.macro TWO_ARG_DOWNCALL name, entrypoint, return
1277 .extern \entrypoint
1278ENTRY \name
Jeff Hao848f70a2014-01-15 13:49:50 -08001279 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001280 mov x2, xSELF // pass Thread::Current
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001281 bl \entrypoint // (uint32_t type_idx, Method* method, Thread*)
1282 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001283 \return
Stuart Monteithb95a5342014-03-12 13:32:32 +00001284END \name
1285.endm
1286
Jeff Hao848f70a2014-01-15 13:49:50 -08001287// Macro to facilitate adding new allocation entrypoints.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001288.macro THREE_ARG_DOWNCALL name, entrypoint, return
1289 .extern \entrypoint
1290ENTRY \name
Jeff Hao848f70a2014-01-15 13:49:50 -08001291 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001292 mov x3, xSELF // pass Thread::Current
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001293 bl \entrypoint
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001294 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001295 \return
Stuart Monteithb95a5342014-03-12 13:32:32 +00001296END \name
1297.endm
1298
Jeff Hao848f70a2014-01-15 13:49:50 -08001299// Macro to facilitate adding new allocation entrypoints.
1300.macro FOUR_ARG_DOWNCALL name, entrypoint, return
1301 .extern \entrypoint
1302ENTRY \name
1303 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
1304 mov x4, xSELF // pass Thread::Current
1305 bl \entrypoint //
1306 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
1307 \return
1308 DELIVER_PENDING_EXCEPTION
1309END \name
1310.endm
1311
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001312// Macros taking opportunity of code similarities for downcalls with referrer.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001313.macro ONE_ARG_REF_DOWNCALL name, entrypoint, return
1314 .extern \entrypoint
1315ENTRY \name
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001316 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Andreas Gampecf4035a2014-05-28 22:43:01 -07001317 ldr w1, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001318 mov x2, xSELF // pass Thread::Current
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001319 bl \entrypoint // (uint32_t type_idx, Method* method, Thread*, SP)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001320 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001321 \return
1322END \name
1323.endm
1324
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001325.macro TWO_ARG_REF_DOWNCALL name, entrypoint, return
1326 .extern \entrypoint
1327ENTRY \name
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001328 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Andreas Gampecf4035a2014-05-28 22:43:01 -07001329 ldr w2, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001330 mov x3, xSELF // pass Thread::Current
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001331 bl \entrypoint
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001332 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001333 \return
1334END \name
1335.endm
1336
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001337.macro THREE_ARG_REF_DOWNCALL name, entrypoint, return
1338 .extern \entrypoint
1339ENTRY \name
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001340 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Andreas Gampecf4035a2014-05-28 22:43:01 -07001341 ldr w3, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001342 mov x4, xSELF // pass Thread::Current
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001343 bl \entrypoint
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001344 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001345 \return
1346END \name
1347.endm
1348
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001349.macro RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
1350 cbz w0, 1f // result zero branch over
1351 ret // return
13521:
1353 DELIVER_PENDING_EXCEPTION
1354.endm
1355
Matteo Franchindfd891a2014-04-30 12:17:17 +01001356 /*
Vladimir Marko3b370732014-10-09 18:34:28 +01001357 * Entry from managed code that calls artHandleFillArrayDataFromCode and delivers exception on
1358 * failure.
1359 */
1360TWO_ARG_REF_DOWNCALL art_quick_handle_fill_data, artHandleFillArrayDataFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1361
1362 /*
Matteo Franchindfd891a2014-04-30 12:17:17 +01001363 * Entry from managed code when uninitialized static storage, this stub will run the class
1364 * initializer and deliver the exception on error. On success the static storage base is
1365 * returned.
1366 */
Vladimir Marko5ea536a2015-04-20 20:11:30 +01001367ONE_ARG_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
Matteo Franchindfd891a2014-04-30 12:17:17 +01001368
Vladimir Marko5ea536a2015-04-20 20:11:30 +01001369ONE_ARG_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
1370ONE_ARG_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
Matteo Franchindfd891a2014-04-30 12:17:17 +01001371
Fred Shih37f05ef2014-07-16 18:38:08 -07001372ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1373ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1374ONE_ARG_REF_DOWNCALL art_quick_get_char_static, artGetCharStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1375ONE_ARG_REF_DOWNCALL art_quick_get_short_static, artGetShortStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001376ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1377ONE_ARG_REF_DOWNCALL art_quick_get64_static, artGet64StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1378ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1379
Fred Shih37f05ef2014-07-16 18:38:08 -07001380TWO_ARG_REF_DOWNCALL art_quick_get_boolean_instance, artGetBooleanInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1381TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1382TWO_ARG_REF_DOWNCALL art_quick_get_char_instance, artGetCharInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1383TWO_ARG_REF_DOWNCALL art_quick_get_short_instance, artGetShortInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001384TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1385TWO_ARG_REF_DOWNCALL art_quick_get64_instance, artGet64InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1386TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1387
Fred Shih37f05ef2014-07-16 18:38:08 -07001388TWO_ARG_REF_DOWNCALL art_quick_set8_static, artSet8StaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1389TWO_ARG_REF_DOWNCALL art_quick_set16_static, artSet16StaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001390TWO_ARG_REF_DOWNCALL art_quick_set32_static, artSet32StaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1391TWO_ARG_REF_DOWNCALL art_quick_set_obj_static, artSetObjStaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1392
Fred Shih37f05ef2014-07-16 18:38:08 -07001393THREE_ARG_REF_DOWNCALL art_quick_set8_instance, artSet8InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1394THREE_ARG_REF_DOWNCALL art_quick_set16_instance, artSet16InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001395THREE_ARG_REF_DOWNCALL art_quick_set32_instance, artSet32InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
Stephen Kyle0ff20d52014-10-22 15:23:46 +01001396THREE_ARG_REF_DOWNCALL art_quick_set64_instance, artSet64InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001397THREE_ARG_REF_DOWNCALL art_quick_set_obj_instance, artSetObjInstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1398
1399// This is separated out as the argument order is different.
1400 .extern artSet64StaticFromCode
1401ENTRY art_quick_set64_static
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001402 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001403 mov x3, x1 // Store value
Andreas Gampecf4035a2014-05-28 22:43:01 -07001404 ldr w1, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001405 mov x2, x3 // Put value param
1406 mov x3, xSELF // pass Thread::Current
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001407 bl artSet64StaticFromCode
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001408 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001409 RETURN_IF_W0_IS_ZERO_OR_DELIVER
1410END art_quick_set64_static
1411
Matteo Franchindfd891a2014-04-30 12:17:17 +01001412 /*
1413 * Entry from managed code to resolve a string, this stub will allocate a String and deliver an
Vladimir Marko5ea536a2015-04-20 20:11:30 +01001414 * exception on error. On success the String is returned. w0 holds the string index. The fast
1415 * path check for hit in strings cache has already been performed.
Matteo Franchindfd891a2014-04-30 12:17:17 +01001416 */
Vladimir Marko5ea536a2015-04-20 20:11:30 +01001417ONE_ARG_DOWNCALL art_quick_resolve_string, artResolveStringFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001418
Stuart Monteithb95a5342014-03-12 13:32:32 +00001419// Generate the allocation entrypoints for each allocator.
1420GENERATE_ALL_ALLOC_ENTRYPOINTS
1421
Zheng Xu48241e72014-05-23 11:52:42 +08001422 /*
Zheng Xu69a50302015-04-14 20:04:41 +08001423 * Called by managed code when the thread has been asked to suspend.
Zheng Xu48241e72014-05-23 11:52:42 +08001424 */
1425 .extern artTestSuspendFromCode
1426ENTRY art_quick_test_suspend
1427 ldrh w0, [xSELF, #THREAD_FLAGS_OFFSET] // get xSELF->state_and_flags.as_struct.flags
Zheng Xu48241e72014-05-23 11:52:42 +08001428 cbnz w0, .Lneed_suspend // check flags == 0
1429 ret // return if flags == 0
1430.Lneed_suspend:
1431 mov x0, xSELF
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001432 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves for stack crawl
1433 bl artTestSuspendFromCode // (Thread*)
1434 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
Zheng Xu48241e72014-05-23 11:52:42 +08001435END art_quick_test_suspend
Stuart Monteithb95a5342014-03-12 13:32:32 +00001436
Stuart Monteithd5c78f42014-06-11 16:44:46 +01001437ENTRY art_quick_implicit_suspend
1438 mov x0, xSELF
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001439 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves for stack crawl
1440 bl artTestSuspendFromCode // (Thread*)
1441 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
Stuart Monteithd5c78f42014-06-11 16:44:46 +01001442END art_quick_implicit_suspend
1443
Andreas Gampee62a07e2014-03-26 14:53:21 -07001444 /*
1445 * Called by managed code that is attempting to call a method on a proxy class. On entry
1446 * x0 holds the proxy method and x1 holds the receiver; The frame size of the invoked proxy
1447 * method agrees with a ref and args callee save frame.
1448 */
1449 .extern artQuickProxyInvokeHandler
1450ENTRY art_quick_proxy_invoke_handler
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001451 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_X0
Andreas Gampee62a07e2014-03-26 14:53:21 -07001452 mov x2, xSELF // pass Thread::Current
1453 mov x3, sp // pass SP
1454 bl artQuickProxyInvokeHandler // (Method* proxy method, receiver, Thread*, SP)
Zheng Xub551fdc2014-07-25 11:49:42 +08001455 // Use xETR as xSELF might be scratched by native function above.
1456 ldr x2, [xETR, THREAD_EXCEPTION_OFFSET]
Andreas Gampee62a07e2014-03-26 14:53:21 -07001457 cbnz x2, .Lexception_in_proxy // success if no exception is pending
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001458 RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME // Restore frame
Andreas Gamped1e91672014-06-02 22:50:05 -07001459 fmov d0, x0 // Store result in d0 in case it was float or double
Andreas Gampee62a07e2014-03-26 14:53:21 -07001460 ret // return on success
1461.Lexception_in_proxy:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001462 RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Andreas Gampee62a07e2014-03-26 14:53:21 -07001463 DELIVER_PENDING_EXCEPTION
1464END art_quick_proxy_invoke_handler
Stuart Monteithb95a5342014-03-12 13:32:32 +00001465
Andreas Gampe51f76352014-05-21 08:28:48 -07001466 /*
Zheng Xub551fdc2014-07-25 11:49:42 +08001467 * Called to resolve an imt conflict. xIP1 is a hidden argument that holds the target method's
Andreas Gampe51f76352014-05-21 08:28:48 -07001468 * dex method index.
1469 */
1470ENTRY art_quick_imt_conflict_trampoline
Andreas Gampecf4035a2014-05-28 22:43:01 -07001471 ldr w0, [sp, #0] // load caller Method*
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001472 ldr w0, [x0, #MIRROR_ART_METHOD_DEX_CACHE_METHODS_OFFSET] // load dex_cache_resolved_methods
1473 add x0, x0, #MIRROR_OBJECT_ARRAY_DATA_OFFSET // get starting address of data
Zheng Xub551fdc2014-07-25 11:49:42 +08001474 ldr w0, [x0, xIP1, lsl 2] // load the target method
Andreas Gampe51f76352014-05-21 08:28:48 -07001475 b art_quick_invoke_interface_trampoline
1476END art_quick_imt_conflict_trampoline
Stuart Monteithb95a5342014-03-12 13:32:32 +00001477
1478ENTRY art_quick_resolution_trampoline
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001479 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +00001480 mov x2, xSELF
1481 mov x3, sp
1482 bl artQuickResolutionTrampoline // (called, receiver, Thread*, SP)
Matteo Franchindfd891a2014-04-30 12:17:17 +01001483 cbz x0, 1f
Zheng Xub551fdc2014-07-25 11:49:42 +08001484 mov xIP0, x0 // Remember returned code pointer in xIP0.
Andreas Gampecf4035a2014-05-28 22:43:01 -07001485 ldr w0, [sp, #0] // artQuickResolutionTrampoline puts called method in *SP.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001486 RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Zheng Xub551fdc2014-07-25 11:49:42 +08001487 br xIP0
Stuart Monteithb95a5342014-03-12 13:32:32 +000014881:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001489 RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +00001490 DELIVER_PENDING_EXCEPTION
1491END art_quick_resolution_trampoline
1492
1493/*
1494 * Generic JNI frame layout:
1495 *
1496 * #-------------------#
1497 * | |
1498 * | caller method... |
1499 * #-------------------# <--- SP on entry
1500 * | Return X30/LR |
1501 * | X29/FP | callee save
1502 * | X28 | callee save
1503 * | X27 | callee save
1504 * | X26 | callee save
1505 * | X25 | callee save
1506 * | X24 | callee save
1507 * | X23 | callee save
1508 * | X22 | callee save
1509 * | X21 | callee save
1510 * | X20 | callee save
Zheng Xu69a50302015-04-14 20:04:41 +08001511 * | X19 | callee save
Stuart Monteithb95a5342014-03-12 13:32:32 +00001512 * | X7 | arg7
1513 * | X6 | arg6
1514 * | X5 | arg5
1515 * | X4 | arg4
1516 * | X3 | arg3
1517 * | X2 | arg2
1518 * | X1 | arg1
Stuart Monteithb95a5342014-03-12 13:32:32 +00001519 * | D7 | float arg 8
1520 * | D6 | float arg 7
1521 * | D5 | float arg 6
1522 * | D4 | float arg 5
1523 * | D3 | float arg 4
1524 * | D2 | float arg 3
1525 * | D1 | float arg 2
1526 * | D0 | float arg 1
Andreas Gampecf4035a2014-05-28 22:43:01 -07001527 * | Method* | <- X0
Stuart Monteithb95a5342014-03-12 13:32:32 +00001528 * #-------------------#
1529 * | local ref cookie | // 4B
Mathieu Chartier421c5372014-05-14 14:11:40 -07001530 * | handle scope size | // 4B
Stuart Monteithb95a5342014-03-12 13:32:32 +00001531 * #-------------------#
1532 * | JNI Call Stack |
1533 * #-------------------# <--- SP on native call
1534 * | |
1535 * | Stack for Regs | The trampoline assembly will pop these values
1536 * | | into registers for native call
1537 * #-------------------#
1538 * | Native code ptr |
1539 * #-------------------#
1540 * | Free scratch |
1541 * #-------------------#
1542 * | Ptr to (1) | <--- SP
1543 * #-------------------#
1544 */
1545 /*
1546 * Called to do a generic JNI down-call
1547 */
Ian Rogers6f3dbba2014-10-14 17:41:57 -07001548ENTRY art_quick_generic_jni_trampoline
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001549 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_X0
Stuart Monteithb95a5342014-03-12 13:32:32 +00001550
1551 // Save SP , so we can have static CFI info.
1552 mov x28, sp
1553 .cfi_def_cfa_register x28
1554
1555 // This looks the same, but is different: this will be updated to point to the bottom
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001556 // of the frame when the handle scope is inserted.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001557 mov xFP, sp
1558
Zheng Xub551fdc2014-07-25 11:49:42 +08001559 mov xIP0, #5120
1560 sub sp, sp, xIP0
Stuart Monteithb95a5342014-03-12 13:32:32 +00001561
1562 // prepare for artQuickGenericJniTrampoline call
1563 // (Thread*, SP)
1564 // x0 x1 <= C calling convention
1565 // xSELF xFP <= where they are
1566
1567 mov x0, xSELF // Thread*
1568 mov x1, xFP
1569 bl artQuickGenericJniTrampoline // (Thread*, sp)
1570
Andreas Gampec200a4a2014-06-16 18:39:09 -07001571 // The C call will have registered the complete save-frame on success.
1572 // The result of the call is:
1573 // x0: pointer to native code, 0 on error.
1574 // x1: pointer to the bottom of the used area of the alloca, can restore stack till there.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001575
Andreas Gampec200a4a2014-06-16 18:39:09 -07001576 // Check for error = 0.
Nicolas Geoffray126d6592015-03-03 14:28:35 +00001577 cbz x0, .Lexception_in_native
Stuart Monteithb95a5342014-03-12 13:32:32 +00001578
Andreas Gampec200a4a2014-06-16 18:39:09 -07001579 // Release part of the alloca.
1580 mov sp, x1
Stuart Monteithb95a5342014-03-12 13:32:32 +00001581
Andreas Gampec200a4a2014-06-16 18:39:09 -07001582 // Save the code pointer
1583 mov xIP0, x0
Stuart Monteithb95a5342014-03-12 13:32:32 +00001584
1585 // Load parameters from frame into registers.
1586 // TODO Check with artQuickGenericJniTrampoline.
1587 // Also, check again APPCS64 - the stack arguments are interleaved.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001588 ldp x0, x1, [sp]
1589 ldp x2, x3, [sp, #16]
1590 ldp x4, x5, [sp, #32]
1591 ldp x6, x7, [sp, #48]
Stuart Monteithb95a5342014-03-12 13:32:32 +00001592
Andreas Gampec200a4a2014-06-16 18:39:09 -07001593 ldp d0, d1, [sp, #64]
1594 ldp d2, d3, [sp, #80]
1595 ldp d4, d5, [sp, #96]
1596 ldp d6, d7, [sp, #112]
Stuart Monteithb95a5342014-03-12 13:32:32 +00001597
Andreas Gampec200a4a2014-06-16 18:39:09 -07001598 add sp, sp, #128
Stuart Monteithb95a5342014-03-12 13:32:32 +00001599
Zheng Xub551fdc2014-07-25 11:49:42 +08001600 blr xIP0 // native call.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001601
1602 // result sign extension is handled in C code
1603 // prepare for artQuickGenericJniEndTrampoline call
Andreas Gampec200a4a2014-06-16 18:39:09 -07001604 // (Thread*, result, result_f)
1605 // x0 x1 x2 <= C calling convention
1606 mov x1, x0 // Result (from saved)
Zheng Xub551fdc2014-07-25 11:49:42 +08001607 mov x0, xETR // Thread register, original xSELF might be scratched by native code.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001608 fmov x2, d0 // d0 will contain floating point result, but needs to go into x2
Stuart Monteithb95a5342014-03-12 13:32:32 +00001609
1610 bl artQuickGenericJniEndTrampoline
1611
Nicolas Geoffray126d6592015-03-03 14:28:35 +00001612 // Pending exceptions possible.
1613 // Use xETR as xSELF might be scratched by native code
1614 ldr x2, [xETR, THREAD_EXCEPTION_OFFSET]
1615 cbnz x2, .Lexception_in_native
1616
Stuart Monteithb95a5342014-03-12 13:32:32 +00001617 // Tear down the alloca.
1618 mov sp, x28
1619 .cfi_def_cfa_register sp
1620
Stuart Monteithb95a5342014-03-12 13:32:32 +00001621 // Tear down the callee-save frame.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001622 RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +00001623
1624 // store into fpr, for when it's a fpr return...
1625 fmov d0, x0
1626 ret
1627
Stuart Monteithb95a5342014-03-12 13:32:32 +00001628.Lexception_in_native:
Nicolas Geoffray126d6592015-03-03 14:28:35 +00001629 // Restore xSELF. It might have been scratched by native code.
1630 mov xSELF, xETR
1631 // Move to x1 then sp to please assembler.
1632 ldr x1, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET]
1633 mov sp, x1
1634 .cfi_def_cfa_register sp
1635 # This will create a new save-all frame, required by the runtime.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001636 DELIVER_PENDING_EXCEPTION
Stuart Monteithb95a5342014-03-12 13:32:32 +00001637END art_quick_generic_jni_trampoline
1638
1639/*
1640 * Called to bridge from the quick to interpreter ABI. On entry the arguments match those
1641 * of a quick call:
1642 * x0 = method being called/to bridge to.
1643 * x1..x7, d0..d7 = arguments to that method.
1644 */
Ian Rogers6f3dbba2014-10-14 17:41:57 -07001645ENTRY art_quick_to_interpreter_bridge
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001646 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME // Set up frame and save arguments.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001647
1648 // x0 will contain mirror::ArtMethod* method.
1649 mov x1, xSELF // How to get Thread::Current() ???
1650 mov x2, sp
1651
1652 // uint64_t artQuickToInterpreterBridge(mirror::ArtMethod* method, Thread* self,
1653 // mirror::ArtMethod** sp)
1654 bl artQuickToInterpreterBridge
1655
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001656 RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME // TODO: no need to restore arguments in this case.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001657
1658 fmov d0, x0
1659
1660 RETURN_OR_DELIVER_PENDING_EXCEPTION
1661END art_quick_to_interpreter_bridge
1662
Andreas Gamped58342c2014-06-05 14:18:08 -07001663
1664//
1665// Instrumentation-related stubs
1666//
1667 .extern artInstrumentationMethodEntryFromCode
1668ENTRY art_quick_instrumentation_entry
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001669 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Andreas Gamped58342c2014-06-05 14:18:08 -07001670
Zheng Xub551fdc2014-07-25 11:49:42 +08001671 mov x20, x0 // Preserve method reference in a callee-save.
Andreas Gamped58342c2014-06-05 14:18:08 -07001672
1673 mov x2, xSELF
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001674 mov x3, xLR
1675 bl artInstrumentationMethodEntryFromCode // (Method*, Object*, Thread*, LR)
Andreas Gamped58342c2014-06-05 14:18:08 -07001676
Zheng Xub551fdc2014-07-25 11:49:42 +08001677 mov xIP0, x0 // x0 = result of call.
1678 mov x0, x20 // Reload method reference.
Andreas Gamped58342c2014-06-05 14:18:08 -07001679
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001680 RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME // Note: will restore xSELF
Andreas Gamped58342c2014-06-05 14:18:08 -07001681 adr xLR, art_quick_instrumentation_exit
Zheng Xub551fdc2014-07-25 11:49:42 +08001682 br xIP0 // Tail-call method with lr set to art_quick_instrumentation_exit.
Andreas Gamped58342c2014-06-05 14:18:08 -07001683END art_quick_instrumentation_entry
1684
1685 .extern artInstrumentationMethodExitFromCode
1686ENTRY art_quick_instrumentation_exit
1687 mov xLR, #0 // Clobber LR for later checks.
1688
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001689 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gamped58342c2014-06-05 14:18:08 -07001690
1691 // We need to save x0 and d0. We could use a callee-save from SETUP_REF_ONLY, but then
1692 // we would need to fully restore it. As there are a lot of callee-save registers, it seems
1693 // easier to have an extra small stack area.
1694
Sebastien Hertz70f8d4b2014-06-19 11:51:41 +02001695 str x0, [sp, #-16]! // Save integer result.
Andreas Gamped58342c2014-06-05 14:18:08 -07001696 .cfi_adjust_cfa_offset 16
1697 str d0, [sp, #8] // Save floating-point result.
1698
Andreas Gamped58342c2014-06-05 14:18:08 -07001699 add x1, sp, #16 // Pass SP.
1700 mov x2, x0 // Pass integer result.
1701 fmov x3, d0 // Pass floating-point result.
Sebastien Hertz70f8d4b2014-06-19 11:51:41 +02001702 mov x0, xSELF // Pass Thread.
Andreas Gamped58342c2014-06-05 14:18:08 -07001703 bl artInstrumentationMethodExitFromCode // (Thread*, SP, gpr_res, fpr_res)
1704
Zheng Xub551fdc2014-07-25 11:49:42 +08001705 mov xIP0, x0 // Return address from instrumentation call.
Andreas Gamped58342c2014-06-05 14:18:08 -07001706 mov xLR, x1 // r1 is holding link register if we're to bounce to deoptimize
1707
1708 ldr d0, [sp, #8] // Restore floating-point result.
1709 ldr x0, [sp], 16 // Restore integer result, and drop stack area.
1710 .cfi_adjust_cfa_offset 16
1711
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001712 POP_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gamped58342c2014-06-05 14:18:08 -07001713
Zheng Xub551fdc2014-07-25 11:49:42 +08001714 br xIP0 // Tail-call out.
Andreas Gamped58342c2014-06-05 14:18:08 -07001715END art_quick_instrumentation_exit
1716
1717 /*
1718 * Instrumentation has requested that we deoptimize into the interpreter. The deoptimization
1719 * will long jump to the upcall with a special exception of -1.
1720 */
1721 .extern artDeoptimize
1722ENTRY art_quick_deoptimize
1723 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
1724 mov x0, xSELF // Pass thread.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001725 bl artDeoptimize // artDeoptimize(Thread*)
Serban Constantinescu86797a72014-06-19 16:17:56 +01001726 brk 0
Andreas Gamped58342c2014-06-05 14:18:08 -07001727END art_quick_deoptimize
1728
1729
Serban Constantinescu169489b2014-06-11 16:43:35 +01001730 /*
1731 * String's indexOf.
1732 *
1733 * TODO: Not very optimized.
1734 * On entry:
1735 * x0: string object (known non-null)
1736 * w1: char to match (known <= 0xFFFF)
1737 * w2: Starting offset in string data
1738 */
1739ENTRY art_quick_indexof
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001740 ldr w3, [x0, #MIRROR_STRING_COUNT_OFFSET]
Jeff Hao848f70a2014-01-15 13:49:50 -08001741 add x0, x0, #MIRROR_STRING_VALUE_OFFSET
Serban Constantinescu169489b2014-06-11 16:43:35 +01001742
1743 /* Clamp start to [0..count] */
1744 cmp w2, #0
1745 csel w2, wzr, w2, lt
1746 cmp w2, w3
1747 csel w2, w3, w2, gt
1748
Serban Constantinescu169489b2014-06-11 16:43:35 +01001749 /* Save a copy to compute result */
1750 mov x5, x0
1751
1752 /* Build pointer to start of data to compare and pre-bias */
1753 add x0, x0, x2, lsl #1
1754 sub x0, x0, #2
1755
1756 /* Compute iteration count */
1757 sub w2, w3, w2
1758
1759 /*
1760 * At this point we have:
1761 * x0: start of the data to test
1762 * w1: char to compare
1763 * w2: iteration count
1764 * x5: original start of string data
1765 */
1766
1767 subs w2, w2, #4
1768 b.lt .Lindexof_remainder
1769
1770.Lindexof_loop4:
1771 ldrh w6, [x0, #2]!
1772 ldrh w7, [x0, #2]!
Zheng Xub551fdc2014-07-25 11:49:42 +08001773 ldrh wIP0, [x0, #2]!
1774 ldrh wIP1, [x0, #2]!
Serban Constantinescu169489b2014-06-11 16:43:35 +01001775 cmp w6, w1
1776 b.eq .Lmatch_0
1777 cmp w7, w1
1778 b.eq .Lmatch_1
Zheng Xub551fdc2014-07-25 11:49:42 +08001779 cmp wIP0, w1
Serban Constantinescu169489b2014-06-11 16:43:35 +01001780 b.eq .Lmatch_2
Zheng Xub551fdc2014-07-25 11:49:42 +08001781 cmp wIP1, w1
Serban Constantinescu169489b2014-06-11 16:43:35 +01001782 b.eq .Lmatch_3
1783 subs w2, w2, #4
1784 b.ge .Lindexof_loop4
1785
1786.Lindexof_remainder:
1787 adds w2, w2, #4
1788 b.eq .Lindexof_nomatch
1789
1790.Lindexof_loop1:
1791 ldrh w6, [x0, #2]!
1792 cmp w6, w1
1793 b.eq .Lmatch_3
1794 subs w2, w2, #1
1795 b.ne .Lindexof_loop1
1796
1797.Lindexof_nomatch:
1798 mov x0, #-1
1799 ret
1800
1801.Lmatch_0:
1802 sub x0, x0, #6
1803 sub x0, x0, x5
1804 asr x0, x0, #1
1805 ret
1806.Lmatch_1:
1807 sub x0, x0, #4
1808 sub x0, x0, x5
1809 asr x0, x0, #1
1810 ret
1811.Lmatch_2:
1812 sub x0, x0, #2
1813 sub x0, x0, x5
1814 asr x0, x0, #1
1815 ret
1816.Lmatch_3:
1817 sub x0, x0, x5
1818 asr x0, x0, #1
1819 ret
1820END art_quick_indexof
Andreas Gampe266340d2014-05-02 07:55:24 -07001821
1822 /*
1823 * String's compareTo.
1824 *
1825 * TODO: Not very optimized.
1826 *
1827 * On entry:
1828 * x0: this object pointer
1829 * x1: comp object pointer
1830 *
1831 */
Serban Constantinescu86797a72014-06-19 16:17:56 +01001832 .extern __memcmp16
Andreas Gampe266340d2014-05-02 07:55:24 -07001833ENTRY art_quick_string_compareto
1834 mov x2, x0 // x0 is return, use x2 for first input.
1835 sub x0, x2, x1 // Same string object?
1836 cbnz x0,1f
1837 ret
18381: // Different string objects.
1839
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001840 ldr w4, [x2, #MIRROR_STRING_COUNT_OFFSET]
1841 ldr w3, [x1, #MIRROR_STRING_COUNT_OFFSET]
Jeff Hao848f70a2014-01-15 13:49:50 -08001842 add x2, x2, #MIRROR_STRING_VALUE_OFFSET
1843 add x1, x1, #MIRROR_STRING_VALUE_OFFSET
Andreas Gampe266340d2014-05-02 07:55:24 -07001844
1845 /*
Jeff Hao848f70a2014-01-15 13:49:50 -08001846 * Now: Data* Count
1847 * first arg x2 w4
1848 * second arg x1 w3
Andreas Gampe266340d2014-05-02 07:55:24 -07001849 */
1850
1851 // x0 := str1.length(w4) - str2.length(w3). ldr zero-extended w3/w4 into x3/x4.
1852 subs x0, x4, x3
1853 // Min(count1, count2) into w3.
1854 csel x3, x3, x4, ge
1855
Serban Constantinescu169489b2014-06-11 16:43:35 +01001856 // TODO: Tune this value.
Andreas Gampe266340d2014-05-02 07:55:24 -07001857 // Check for long string, do memcmp16 for them.
1858 cmp w3, #28 // Constant from arm32.
1859 bgt .Ldo_memcmp16
1860
1861 /*
1862 * Now:
1863 * x2: *first string data
1864 * x1: *second string data
1865 * w3: iteration count
1866 * x0: return value if comparison equal
1867 * x4, x5, x6, x7: free
1868 */
1869
1870 // Do a simple unrolled loop.
1871.Lloop:
1872 // At least two more elements?
1873 subs w3, w3, #2
1874 b.lt .Lremainder_or_done
1875
1876 ldrh w4, [x2], #2
1877 ldrh w5, [x1], #2
1878
1879 ldrh w6, [x2], #2
1880 ldrh w7, [x1], #2
1881
1882 subs w4, w4, w5
1883 b.ne .Lw4_result
1884
1885 subs w6, w6, w7
1886 b.ne .Lw6_result
1887
1888 b .Lloop
1889
1890.Lremainder_or_done:
1891 adds w3, w3, #1
1892 b.eq .Lremainder
1893 ret
1894
1895.Lremainder:
1896 ldrh w4, [x2], #2
1897 ldrh w5, [x1], #2
1898 subs w4, w4, w5
1899 b.ne .Lw4_result
1900 ret
1901
1902// Result is in w4
1903.Lw4_result:
1904 sxtw x0, w4
1905 ret
1906
1907// Result is in w6
1908.Lw6_result:
1909 sxtw x0, w6
1910 ret
1911
1912.Ldo_memcmp16:
Zheng Xu62ddb322014-08-12 17:19:12 +08001913 mov x14, x0 // Save x0 and LR. __memcmp16 does not use these temps.
1914 mov x15, xLR // TODO: Codify and check that?
Andreas Gampe266340d2014-05-02 07:55:24 -07001915
1916 mov x0, x2
1917 uxtw x2, w3
Serban Constantinescu86797a72014-06-19 16:17:56 +01001918 bl __memcmp16
Andreas Gampe266340d2014-05-02 07:55:24 -07001919
Zheng Xu62ddb322014-08-12 17:19:12 +08001920 mov xLR, x15 // Restore LR.
Andreas Gampe266340d2014-05-02 07:55:24 -07001921
Serban Constantinescu86797a72014-06-19 16:17:56 +01001922 cmp x0, #0 // Check the memcmp difference.
Zheng Xu62ddb322014-08-12 17:19:12 +08001923 csel x0, x0, x14, ne // x0 := x0 != 0 ? x14(prev x0=length diff) : x1.
Andreas Gampe266340d2014-05-02 07:55:24 -07001924 ret
1925END art_quick_string_compareto
Zheng Xu0210d112014-06-17 12:25:48 +08001926
1927// Macro to facilitate adding new entrypoints which call to native function directly.
1928// Currently, xSELF is the only thing we need to take care of between managed code and AAPCS.
1929// But we might introduce more differences.
1930.macro NATIVE_DOWNCALL name, entrypoint
1931 .extern \entrypoint
1932ENTRY \name
Serban Constantinescu86797a72014-06-19 16:17:56 +01001933 stp xSELF, xLR, [sp, #-16]!
Zheng Xu0210d112014-06-17 12:25:48 +08001934 bl \entrypoint
Serban Constantinescu86797a72014-06-19 16:17:56 +01001935 ldp xSELF, xLR, [sp], #16
Zheng Xu0210d112014-06-17 12:25:48 +08001936 ret
1937END \name
1938.endm
1939
1940NATIVE_DOWNCALL art_quick_fmod fmod
1941NATIVE_DOWNCALL art_quick_fmodf fmodf
1942NATIVE_DOWNCALL art_quick_memcpy memcpy
Serban Constantinescu86797a72014-06-19 16:17:56 +01001943NATIVE_DOWNCALL art_quick_assignable_from_code artIsAssignableFromCode