blob: e9ad1f4080c24bdb8e7c8417b5ceebe4be9a3b70 [file] [log] [blame]
Stuart Monteithb95a5342014-03-12 13:32:32 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "asm_support_arm64.S"
18
19#include "arch/quick_alloc_entrypoints.S"
20
21
22 /*
23 * Macro that sets up the callee save frame to conform with
24 * Runtime::CreateCalleeSaveMethod(kSaveAll)
25 */
26.macro SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
Zheng Xub551fdc2014-07-25 11:49:42 +080027 adrp xIP0, :got:_ZN3art7Runtime9instance_E
28 ldr xIP0, [xIP0, #:got_lo12:_ZN3art7Runtime9instance_E]
Stuart Monteithb95a5342014-03-12 13:32:32 +000029
30 // Our registers aren't intermixed - just spill in order.
Zheng Xub551fdc2014-07-25 11:49:42 +080031 ldr xIP0, [xIP0] // xIP0 = & (art::Runtime * art::Runtime.instance_) .
Stuart Monteithb95a5342014-03-12 13:32:32 +000032
Zheng Xub551fdc2014-07-25 11:49:42 +080033 // xIP0 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] .
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010034 // Loads appropriate callee-save-method.
Mathieu Chartiere401d142015-04-22 13:56:20 -070035 ldr xIP0, [xIP0, RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET ]
Andreas Gampe5c1e4352014-04-21 19:28:24 -070036
37 sub sp, sp, #176
38 .cfi_adjust_cfa_offset 176
39
40 // Ugly compile-time check, but we only have the preprocessor.
Zheng Xub551fdc2014-07-25 11:49:42 +080041#if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVE != 176)
42#error "SAVE_ALL_CALLEE_SAVE_FRAME(ARM64) size not as expected."
Andreas Gampe5c1e4352014-04-21 19:28:24 -070043#endif
44
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010045 // Stack alignment filler [sp, #8].
46 // FP callee-saves.
47 stp d8, d9, [sp, #16]
48 stp d10, d11, [sp, #32]
49 stp d12, d13, [sp, #48]
50 stp d14, d15, [sp, #64]
Andreas Gampe5c1e4352014-04-21 19:28:24 -070051
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010052 // GP callee-saves
53 stp x19, x20, [sp, #80]
Andreas Gampe5c1e4352014-04-21 19:28:24 -070054 .cfi_rel_offset x19, 80
Andreas Gampe5c1e4352014-04-21 19:28:24 -070055 .cfi_rel_offset x20, 88
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010056
57 stp x21, x22, [sp, #96]
Andreas Gampe5c1e4352014-04-21 19:28:24 -070058 .cfi_rel_offset x21, 96
Andreas Gampe5c1e4352014-04-21 19:28:24 -070059 .cfi_rel_offset x22, 104
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010060
61 stp x23, x24, [sp, #112]
Andreas Gampe5c1e4352014-04-21 19:28:24 -070062 .cfi_rel_offset x23, 112
Andreas Gampe5c1e4352014-04-21 19:28:24 -070063 .cfi_rel_offset x24, 120
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010064
65 stp x25, x26, [sp, #128]
Andreas Gampe5c1e4352014-04-21 19:28:24 -070066 .cfi_rel_offset x25, 128
Andreas Gampe5c1e4352014-04-21 19:28:24 -070067 .cfi_rel_offset x26, 136
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010068
69 stp x27, x28, [sp, #144]
Andreas Gampe5c1e4352014-04-21 19:28:24 -070070 .cfi_rel_offset x27, 144
Andreas Gampe5c1e4352014-04-21 19:28:24 -070071 .cfi_rel_offset x28, 152
Andreas Gampe5c1e4352014-04-21 19:28:24 -070072
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010073 stp x29, xLR, [sp, #160]
74 .cfi_rel_offset x29, 160
Andreas Gampe5c1e4352014-04-21 19:28:24 -070075 .cfi_rel_offset x30, 168
76
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010077 // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs].
78 str xIP0, [sp]
Ian Rogers1d8cdbc2014-09-22 22:51:09 -070079 // Place sp in Thread::Current()->top_quick_frame.
80 mov xIP0, sp
81 str xIP0, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET]
Stuart Monteithb95a5342014-03-12 13:32:32 +000082.endm
83
Zheng Xub551fdc2014-07-25 11:49:42 +080084 /*
85 * Macro that sets up the callee save frame to conform with
86 * Runtime::CreateCalleeSaveMethod(kRefsOnly).
87 */
Ian Rogers1d8cdbc2014-09-22 22:51:09 -070088.macro SETUP_REFS_ONLY_CALLEE_SAVE_FRAME
Zheng Xub551fdc2014-07-25 11:49:42 +080089 adrp xIP0, :got:_ZN3art7Runtime9instance_E
90 ldr xIP0, [xIP0, #:got_lo12:_ZN3art7Runtime9instance_E]
91
92 // Our registers aren't intermixed - just spill in order.
93 ldr xIP0, [xIP0] // xIP0 = & (art::Runtime * art::Runtime.instance_) .
94
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010095 // xIP0 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefOnly] .
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010096 // Loads appropriate callee-save-method.
Mathieu Chartiere401d142015-04-22 13:56:20 -070097 ldr xIP0, [xIP0, RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET ]
Zheng Xub551fdc2014-07-25 11:49:42 +080098
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010099 sub sp, sp, #96
100 .cfi_adjust_cfa_offset 96
Zheng Xub551fdc2014-07-25 11:49:42 +0800101
102 // Ugly compile-time check, but we only have the preprocessor.
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100103#if (FRAME_SIZE_REFS_ONLY_CALLEE_SAVE != 96)
Zheng Xub551fdc2014-07-25 11:49:42 +0800104#error "REFS_ONLY_CALLEE_SAVE_FRAME(ARM64) size not as expected."
105#endif
106
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100107 // GP callee-saves.
108 // x20 paired with ArtMethod* - see below.
109 stp x21, x22, [sp, #16]
110 .cfi_rel_offset x21, 16
111 .cfi_rel_offset x22, 24
Zheng Xub551fdc2014-07-25 11:49:42 +0800112
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100113 stp x23, x24, [sp, #32]
114 .cfi_rel_offset x23, 32
115 .cfi_rel_offset x24, 40
Zheng Xub551fdc2014-07-25 11:49:42 +0800116
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100117 stp x25, x26, [sp, #48]
118 .cfi_rel_offset x25, 48
119 .cfi_rel_offset x26, 56
Zheng Xub551fdc2014-07-25 11:49:42 +0800120
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100121 stp x27, x28, [sp, #64]
122 .cfi_rel_offset x27, 64
123 .cfi_rel_offset x28, 72
Zheng Xub551fdc2014-07-25 11:49:42 +0800124
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100125 stp x29, xLR, [sp, #80]
126 .cfi_rel_offset x29, 80
127 .cfi_rel_offset x30, 88
Zheng Xub551fdc2014-07-25 11:49:42 +0800128
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100129 // Store ArtMethod* Runtime::callee_save_methods_[kRefsOnly].
130 stp xIP0, x20, [sp]
131 .cfi_rel_offset x20, 8
Zheng Xub551fdc2014-07-25 11:49:42 +0800132
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700133 // Place sp in Thread::Current()->top_quick_frame.
134 mov xIP0, sp
135 str xIP0, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET]
Zheng Xub551fdc2014-07-25 11:49:42 +0800136.endm
137
138// TODO: Probably no need to restore registers preserved by aapcs64.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700139.macro RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100140 // Callee-saves.
141 ldr x20, [sp, #8]
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700142 .cfi_restore x20
Zheng Xu69a50302015-04-14 20:04:41 +0800143
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100144 ldp x21, x22, [sp, #16]
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700145 .cfi_restore x21
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700146 .cfi_restore x22
Zheng Xu69a50302015-04-14 20:04:41 +0800147
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100148 ldp x23, x24, [sp, #32]
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700149 .cfi_restore x23
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700150 .cfi_restore x24
Zheng Xu69a50302015-04-14 20:04:41 +0800151
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100152 ldp x25, x26, [sp, #48]
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700153 .cfi_restore x25
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700154 .cfi_restore x26
Zheng Xu69a50302015-04-14 20:04:41 +0800155
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100156 ldp x27, x28, [sp, #64]
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700157 .cfi_restore x27
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700158 .cfi_restore x28
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700159
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100160 ldp x29, xLR, [sp, #80]
Zheng Xu69a50302015-04-14 20:04:41 +0800161 .cfi_restore x29
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700162 .cfi_restore x30
163
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100164 add sp, sp, #96
165 .cfi_adjust_cfa_offset -96
Stuart Monteithb95a5342014-03-12 13:32:32 +0000166.endm
167
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700168.macro POP_REFS_ONLY_CALLEE_SAVE_FRAME
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100169 add sp, sp, #96
170 .cfi_adjust_cfa_offset - 96
Andreas Gamped58342c2014-06-05 14:18:08 -0700171.endm
172
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700173.macro RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
174 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Zheng Xu48241e72014-05-23 11:52:42 +0800175 ret
Stuart Monteithb95a5342014-03-12 13:32:32 +0000176.endm
177
178
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700179.macro SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
Zheng Xub551fdc2014-07-25 11:49:42 +0800180 sub sp, sp, #224
181 .cfi_adjust_cfa_offset 224
Stuart Monteithb95a5342014-03-12 13:32:32 +0000182
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700183 // Ugly compile-time check, but we only have the preprocessor.
Zheng Xub551fdc2014-07-25 11:49:42 +0800184#if (FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE != 224)
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700185#error "REFS_AND_ARGS_CALLEE_SAVE_FRAME(ARM64) size not as expected."
186#endif
187
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100188 // Stack alignment filler [sp, #8].
Zheng Xu69a50302015-04-14 20:04:41 +0800189 // FP args.
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100190 stp d0, d1, [sp, #16]
191 stp d2, d3, [sp, #32]
192 stp d4, d5, [sp, #48]
193 stp d6, d7, [sp, #64]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000194
Zheng Xu69a50302015-04-14 20:04:41 +0800195 // Core args.
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100196 stp x1, x2, [sp, #80]
197 .cfi_rel_offset x1, 80
198 .cfi_rel_offset x2, 88
Stuart Monteithb95a5342014-03-12 13:32:32 +0000199
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100200 stp x3, x4, [sp, #96]
201 .cfi_rel_offset x3, 96
202 .cfi_rel_offset x4, 104
Andreas Gampe03906cf2014-04-07 12:08:28 -0700203
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100204 stp x5, x6, [sp, #112]
205 .cfi_rel_offset x5, 112
206 .cfi_rel_offset x6, 120
Andreas Gampe03906cf2014-04-07 12:08:28 -0700207
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100208 // x7, Callee-saves.
209 stp x7, x20, [sp, #128]
210 .cfi_rel_offset x7, 128
Zheng Xu69a50302015-04-14 20:04:41 +0800211 .cfi_rel_offset x20, 136
212
Zheng Xub551fdc2014-07-25 11:49:42 +0800213 stp x21, x22, [sp, #144]
214 .cfi_rel_offset x21, 144
215 .cfi_rel_offset x22, 152
Andreas Gampe03906cf2014-04-07 12:08:28 -0700216
Zheng Xub551fdc2014-07-25 11:49:42 +0800217 stp x23, x24, [sp, #160]
218 .cfi_rel_offset x23, 160
219 .cfi_rel_offset x24, 168
Andreas Gampe03906cf2014-04-07 12:08:28 -0700220
Zheng Xub551fdc2014-07-25 11:49:42 +0800221 stp x25, x26, [sp, #176]
222 .cfi_rel_offset x25, 176
223 .cfi_rel_offset x26, 184
Andreas Gampe03906cf2014-04-07 12:08:28 -0700224
Zheng Xub551fdc2014-07-25 11:49:42 +0800225 stp x27, x28, [sp, #192]
226 .cfi_rel_offset x27, 192
227 .cfi_rel_offset x28, 200
Andreas Gampe03906cf2014-04-07 12:08:28 -0700228
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100229 // x29(callee-save) and LR.
Zheng Xub551fdc2014-07-25 11:49:42 +0800230 stp x29, xLR, [sp, #208]
231 .cfi_rel_offset x29, 208
232 .cfi_rel_offset x30, 216
Andreas Gampe03906cf2014-04-07 12:08:28 -0700233
Stuart Monteithb95a5342014-03-12 13:32:32 +0000234.endm
235
236 /*
237 * Macro that sets up the callee save frame to conform with
238 * Runtime::CreateCalleeSaveMethod(kRefsAndArgs).
239 *
240 * TODO This is probably too conservative - saving FP & LR.
241 */
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700242.macro SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Zheng Xub551fdc2014-07-25 11:49:42 +0800243 adrp xIP0, :got:_ZN3art7Runtime9instance_E
244 ldr xIP0, [xIP0, #:got_lo12:_ZN3art7Runtime9instance_E]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000245
246 // Our registers aren't intermixed - just spill in order.
Zheng Xub551fdc2014-07-25 11:49:42 +0800247 ldr xIP0, [xIP0] // xIP0 = & (art::Runtime * art::Runtime.instance_) .
Stuart Monteithb95a5342014-03-12 13:32:32 +0000248
Zheng Xub551fdc2014-07-25 11:49:42 +0800249 // xIP0 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] .
Mathieu Chartiere401d142015-04-22 13:56:20 -0700250 ldr xIP0, [xIP0, RUNTIME_REFS_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET ]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000251
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700252 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
Stuart Monteithb95a5342014-03-12 13:32:32 +0000253
Zheng Xub551fdc2014-07-25 11:49:42 +0800254 str xIP0, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs]
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700255 // Place sp in Thread::Current()->top_quick_frame.
256 mov xIP0, sp
257 str xIP0, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET]
258.endm
259
260.macro SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_X0
261 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
262 str x0, [sp, #0] // Store ArtMethod* to bottom of stack.
263 // Place sp in Thread::Current()->top_quick_frame.
264 mov xIP0, sp
265 str xIP0, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000266.endm
267
Zheng Xub551fdc2014-07-25 11:49:42 +0800268// TODO: Probably no need to restore registers preserved by aapcs64.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700269.macro RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Zheng Xu69a50302015-04-14 20:04:41 +0800270 // FP args.
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100271 ldp d0, d1, [sp, #16]
272 ldp d2, d3, [sp, #32]
273 ldp d4, d5, [sp, #48]
274 ldp d6, d7, [sp, #64]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000275
Zheng Xu69a50302015-04-14 20:04:41 +0800276 // Core args.
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100277 ldp x1, x2, [sp, #80]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700278 .cfi_restore x1
279 .cfi_restore x2
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100280
281 ldp x3, x4, [sp, #96]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700282 .cfi_restore x3
283 .cfi_restore x4
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100284
285 ldp x5, x6, [sp, #112]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700286 .cfi_restore x5
Zheng Xu69a50302015-04-14 20:04:41 +0800287 .cfi_restore x6
Andreas Gampe03906cf2014-04-07 12:08:28 -0700288
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100289 // x7, Callee-saves.
290 ldp x7, x20, [sp, #128]
291 .cfi_restore x7
Zheng Xu69a50302015-04-14 20:04:41 +0800292 .cfi_restore x20
293
Zheng Xub551fdc2014-07-25 11:49:42 +0800294 ldp x21, x22, [sp, #144]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700295 .cfi_restore x21
296 .cfi_restore x22
297
Zheng Xub551fdc2014-07-25 11:49:42 +0800298 ldp x23, x24, [sp, #160]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700299 .cfi_restore x23
300 .cfi_restore x24
301
Zheng Xub551fdc2014-07-25 11:49:42 +0800302 ldp x25, x26, [sp, #176]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700303 .cfi_restore x25
304 .cfi_restore x26
305
Zheng Xub551fdc2014-07-25 11:49:42 +0800306 ldp x27, x28, [sp, #192]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700307 .cfi_restore x27
308 .cfi_restore x28
309
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100310 // x29(callee-save) and LR.
Zheng Xub551fdc2014-07-25 11:49:42 +0800311 ldp x29, xLR, [sp, #208]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700312 .cfi_restore x29
313 .cfi_restore x30
Stuart Monteithb95a5342014-03-12 13:32:32 +0000314
Zheng Xub551fdc2014-07-25 11:49:42 +0800315 add sp, sp, #224
316 .cfi_adjust_cfa_offset -224
Stuart Monteithb95a5342014-03-12 13:32:32 +0000317.endm
318
319.macro RETURN_IF_RESULT_IS_ZERO
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700320 cbnz x0, 1f // result non-zero branch over
321 ret // return
3221:
Stuart Monteithb95a5342014-03-12 13:32:32 +0000323.endm
324
325.macro RETURN_IF_RESULT_IS_NON_ZERO
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700326 cbz x0, 1f // result zero branch over
327 ret // return
3281:
Stuart Monteithb95a5342014-03-12 13:32:32 +0000329.endm
330
331 /*
332 * Macro that set calls through to artDeliverPendingExceptionFromCode, where the pending
333 * exception is Thread::Current()->exception_
334 */
335.macro DELIVER_PENDING_EXCEPTION
336 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
337 mov x0, xSELF
Stuart Monteithb95a5342014-03-12 13:32:32 +0000338
339 // Point of no return.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700340 b artDeliverPendingExceptionFromCode // artDeliverPendingExceptionFromCode(Thread*)
Stuart Monteithb95a5342014-03-12 13:32:32 +0000341 brk 0 // Unreached
342.endm
343
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700344.macro RETURN_OR_DELIVER_PENDING_EXCEPTION_REG reg
345 ldr \reg, [xSELF, # THREAD_EXCEPTION_OFFSET] // Get exception field.
346 cbnz \reg, 1f
Stuart Monteithb95a5342014-03-12 13:32:32 +0000347 ret
3481:
349 DELIVER_PENDING_EXCEPTION
350.endm
351
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700352.macro RETURN_OR_DELIVER_PENDING_EXCEPTION
Zheng Xub551fdc2014-07-25 11:49:42 +0800353 RETURN_OR_DELIVER_PENDING_EXCEPTION_REG xIP0
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700354.endm
355
356// Same as above with x1. This is helpful in stubs that want to avoid clobbering another register.
357.macro RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
358 RETURN_OR_DELIVER_PENDING_EXCEPTION_REG x1
359.endm
360
361.macro RETURN_IF_W0_IS_ZERO_OR_DELIVER
362 cbnz w0, 1f // result non-zero branch over
363 ret // return
3641:
365 DELIVER_PENDING_EXCEPTION
366.endm
367
Stuart Monteithb95a5342014-03-12 13:32:32 +0000368.macro NO_ARG_RUNTIME_EXCEPTION c_name, cxx_name
369 .extern \cxx_name
370ENTRY \c_name
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700371 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
Zheng Xub551fdc2014-07-25 11:49:42 +0800372 mov x0, xSELF // pass Thread::Current
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700373 b \cxx_name // \cxx_name(Thread*)
Stuart Monteithb95a5342014-03-12 13:32:32 +0000374END \c_name
375.endm
376
377.macro ONE_ARG_RUNTIME_EXCEPTION c_name, cxx_name
378 .extern \cxx_name
379ENTRY \c_name
Serban Constantinescu75b91132014-04-09 18:39:10 +0100380 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context.
Zheng Xub551fdc2014-07-25 11:49:42 +0800381 mov x1, xSELF // pass Thread::Current.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700382 b \cxx_name // \cxx_name(arg, Thread*).
Stuart Monteithb95a5342014-03-12 13:32:32 +0000383 brk 0
384END \c_name
385.endm
386
387.macro TWO_ARG_RUNTIME_EXCEPTION c_name, cxx_name
388 .extern \cxx_name
389ENTRY \c_name
390 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
Zheng Xub551fdc2014-07-25 11:49:42 +0800391 mov x2, xSELF // pass Thread::Current
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700392 b \cxx_name // \cxx_name(arg1, arg2, Thread*)
Stuart Monteithb95a5342014-03-12 13:32:32 +0000393 brk 0
394END \c_name
395.endm
396
397 /*
398 * Called by managed code, saves callee saves and then calls artThrowException
399 * that will place a mock Method* at the bottom of the stack. Arg1 holds the exception.
400 */
401ONE_ARG_RUNTIME_EXCEPTION art_quick_deliver_exception, artDeliverExceptionFromCode
402
403 /*
404 * Called by managed code to create and deliver a NullPointerException.
405 */
406NO_ARG_RUNTIME_EXCEPTION art_quick_throw_null_pointer_exception, artThrowNullPointerExceptionFromCode
407
408 /*
Nicolas Geoffraye8e11272016-06-28 18:08:46 +0100409 * Call installed by a signal handler to create and deliver a NullPointerException.
410 */
411ONE_ARG_RUNTIME_EXCEPTION art_quick_throw_null_pointer_exception_from_signal, artThrowNullPointerExceptionFromSignal
412
413 /*
Stuart Monteithb95a5342014-03-12 13:32:32 +0000414 * Called by managed code to create and deliver an ArithmeticException.
415 */
416NO_ARG_RUNTIME_EXCEPTION art_quick_throw_div_zero, artThrowDivZeroFromCode
417
418 /*
419 * Called by managed code to create and deliver an ArrayIndexOutOfBoundsException. Arg1 holds
420 * index, arg2 holds limit.
421 */
422TWO_ARG_RUNTIME_EXCEPTION art_quick_throw_array_bounds, artThrowArrayBoundsFromCode
423
424 /*
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100425 * Called by managed code to create and deliver a StringIndexOutOfBoundsException
426 * as if thrown from a call to String.charAt(). Arg1 holds index, arg2 holds limit.
427 */
428TWO_ARG_RUNTIME_EXCEPTION art_quick_throw_string_bounds, artThrowStringBoundsFromCode
429
430 /*
Stuart Monteithb95a5342014-03-12 13:32:32 +0000431 * Called by managed code to create and deliver a StackOverflowError.
432 */
433NO_ARG_RUNTIME_EXCEPTION art_quick_throw_stack_overflow, artThrowStackOverflowFromCode
434
435 /*
436 * Called by managed code to create and deliver a NoSuchMethodError.
437 */
438ONE_ARG_RUNTIME_EXCEPTION art_quick_throw_no_such_method, artThrowNoSuchMethodFromCode
439
440 /*
Stuart Monteithb95a5342014-03-12 13:32:32 +0000441 * All generated callsites for interface invokes and invocation slow paths will load arguments
Andreas Gampe51f76352014-05-21 08:28:48 -0700442 * as usual - except instead of loading arg0/x0 with the target Method*, arg0/x0 will contain
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +0100443 * the method_idx. This wrapper will save arg1-arg3, and call the appropriate C helper.
Andreas Gampe51f76352014-05-21 08:28:48 -0700444 * NOTE: "this" is first visible argument of the target, and so can be found in arg1/x1.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000445 *
Andreas Gampe51f76352014-05-21 08:28:48 -0700446 * The helper will attempt to locate the target and return a 128-bit result in x0/x1 consisting
Stuart Monteithb95a5342014-03-12 13:32:32 +0000447 * of the target Method* in x0 and method->code_ in x1.
448 *
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700449 * If unsuccessful, the helper will return null/????. There will be a pending exception in the
Stuart Monteithb95a5342014-03-12 13:32:32 +0000450 * thread and we branch to another stub to deliver it.
451 *
452 * On success this wrapper will restore arguments and *jump* to the target, leaving the lr
453 * pointing back to the original caller.
Andreas Gampe51f76352014-05-21 08:28:48 -0700454 *
455 * Adapted from ARM32 code.
456 *
Zheng Xub551fdc2014-07-25 11:49:42 +0800457 * Clobbers xIP0.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000458 */
Andreas Gampe3031c8d2015-07-13 20:11:06 -0700459.macro INVOKE_TRAMPOLINE_BODY cxx_name
Stuart Monteithb95a5342014-03-12 13:32:32 +0000460 .extern \cxx_name
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700461 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME // save callee saves in case allocation triggers GC
Andreas Gampe51f76352014-05-21 08:28:48 -0700462 // Helper signature is always
463 // (method_idx, *this_object, *caller_method, *self, sp)
464
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +0100465 mov x2, xSELF // pass Thread::Current
466 mov x3, sp
467 bl \cxx_name // (method_idx, this, Thread*, SP)
Zheng Xub551fdc2014-07-25 11:49:42 +0800468 mov xIP0, x1 // save Method*->code_
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700469 RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Andreas Gampe51f76352014-05-21 08:28:48 -0700470 cbz x0, 1f // did we find the target? if not go to exception delivery
Zheng Xub551fdc2014-07-25 11:49:42 +0800471 br xIP0 // tail call to target
Andreas Gampe51f76352014-05-21 08:28:48 -07004721:
473 DELIVER_PENDING_EXCEPTION
Andreas Gampe3031c8d2015-07-13 20:11:06 -0700474.endm
475.macro INVOKE_TRAMPOLINE c_name, cxx_name
476ENTRY \c_name
477 INVOKE_TRAMPOLINE_BODY \cxx_name
Stuart Monteithb95a5342014-03-12 13:32:32 +0000478END \c_name
479.endm
480
Stuart Monteithb95a5342014-03-12 13:32:32 +0000481INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline_with_access_check, artInvokeInterfaceTrampolineWithAccessCheck
482
483INVOKE_TRAMPOLINE art_quick_invoke_static_trampoline_with_access_check, artInvokeStaticTrampolineWithAccessCheck
484INVOKE_TRAMPOLINE art_quick_invoke_direct_trampoline_with_access_check, artInvokeDirectTrampolineWithAccessCheck
485INVOKE_TRAMPOLINE art_quick_invoke_super_trampoline_with_access_check, artInvokeSuperTrampolineWithAccessCheck
486INVOKE_TRAMPOLINE art_quick_invoke_virtual_trampoline_with_access_check, artInvokeVirtualTrampolineWithAccessCheck
487
Andreas Gampe03906cf2014-04-07 12:08:28 -0700488
489.macro INVOKE_STUB_CREATE_FRAME
490
Zheng Xu69a50302015-04-14 20:04:41 +0800491SAVE_SIZE=15*8 // x4, x5, x19, x20, x21, x22, x23, x24, x25, x26, x27, x28, SP, LR, FP saved.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700492SAVE_SIZE_AND_METHOD=SAVE_SIZE+8
Andreas Gampecf4035a2014-05-28 22:43:01 -0700493
Andreas Gampe03906cf2014-04-07 12:08:28 -0700494
Zheng Xu48241e72014-05-23 11:52:42 +0800495 mov x9, sp // Save stack pointer.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700496 .cfi_register sp,x9
497
Zheng Xu48241e72014-05-23 11:52:42 +0800498 add x10, x2, # SAVE_SIZE_AND_METHOD // calculate size of frame.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700499 sub x10, sp, x10 // Calculate SP position - saves + ArtMethod* + args
Zheng Xu48241e72014-05-23 11:52:42 +0800500 and x10, x10, # ~0xf // Enforce 16 byte stack alignment.
501 mov sp, x10 // Set new SP.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700502
Zheng Xu48241e72014-05-23 11:52:42 +0800503 sub x10, x9, #SAVE_SIZE // Calculate new FP (later). Done here as we must move SP
504 .cfi_def_cfa_register x10 // before this.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700505 .cfi_adjust_cfa_offset SAVE_SIZE
506
Nicolas Geoffray48088462014-12-12 10:29:38 +0000507 str x28, [x10, #112]
508 .cfi_rel_offset x28, 112
509
510 stp x26, x27, [x10, #96]
511 .cfi_rel_offset x26, 96
512 .cfi_rel_offset x27, 104
513
514 stp x24, x25, [x10, #80]
515 .cfi_rel_offset x24, 80
516 .cfi_rel_offset x25, 88
517
518 stp x22, x23, [x10, #64]
519 .cfi_rel_offset x22, 64
520 .cfi_rel_offset x23, 72
521
522 stp x20, x21, [x10, #48]
523 .cfi_rel_offset x20, 48
524 .cfi_rel_offset x21, 56
525
Zheng Xu69a50302015-04-14 20:04:41 +0800526 stp x9, x19, [x10, #32] // Save old stack pointer and x19.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700527 .cfi_rel_offset sp, 32
Andreas Gampecf4035a2014-05-28 22:43:01 -0700528 .cfi_rel_offset x19, 40
Andreas Gampe03906cf2014-04-07 12:08:28 -0700529
Zheng Xu48241e72014-05-23 11:52:42 +0800530 stp x4, x5, [x10, #16] // Save result and shorty addresses.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700531 .cfi_rel_offset x4, 16
532 .cfi_rel_offset x5, 24
533
Zheng Xu48241e72014-05-23 11:52:42 +0800534 stp xFP, xLR, [x10] // Store LR & FP.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700535 .cfi_rel_offset x29, 0
536 .cfi_rel_offset x30, 8
537
Zheng Xu48241e72014-05-23 11:52:42 +0800538 mov xFP, x10 // Use xFP now, as it's callee-saved.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700539 .cfi_def_cfa_register x29
Zheng Xu48241e72014-05-23 11:52:42 +0800540 mov xSELF, x3 // Move thread pointer into SELF register.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700541
542 // Copy arguments into stack frame.
543 // Use simple copy routine for now.
544 // 4 bytes per slot.
545 // X1 - source address
546 // W2 - args length
547 // X9 - destination address.
548 // W10 - temporary
Mathieu Chartiere401d142015-04-22 13:56:20 -0700549 add x9, sp, #8 // Destination address is bottom of stack + null.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700550
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700551 // Copy parameters into the stack. Use numeric label as this is a macro and Clang's assembler
552 // does not have unique-id variables.
5531:
Andreas Gampe03906cf2014-04-07 12:08:28 -0700554 cmp w2, #0
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700555 beq 2f
Andreas Gampe03906cf2014-04-07 12:08:28 -0700556 sub w2, w2, #4 // Need 65536 bytes of range.
557 ldr w10, [x1, x2]
558 str w10, [x9, x2]
559
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700560 b 1b
Andreas Gampe03906cf2014-04-07 12:08:28 -0700561
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -07005622:
Mathieu Chartiere401d142015-04-22 13:56:20 -0700563 // Store null into ArtMethod* at bottom of frame.
564 str xzr, [sp]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700565.endm
566
567.macro INVOKE_STUB_CALL_AND_RETURN
568
569 // load method-> METHOD_QUICK_CODE_OFFSET
Mathieu Chartiere401d142015-04-22 13:56:20 -0700570 ldr x9, [x0, #ART_METHOD_QUICK_CODE_OFFSET_64]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700571 // Branch to method.
572 blr x9
573
574 // Restore return value address and shorty address.
575 ldp x4,x5, [xFP, #16]
576 .cfi_restore x4
577 .cfi_restore x5
578
Nicolas Geoffray48088462014-12-12 10:29:38 +0000579 ldr x28, [xFP, #112]
580 .cfi_restore x28
581
582 ldp x26, x27, [xFP, #96]
583 .cfi_restore x26
584 .cfi_restore x27
585
586 ldp x24, x25, [xFP, #80]
587 .cfi_restore x24
588 .cfi_restore x25
589
590 ldp x22, x23, [xFP, #64]
591 .cfi_restore x22
592 .cfi_restore x23
593
594 ldp x20, x21, [xFP, #48]
595 .cfi_restore x20
596 .cfi_restore x21
597
Andreas Gampe03906cf2014-04-07 12:08:28 -0700598 // Store result (w0/x0/s0/d0) appropriately, depending on resultType.
599 ldrb w10, [x5]
600
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700601 // Check the return type and store the correct register into the jvalue in memory.
602 // Use numeric label as this is a macro and Clang's assembler does not have unique-id variables.
603
Andreas Gampe03906cf2014-04-07 12:08:28 -0700604 // Don't set anything for a void type.
605 cmp w10, #'V'
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700606 beq 3f
Andreas Gampe03906cf2014-04-07 12:08:28 -0700607
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700608 // Is it a double?
Andreas Gampe03906cf2014-04-07 12:08:28 -0700609 cmp w10, #'D'
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700610 bne 1f
Andreas Gampe03906cf2014-04-07 12:08:28 -0700611 str d0, [x4]
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700612 b 3f
Andreas Gampe03906cf2014-04-07 12:08:28 -0700613
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -07006141: // Is it a float?
Andreas Gampe03906cf2014-04-07 12:08:28 -0700615 cmp w10, #'F'
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700616 bne 2f
Andreas Gampe03906cf2014-04-07 12:08:28 -0700617 str s0, [x4]
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700618 b 3f
Andreas Gampe03906cf2014-04-07 12:08:28 -0700619
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -07006202: // Just store x0. Doesn't matter if it is 64 or 32 bits.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700621 str x0, [x4]
622
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -07006233: // Finish up.
Zheng Xu69a50302015-04-14 20:04:41 +0800624 ldp x2, x19, [xFP, #32] // Restore stack pointer and x19.
Andreas Gampecf4035a2014-05-28 22:43:01 -0700625 .cfi_restore x19
Andreas Gampe03906cf2014-04-07 12:08:28 -0700626 mov sp, x2
627 .cfi_restore sp
628
Andreas Gamped58342c2014-06-05 14:18:08 -0700629 ldp xFP, xLR, [xFP] // Restore old frame pointer and link register.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700630 .cfi_restore x29
631 .cfi_restore x30
632
633 ret
634
635.endm
636
637
Stuart Monteithb95a5342014-03-12 13:32:32 +0000638/*
639 * extern"C" void art_quick_invoke_stub(ArtMethod *method, x0
640 * uint32_t *args, x1
641 * uint32_t argsize, w2
642 * Thread *self, x3
643 * JValue *result, x4
644 * char *shorty); x5
645 * +----------------------+
646 * | |
647 * | C/C++ frame |
648 * | LR'' |
649 * | FP'' | <- SP'
650 * +----------------------+
651 * +----------------------+
Zheng Xu69a50302015-04-14 20:04:41 +0800652 * | x28 | <- TODO: Remove callee-saves.
653 * | : |
654 * | x19 |
Stuart Monteithb95a5342014-03-12 13:32:32 +0000655 * | SP' |
656 * | X5 |
657 * | X4 | Saved registers
658 * | LR' |
659 * | FP' | <- FP
660 * +----------------------+
661 * | uint32_t out[n-1] |
662 * | : : | Outs
663 * | uint32_t out[0] |
Mathieu Chartiere401d142015-04-22 13:56:20 -0700664 * | ArtMethod* | <- SP value=null
Stuart Monteithb95a5342014-03-12 13:32:32 +0000665 * +----------------------+
666 *
667 * Outgoing registers:
668 * x0 - Method*
669 * x1-x7 - integer parameters.
670 * d0-d7 - Floating point parameters.
671 * xSELF = self
672 * SP = & of ArtMethod*
673 * x1 = "this" pointer.
674 *
675 */
676ENTRY art_quick_invoke_stub
677 // Spill registers as per AACPS64 calling convention.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700678 INVOKE_STUB_CREATE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +0000679
680 // Fill registers x/w1 to x/w7 and s/d0 to s/d7 with parameters.
681 // Parse the passed shorty to determine which register to load.
682 // Load addresses for routines that load WXSD registers.
683 adr x11, .LstoreW2
684 adr x12, .LstoreX2
685 adr x13, .LstoreS0
686 adr x14, .LstoreD0
687
688 // Initialize routine offsets to 0 for integers and floats.
689 // x8 for integers, x15 for floating point.
690 mov x8, #0
691 mov x15, #0
692
693 add x10, x5, #1 // Load shorty address, plus one to skip return value.
694 ldr w1, [x9],#4 // Load "this" parameter, and increment arg pointer.
695
696 // Loop to fill registers.
697.LfillRegisters:
698 ldrb w17, [x10], #1 // Load next character in signature, and increment.
699 cbz w17, .LcallFunction // Exit at end of signature. Shorty 0 terminated.
700
701 cmp w17, #'F' // is this a float?
702 bne .LisDouble
703
704 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700705 beq .Ladvance4
Stuart Monteithb95a5342014-03-12 13:32:32 +0000706
707 add x17, x13, x15 // Calculate subroutine to jump to.
708 br x17
709
710.LisDouble:
711 cmp w17, #'D' // is this a double?
712 bne .LisLong
713
714 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700715 beq .Ladvance8
Stuart Monteithb95a5342014-03-12 13:32:32 +0000716
717 add x17, x14, x15 // Calculate subroutine to jump to.
718 br x17
719
720.LisLong:
721 cmp w17, #'J' // is this a long?
722 bne .LisOther
723
Andreas Gampe9de65ff2014-03-21 17:25:57 -0700724 cmp x8, # 6*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700725 beq .Ladvance8
Stuart Monteithb95a5342014-03-12 13:32:32 +0000726
727 add x17, x12, x8 // Calculate subroutine to jump to.
728 br x17
729
Stuart Monteithb95a5342014-03-12 13:32:32 +0000730.LisOther: // Everything else takes one vReg.
Andreas Gampe9de65ff2014-03-21 17:25:57 -0700731 cmp x8, # 6*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700732 beq .Ladvance4
733
Stuart Monteithb95a5342014-03-12 13:32:32 +0000734 add x17, x11, x8 // Calculate subroutine to jump to.
735 br x17
736
Andreas Gampe03906cf2014-04-07 12:08:28 -0700737.Ladvance4:
738 add x9, x9, #4
739 b .LfillRegisters
740
741.Ladvance8:
742 add x9, x9, #8
743 b .LfillRegisters
744
Stuart Monteithb95a5342014-03-12 13:32:32 +0000745// Macro for loading a parameter into a register.
746// counter - the register with offset into these tables
747// size - the size of the register - 4 or 8 bytes.
748// register - the name of the register to be loaded.
749.macro LOADREG counter size register return
750 ldr \register , [x9], #\size
751 add \counter, \counter, 12
752 b \return
753.endm
754
755// Store ints.
756.LstoreW2:
757 LOADREG x8 4 w2 .LfillRegisters
758 LOADREG x8 4 w3 .LfillRegisters
759 LOADREG x8 4 w4 .LfillRegisters
760 LOADREG x8 4 w5 .LfillRegisters
761 LOADREG x8 4 w6 .LfillRegisters
762 LOADREG x8 4 w7 .LfillRegisters
763
764// Store longs.
765.LstoreX2:
766 LOADREG x8 8 x2 .LfillRegisters
767 LOADREG x8 8 x3 .LfillRegisters
768 LOADREG x8 8 x4 .LfillRegisters
769 LOADREG x8 8 x5 .LfillRegisters
770 LOADREG x8 8 x6 .LfillRegisters
771 LOADREG x8 8 x7 .LfillRegisters
772
773// Store singles.
774.LstoreS0:
775 LOADREG x15 4 s0 .LfillRegisters
776 LOADREG x15 4 s1 .LfillRegisters
777 LOADREG x15 4 s2 .LfillRegisters
778 LOADREG x15 4 s3 .LfillRegisters
779 LOADREG x15 4 s4 .LfillRegisters
780 LOADREG x15 4 s5 .LfillRegisters
781 LOADREG x15 4 s6 .LfillRegisters
782 LOADREG x15 4 s7 .LfillRegisters
783
784// Store doubles.
785.LstoreD0:
786 LOADREG x15 8 d0 .LfillRegisters
787 LOADREG x15 8 d1 .LfillRegisters
788 LOADREG x15 8 d2 .LfillRegisters
789 LOADREG x15 8 d3 .LfillRegisters
790 LOADREG x15 8 d4 .LfillRegisters
791 LOADREG x15 8 d5 .LfillRegisters
792 LOADREG x15 8 d6 .LfillRegisters
793 LOADREG x15 8 d7 .LfillRegisters
794
795
796.LcallFunction:
797
Andreas Gampe03906cf2014-04-07 12:08:28 -0700798 INVOKE_STUB_CALL_AND_RETURN
Stuart Monteithb95a5342014-03-12 13:32:32 +0000799
Stuart Monteithb95a5342014-03-12 13:32:32 +0000800END art_quick_invoke_stub
801
802/* extern"C"
803 * void art_quick_invoke_static_stub(ArtMethod *method, x0
804 * uint32_t *args, x1
805 * uint32_t argsize, w2
806 * Thread *self, x3
807 * JValue *result, x4
808 * char *shorty); x5
809 */
810ENTRY art_quick_invoke_static_stub
811 // Spill registers as per AACPS64 calling convention.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700812 INVOKE_STUB_CREATE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +0000813
814 // Fill registers x/w1 to x/w7 and s/d0 to s/d7 with parameters.
815 // Parse the passed shorty to determine which register to load.
816 // Load addresses for routines that load WXSD registers.
817 adr x11, .LstoreW1_2
818 adr x12, .LstoreX1_2
819 adr x13, .LstoreS0_2
820 adr x14, .LstoreD0_2
821
822 // Initialize routine offsets to 0 for integers and floats.
823 // x8 for integers, x15 for floating point.
824 mov x8, #0
825 mov x15, #0
826
827 add x10, x5, #1 // Load shorty address, plus one to skip return value.
828
829 // Loop to fill registers.
830.LfillRegisters2:
831 ldrb w17, [x10], #1 // Load next character in signature, and increment.
832 cbz w17, .LcallFunction2 // Exit at end of signature. Shorty 0 terminated.
833
834 cmp w17, #'F' // is this a float?
835 bne .LisDouble2
836
837 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700838 beq .Ladvance4_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000839
840 add x17, x13, x15 // Calculate subroutine to jump to.
841 br x17
842
843.LisDouble2:
844 cmp w17, #'D' // is this a double?
845 bne .LisLong2
846
847 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700848 beq .Ladvance8_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000849
850 add x17, x14, x15 // Calculate subroutine to jump to.
851 br x17
852
853.LisLong2:
854 cmp w17, #'J' // is this a long?
855 bne .LisOther2
856
857 cmp x8, # 7*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700858 beq .Ladvance8_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000859
860 add x17, x12, x8 // Calculate subroutine to jump to.
861 br x17
862
Stuart Monteithb95a5342014-03-12 13:32:32 +0000863.LisOther2: // Everything else takes one vReg.
864 cmp x8, # 7*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700865 beq .Ladvance4_2
866
Stuart Monteithb95a5342014-03-12 13:32:32 +0000867 add x17, x11, x8 // Calculate subroutine to jump to.
868 br x17
869
Andreas Gampe03906cf2014-04-07 12:08:28 -0700870.Ladvance4_2:
871 add x9, x9, #4
872 b .LfillRegisters2
873
874.Ladvance8_2:
875 add x9, x9, #8
876 b .LfillRegisters2
877
Stuart Monteithb95a5342014-03-12 13:32:32 +0000878// Store ints.
879.LstoreW1_2:
880 LOADREG x8 4 w1 .LfillRegisters2
881 LOADREG x8 4 w2 .LfillRegisters2
882 LOADREG x8 4 w3 .LfillRegisters2
883 LOADREG x8 4 w4 .LfillRegisters2
884 LOADREG x8 4 w5 .LfillRegisters2
885 LOADREG x8 4 w6 .LfillRegisters2
886 LOADREG x8 4 w7 .LfillRegisters2
887
888// Store longs.
889.LstoreX1_2:
890 LOADREG x8 8 x1 .LfillRegisters2
891 LOADREG x8 8 x2 .LfillRegisters2
892 LOADREG x8 8 x3 .LfillRegisters2
893 LOADREG x8 8 x4 .LfillRegisters2
894 LOADREG x8 8 x5 .LfillRegisters2
895 LOADREG x8 8 x6 .LfillRegisters2
896 LOADREG x8 8 x7 .LfillRegisters2
897
898// Store singles.
899.LstoreS0_2:
900 LOADREG x15 4 s0 .LfillRegisters2
901 LOADREG x15 4 s1 .LfillRegisters2
902 LOADREG x15 4 s2 .LfillRegisters2
903 LOADREG x15 4 s3 .LfillRegisters2
904 LOADREG x15 4 s4 .LfillRegisters2
905 LOADREG x15 4 s5 .LfillRegisters2
906 LOADREG x15 4 s6 .LfillRegisters2
907 LOADREG x15 4 s7 .LfillRegisters2
908
909// Store doubles.
910.LstoreD0_2:
911 LOADREG x15 8 d0 .LfillRegisters2
912 LOADREG x15 8 d1 .LfillRegisters2
913 LOADREG x15 8 d2 .LfillRegisters2
914 LOADREG x15 8 d3 .LfillRegisters2
915 LOADREG x15 8 d4 .LfillRegisters2
916 LOADREG x15 8 d5 .LfillRegisters2
917 LOADREG x15 8 d6 .LfillRegisters2
918 LOADREG x15 8 d7 .LfillRegisters2
919
920
921.LcallFunction2:
922
Andreas Gampe03906cf2014-04-07 12:08:28 -0700923 INVOKE_STUB_CALL_AND_RETURN
Stuart Monteithb95a5342014-03-12 13:32:32 +0000924
Stuart Monteithb95a5342014-03-12 13:32:32 +0000925END art_quick_invoke_static_stub
926
Andreas Gampe03906cf2014-04-07 12:08:28 -0700927
Stuart Monteithb95a5342014-03-12 13:32:32 +0000928
Nicolas Geoffrayb331feb2016-02-05 16:51:53 +0000929/* extern"C" void art_quick_osr_stub(void** stack, x0
930 * size_t stack_size_in_bytes, x1
931 * const uin8_t* native_pc, x2
932 * JValue *result, x3
933 * char *shorty, x4
934 * Thread *self) x5
935 */
936ENTRY art_quick_osr_stub
937SAVE_SIZE=15*8 // x3, x4, x19, x20, x21, x22, x23, x24, x25, x26, x27, x28, SP, LR, FP saved.
938 mov x9, sp // Save stack pointer.
939 .cfi_register sp,x9
940
941 sub x10, sp, # SAVE_SIZE
942 and x10, x10, # ~0xf // Enforce 16 byte stack alignment.
943 mov sp, x10 // Set new SP.
944
945 str x28, [sp, #112]
946 stp x26, x27, [sp, #96]
947 stp x24, x25, [sp, #80]
948 stp x22, x23, [sp, #64]
949 stp x20, x21, [sp, #48]
950 stp x9, x19, [sp, #32] // Save old stack pointer and x19.
951 stp x3, x4, [sp, #16] // Save result and shorty addresses.
952 stp xFP, xLR, [sp] // Store LR & FP.
953 mov xSELF, x5 // Move thread pointer into SELF register.
954
955 sub sp, sp, #16
956 str xzr, [sp] // Store null for ArtMethod* slot
957 // Branch to stub.
958 bl .Losr_entry
959 add sp, sp, #16
960
961 // Restore return value address and shorty address.
962 ldp x3,x4, [sp, #16]
963 ldr x28, [sp, #112]
964 ldp x26, x27, [sp, #96]
965 ldp x24, x25, [sp, #80]
966 ldp x22, x23, [sp, #64]
967 ldp x20, x21, [sp, #48]
968
969 // Store result (w0/x0/s0/d0) appropriately, depending on resultType.
970 ldrb w10, [x4]
971
972 // Check the return type and store the correct register into the jvalue in memory.
973
974 // Don't set anything for a void type.
975 cmp w10, #'V'
976 beq .Losr_exit
977
978 // Is it a double?
979 cmp w10, #'D'
980 bne .Lno_double
981 str d0, [x3]
982 b .Losr_exit
983
984.Lno_double: // Is it a float?
985 cmp w10, #'F'
986 bne .Lno_float
987 str s0, [x3]
988 b .Losr_exit
989
990.Lno_float: // Just store x0. Doesn't matter if it is 64 or 32 bits.
991 str x0, [x3]
992
993.Losr_exit: // Finish up.
994 ldp x2, x19, [sp, #32] // Restore stack pointer and x19.
995 ldp xFP, xLR, [sp] // Restore old frame pointer and link register.
996 mov sp, x2
997 ret
998
999.Losr_entry:
1000 // Update stack pointer for the callee
1001 sub sp, sp, x1
1002
1003 // Update link register slot expected by the callee.
1004 sub w1, w1, #8
1005 str lr, [sp, x1]
1006
1007 // Copy arguments into stack frame.
1008 // Use simple copy routine for now.
1009 // 4 bytes per slot.
1010 // X0 - source address
1011 // W1 - args length
1012 // SP - destination address.
1013 // W10 - temporary
1014.Losr_loop_entry:
1015 cmp w1, #0
1016 beq .Losr_loop_exit
1017 sub w1, w1, #4
1018 ldr w10, [x0, x1]
1019 str w10, [sp, x1]
1020 b .Losr_loop_entry
1021
1022.Losr_loop_exit:
1023 // Branch to the OSR entry point.
1024 br x2
1025
1026END art_quick_osr_stub
1027
Stuart Monteithb95a5342014-03-12 13:32:32 +00001028 /*
1029 * On entry x0 is uintptr_t* gprs_ and x1 is uint64_t* fprs_
1030 */
1031
1032ENTRY art_quick_do_long_jump
1033 // Load FPRs
1034 ldp d0, d1, [x1], #16
1035 ldp d2, d3, [x1], #16
1036 ldp d4, d5, [x1], #16
1037 ldp d6, d7, [x1], #16
1038 ldp d8, d9, [x1], #16
1039 ldp d10, d11, [x1], #16
1040 ldp d12, d13, [x1], #16
1041 ldp d14, d15, [x1], #16
1042 ldp d16, d17, [x1], #16
1043 ldp d18, d19, [x1], #16
1044 ldp d20, d21, [x1], #16
1045 ldp d22, d23, [x1], #16
1046 ldp d24, d25, [x1], #16
1047 ldp d26, d27, [x1], #16
1048 ldp d28, d29, [x1], #16
1049 ldp d30, d31, [x1]
1050
1051 // Load GPRs
1052 // TODO: lots of those are smashed, could optimize.
1053 add x0, x0, #30*8
Andreas Gampe639bdd12015-06-03 11:22:45 -07001054 ldp x30, x1, [x0], #-16 // LR & SP
Stuart Monteithb95a5342014-03-12 13:32:32 +00001055 ldp x28, x29, [x0], #-16
1056 ldp x26, x27, [x0], #-16
1057 ldp x24, x25, [x0], #-16
1058 ldp x22, x23, [x0], #-16
1059 ldp x20, x21, [x0], #-16
1060 ldp x18, x19, [x0], #-16
1061 ldp x16, x17, [x0], #-16
1062 ldp x14, x15, [x0], #-16
1063 ldp x12, x13, [x0], #-16
1064 ldp x10, x11, [x0], #-16
1065 ldp x8, x9, [x0], #-16
1066 ldp x6, x7, [x0], #-16
1067 ldp x4, x5, [x0], #-16
1068 ldp x2, x3, [x0], #-16
1069 mov sp, x1
1070
Andreas Gampe639bdd12015-06-03 11:22:45 -07001071 // Need to load PC, it's at the end (after the space for the unused XZR). Use x1.
1072 ldr x1, [x0, #33*8]
1073 // And the value of x0.
1074 ldr x0, [x0]
1075
1076 br x1
Stuart Monteithb95a5342014-03-12 13:32:32 +00001077END art_quick_do_long_jump
1078
Andreas Gampef4e910b2014-04-29 16:55:52 -07001079 /*
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001080 * Entry from managed code that calls artLockObjectFromCode, may block for GC. x0 holds the
1081 * possibly null object to lock.
1082 *
1083 * Derived from arm32 code.
1084 */
1085 .extern artLockObjectFromCode
1086ENTRY art_quick_lock_object
1087 cbz w0, .Lslow_lock
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001088 add x4, x0, #MIRROR_OBJECT_LOCK_WORD_OFFSET // exclusive load/store has no immediate anymore
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001089.Lretry_lock:
1090 ldr w2, [xSELF, #THREAD_ID_OFFSET] // TODO: Can the thread ID really change during the loop?
1091 ldxr w1, [x4]
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001092 mov x3, x1
1093 and w3, w3, #LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED // zero the read barrier bits
1094 cbnz w3, .Lnot_unlocked // already thin locked
1095 // unlocked case - x1: original lock word that's zero except for the read barrier bits.
1096 orr x2, x1, x2 // x2 holds thread id with count of 0 with preserved read barrier bits
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001097 stxr w3, w2, [x4]
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001098 cbnz w3, .Llock_stxr_fail // store failed, retry
Andreas Gampe675967d2014-05-14 16:28:34 -07001099 dmb ishld // full (LoadLoad|LoadStore) memory barrier
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001100 ret
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001101.Lnot_unlocked: // x1: original lock word
1102 lsr w3, w1, LOCK_WORD_STATE_SHIFT
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001103 cbnz w3, .Lslow_lock // if either of the top two bits are set, go slow path
1104 eor w2, w1, w2 // lock_word.ThreadId() ^ self->ThreadId()
1105 uxth w2, w2 // zero top 16 bits
1106 cbnz w2, .Lslow_lock // lock word and self thread id's match -> recursive lock
1107 // else contention, go to slow path
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001108 mov x3, x1 // copy the lock word to check count overflow.
1109 and w3, w3, #LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED // zero the read barrier bits.
1110 add w2, w3, #LOCK_WORD_THIN_LOCK_COUNT_ONE // increment count in lock word placing in w2 to check overflow
1111 lsr w3, w2, LOCK_WORD_READ_BARRIER_STATE_SHIFT // if either of the upper two bits (28-29) are set, we overflowed.
1112 cbnz w3, .Lslow_lock // if we overflow the count go slow path
1113 add w2, w1, #LOCK_WORD_THIN_LOCK_COUNT_ONE // increment count for real
1114 stxr w3, w2, [x4]
1115 cbnz w3, .Llock_stxr_fail // store failed, retry
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001116 ret
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001117.Llock_stxr_fail:
1118 b .Lretry_lock // retry
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001119.Lslow_lock:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001120 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case we block
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001121 mov x1, xSELF // pass Thread::Current
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001122 bl artLockObjectFromCode // (Object* obj, Thread*)
1123 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001124 RETURN_IF_W0_IS_ZERO_OR_DELIVER
1125END art_quick_lock_object
1126
Andreas Gampec7ed09b2016-04-25 20:08:55 -07001127ENTRY art_quick_lock_object_no_inline
1128 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case we block
1129 mov x1, xSELF // pass Thread::Current
1130 bl artLockObjectFromCode // (Object* obj, Thread*)
1131 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
1132 RETURN_IF_W0_IS_ZERO_OR_DELIVER
1133END art_quick_lock_object_no_inline
1134
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001135 /*
1136 * Entry from managed code that calls artUnlockObjectFromCode and delivers exception on failure.
1137 * x0 holds the possibly null object to lock.
1138 *
1139 * Derived from arm32 code.
1140 */
1141 .extern artUnlockObjectFromCode
1142ENTRY art_quick_unlock_object
1143 cbz x0, .Lslow_unlock
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001144 add x4, x0, #MIRROR_OBJECT_LOCK_WORD_OFFSET // exclusive load/store has no immediate anymore
1145.Lretry_unlock:
1146#ifndef USE_READ_BARRIER
1147 ldr w1, [x4]
1148#else
1149 ldxr w1, [x4] // Need to use atomic instructions for read barrier
1150#endif
1151 lsr w2, w1, LOCK_WORD_STATE_SHIFT
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001152 cbnz w2, .Lslow_unlock // if either of the top two bits are set, go slow path
1153 ldr w2, [xSELF, #THREAD_ID_OFFSET]
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001154 mov x3, x1 // copy lock word to check thread id equality
1155 and w3, w3, #LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED // zero the read barrier bits
1156 eor w3, w3, w2 // lock_word.ThreadId() ^ self->ThreadId()
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001157 uxth w3, w3 // zero top 16 bits
1158 cbnz w3, .Lslow_unlock // do lock word and self thread id's match?
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001159 mov x3, x1 // copy lock word to detect transition to unlocked
1160 and w3, w3, #LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED // zero the read barrier bits
1161 cmp w3, #LOCK_WORD_THIN_LOCK_COUNT_ONE
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001162 bpl .Lrecursive_thin_unlock
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001163 // transition to unlocked
1164 mov x3, x1
1165 and w3, w3, #LOCK_WORD_READ_BARRIER_STATE_MASK // w3: zero except for the preserved read barrier bits
Andreas Gampe675967d2014-05-14 16:28:34 -07001166 dmb ish // full (LoadStore|StoreStore) memory barrier
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001167#ifndef USE_READ_BARRIER
1168 str w3, [x4]
1169#else
1170 stxr w2, w3, [x4] // Need to use atomic instructions for read barrier
1171 cbnz w2, .Lunlock_stxr_fail // store failed, retry
1172#endif
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001173 ret
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001174.Lrecursive_thin_unlock: // w1: original lock word
1175 sub w1, w1, #LOCK_WORD_THIN_LOCK_COUNT_ONE // decrement count
1176#ifndef USE_READ_BARRIER
1177 str w1, [x4]
1178#else
1179 stxr w2, w1, [x4] // Need to use atomic instructions for read barrier
1180 cbnz w2, .Lunlock_stxr_fail // store failed, retry
1181#endif
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001182 ret
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001183.Lunlock_stxr_fail:
1184 b .Lretry_unlock // retry
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001185.Lslow_unlock:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001186 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case exception allocation triggers GC
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001187 mov x1, xSELF // pass Thread::Current
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001188 bl artUnlockObjectFromCode // (Object* obj, Thread*)
1189 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001190 RETURN_IF_W0_IS_ZERO_OR_DELIVER
1191END art_quick_unlock_object
Andreas Gampe525cde22014-04-22 15:44:50 -07001192
Andreas Gampec7ed09b2016-04-25 20:08:55 -07001193ENTRY art_quick_unlock_object_no_inline
1194 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case exception allocation triggers GC
1195 mov x1, xSELF // pass Thread::Current
1196 bl artUnlockObjectFromCode // (Object* obj, Thread*)
1197 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
1198 RETURN_IF_W0_IS_ZERO_OR_DELIVER
1199END art_quick_unlock_object_no_inline
1200
Andreas Gampe525cde22014-04-22 15:44:50 -07001201 /*
1202 * Entry from managed code that calls artIsAssignableFromCode and on failure calls
1203 * artThrowClassCastException.
1204 */
1205 .extern artThrowClassCastException
1206ENTRY art_quick_check_cast
1207 // Store arguments and link register
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01001208 // Stack needs to be 16B aligned on calls.
1209 stp x0, x1, [sp,#-32]!
Andreas Gampe525cde22014-04-22 15:44:50 -07001210 .cfi_adjust_cfa_offset 32
Andreas Gampe525cde22014-04-22 15:44:50 -07001211 .cfi_rel_offset x0, 0
1212 .cfi_rel_offset x1, 8
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01001213 str xLR, [sp, #24]
Andreas Gampe525cde22014-04-22 15:44:50 -07001214 .cfi_rel_offset x30, 24
1215
1216 // Call runtime code
1217 bl artIsAssignableFromCode
1218
1219 // Check for exception
1220 cbz x0, .Lthrow_class_cast_exception
1221
1222 // Restore and return
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01001223 ldr xLR, [sp, #24]
1224 .cfi_restore x30
1225 ldp x0, x1, [sp], #32
Andreas Gampe525cde22014-04-22 15:44:50 -07001226 .cfi_restore x0
1227 .cfi_restore x1
Andreas Gampe525cde22014-04-22 15:44:50 -07001228 .cfi_adjust_cfa_offset -32
1229 ret
1230
Andreas Gampe6b90d422015-06-26 19:49:24 -07001231 .cfi_adjust_cfa_offset 32 // Reset unwind info so following code unwinds.
1232
Andreas Gampe525cde22014-04-22 15:44:50 -07001233.Lthrow_class_cast_exception:
1234 // Restore
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01001235 ldr xLR, [sp, #24]
1236 .cfi_restore x30
1237 ldp x0, x1, [sp], #32
Andreas Gampe525cde22014-04-22 15:44:50 -07001238 .cfi_restore x0
1239 .cfi_restore x1
Andreas Gampe525cde22014-04-22 15:44:50 -07001240 .cfi_adjust_cfa_offset -32
1241
1242 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
1243 mov x2, xSELF // pass Thread::Current
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001244 b artThrowClassCastException // (Class*, Class*, Thread*)
Andreas Gampe525cde22014-04-22 15:44:50 -07001245 brk 0 // We should not return here...
1246END art_quick_check_cast
1247
Man Cao1aee9002015-07-14 22:31:42 -07001248// Restore xReg's value from [sp, #offset] if xReg is not the same as xExclude.
1249.macro POP_REG_NE xReg, offset, xExclude
1250 .ifnc \xReg, \xExclude
1251 ldr \xReg, [sp, #\offset] // restore xReg
1252 .cfi_restore \xReg
1253 .endif
1254.endm
1255
1256 /*
1257 * Macro to insert read barrier, only used in art_quick_aput_obj.
1258 * xDest, wDest and xObj are registers, offset is a defined literal such as
1259 * MIRROR_OBJECT_CLASS_OFFSET. Dest needs both x and w versions of the same register to handle
1260 * name mismatch between instructions. This macro uses the lower 32b of register when possible.
1261 * TODO: When read barrier has a fast path, add heap unpoisoning support for the fast path.
1262 */
1263.macro READ_BARRIER xDest, wDest, xObj, offset
1264#ifdef USE_READ_BARRIER
1265 // Store registers used in art_quick_aput_obj (x0-x4, LR), stack is 16B aligned.
1266 stp x0, x1, [sp, #-48]!
1267 .cfi_adjust_cfa_offset 48
1268 .cfi_rel_offset x0, 0
1269 .cfi_rel_offset x1, 8
1270 stp x2, x3, [sp, #16]
1271 .cfi_rel_offset x2, 16
1272 .cfi_rel_offset x3, 24
1273 stp x4, xLR, [sp, #32]
1274 .cfi_rel_offset x4, 32
1275 .cfi_rel_offset x30, 40
1276
Man Cao63069212015-08-21 15:51:39 -07001277 // mov x0, \xRef // pass ref in x0 (no-op for now since parameter ref is unused)
Man Cao1aee9002015-07-14 22:31:42 -07001278 .ifnc \xObj, x1
1279 mov x1, \xObj // pass xObj
1280 .endif
1281 mov w2, #\offset // pass offset
1282 bl artReadBarrierSlow // artReadBarrierSlow(ref, xObj, offset)
1283 // No need to unpoison return value in w0, artReadBarrierSlow() would do the unpoisoning.
1284 .ifnc \wDest, w0
1285 mov \wDest, w0 // save return value in wDest
1286 .endif
1287
1288 // Conditionally restore saved registers
1289 POP_REG_NE x0, 0, \xDest
1290 POP_REG_NE x1, 8, \xDest
1291 POP_REG_NE x2, 16, \xDest
1292 POP_REG_NE x3, 24, \xDest
1293 POP_REG_NE x4, 32, \xDest
1294 ldr xLR, [sp, #40]
1295 .cfi_restore x30
1296 add sp, sp, #48
1297 .cfi_adjust_cfa_offset -48
1298#else
1299 ldr \wDest, [\xObj, #\offset] // Heap reference = 32b. This also zero-extends to \xDest.
1300 UNPOISON_HEAP_REF \wDest
1301#endif // USE_READ_BARRIER
1302.endm
1303
Andreas Gampef4e910b2014-04-29 16:55:52 -07001304 /*
1305 * Entry from managed code for array put operations of objects where the value being stored
1306 * needs to be checked for compatibility.
1307 * x0 = array, x1 = index, x2 = value
1308 *
1309 * Currently all values should fit into w0/w1/w2, and w1 always will as indices are 32b. We
1310 * assume, though, that the upper 32b are zeroed out. At least for x1/w1 we can do better by
1311 * using index-zero-extension in load/stores.
1312 *
1313 * Temporaries: x3, x4
1314 * TODO: x4 OK? ip seems wrong here.
1315 */
1316ENTRY art_quick_aput_obj_with_null_and_bound_check
1317 tst x0, x0
1318 bne art_quick_aput_obj_with_bound_check
1319 b art_quick_throw_null_pointer_exception
1320END art_quick_aput_obj_with_null_and_bound_check
1321
1322ENTRY art_quick_aput_obj_with_bound_check
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001323 ldr w3, [x0, #MIRROR_ARRAY_LENGTH_OFFSET]
Andreas Gampef4e910b2014-04-29 16:55:52 -07001324 cmp w3, w1
1325 bhi art_quick_aput_obj
1326 mov x0, x1
1327 mov x1, x3
1328 b art_quick_throw_array_bounds
1329END art_quick_aput_obj_with_bound_check
1330
Man Cao1aee9002015-07-14 22:31:42 -07001331#ifdef USE_READ_BARRIER
1332 .extern artReadBarrierSlow
1333#endif
Andreas Gampef4e910b2014-04-29 16:55:52 -07001334ENTRY art_quick_aput_obj
1335 cbz x2, .Ldo_aput_null
Man Cao1aee9002015-07-14 22:31:42 -07001336 READ_BARRIER x3, w3, x0, MIRROR_OBJECT_CLASS_OFFSET // Heap reference = 32b
Andreas Gampef4e910b2014-04-29 16:55:52 -07001337 // This also zero-extends to x3
Man Cao1aee9002015-07-14 22:31:42 -07001338 READ_BARRIER x4, w4, x2, MIRROR_OBJECT_CLASS_OFFSET // Heap reference = 32b
Andreas Gampef4e910b2014-04-29 16:55:52 -07001339 // This also zero-extends to x4
Man Cao1aee9002015-07-14 22:31:42 -07001340 READ_BARRIER x3, w3, x3, MIRROR_CLASS_COMPONENT_TYPE_OFFSET // Heap reference = 32b
Andreas Gampef4e910b2014-04-29 16:55:52 -07001341 // This also zero-extends to x3
1342 cmp w3, w4 // value's type == array's component type - trivial assignability
1343 bne .Lcheck_assignability
1344.Ldo_aput:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001345 add x3, x0, #MIRROR_OBJECT_ARRAY_DATA_OFFSET
Andreas Gampef4e910b2014-04-29 16:55:52 -07001346 // "Compress" = do nothing
Hiroshi Yamauchibfa5eb62015-05-29 15:04:41 -07001347 POISON_HEAP_REF w2
Andreas Gampef4e910b2014-04-29 16:55:52 -07001348 str w2, [x3, x1, lsl #2] // Heap reference = 32b
1349 ldr x3, [xSELF, #THREAD_CARD_TABLE_OFFSET]
1350 lsr x0, x0, #7
1351 strb w3, [x3, x0]
1352 ret
1353.Ldo_aput_null:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001354 add x3, x0, #MIRROR_OBJECT_ARRAY_DATA_OFFSET
Andreas Gampef4e910b2014-04-29 16:55:52 -07001355 // "Compress" = do nothing
1356 str w2, [x3, x1, lsl #2] // Heap reference = 32b
1357 ret
1358.Lcheck_assignability:
1359 // Store arguments and link register
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01001360 stp x0, x1, [sp,#-32]!
1361 .cfi_adjust_cfa_offset 32
Andreas Gampef4e910b2014-04-29 16:55:52 -07001362 .cfi_rel_offset x0, 0
1363 .cfi_rel_offset x1, 8
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01001364 stp x2, xLR, [sp, #16]
Andreas Gampef4e910b2014-04-29 16:55:52 -07001365 .cfi_rel_offset x2, 16
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01001366 .cfi_rel_offset x30, 24
Andreas Gampef4e910b2014-04-29 16:55:52 -07001367
1368 // Call runtime code
1369 mov x0, x3 // Heap reference, 32b, "uncompress" = do nothing, already zero-extended
1370 mov x1, x4 // Heap reference, 32b, "uncompress" = do nothing, already zero-extended
1371 bl artIsAssignableFromCode
1372
1373 // Check for exception
1374 cbz x0, .Lthrow_array_store_exception
1375
1376 // Restore
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01001377 ldp x2, x30, [sp, #16]
1378 .cfi_restore x2
1379 .cfi_restore x30
1380 ldp x0, x1, [sp], #32
Andreas Gampef4e910b2014-04-29 16:55:52 -07001381 .cfi_restore x0
1382 .cfi_restore x1
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01001383 .cfi_adjust_cfa_offset -32
Andreas Gampef4e910b2014-04-29 16:55:52 -07001384
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001385 add x3, x0, #MIRROR_OBJECT_ARRAY_DATA_OFFSET
Andreas Gampef4e910b2014-04-29 16:55:52 -07001386 // "Compress" = do nothing
Hiroshi Yamauchibfa5eb62015-05-29 15:04:41 -07001387 POISON_HEAP_REF w2
Andreas Gampef4e910b2014-04-29 16:55:52 -07001388 str w2, [x3, x1, lsl #2] // Heap reference = 32b
1389 ldr x3, [xSELF, #THREAD_CARD_TABLE_OFFSET]
1390 lsr x0, x0, #7
1391 strb w3, [x3, x0]
1392 ret
Mathieu Chartier27386392015-06-27 15:42:27 -07001393 .cfi_adjust_cfa_offset 32 // 4 restores after cbz for unwinding.
Andreas Gampef4e910b2014-04-29 16:55:52 -07001394.Lthrow_array_store_exception:
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01001395 ldp x2, x30, [sp, #16]
1396 .cfi_restore x2
1397 .cfi_restore x30
1398 ldp x0, x1, [sp], #32
Andreas Gampef4e910b2014-04-29 16:55:52 -07001399 .cfi_restore x0
1400 .cfi_restore x1
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01001401 .cfi_adjust_cfa_offset -32
Andreas Gampef4e910b2014-04-29 16:55:52 -07001402
1403 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
1404 mov x1, x2 // Pass value.
1405 mov x2, xSELF // Pass Thread::Current.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001406 b artThrowArrayStoreException // (Object*, Object*, Thread*).
Andreas Gampef4e910b2014-04-29 16:55:52 -07001407 brk 0 // Unreached.
1408END art_quick_aput_obj
1409
Stuart Monteithb95a5342014-03-12 13:32:32 +00001410// Macro to facilitate adding new allocation entrypoints.
Vladimir Marko5ea536a2015-04-20 20:11:30 +01001411.macro ONE_ARG_DOWNCALL name, entrypoint, return
1412 .extern \entrypoint
1413ENTRY \name
Jeff Hao848f70a2014-01-15 13:49:50 -08001414 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Vladimir Marko5ea536a2015-04-20 20:11:30 +01001415 mov x1, xSELF // pass Thread::Current
1416 bl \entrypoint // (uint32_t type_idx, Method* method, Thread*)
1417 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
1418 \return
1419END \name
1420.endm
1421
1422// Macro to facilitate adding new allocation entrypoints.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001423.macro TWO_ARG_DOWNCALL name, entrypoint, return
1424 .extern \entrypoint
1425ENTRY \name
Jeff Hao848f70a2014-01-15 13:49:50 -08001426 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001427 mov x2, xSELF // pass Thread::Current
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001428 bl \entrypoint // (uint32_t type_idx, Method* method, Thread*)
1429 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001430 \return
Stuart Monteithb95a5342014-03-12 13:32:32 +00001431END \name
1432.endm
1433
Jeff Hao848f70a2014-01-15 13:49:50 -08001434// Macro to facilitate adding new allocation entrypoints.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001435.macro THREE_ARG_DOWNCALL name, entrypoint, return
1436 .extern \entrypoint
1437ENTRY \name
Jeff Hao848f70a2014-01-15 13:49:50 -08001438 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001439 mov x3, xSELF // pass Thread::Current
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001440 bl \entrypoint
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001441 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001442 \return
Stuart Monteithb95a5342014-03-12 13:32:32 +00001443END \name
1444.endm
1445
Jeff Hao848f70a2014-01-15 13:49:50 -08001446// Macro to facilitate adding new allocation entrypoints.
1447.macro FOUR_ARG_DOWNCALL name, entrypoint, return
1448 .extern \entrypoint
1449ENTRY \name
1450 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
1451 mov x4, xSELF // pass Thread::Current
1452 bl \entrypoint //
1453 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
1454 \return
1455 DELIVER_PENDING_EXCEPTION
1456END \name
1457.endm
1458
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001459// Macros taking opportunity of code similarities for downcalls with referrer.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001460.macro ONE_ARG_REF_DOWNCALL name, entrypoint, return
1461 .extern \entrypoint
1462ENTRY \name
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001463 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Mathieu Chartiere401d142015-04-22 13:56:20 -07001464 ldr x1, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001465 mov x2, xSELF // pass Thread::Current
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001466 bl \entrypoint // (uint32_t type_idx, Method* method, Thread*, SP)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001467 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001468 \return
1469END \name
1470.endm
1471
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001472.macro TWO_ARG_REF_DOWNCALL name, entrypoint, return
1473 .extern \entrypoint
1474ENTRY \name
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001475 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Mathieu Chartiere401d142015-04-22 13:56:20 -07001476 ldr x2, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001477 mov x3, xSELF // pass Thread::Current
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001478 bl \entrypoint
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001479 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001480 \return
1481END \name
1482.endm
1483
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001484.macro THREE_ARG_REF_DOWNCALL name, entrypoint, return
1485 .extern \entrypoint
1486ENTRY \name
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001487 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Mathieu Chartiere401d142015-04-22 13:56:20 -07001488 ldr x3, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001489 mov x4, xSELF // pass Thread::Current
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001490 bl \entrypoint
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001491 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001492 \return
1493END \name
1494.endm
1495
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001496.macro RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
1497 cbz w0, 1f // result zero branch over
1498 ret // return
14991:
1500 DELIVER_PENDING_EXCEPTION
1501.endm
1502
Matteo Franchindfd891a2014-04-30 12:17:17 +01001503 /*
Vladimir Marko3b370732014-10-09 18:34:28 +01001504 * Entry from managed code that calls artHandleFillArrayDataFromCode and delivers exception on
1505 * failure.
1506 */
1507TWO_ARG_REF_DOWNCALL art_quick_handle_fill_data, artHandleFillArrayDataFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1508
1509 /*
Matteo Franchindfd891a2014-04-30 12:17:17 +01001510 * Entry from managed code when uninitialized static storage, this stub will run the class
1511 * initializer and deliver the exception on error. On success the static storage base is
1512 * returned.
1513 */
Vladimir Marko5ea536a2015-04-20 20:11:30 +01001514ONE_ARG_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
Matteo Franchindfd891a2014-04-30 12:17:17 +01001515
Vladimir Marko5ea536a2015-04-20 20:11:30 +01001516ONE_ARG_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
1517ONE_ARG_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
Matteo Franchindfd891a2014-04-30 12:17:17 +01001518
Fred Shih37f05ef2014-07-16 18:38:08 -07001519ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1520ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1521ONE_ARG_REF_DOWNCALL art_quick_get_char_static, artGetCharStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1522ONE_ARG_REF_DOWNCALL art_quick_get_short_static, artGetShortStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001523ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1524ONE_ARG_REF_DOWNCALL art_quick_get64_static, artGet64StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1525ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1526
Fred Shih37f05ef2014-07-16 18:38:08 -07001527TWO_ARG_REF_DOWNCALL art_quick_get_boolean_instance, artGetBooleanInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1528TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1529TWO_ARG_REF_DOWNCALL art_quick_get_char_instance, artGetCharInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1530TWO_ARG_REF_DOWNCALL art_quick_get_short_instance, artGetShortInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001531TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1532TWO_ARG_REF_DOWNCALL art_quick_get64_instance, artGet64InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1533TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1534
Fred Shih37f05ef2014-07-16 18:38:08 -07001535TWO_ARG_REF_DOWNCALL art_quick_set8_static, artSet8StaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1536TWO_ARG_REF_DOWNCALL art_quick_set16_static, artSet16StaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001537TWO_ARG_REF_DOWNCALL art_quick_set32_static, artSet32StaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1538TWO_ARG_REF_DOWNCALL art_quick_set_obj_static, artSetObjStaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1539
Fred Shih37f05ef2014-07-16 18:38:08 -07001540THREE_ARG_REF_DOWNCALL art_quick_set8_instance, artSet8InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1541THREE_ARG_REF_DOWNCALL art_quick_set16_instance, artSet16InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001542THREE_ARG_REF_DOWNCALL art_quick_set32_instance, artSet32InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
Stephen Kyle0ff20d52014-10-22 15:23:46 +01001543THREE_ARG_REF_DOWNCALL art_quick_set64_instance, artSet64InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001544THREE_ARG_REF_DOWNCALL art_quick_set_obj_instance, artSetObjInstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1545
1546// This is separated out as the argument order is different.
1547 .extern artSet64StaticFromCode
1548ENTRY art_quick_set64_static
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001549 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Mathieu Chartiere401d142015-04-22 13:56:20 -07001550 ldr x1, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
Calin Juravlee460d1d2015-09-29 04:52:17 +01001551 // x2 contains the parameter
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001552 mov x3, xSELF // pass Thread::Current
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001553 bl artSet64StaticFromCode
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001554 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001555 RETURN_IF_W0_IS_ZERO_OR_DELIVER
1556END art_quick_set64_static
1557
Matteo Franchindfd891a2014-04-30 12:17:17 +01001558 /*
1559 * Entry from managed code to resolve a string, this stub will allocate a String and deliver an
Vladimir Marko5ea536a2015-04-20 20:11:30 +01001560 * exception on error. On success the String is returned. w0 holds the string index. The fast
1561 * path check for hit in strings cache has already been performed.
Matteo Franchindfd891a2014-04-30 12:17:17 +01001562 */
Vladimir Marko5ea536a2015-04-20 20:11:30 +01001563ONE_ARG_DOWNCALL art_quick_resolve_string, artResolveStringFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001564
Stuart Monteithb95a5342014-03-12 13:32:32 +00001565// Generate the allocation entrypoints for each allocator.
Hiroshi Yamauchi6f6244a2015-10-22 12:08:12 -07001566GENERATE_ALLOC_ENTRYPOINTS_FOR_EACH_ALLOCATOR
Hiroshi Yamauchi10d4c082016-02-24 12:51:18 -08001567
Hiroshi Yamauchi6f6244a2015-10-22 12:08:12 -07001568// A hand-written override for GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_rosalloc, RosAlloc).
1569ENTRY art_quick_alloc_object_rosalloc
1570 // Fast path rosalloc allocation.
1571 // x0: type_idx/return value, x1: ArtMethod*, xSELF(x19): Thread::Current
1572 // x2-x7: free.
1573 ldr x2, [x1, #ART_METHOD_DEX_CACHE_TYPES_OFFSET_64] // Load dex cache resolved types array
1574 // Load the class (x2)
1575 ldr w2, [x2, x0, lsl #COMPRESSED_REFERENCE_SIZE_SHIFT]
1576 cbz x2, .Lart_quick_alloc_object_rosalloc_slow_path // Check null class
1577 // Check class status.
1578 ldr w3, [x2, #MIRROR_CLASS_STATUS_OFFSET]
1579 cmp x3, #MIRROR_CLASS_STATUS_INITIALIZED
1580 bne .Lart_quick_alloc_object_rosalloc_slow_path
1581 // Add a fake dependence from the
1582 // following access flag and size
1583 // loads to the status load.
1584 // This is to prevent those loads
1585 // from being reordered above the
1586 // status load and reading wrong
1587 // values (an alternative is to use
1588 // a load-acquire for the status).
1589 eor x3, x3, x3
1590 add x2, x2, x3
1591 // Check access flags has
1592 // kAccClassIsFinalizable
1593 ldr w3, [x2, #MIRROR_CLASS_ACCESS_FLAGS_OFFSET]
1594 tst x3, #ACCESS_FLAGS_CLASS_IS_FINALIZABLE
1595 bne .Lart_quick_alloc_object_rosalloc_slow_path
1596 ldr x3, [xSELF, #THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET] // Check if the thread local
1597 // allocation stack has room.
1598 // ldp won't work due to large offset.
1599 ldr x4, [xSELF, #THREAD_LOCAL_ALLOC_STACK_END_OFFSET]
1600 cmp x3, x4
1601 bhs .Lart_quick_alloc_object_rosalloc_slow_path
1602 ldr w3, [x2, #MIRROR_CLASS_OBJECT_SIZE_OFFSET] // Load the object size (x3)
1603 cmp x3, #ROSALLOC_MAX_THREAD_LOCAL_BRACKET_SIZE // Check if the size is for a thread
1604 // local allocation
1605 bhs .Lart_quick_alloc_object_rosalloc_slow_path
1606 // Compute the rosalloc bracket index
1607 // from the size.
1608 // Align up the size by the rosalloc
1609 // bracket quantum size and divide
1610 // by the quantum size and subtract
1611 // by 1. This code is a shorter but
1612 // equivalent version.
1613 sub x3, x3, #1
1614 lsr x3, x3, #ROSALLOC_BRACKET_QUANTUM_SIZE_SHIFT
1615 // Load the rosalloc run (x4)
1616 add x4, xSELF, x3, lsl #POINTER_SIZE_SHIFT
1617 ldr x4, [x4, #THREAD_ROSALLOC_RUNS_OFFSET]
1618 // Load the free list head (x3). This
1619 // will be the return val.
1620 ldr x3, [x4, #(ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET)]
1621 cbz x3, .Lart_quick_alloc_object_rosalloc_slow_path
1622 // "Point of no slow path". Won't go to the slow path from here on. OK to clobber x0 and x1.
1623 ldr x1, [x3, #ROSALLOC_SLOT_NEXT_OFFSET] // Load the next pointer of the head
1624 // and update the list head with the
1625 // next pointer.
1626 str x1, [x4, #(ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET)]
1627 // Store the class pointer in the
1628 // header. This also overwrites the
1629 // next pointer. The offsets are
1630 // asserted to match.
1631#if ROSALLOC_SLOT_NEXT_OFFSET != MIRROR_OBJECT_CLASS_OFFSET
1632#error "Class pointer needs to overwrite next pointer."
1633#endif
1634 POISON_HEAP_REF w2
1635 str w2, [x3, #MIRROR_OBJECT_CLASS_OFFSET]
1636 // Push the new object onto the thread
1637 // local allocation stack and
1638 // increment the thread local
1639 // allocation stack top.
1640 ldr x1, [xSELF, #THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET]
1641 str w3, [x1], #COMPRESSED_REFERENCE_SIZE // (Increment x1 as a side effect.)
1642 str x1, [xSELF, #THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET]
1643 // Decrement the size of the free list
1644 ldr w1, [x4, #(ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_SIZE_OFFSET)]
1645 sub x1, x1, #1
1646 // TODO: consider combining this store
1647 // and the list head store above using
1648 // strd.
1649 str w1, [x4, #(ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_SIZE_OFFSET)]
1650 // Fence. This is "ish" not "ishst" so
1651 // that the code after this allocation
1652 // site will see the right values in
1653 // the fields of the class.
1654 // Alternatively we could use "ishst"
1655 // if we use load-acquire for the
1656 // class status load.)
1657 dmb ish
1658 mov x0, x3 // Set the return value and return.
1659 ret
1660.Lart_quick_alloc_object_rosalloc_slow_path:
1661 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
1662 mov x2, xSELF // pass Thread::Current
1663 bl artAllocObjectFromCodeRosAlloc // (uint32_t type_idx, Method* method, Thread*)
1664 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
1665 RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
1666END art_quick_alloc_object_rosalloc
Stuart Monteithb95a5342014-03-12 13:32:32 +00001667
Hiroshi Yamauchicd773782016-04-07 17:18:24 -07001668// The common fast path code for art_quick_alloc_object_tlab and art_quick_alloc_object_region_tlab.
1669//
1670// x0: type_idx/return value, x1: ArtMethod*, x2: Class*, xSELF(x19): Thread::Current
1671// x3-x7: free.
1672// Need to preserve x0 and x1 to the slow path.
1673.macro ALLOC_OBJECT_TLAB_FAST_PATH slowPathLabel
1674 cbz x2, \slowPathLabel // Check null class
Hiroshi Yamauchid72945c2016-03-16 11:23:10 -07001675 // Check class status.
1676 ldr w3, [x2, #MIRROR_CLASS_STATUS_OFFSET]
1677 cmp x3, #MIRROR_CLASS_STATUS_INITIALIZED
Hiroshi Yamauchicd773782016-04-07 17:18:24 -07001678 bne \slowPathLabel
Hiroshi Yamauchid72945c2016-03-16 11:23:10 -07001679 // Add a fake dependence from the
1680 // following access flag and size
1681 // loads to the status load.
1682 // This is to prevent those loads
1683 // from being reordered above the
1684 // status load and reading wrong
1685 // values (an alternative is to use
1686 // a load-acquire for the status).
1687 eor x3, x3, x3
1688 add x2, x2, x3
1689 // Check access flags has
1690 // kAccClassIsFinalizable.
1691 ldr w3, [x2, #MIRROR_CLASS_ACCESS_FLAGS_OFFSET]
Hiroshi Yamauchicd773782016-04-07 17:18:24 -07001692 tbnz x3, #ACCESS_FLAGS_CLASS_IS_FINALIZABLE_BIT, \slowPathLabel
Hiroshi Yamauchid72945c2016-03-16 11:23:10 -07001693 // Load thread_local_pos (x4) and
1694 // thread_local_end (x5).
1695 ldr x4, [xSELF, #THREAD_LOCAL_POS_OFFSET]
1696 ldr x5, [xSELF, #THREAD_LOCAL_END_OFFSET]
1697 sub x6, x5, x4 // Compute the remaining buf size.
1698 ldr w7, [x2, #MIRROR_CLASS_OBJECT_SIZE_OFFSET] // Load the object size (x7).
1699 cmp x7, x6 // Check if it fits. OK to do this
1700 // before rounding up the object size
1701 // assuming the buf size alignment.
Hiroshi Yamauchicd773782016-04-07 17:18:24 -07001702 bhi \slowPathLabel
Hiroshi Yamauchid72945c2016-03-16 11:23:10 -07001703 // "Point of no slow path". Won't go to the slow path from here on. OK to clobber x0 and x1.
1704 // Round up the object size by the
1705 // object alignment. (addr + 7) & ~7.
1706 add x7, x7, #OBJECT_ALIGNMENT_MASK
1707 and x7, x7, #OBJECT_ALIGNMENT_MASK_TOGGLED
1708 // Move old thread_local_pos to x0
1709 // for the return value.
1710 mov x0, x4
1711 add x5, x0, x7
1712 str x5, [xSELF, #THREAD_LOCAL_POS_OFFSET] // Store new thread_local_pos.
1713 ldr x5, [xSELF, #THREAD_LOCAL_OBJECTS_OFFSET] // Increment thread_local_objects.
1714 add x5, x5, #1
1715 str x5, [xSELF, #THREAD_LOCAL_OBJECTS_OFFSET]
1716 POISON_HEAP_REF w2
1717 str w2, [x0, #MIRROR_OBJECT_CLASS_OFFSET] // Store the class pointer.
1718 // Fence. This is "ish" not "ishst" so
1719 // that the code after this allocation
1720 // site will see the right values in
1721 // the fields of the class.
1722 // Alternatively we could use "ishst"
1723 // if we use load-acquire for the
1724 // class status load.)
1725 dmb ish
1726 ret
Hiroshi Yamauchicd773782016-04-07 17:18:24 -07001727.endm
1728
1729// A hand-written override for GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_tlab, TLAB).
1730ENTRY art_quick_alloc_object_tlab
1731 // Fast path tlab allocation.
1732 // x0: type_idx/return value, x1: ArtMethod*, xSELF(x19): Thread::Current
1733 // x2-x7: free.
1734#if defined(USE_READ_BARRIER)
1735 mvn x0, xzr // Read barrier not supported here.
1736 ret // Return -1.
1737#endif
1738 ldr x2, [x1, #ART_METHOD_DEX_CACHE_TYPES_OFFSET_64] // Load dex cache resolved types array
1739 // Load the class (x2)
1740 ldr w2, [x2, x0, lsl #COMPRESSED_REFERENCE_SIZE_SHIFT]
1741 ALLOC_OBJECT_TLAB_FAST_PATH .Lart_quick_alloc_object_tlab_slow_path
Hiroshi Yamauchid72945c2016-03-16 11:23:10 -07001742.Lart_quick_alloc_object_tlab_slow_path:
1743 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // Save callee saves in case of GC.
1744 mov x2, xSELF // Pass Thread::Current.
1745 bl artAllocObjectFromCodeTLAB // (uint32_t type_idx, Method* method, Thread*)
1746 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
1747 RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
1748END art_quick_alloc_object_tlab
1749
Hiroshi Yamauchicd773782016-04-07 17:18:24 -07001750// A hand-written override for GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_region_tlab, RegionTLAB)
1751ENTRY art_quick_alloc_object_region_tlab
1752 // Fast path region tlab allocation.
1753 // x0: type_idx/return value, x1: ArtMethod*, xSELF(x19): Thread::Current
1754 // x2-x7: free.
1755#if !defined(USE_READ_BARRIER)
1756 mvn x0, xzr // Read barrier must be enabled here.
1757 ret // Return -1.
1758#endif
1759 ldr x2, [x1, #ART_METHOD_DEX_CACHE_TYPES_OFFSET_64] // Load dex cache resolved types array
1760 // Load the class (x2)
1761 ldr w2, [x2, x0, lsl #COMPRESSED_REFERENCE_SIZE_SHIFT]
1762 // Read barrier for class load.
1763 ldr w3, [xSELF, #THREAD_IS_GC_MARKING_OFFSET]
1764 cbnz x3, .Lart_quick_alloc_object_region_tlab_class_load_read_barrier_slow_path
1765.Lart_quick_alloc_object_region_tlab_class_load_read_barrier_slow_path_exit:
1766 ALLOC_OBJECT_TLAB_FAST_PATH .Lart_quick_alloc_object_region_tlab_slow_path
1767.Lart_quick_alloc_object_region_tlab_class_load_read_barrier_slow_path:
1768 // The read barrier slow path. Mark
1769 // the class.
1770 stp x0, x1, [sp, #-32]! // Save registers (x0, x1, lr).
1771 str xLR, [sp, #16] // Align sp by 16 bytes.
1772 mov x0, x2 // Pass the class as the first param.
1773 bl artReadBarrierMark
1774 mov x2, x0 // Get the (marked) class back.
1775 ldp x0, x1, [sp, #0] // Restore registers.
1776 ldr xLR, [sp, #16]
1777 add sp, sp, #32
1778 b .Lart_quick_alloc_object_region_tlab_class_load_read_barrier_slow_path_exit
1779.Lart_quick_alloc_object_region_tlab_slow_path:
1780 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // Save callee saves in case of GC.
1781 mov x2, xSELF // Pass Thread::Current.
1782 bl artAllocObjectFromCodeRegionTLAB // (uint32_t type_idx, Method* method, Thread*)
1783 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
1784 RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
1785END art_quick_alloc_object_region_tlab
Hiroshi Yamauchi10d4c082016-02-24 12:51:18 -08001786
Zheng Xu48241e72014-05-23 11:52:42 +08001787 /*
Zheng Xu69a50302015-04-14 20:04:41 +08001788 * Called by managed code when the thread has been asked to suspend.
Zheng Xu48241e72014-05-23 11:52:42 +08001789 */
1790 .extern artTestSuspendFromCode
1791ENTRY art_quick_test_suspend
1792 ldrh w0, [xSELF, #THREAD_FLAGS_OFFSET] // get xSELF->state_and_flags.as_struct.flags
Zheng Xu48241e72014-05-23 11:52:42 +08001793 cbnz w0, .Lneed_suspend // check flags == 0
1794 ret // return if flags == 0
1795.Lneed_suspend:
1796 mov x0, xSELF
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001797 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves for stack crawl
1798 bl artTestSuspendFromCode // (Thread*)
1799 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
Zheng Xu48241e72014-05-23 11:52:42 +08001800END art_quick_test_suspend
Stuart Monteithb95a5342014-03-12 13:32:32 +00001801
Stuart Monteithd5c78f42014-06-11 16:44:46 +01001802ENTRY art_quick_implicit_suspend
1803 mov x0, xSELF
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001804 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves for stack crawl
1805 bl artTestSuspendFromCode // (Thread*)
1806 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
Stuart Monteithd5c78f42014-06-11 16:44:46 +01001807END art_quick_implicit_suspend
1808
Andreas Gampee62a07e2014-03-26 14:53:21 -07001809 /*
1810 * Called by managed code that is attempting to call a method on a proxy class. On entry
1811 * x0 holds the proxy method and x1 holds the receiver; The frame size of the invoked proxy
1812 * method agrees with a ref and args callee save frame.
1813 */
1814 .extern artQuickProxyInvokeHandler
1815ENTRY art_quick_proxy_invoke_handler
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001816 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_X0
Andreas Gampee62a07e2014-03-26 14:53:21 -07001817 mov x2, xSELF // pass Thread::Current
1818 mov x3, sp // pass SP
1819 bl artQuickProxyInvokeHandler // (Method* proxy method, receiver, Thread*, SP)
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01001820 ldr x2, [xSELF, THREAD_EXCEPTION_OFFSET]
Andreas Gampee62a07e2014-03-26 14:53:21 -07001821 cbnz x2, .Lexception_in_proxy // success if no exception is pending
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001822 RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME // Restore frame
Andreas Gamped1e91672014-06-02 22:50:05 -07001823 fmov d0, x0 // Store result in d0 in case it was float or double
Andreas Gampee62a07e2014-03-26 14:53:21 -07001824 ret // return on success
1825.Lexception_in_proxy:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001826 RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Andreas Gampee62a07e2014-03-26 14:53:21 -07001827 DELIVER_PENDING_EXCEPTION
1828END art_quick_proxy_invoke_handler
Stuart Monteithb95a5342014-03-12 13:32:32 +00001829
Andreas Gampe51f76352014-05-21 08:28:48 -07001830 /*
Nicolas Geoffray796d6302016-03-13 22:22:31 +00001831 * Called to resolve an imt conflict.
1832 * x0 is the conflict ArtMethod.
1833 * xIP1 is a hidden argument that holds the target interface method's dex method index.
1834 *
1835 * Note that this stub writes to xIP0, xIP1, and x0.
Andreas Gampe51f76352014-05-21 08:28:48 -07001836 */
Andreas Gampe3031c8d2015-07-13 20:11:06 -07001837 .extern artInvokeInterfaceTrampoline
Andreas Gampe51f76352014-05-21 08:28:48 -07001838ENTRY art_quick_imt_conflict_trampoline
Nicolas Geoffray796d6302016-03-13 22:22:31 +00001839 ldr xIP0, [sp, #0] // Load referrer
1840 ldr xIP0, [xIP0, #ART_METHOD_DEX_CACHE_METHODS_OFFSET_64] // Load dex cache methods array
1841 ldr xIP0, [xIP0, xIP1, lsl #POINTER_SIZE_SHIFT] // Load interface method
1842 ldr xIP1, [x0, #ART_METHOD_JNI_OFFSET_64] // Load ImtConflictTable
1843 ldr x0, [xIP1] // Load first entry in ImtConflictTable.
1844.Limt_table_iterate:
1845 cmp x0, xIP0
1846 // Branch if found. Benchmarks have shown doing a branch here is better.
1847 beq .Limt_table_found
1848 // If the entry is null, the interface method is not in the ImtConflictTable.
1849 cbz x0, .Lconflict_trampoline
1850 // Iterate over the entries of the ImtConflictTable.
1851 ldr x0, [xIP1, #(2 * __SIZEOF_POINTER__)]!
1852 b .Limt_table_iterate
1853.Limt_table_found:
Goran Jakovljevic59028d92016-03-29 18:05:03 +02001854 // We successfully hit an entry in the table. Load the target method
Nicolas Geoffray796d6302016-03-13 22:22:31 +00001855 // and jump to it.
1856 ldr x0, [xIP1, #__SIZEOF_POINTER__]
1857 ldr xIP0, [x0, #ART_METHOD_QUICK_CODE_OFFSET_64]
1858 br xIP0
1859.Lconflict_trampoline:
1860 // Call the runtime stub to populate the ImtConflictTable and jump to the
1861 // resolved method.
Andreas Gampe3031c8d2015-07-13 20:11:06 -07001862 INVOKE_TRAMPOLINE_BODY artInvokeInterfaceTrampoline
Andreas Gampe51f76352014-05-21 08:28:48 -07001863END art_quick_imt_conflict_trampoline
Stuart Monteithb95a5342014-03-12 13:32:32 +00001864
1865ENTRY art_quick_resolution_trampoline
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001866 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +00001867 mov x2, xSELF
1868 mov x3, sp
1869 bl artQuickResolutionTrampoline // (called, receiver, Thread*, SP)
Matteo Franchindfd891a2014-04-30 12:17:17 +01001870 cbz x0, 1f
Zheng Xub551fdc2014-07-25 11:49:42 +08001871 mov xIP0, x0 // Remember returned code pointer in xIP0.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001872 ldr x0, [sp, #0] // artQuickResolutionTrampoline puts called method in *SP.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001873 RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Zheng Xub551fdc2014-07-25 11:49:42 +08001874 br xIP0
Stuart Monteithb95a5342014-03-12 13:32:32 +000018751:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001876 RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +00001877 DELIVER_PENDING_EXCEPTION
1878END art_quick_resolution_trampoline
1879
1880/*
1881 * Generic JNI frame layout:
1882 *
1883 * #-------------------#
1884 * | |
1885 * | caller method... |
1886 * #-------------------# <--- SP on entry
1887 * | Return X30/LR |
1888 * | X29/FP | callee save
1889 * | X28 | callee save
1890 * | X27 | callee save
1891 * | X26 | callee save
1892 * | X25 | callee save
1893 * | X24 | callee save
1894 * | X23 | callee save
1895 * | X22 | callee save
1896 * | X21 | callee save
1897 * | X20 | callee save
Zheng Xu69a50302015-04-14 20:04:41 +08001898 * | X19 | callee save
Stuart Monteithb95a5342014-03-12 13:32:32 +00001899 * | X7 | arg7
1900 * | X6 | arg6
1901 * | X5 | arg5
1902 * | X4 | arg4
1903 * | X3 | arg3
1904 * | X2 | arg2
1905 * | X1 | arg1
Stuart Monteithb95a5342014-03-12 13:32:32 +00001906 * | D7 | float arg 8
1907 * | D6 | float arg 7
1908 * | D5 | float arg 6
1909 * | D4 | float arg 5
1910 * | D3 | float arg 4
1911 * | D2 | float arg 3
1912 * | D1 | float arg 2
1913 * | D0 | float arg 1
Andreas Gampecf4035a2014-05-28 22:43:01 -07001914 * | Method* | <- X0
Stuart Monteithb95a5342014-03-12 13:32:32 +00001915 * #-------------------#
1916 * | local ref cookie | // 4B
Mathieu Chartier421c5372014-05-14 14:11:40 -07001917 * | handle scope size | // 4B
Stuart Monteithb95a5342014-03-12 13:32:32 +00001918 * #-------------------#
1919 * | JNI Call Stack |
1920 * #-------------------# <--- SP on native call
1921 * | |
1922 * | Stack for Regs | The trampoline assembly will pop these values
1923 * | | into registers for native call
1924 * #-------------------#
1925 * | Native code ptr |
1926 * #-------------------#
1927 * | Free scratch |
1928 * #-------------------#
1929 * | Ptr to (1) | <--- SP
1930 * #-------------------#
1931 */
1932 /*
1933 * Called to do a generic JNI down-call
1934 */
Ian Rogers6f3dbba2014-10-14 17:41:57 -07001935ENTRY art_quick_generic_jni_trampoline
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001936 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_X0
Stuart Monteithb95a5342014-03-12 13:32:32 +00001937
1938 // Save SP , so we can have static CFI info.
1939 mov x28, sp
1940 .cfi_def_cfa_register x28
1941
1942 // This looks the same, but is different: this will be updated to point to the bottom
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001943 // of the frame when the handle scope is inserted.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001944 mov xFP, sp
1945
Zheng Xub551fdc2014-07-25 11:49:42 +08001946 mov xIP0, #5120
1947 sub sp, sp, xIP0
Stuart Monteithb95a5342014-03-12 13:32:32 +00001948
1949 // prepare for artQuickGenericJniTrampoline call
1950 // (Thread*, SP)
1951 // x0 x1 <= C calling convention
1952 // xSELF xFP <= where they are
1953
1954 mov x0, xSELF // Thread*
1955 mov x1, xFP
1956 bl artQuickGenericJniTrampoline // (Thread*, sp)
1957
Andreas Gampec200a4a2014-06-16 18:39:09 -07001958 // The C call will have registered the complete save-frame on success.
1959 // The result of the call is:
1960 // x0: pointer to native code, 0 on error.
1961 // x1: pointer to the bottom of the used area of the alloca, can restore stack till there.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001962
Andreas Gampec200a4a2014-06-16 18:39:09 -07001963 // Check for error = 0.
Nicolas Geoffray126d6592015-03-03 14:28:35 +00001964 cbz x0, .Lexception_in_native
Stuart Monteithb95a5342014-03-12 13:32:32 +00001965
Andreas Gampec200a4a2014-06-16 18:39:09 -07001966 // Release part of the alloca.
1967 mov sp, x1
Stuart Monteithb95a5342014-03-12 13:32:32 +00001968
Andreas Gampec200a4a2014-06-16 18:39:09 -07001969 // Save the code pointer
1970 mov xIP0, x0
Stuart Monteithb95a5342014-03-12 13:32:32 +00001971
1972 // Load parameters from frame into registers.
1973 // TODO Check with artQuickGenericJniTrampoline.
1974 // Also, check again APPCS64 - the stack arguments are interleaved.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001975 ldp x0, x1, [sp]
1976 ldp x2, x3, [sp, #16]
1977 ldp x4, x5, [sp, #32]
1978 ldp x6, x7, [sp, #48]
Stuart Monteithb95a5342014-03-12 13:32:32 +00001979
Andreas Gampec200a4a2014-06-16 18:39:09 -07001980 ldp d0, d1, [sp, #64]
1981 ldp d2, d3, [sp, #80]
1982 ldp d4, d5, [sp, #96]
1983 ldp d6, d7, [sp, #112]
Stuart Monteithb95a5342014-03-12 13:32:32 +00001984
Andreas Gampec200a4a2014-06-16 18:39:09 -07001985 add sp, sp, #128
Stuart Monteithb95a5342014-03-12 13:32:32 +00001986
Zheng Xub551fdc2014-07-25 11:49:42 +08001987 blr xIP0 // native call.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001988
1989 // result sign extension is handled in C code
1990 // prepare for artQuickGenericJniEndTrampoline call
Andreas Gampec200a4a2014-06-16 18:39:09 -07001991 // (Thread*, result, result_f)
1992 // x0 x1 x2 <= C calling convention
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01001993 mov x1, x0 // Result (from saved).
1994 mov x0, xSELF // Thread register.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001995 fmov x2, d0 // d0 will contain floating point result, but needs to go into x2
Stuart Monteithb95a5342014-03-12 13:32:32 +00001996
1997 bl artQuickGenericJniEndTrampoline
1998
Nicolas Geoffray126d6592015-03-03 14:28:35 +00001999 // Pending exceptions possible.
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01002000 ldr x2, [xSELF, THREAD_EXCEPTION_OFFSET]
Nicolas Geoffray126d6592015-03-03 14:28:35 +00002001 cbnz x2, .Lexception_in_native
2002
Stuart Monteithb95a5342014-03-12 13:32:32 +00002003 // Tear down the alloca.
2004 mov sp, x28
2005 .cfi_def_cfa_register sp
2006
Stuart Monteithb95a5342014-03-12 13:32:32 +00002007 // Tear down the callee-save frame.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002008 RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +00002009
2010 // store into fpr, for when it's a fpr return...
2011 fmov d0, x0
2012 ret
2013
Stuart Monteithb95a5342014-03-12 13:32:32 +00002014.Lexception_in_native:
Nicolas Geoffray126d6592015-03-03 14:28:35 +00002015 // Move to x1 then sp to please assembler.
2016 ldr x1, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET]
2017 mov sp, x1
2018 .cfi_def_cfa_register sp
2019 # This will create a new save-all frame, required by the runtime.
Stuart Monteithb95a5342014-03-12 13:32:32 +00002020 DELIVER_PENDING_EXCEPTION
Stuart Monteithb95a5342014-03-12 13:32:32 +00002021END art_quick_generic_jni_trampoline
2022
2023/*
2024 * Called to bridge from the quick to interpreter ABI. On entry the arguments match those
2025 * of a quick call:
2026 * x0 = method being called/to bridge to.
2027 * x1..x7, d0..d7 = arguments to that method.
2028 */
Ian Rogers6f3dbba2014-10-14 17:41:57 -07002029ENTRY art_quick_to_interpreter_bridge
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002030 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME // Set up frame and save arguments.
Stuart Monteithb95a5342014-03-12 13:32:32 +00002031
2032 // x0 will contain mirror::ArtMethod* method.
2033 mov x1, xSELF // How to get Thread::Current() ???
2034 mov x2, sp
2035
2036 // uint64_t artQuickToInterpreterBridge(mirror::ArtMethod* method, Thread* self,
2037 // mirror::ArtMethod** sp)
2038 bl artQuickToInterpreterBridge
2039
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002040 RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME // TODO: no need to restore arguments in this case.
Stuart Monteithb95a5342014-03-12 13:32:32 +00002041
2042 fmov d0, x0
2043
2044 RETURN_OR_DELIVER_PENDING_EXCEPTION
2045END art_quick_to_interpreter_bridge
2046
Andreas Gamped58342c2014-06-05 14:18:08 -07002047
2048//
2049// Instrumentation-related stubs
2050//
2051 .extern artInstrumentationMethodEntryFromCode
2052ENTRY art_quick_instrumentation_entry
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002053 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Andreas Gamped58342c2014-06-05 14:18:08 -07002054
Zheng Xub551fdc2014-07-25 11:49:42 +08002055 mov x20, x0 // Preserve method reference in a callee-save.
Andreas Gamped58342c2014-06-05 14:18:08 -07002056
2057 mov x2, xSELF
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002058 mov x3, xLR
2059 bl artInstrumentationMethodEntryFromCode // (Method*, Object*, Thread*, LR)
Andreas Gamped58342c2014-06-05 14:18:08 -07002060
Zheng Xub551fdc2014-07-25 11:49:42 +08002061 mov xIP0, x0 // x0 = result of call.
2062 mov x0, x20 // Reload method reference.
Andreas Gamped58342c2014-06-05 14:18:08 -07002063
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002064 RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME // Note: will restore xSELF
Andreas Gamped58342c2014-06-05 14:18:08 -07002065 adr xLR, art_quick_instrumentation_exit
Zheng Xub551fdc2014-07-25 11:49:42 +08002066 br xIP0 // Tail-call method with lr set to art_quick_instrumentation_exit.
Andreas Gamped58342c2014-06-05 14:18:08 -07002067END art_quick_instrumentation_entry
2068
2069 .extern artInstrumentationMethodExitFromCode
2070ENTRY art_quick_instrumentation_exit
2071 mov xLR, #0 // Clobber LR for later checks.
2072
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002073 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gamped58342c2014-06-05 14:18:08 -07002074
2075 // We need to save x0 and d0. We could use a callee-save from SETUP_REF_ONLY, but then
2076 // we would need to fully restore it. As there are a lot of callee-save registers, it seems
2077 // easier to have an extra small stack area.
2078
Sebastien Hertz70f8d4b2014-06-19 11:51:41 +02002079 str x0, [sp, #-16]! // Save integer result.
Andreas Gamped58342c2014-06-05 14:18:08 -07002080 .cfi_adjust_cfa_offset 16
2081 str d0, [sp, #8] // Save floating-point result.
2082
Andreas Gamped58342c2014-06-05 14:18:08 -07002083 add x1, sp, #16 // Pass SP.
2084 mov x2, x0 // Pass integer result.
2085 fmov x3, d0 // Pass floating-point result.
Sebastien Hertz70f8d4b2014-06-19 11:51:41 +02002086 mov x0, xSELF // Pass Thread.
Andreas Gamped58342c2014-06-05 14:18:08 -07002087 bl artInstrumentationMethodExitFromCode // (Thread*, SP, gpr_res, fpr_res)
2088
Zheng Xub551fdc2014-07-25 11:49:42 +08002089 mov xIP0, x0 // Return address from instrumentation call.
Andreas Gamped58342c2014-06-05 14:18:08 -07002090 mov xLR, x1 // r1 is holding link register if we're to bounce to deoptimize
2091
2092 ldr d0, [sp, #8] // Restore floating-point result.
2093 ldr x0, [sp], 16 // Restore integer result, and drop stack area.
2094 .cfi_adjust_cfa_offset 16
2095
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002096 POP_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gamped58342c2014-06-05 14:18:08 -07002097
Zheng Xub551fdc2014-07-25 11:49:42 +08002098 br xIP0 // Tail-call out.
Andreas Gamped58342c2014-06-05 14:18:08 -07002099END art_quick_instrumentation_exit
2100
2101 /*
2102 * Instrumentation has requested that we deoptimize into the interpreter. The deoptimization
2103 * will long jump to the upcall with a special exception of -1.
2104 */
2105 .extern artDeoptimize
2106ENTRY art_quick_deoptimize
2107 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
2108 mov x0, xSELF // Pass thread.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002109 bl artDeoptimize // artDeoptimize(Thread*)
Serban Constantinescu86797a72014-06-19 16:17:56 +01002110 brk 0
Andreas Gamped58342c2014-06-05 14:18:08 -07002111END art_quick_deoptimize
2112
Sebastien Hertz07474662015-08-25 15:12:33 +00002113 /*
2114 * Compiled code has requested that we deoptimize into the interpreter. The deoptimization
2115 * will long jump to the upcall with a special exception of -1.
2116 */
2117 .extern artDeoptimizeFromCompiledCode
2118ENTRY art_quick_deoptimize_from_compiled_code
2119 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
2120 mov x0, xSELF // Pass thread.
2121 bl artDeoptimizeFromCompiledCode // artDeoptimizeFromCompiledCode(Thread*)
2122 brk 0
2123END art_quick_deoptimize_from_compiled_code
2124
Andreas Gamped58342c2014-06-05 14:18:08 -07002125
Serban Constantinescu169489b2014-06-11 16:43:35 +01002126 /*
2127 * String's indexOf.
2128 *
2129 * TODO: Not very optimized.
2130 * On entry:
2131 * x0: string object (known non-null)
2132 * w1: char to match (known <= 0xFFFF)
2133 * w2: Starting offset in string data
2134 */
2135ENTRY art_quick_indexof
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002136 ldr w3, [x0, #MIRROR_STRING_COUNT_OFFSET]
Jeff Hao848f70a2014-01-15 13:49:50 -08002137 add x0, x0, #MIRROR_STRING_VALUE_OFFSET
Serban Constantinescu169489b2014-06-11 16:43:35 +01002138
2139 /* Clamp start to [0..count] */
2140 cmp w2, #0
2141 csel w2, wzr, w2, lt
2142 cmp w2, w3
2143 csel w2, w3, w2, gt
2144
Serban Constantinescu169489b2014-06-11 16:43:35 +01002145 /* Save a copy to compute result */
2146 mov x5, x0
2147
2148 /* Build pointer to start of data to compare and pre-bias */
2149 add x0, x0, x2, lsl #1
2150 sub x0, x0, #2
2151
2152 /* Compute iteration count */
2153 sub w2, w3, w2
2154
2155 /*
2156 * At this point we have:
2157 * x0: start of the data to test
2158 * w1: char to compare
2159 * w2: iteration count
2160 * x5: original start of string data
2161 */
2162
2163 subs w2, w2, #4
2164 b.lt .Lindexof_remainder
2165
2166.Lindexof_loop4:
2167 ldrh w6, [x0, #2]!
2168 ldrh w7, [x0, #2]!
Zheng Xub551fdc2014-07-25 11:49:42 +08002169 ldrh wIP0, [x0, #2]!
2170 ldrh wIP1, [x0, #2]!
Serban Constantinescu169489b2014-06-11 16:43:35 +01002171 cmp w6, w1
2172 b.eq .Lmatch_0
2173 cmp w7, w1
2174 b.eq .Lmatch_1
Zheng Xub551fdc2014-07-25 11:49:42 +08002175 cmp wIP0, w1
Serban Constantinescu169489b2014-06-11 16:43:35 +01002176 b.eq .Lmatch_2
Zheng Xub551fdc2014-07-25 11:49:42 +08002177 cmp wIP1, w1
Serban Constantinescu169489b2014-06-11 16:43:35 +01002178 b.eq .Lmatch_3
2179 subs w2, w2, #4
2180 b.ge .Lindexof_loop4
2181
2182.Lindexof_remainder:
2183 adds w2, w2, #4
2184 b.eq .Lindexof_nomatch
2185
2186.Lindexof_loop1:
2187 ldrh w6, [x0, #2]!
2188 cmp w6, w1
2189 b.eq .Lmatch_3
2190 subs w2, w2, #1
2191 b.ne .Lindexof_loop1
2192
2193.Lindexof_nomatch:
2194 mov x0, #-1
2195 ret
2196
2197.Lmatch_0:
2198 sub x0, x0, #6
2199 sub x0, x0, x5
2200 asr x0, x0, #1
2201 ret
2202.Lmatch_1:
2203 sub x0, x0, #4
2204 sub x0, x0, x5
2205 asr x0, x0, #1
2206 ret
2207.Lmatch_2:
2208 sub x0, x0, #2
2209 sub x0, x0, x5
2210 asr x0, x0, #1
2211 ret
2212.Lmatch_3:
2213 sub x0, x0, x5
2214 asr x0, x0, #1
2215 ret
2216END art_quick_indexof
Roland Levillain02b75802016-07-13 11:54:35 +01002217
2218 /*
2219 * Create a function `name` calling the ReadBarrier::Mark routine,
2220 * getting its argument and returning its result through register
2221 * `reg`, thus following a non-standard runtime calling convention:
2222 * - `reg` is used to pass the (sole) argument of this function
2223 * (instead of W0);
2224 * - `reg` is used to return the result of this function (instead of W0);
2225 * - W0 is treated like a normal (non-argument) caller-save register;
2226 * - everything else is the same as in the standard runtime calling
2227 * convention (e.g. same callee-save registers).
2228 */
2229.macro READ_BARRIER_MARK_REG name, reg
2230ENTRY \name
2231 str xLR, [sp, #-16]! // Save return address and add padding (16B align stack).
2232 .cfi_adjust_cfa_offset 16
2233 .cfi_rel_offset x30, 0
2234 mov w0, \reg // Pass arg1 - obj from `reg`
2235 bl artReadBarrierMark // artReadBarrierMark(obj)
2236 mov \reg, w0 // Return result into `reg`
2237 ldr xLR, [sp], #16 // Restore return address and remove padding.
2238 .cfi_restore x30
2239 .cfi_adjust_cfa_offset -16
2240 ret
2241END \name
2242.endm
2243
2244READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg01, w1
2245READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg02, w2
2246READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg03, w3
2247READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg04, w4
2248READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg05, w5
2249READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg06, w6
2250READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg07, w7
2251READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg08, w8
2252READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg09, w9
2253READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg10, w10
2254READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg11, w11
2255READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg12, w12
2256READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg13, w13
2257READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg14, w14
2258READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg15, w15
2259READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg16, w16
2260READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg17, w17
2261READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg18, w18
2262READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg19, w19
2263READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg20, w20
2264READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg21, w21
2265READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg22, w22
2266READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg23, w23
2267READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg24, w24
2268READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg25, w25
2269READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg26, w26
2270READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg27, w27
2271READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg28, w28
2272READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg29, w29