blob: 9ccabad1cc24222ff1c789a0b1bcf2eaf82ca850 [file] [log] [blame]
Stuart Monteithb95a5342014-03-12 13:32:32 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "asm_support_arm64.S"
18
19#include "arch/quick_alloc_entrypoints.S"
20
21
22 /*
23 * Macro that sets up the callee save frame to conform with
24 * Runtime::CreateCalleeSaveMethod(kSaveAll)
25 */
26.macro SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
Zheng Xub551fdc2014-07-25 11:49:42 +080027 adrp xIP0, :got:_ZN3art7Runtime9instance_E
28 ldr xIP0, [xIP0, #:got_lo12:_ZN3art7Runtime9instance_E]
Stuart Monteithb95a5342014-03-12 13:32:32 +000029
30 // Our registers aren't intermixed - just spill in order.
Zheng Xub551fdc2014-07-25 11:49:42 +080031 ldr xIP0, [xIP0] // xIP0 = & (art::Runtime * art::Runtime.instance_) .
Stuart Monteithb95a5342014-03-12 13:32:32 +000032
Zheng Xub551fdc2014-07-25 11:49:42 +080033 // xIP0 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] .
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010034 // Loads appropriate callee-save-method.
Mathieu Chartiere401d142015-04-22 13:56:20 -070035 ldr xIP0, [xIP0, RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET ]
Andreas Gampe5c1e4352014-04-21 19:28:24 -070036
37 sub sp, sp, #176
38 .cfi_adjust_cfa_offset 176
39
40 // Ugly compile-time check, but we only have the preprocessor.
Zheng Xub551fdc2014-07-25 11:49:42 +080041#if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVE != 176)
42#error "SAVE_ALL_CALLEE_SAVE_FRAME(ARM64) size not as expected."
Andreas Gampe5c1e4352014-04-21 19:28:24 -070043#endif
44
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010045 // Stack alignment filler [sp, #8].
46 // FP callee-saves.
47 stp d8, d9, [sp, #16]
48 stp d10, d11, [sp, #32]
49 stp d12, d13, [sp, #48]
50 stp d14, d15, [sp, #64]
Andreas Gampe5c1e4352014-04-21 19:28:24 -070051
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010052 // GP callee-saves
53 stp x19, x20, [sp, #80]
Andreas Gampe5c1e4352014-04-21 19:28:24 -070054 .cfi_rel_offset x19, 80
Andreas Gampe5c1e4352014-04-21 19:28:24 -070055 .cfi_rel_offset x20, 88
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010056
57 stp x21, x22, [sp, #96]
Andreas Gampe5c1e4352014-04-21 19:28:24 -070058 .cfi_rel_offset x21, 96
Andreas Gampe5c1e4352014-04-21 19:28:24 -070059 .cfi_rel_offset x22, 104
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010060
61 stp x23, x24, [sp, #112]
Andreas Gampe5c1e4352014-04-21 19:28:24 -070062 .cfi_rel_offset x23, 112
Andreas Gampe5c1e4352014-04-21 19:28:24 -070063 .cfi_rel_offset x24, 120
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010064
65 stp x25, x26, [sp, #128]
Andreas Gampe5c1e4352014-04-21 19:28:24 -070066 .cfi_rel_offset x25, 128
Andreas Gampe5c1e4352014-04-21 19:28:24 -070067 .cfi_rel_offset x26, 136
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010068
69 stp x27, x28, [sp, #144]
Andreas Gampe5c1e4352014-04-21 19:28:24 -070070 .cfi_rel_offset x27, 144
Andreas Gampe5c1e4352014-04-21 19:28:24 -070071 .cfi_rel_offset x28, 152
Andreas Gampe5c1e4352014-04-21 19:28:24 -070072
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010073 stp x29, xLR, [sp, #160]
74 .cfi_rel_offset x29, 160
Andreas Gampe5c1e4352014-04-21 19:28:24 -070075 .cfi_rel_offset x30, 168
76
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010077 // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs].
78 str xIP0, [sp]
Ian Rogers1d8cdbc2014-09-22 22:51:09 -070079 // Place sp in Thread::Current()->top_quick_frame.
80 mov xIP0, sp
81 str xIP0, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET]
Stuart Monteithb95a5342014-03-12 13:32:32 +000082.endm
83
Zheng Xub551fdc2014-07-25 11:49:42 +080084 /*
85 * Macro that sets up the callee save frame to conform with
86 * Runtime::CreateCalleeSaveMethod(kRefsOnly).
87 */
Ian Rogers1d8cdbc2014-09-22 22:51:09 -070088.macro SETUP_REFS_ONLY_CALLEE_SAVE_FRAME
Zheng Xub551fdc2014-07-25 11:49:42 +080089 adrp xIP0, :got:_ZN3art7Runtime9instance_E
90 ldr xIP0, [xIP0, #:got_lo12:_ZN3art7Runtime9instance_E]
91
92 // Our registers aren't intermixed - just spill in order.
93 ldr xIP0, [xIP0] // xIP0 = & (art::Runtime * art::Runtime.instance_) .
94
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010095 // xIP0 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefOnly] .
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010096 // Loads appropriate callee-save-method.
Mathieu Chartiere401d142015-04-22 13:56:20 -070097 ldr xIP0, [xIP0, RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET ]
Zheng Xub551fdc2014-07-25 11:49:42 +080098
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010099 sub sp, sp, #96
100 .cfi_adjust_cfa_offset 96
Zheng Xub551fdc2014-07-25 11:49:42 +0800101
102 // Ugly compile-time check, but we only have the preprocessor.
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100103#if (FRAME_SIZE_REFS_ONLY_CALLEE_SAVE != 96)
Zheng Xub551fdc2014-07-25 11:49:42 +0800104#error "REFS_ONLY_CALLEE_SAVE_FRAME(ARM64) size not as expected."
105#endif
106
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100107 // GP callee-saves.
108 // x20 paired with ArtMethod* - see below.
109 stp x21, x22, [sp, #16]
110 .cfi_rel_offset x21, 16
111 .cfi_rel_offset x22, 24
Zheng Xub551fdc2014-07-25 11:49:42 +0800112
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100113 stp x23, x24, [sp, #32]
114 .cfi_rel_offset x23, 32
115 .cfi_rel_offset x24, 40
Zheng Xub551fdc2014-07-25 11:49:42 +0800116
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100117 stp x25, x26, [sp, #48]
118 .cfi_rel_offset x25, 48
119 .cfi_rel_offset x26, 56
Zheng Xub551fdc2014-07-25 11:49:42 +0800120
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100121 stp x27, x28, [sp, #64]
122 .cfi_rel_offset x27, 64
123 .cfi_rel_offset x28, 72
Zheng Xub551fdc2014-07-25 11:49:42 +0800124
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100125 stp x29, xLR, [sp, #80]
126 .cfi_rel_offset x29, 80
127 .cfi_rel_offset x30, 88
Zheng Xub551fdc2014-07-25 11:49:42 +0800128
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100129 // Store ArtMethod* Runtime::callee_save_methods_[kRefsOnly].
130 stp xIP0, x20, [sp]
131 .cfi_rel_offset x20, 8
Zheng Xub551fdc2014-07-25 11:49:42 +0800132
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700133 // Place sp in Thread::Current()->top_quick_frame.
134 mov xIP0, sp
135 str xIP0, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET]
Zheng Xub551fdc2014-07-25 11:49:42 +0800136.endm
137
138// TODO: Probably no need to restore registers preserved by aapcs64.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700139.macro RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100140 // Callee-saves.
141 ldr x20, [sp, #8]
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700142 .cfi_restore x20
Zheng Xu69a50302015-04-14 20:04:41 +0800143
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100144 ldp x21, x22, [sp, #16]
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700145 .cfi_restore x21
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700146 .cfi_restore x22
Zheng Xu69a50302015-04-14 20:04:41 +0800147
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100148 ldp x23, x24, [sp, #32]
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700149 .cfi_restore x23
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700150 .cfi_restore x24
Zheng Xu69a50302015-04-14 20:04:41 +0800151
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100152 ldp x25, x26, [sp, #48]
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700153 .cfi_restore x25
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700154 .cfi_restore x26
Zheng Xu69a50302015-04-14 20:04:41 +0800155
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100156 ldp x27, x28, [sp, #64]
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700157 .cfi_restore x27
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700158 .cfi_restore x28
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700159
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100160 ldp x29, xLR, [sp, #80]
Zheng Xu69a50302015-04-14 20:04:41 +0800161 .cfi_restore x29
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700162 .cfi_restore x30
163
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100164 add sp, sp, #96
165 .cfi_adjust_cfa_offset -96
Stuart Monteithb95a5342014-03-12 13:32:32 +0000166.endm
167
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700168.macro POP_REFS_ONLY_CALLEE_SAVE_FRAME
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100169 add sp, sp, #96
170 .cfi_adjust_cfa_offset - 96
Andreas Gamped58342c2014-06-05 14:18:08 -0700171.endm
172
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700173.macro RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
174 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Zheng Xu48241e72014-05-23 11:52:42 +0800175 ret
Stuart Monteithb95a5342014-03-12 13:32:32 +0000176.endm
177
178
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700179.macro SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
Zheng Xub551fdc2014-07-25 11:49:42 +0800180 sub sp, sp, #224
181 .cfi_adjust_cfa_offset 224
Stuart Monteithb95a5342014-03-12 13:32:32 +0000182
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700183 // Ugly compile-time check, but we only have the preprocessor.
Zheng Xub551fdc2014-07-25 11:49:42 +0800184#if (FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE != 224)
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700185#error "REFS_AND_ARGS_CALLEE_SAVE_FRAME(ARM64) size not as expected."
186#endif
187
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100188 // Stack alignment filler [sp, #8].
Zheng Xu69a50302015-04-14 20:04:41 +0800189 // FP args.
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100190 stp d0, d1, [sp, #16]
191 stp d2, d3, [sp, #32]
192 stp d4, d5, [sp, #48]
193 stp d6, d7, [sp, #64]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000194
Zheng Xu69a50302015-04-14 20:04:41 +0800195 // Core args.
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100196 stp x1, x2, [sp, #80]
197 .cfi_rel_offset x1, 80
198 .cfi_rel_offset x2, 88
Stuart Monteithb95a5342014-03-12 13:32:32 +0000199
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100200 stp x3, x4, [sp, #96]
201 .cfi_rel_offset x3, 96
202 .cfi_rel_offset x4, 104
Andreas Gampe03906cf2014-04-07 12:08:28 -0700203
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100204 stp x5, x6, [sp, #112]
205 .cfi_rel_offset x5, 112
206 .cfi_rel_offset x6, 120
Andreas Gampe03906cf2014-04-07 12:08:28 -0700207
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100208 // x7, Callee-saves.
209 stp x7, x20, [sp, #128]
210 .cfi_rel_offset x7, 128
Zheng Xu69a50302015-04-14 20:04:41 +0800211 .cfi_rel_offset x20, 136
212
Zheng Xub551fdc2014-07-25 11:49:42 +0800213 stp x21, x22, [sp, #144]
214 .cfi_rel_offset x21, 144
215 .cfi_rel_offset x22, 152
Andreas Gampe03906cf2014-04-07 12:08:28 -0700216
Zheng Xub551fdc2014-07-25 11:49:42 +0800217 stp x23, x24, [sp, #160]
218 .cfi_rel_offset x23, 160
219 .cfi_rel_offset x24, 168
Andreas Gampe03906cf2014-04-07 12:08:28 -0700220
Zheng Xub551fdc2014-07-25 11:49:42 +0800221 stp x25, x26, [sp, #176]
222 .cfi_rel_offset x25, 176
223 .cfi_rel_offset x26, 184
Andreas Gampe03906cf2014-04-07 12:08:28 -0700224
Zheng Xub551fdc2014-07-25 11:49:42 +0800225 stp x27, x28, [sp, #192]
226 .cfi_rel_offset x27, 192
227 .cfi_rel_offset x28, 200
Andreas Gampe03906cf2014-04-07 12:08:28 -0700228
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100229 // x29(callee-save) and LR.
Zheng Xub551fdc2014-07-25 11:49:42 +0800230 stp x29, xLR, [sp, #208]
231 .cfi_rel_offset x29, 208
232 .cfi_rel_offset x30, 216
Andreas Gampe03906cf2014-04-07 12:08:28 -0700233
Stuart Monteithb95a5342014-03-12 13:32:32 +0000234.endm
235
236 /*
237 * Macro that sets up the callee save frame to conform with
238 * Runtime::CreateCalleeSaveMethod(kRefsAndArgs).
239 *
240 * TODO This is probably too conservative - saving FP & LR.
241 */
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700242.macro SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Zheng Xub551fdc2014-07-25 11:49:42 +0800243 adrp xIP0, :got:_ZN3art7Runtime9instance_E
244 ldr xIP0, [xIP0, #:got_lo12:_ZN3art7Runtime9instance_E]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000245
246 // Our registers aren't intermixed - just spill in order.
Zheng Xub551fdc2014-07-25 11:49:42 +0800247 ldr xIP0, [xIP0] // xIP0 = & (art::Runtime * art::Runtime.instance_) .
Stuart Monteithb95a5342014-03-12 13:32:32 +0000248
Zheng Xub551fdc2014-07-25 11:49:42 +0800249 // xIP0 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] .
Mathieu Chartiere401d142015-04-22 13:56:20 -0700250 ldr xIP0, [xIP0, RUNTIME_REFS_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET ]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000251
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700252 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
Stuart Monteithb95a5342014-03-12 13:32:32 +0000253
Zheng Xub551fdc2014-07-25 11:49:42 +0800254 str xIP0, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs]
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700255 // Place sp in Thread::Current()->top_quick_frame.
256 mov xIP0, sp
257 str xIP0, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET]
258.endm
259
260.macro SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_X0
261 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL
262 str x0, [sp, #0] // Store ArtMethod* to bottom of stack.
263 // Place sp in Thread::Current()->top_quick_frame.
264 mov xIP0, sp
265 str xIP0, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000266.endm
267
Zheng Xub551fdc2014-07-25 11:49:42 +0800268// TODO: Probably no need to restore registers preserved by aapcs64.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700269.macro RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Zheng Xu69a50302015-04-14 20:04:41 +0800270 // FP args.
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100271 ldp d0, d1, [sp, #16]
272 ldp d2, d3, [sp, #32]
273 ldp d4, d5, [sp, #48]
274 ldp d6, d7, [sp, #64]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000275
Zheng Xu69a50302015-04-14 20:04:41 +0800276 // Core args.
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100277 ldp x1, x2, [sp, #80]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700278 .cfi_restore x1
279 .cfi_restore x2
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100280
281 ldp x3, x4, [sp, #96]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700282 .cfi_restore x3
283 .cfi_restore x4
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100284
285 ldp x5, x6, [sp, #112]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700286 .cfi_restore x5
Zheng Xu69a50302015-04-14 20:04:41 +0800287 .cfi_restore x6
Andreas Gampe03906cf2014-04-07 12:08:28 -0700288
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100289 // x7, Callee-saves.
290 ldp x7, x20, [sp, #128]
291 .cfi_restore x7
Zheng Xu69a50302015-04-14 20:04:41 +0800292 .cfi_restore x20
293
Zheng Xub551fdc2014-07-25 11:49:42 +0800294 ldp x21, x22, [sp, #144]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700295 .cfi_restore x21
296 .cfi_restore x22
297
Zheng Xub551fdc2014-07-25 11:49:42 +0800298 ldp x23, x24, [sp, #160]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700299 .cfi_restore x23
300 .cfi_restore x24
301
Zheng Xub551fdc2014-07-25 11:49:42 +0800302 ldp x25, x26, [sp, #176]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700303 .cfi_restore x25
304 .cfi_restore x26
305
Zheng Xub551fdc2014-07-25 11:49:42 +0800306 ldp x27, x28, [sp, #192]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700307 .cfi_restore x27
308 .cfi_restore x28
309
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100310 // x29(callee-save) and LR.
Zheng Xub551fdc2014-07-25 11:49:42 +0800311 ldp x29, xLR, [sp, #208]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700312 .cfi_restore x29
313 .cfi_restore x30
Stuart Monteithb95a5342014-03-12 13:32:32 +0000314
Zheng Xub551fdc2014-07-25 11:49:42 +0800315 add sp, sp, #224
316 .cfi_adjust_cfa_offset -224
Stuart Monteithb95a5342014-03-12 13:32:32 +0000317.endm
318
319.macro RETURN_IF_RESULT_IS_ZERO
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700320 cbnz x0, 1f // result non-zero branch over
321 ret // return
3221:
Stuart Monteithb95a5342014-03-12 13:32:32 +0000323.endm
324
325.macro RETURN_IF_RESULT_IS_NON_ZERO
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700326 cbz x0, 1f // result zero branch over
327 ret // return
3281:
Stuart Monteithb95a5342014-03-12 13:32:32 +0000329.endm
330
331 /*
332 * Macro that set calls through to artDeliverPendingExceptionFromCode, where the pending
333 * exception is Thread::Current()->exception_
334 */
335.macro DELIVER_PENDING_EXCEPTION
336 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
337 mov x0, xSELF
Stuart Monteithb95a5342014-03-12 13:32:32 +0000338
339 // Point of no return.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700340 b artDeliverPendingExceptionFromCode // artDeliverPendingExceptionFromCode(Thread*)
Stuart Monteithb95a5342014-03-12 13:32:32 +0000341 brk 0 // Unreached
342.endm
343
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700344.macro RETURN_OR_DELIVER_PENDING_EXCEPTION_REG reg
345 ldr \reg, [xSELF, # THREAD_EXCEPTION_OFFSET] // Get exception field.
346 cbnz \reg, 1f
Stuart Monteithb95a5342014-03-12 13:32:32 +0000347 ret
3481:
349 DELIVER_PENDING_EXCEPTION
350.endm
351
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700352.macro RETURN_OR_DELIVER_PENDING_EXCEPTION
Zheng Xub551fdc2014-07-25 11:49:42 +0800353 RETURN_OR_DELIVER_PENDING_EXCEPTION_REG xIP0
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700354.endm
355
356// Same as above with x1. This is helpful in stubs that want to avoid clobbering another register.
357.macro RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
358 RETURN_OR_DELIVER_PENDING_EXCEPTION_REG x1
359.endm
360
361.macro RETURN_IF_W0_IS_ZERO_OR_DELIVER
362 cbnz w0, 1f // result non-zero branch over
363 ret // return
3641:
365 DELIVER_PENDING_EXCEPTION
366.endm
367
Stuart Monteithb95a5342014-03-12 13:32:32 +0000368.macro NO_ARG_RUNTIME_EXCEPTION c_name, cxx_name
369 .extern \cxx_name
370ENTRY \c_name
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700371 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
Zheng Xub551fdc2014-07-25 11:49:42 +0800372 mov x0, xSELF // pass Thread::Current
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700373 b \cxx_name // \cxx_name(Thread*)
Stuart Monteithb95a5342014-03-12 13:32:32 +0000374END \c_name
375.endm
376
377.macro ONE_ARG_RUNTIME_EXCEPTION c_name, cxx_name
378 .extern \cxx_name
379ENTRY \c_name
Serban Constantinescu75b91132014-04-09 18:39:10 +0100380 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context.
Zheng Xub551fdc2014-07-25 11:49:42 +0800381 mov x1, xSELF // pass Thread::Current.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700382 b \cxx_name // \cxx_name(arg, Thread*).
Stuart Monteithb95a5342014-03-12 13:32:32 +0000383 brk 0
384END \c_name
385.endm
386
387.macro TWO_ARG_RUNTIME_EXCEPTION c_name, cxx_name
388 .extern \cxx_name
389ENTRY \c_name
390 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
Zheng Xub551fdc2014-07-25 11:49:42 +0800391 mov x2, xSELF // pass Thread::Current
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700392 b \cxx_name // \cxx_name(arg1, arg2, Thread*)
Stuart Monteithb95a5342014-03-12 13:32:32 +0000393 brk 0
394END \c_name
395.endm
396
397 /*
398 * Called by managed code, saves callee saves and then calls artThrowException
399 * that will place a mock Method* at the bottom of the stack. Arg1 holds the exception.
400 */
401ONE_ARG_RUNTIME_EXCEPTION art_quick_deliver_exception, artDeliverExceptionFromCode
402
403 /*
404 * Called by managed code to create and deliver a NullPointerException.
405 */
406NO_ARG_RUNTIME_EXCEPTION art_quick_throw_null_pointer_exception, artThrowNullPointerExceptionFromCode
407
408 /*
409 * Called by managed code to create and deliver an ArithmeticException.
410 */
411NO_ARG_RUNTIME_EXCEPTION art_quick_throw_div_zero, artThrowDivZeroFromCode
412
413 /*
414 * Called by managed code to create and deliver an ArrayIndexOutOfBoundsException. Arg1 holds
415 * index, arg2 holds limit.
416 */
417TWO_ARG_RUNTIME_EXCEPTION art_quick_throw_array_bounds, artThrowArrayBoundsFromCode
418
419 /*
420 * Called by managed code to create and deliver a StackOverflowError.
421 */
422NO_ARG_RUNTIME_EXCEPTION art_quick_throw_stack_overflow, artThrowStackOverflowFromCode
423
424 /*
425 * Called by managed code to create and deliver a NoSuchMethodError.
426 */
427ONE_ARG_RUNTIME_EXCEPTION art_quick_throw_no_such_method, artThrowNoSuchMethodFromCode
428
429 /*
Stuart Monteithb95a5342014-03-12 13:32:32 +0000430 * All generated callsites for interface invokes and invocation slow paths will load arguments
Andreas Gampe51f76352014-05-21 08:28:48 -0700431 * as usual - except instead of loading arg0/x0 with the target Method*, arg0/x0 will contain
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +0100432 * the method_idx. This wrapper will save arg1-arg3, and call the appropriate C helper.
Andreas Gampe51f76352014-05-21 08:28:48 -0700433 * NOTE: "this" is first visible argument of the target, and so can be found in arg1/x1.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000434 *
Andreas Gampe51f76352014-05-21 08:28:48 -0700435 * The helper will attempt to locate the target and return a 128-bit result in x0/x1 consisting
Stuart Monteithb95a5342014-03-12 13:32:32 +0000436 * of the target Method* in x0 and method->code_ in x1.
437 *
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700438 * If unsuccessful, the helper will return null/????. There will be a pending exception in the
Stuart Monteithb95a5342014-03-12 13:32:32 +0000439 * thread and we branch to another stub to deliver it.
440 *
441 * On success this wrapper will restore arguments and *jump* to the target, leaving the lr
442 * pointing back to the original caller.
Andreas Gampe51f76352014-05-21 08:28:48 -0700443 *
444 * Adapted from ARM32 code.
445 *
Zheng Xub551fdc2014-07-25 11:49:42 +0800446 * Clobbers xIP0.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000447 */
Andreas Gampe3031c8d2015-07-13 20:11:06 -0700448.macro INVOKE_TRAMPOLINE_BODY cxx_name
Stuart Monteithb95a5342014-03-12 13:32:32 +0000449 .extern \cxx_name
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700450 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME // save callee saves in case allocation triggers GC
Andreas Gampe51f76352014-05-21 08:28:48 -0700451 // Helper signature is always
452 // (method_idx, *this_object, *caller_method, *self, sp)
453
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +0100454 mov x2, xSELF // pass Thread::Current
455 mov x3, sp
456 bl \cxx_name // (method_idx, this, Thread*, SP)
Zheng Xub551fdc2014-07-25 11:49:42 +0800457 mov xIP0, x1 // save Method*->code_
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700458 RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Andreas Gampe51f76352014-05-21 08:28:48 -0700459 cbz x0, 1f // did we find the target? if not go to exception delivery
Zheng Xub551fdc2014-07-25 11:49:42 +0800460 br xIP0 // tail call to target
Andreas Gampe51f76352014-05-21 08:28:48 -07004611:
462 DELIVER_PENDING_EXCEPTION
Andreas Gampe3031c8d2015-07-13 20:11:06 -0700463.endm
464.macro INVOKE_TRAMPOLINE c_name, cxx_name
465ENTRY \c_name
466 INVOKE_TRAMPOLINE_BODY \cxx_name
Stuart Monteithb95a5342014-03-12 13:32:32 +0000467END \c_name
468.endm
469
Stuart Monteithb95a5342014-03-12 13:32:32 +0000470INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline_with_access_check, artInvokeInterfaceTrampolineWithAccessCheck
471
472INVOKE_TRAMPOLINE art_quick_invoke_static_trampoline_with_access_check, artInvokeStaticTrampolineWithAccessCheck
473INVOKE_TRAMPOLINE art_quick_invoke_direct_trampoline_with_access_check, artInvokeDirectTrampolineWithAccessCheck
474INVOKE_TRAMPOLINE art_quick_invoke_super_trampoline_with_access_check, artInvokeSuperTrampolineWithAccessCheck
475INVOKE_TRAMPOLINE art_quick_invoke_virtual_trampoline_with_access_check, artInvokeVirtualTrampolineWithAccessCheck
476
Andreas Gampe03906cf2014-04-07 12:08:28 -0700477
478.macro INVOKE_STUB_CREATE_FRAME
479
Zheng Xu69a50302015-04-14 20:04:41 +0800480SAVE_SIZE=15*8 // x4, x5, x19, x20, x21, x22, x23, x24, x25, x26, x27, x28, SP, LR, FP saved.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700481SAVE_SIZE_AND_METHOD=SAVE_SIZE+8
Andreas Gampecf4035a2014-05-28 22:43:01 -0700482
Andreas Gampe03906cf2014-04-07 12:08:28 -0700483
Zheng Xu48241e72014-05-23 11:52:42 +0800484 mov x9, sp // Save stack pointer.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700485 .cfi_register sp,x9
486
Zheng Xu48241e72014-05-23 11:52:42 +0800487 add x10, x2, # SAVE_SIZE_AND_METHOD // calculate size of frame.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700488 sub x10, sp, x10 // Calculate SP position - saves + ArtMethod* + args
Zheng Xu48241e72014-05-23 11:52:42 +0800489 and x10, x10, # ~0xf // Enforce 16 byte stack alignment.
490 mov sp, x10 // Set new SP.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700491
Zheng Xu48241e72014-05-23 11:52:42 +0800492 sub x10, x9, #SAVE_SIZE // Calculate new FP (later). Done here as we must move SP
493 .cfi_def_cfa_register x10 // before this.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700494 .cfi_adjust_cfa_offset SAVE_SIZE
495
Nicolas Geoffray48088462014-12-12 10:29:38 +0000496 str x28, [x10, #112]
497 .cfi_rel_offset x28, 112
498
499 stp x26, x27, [x10, #96]
500 .cfi_rel_offset x26, 96
501 .cfi_rel_offset x27, 104
502
503 stp x24, x25, [x10, #80]
504 .cfi_rel_offset x24, 80
505 .cfi_rel_offset x25, 88
506
507 stp x22, x23, [x10, #64]
508 .cfi_rel_offset x22, 64
509 .cfi_rel_offset x23, 72
510
511 stp x20, x21, [x10, #48]
512 .cfi_rel_offset x20, 48
513 .cfi_rel_offset x21, 56
514
Zheng Xu69a50302015-04-14 20:04:41 +0800515 stp x9, x19, [x10, #32] // Save old stack pointer and x19.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700516 .cfi_rel_offset sp, 32
Andreas Gampecf4035a2014-05-28 22:43:01 -0700517 .cfi_rel_offset x19, 40
Andreas Gampe03906cf2014-04-07 12:08:28 -0700518
Zheng Xu48241e72014-05-23 11:52:42 +0800519 stp x4, x5, [x10, #16] // Save result and shorty addresses.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700520 .cfi_rel_offset x4, 16
521 .cfi_rel_offset x5, 24
522
Zheng Xu48241e72014-05-23 11:52:42 +0800523 stp xFP, xLR, [x10] // Store LR & FP.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700524 .cfi_rel_offset x29, 0
525 .cfi_rel_offset x30, 8
526
Zheng Xu48241e72014-05-23 11:52:42 +0800527 mov xFP, x10 // Use xFP now, as it's callee-saved.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700528 .cfi_def_cfa_register x29
Zheng Xu48241e72014-05-23 11:52:42 +0800529 mov xSELF, x3 // Move thread pointer into SELF register.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700530
531 // Copy arguments into stack frame.
532 // Use simple copy routine for now.
533 // 4 bytes per slot.
534 // X1 - source address
535 // W2 - args length
536 // X9 - destination address.
537 // W10 - temporary
Mathieu Chartiere401d142015-04-22 13:56:20 -0700538 add x9, sp, #8 // Destination address is bottom of stack + null.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700539
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700540 // Copy parameters into the stack. Use numeric label as this is a macro and Clang's assembler
541 // does not have unique-id variables.
5421:
Andreas Gampe03906cf2014-04-07 12:08:28 -0700543 cmp w2, #0
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700544 beq 2f
Andreas Gampe03906cf2014-04-07 12:08:28 -0700545 sub w2, w2, #4 // Need 65536 bytes of range.
546 ldr w10, [x1, x2]
547 str w10, [x9, x2]
548
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700549 b 1b
Andreas Gampe03906cf2014-04-07 12:08:28 -0700550
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -07005512:
Mathieu Chartiere401d142015-04-22 13:56:20 -0700552 // Store null into ArtMethod* at bottom of frame.
553 str xzr, [sp]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700554.endm
555
556.macro INVOKE_STUB_CALL_AND_RETURN
557
558 // load method-> METHOD_QUICK_CODE_OFFSET
Mathieu Chartiere401d142015-04-22 13:56:20 -0700559 ldr x9, [x0, #ART_METHOD_QUICK_CODE_OFFSET_64]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700560 // Branch to method.
561 blr x9
562
563 // Restore return value address and shorty address.
564 ldp x4,x5, [xFP, #16]
565 .cfi_restore x4
566 .cfi_restore x5
567
Nicolas Geoffray48088462014-12-12 10:29:38 +0000568 ldr x28, [xFP, #112]
569 .cfi_restore x28
570
571 ldp x26, x27, [xFP, #96]
572 .cfi_restore x26
573 .cfi_restore x27
574
575 ldp x24, x25, [xFP, #80]
576 .cfi_restore x24
577 .cfi_restore x25
578
579 ldp x22, x23, [xFP, #64]
580 .cfi_restore x22
581 .cfi_restore x23
582
583 ldp x20, x21, [xFP, #48]
584 .cfi_restore x20
585 .cfi_restore x21
586
Andreas Gampe03906cf2014-04-07 12:08:28 -0700587 // Store result (w0/x0/s0/d0) appropriately, depending on resultType.
588 ldrb w10, [x5]
589
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700590 // Check the return type and store the correct register into the jvalue in memory.
591 // Use numeric label as this is a macro and Clang's assembler does not have unique-id variables.
592
Andreas Gampe03906cf2014-04-07 12:08:28 -0700593 // Don't set anything for a void type.
594 cmp w10, #'V'
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700595 beq 3f
Andreas Gampe03906cf2014-04-07 12:08:28 -0700596
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700597 // Is it a double?
Andreas Gampe03906cf2014-04-07 12:08:28 -0700598 cmp w10, #'D'
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700599 bne 1f
Andreas Gampe03906cf2014-04-07 12:08:28 -0700600 str d0, [x4]
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700601 b 3f
Andreas Gampe03906cf2014-04-07 12:08:28 -0700602
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -07006031: // Is it a float?
Andreas Gampe03906cf2014-04-07 12:08:28 -0700604 cmp w10, #'F'
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700605 bne 2f
Andreas Gampe03906cf2014-04-07 12:08:28 -0700606 str s0, [x4]
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700607 b 3f
Andreas Gampe03906cf2014-04-07 12:08:28 -0700608
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -07006092: // Just store x0. Doesn't matter if it is 64 or 32 bits.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700610 str x0, [x4]
611
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -07006123: // Finish up.
Zheng Xu69a50302015-04-14 20:04:41 +0800613 ldp x2, x19, [xFP, #32] // Restore stack pointer and x19.
Andreas Gampecf4035a2014-05-28 22:43:01 -0700614 .cfi_restore x19
Andreas Gampe03906cf2014-04-07 12:08:28 -0700615 mov sp, x2
616 .cfi_restore sp
617
Andreas Gamped58342c2014-06-05 14:18:08 -0700618 ldp xFP, xLR, [xFP] // Restore old frame pointer and link register.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700619 .cfi_restore x29
620 .cfi_restore x30
621
622 ret
623
624.endm
625
626
Stuart Monteithb95a5342014-03-12 13:32:32 +0000627/*
628 * extern"C" void art_quick_invoke_stub(ArtMethod *method, x0
629 * uint32_t *args, x1
630 * uint32_t argsize, w2
631 * Thread *self, x3
632 * JValue *result, x4
633 * char *shorty); x5
634 * +----------------------+
635 * | |
636 * | C/C++ frame |
637 * | LR'' |
638 * | FP'' | <- SP'
639 * +----------------------+
640 * +----------------------+
Zheng Xu69a50302015-04-14 20:04:41 +0800641 * | x28 | <- TODO: Remove callee-saves.
642 * | : |
643 * | x19 |
Stuart Monteithb95a5342014-03-12 13:32:32 +0000644 * | SP' |
645 * | X5 |
646 * | X4 | Saved registers
647 * | LR' |
648 * | FP' | <- FP
649 * +----------------------+
650 * | uint32_t out[n-1] |
651 * | : : | Outs
652 * | uint32_t out[0] |
Mathieu Chartiere401d142015-04-22 13:56:20 -0700653 * | ArtMethod* | <- SP value=null
Stuart Monteithb95a5342014-03-12 13:32:32 +0000654 * +----------------------+
655 *
656 * Outgoing registers:
657 * x0 - Method*
658 * x1-x7 - integer parameters.
659 * d0-d7 - Floating point parameters.
660 * xSELF = self
661 * SP = & of ArtMethod*
662 * x1 = "this" pointer.
663 *
664 */
665ENTRY art_quick_invoke_stub
666 // Spill registers as per AACPS64 calling convention.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700667 INVOKE_STUB_CREATE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +0000668
669 // Fill registers x/w1 to x/w7 and s/d0 to s/d7 with parameters.
670 // Parse the passed shorty to determine which register to load.
671 // Load addresses for routines that load WXSD registers.
672 adr x11, .LstoreW2
673 adr x12, .LstoreX2
674 adr x13, .LstoreS0
675 adr x14, .LstoreD0
676
677 // Initialize routine offsets to 0 for integers and floats.
678 // x8 for integers, x15 for floating point.
679 mov x8, #0
680 mov x15, #0
681
682 add x10, x5, #1 // Load shorty address, plus one to skip return value.
683 ldr w1, [x9],#4 // Load "this" parameter, and increment arg pointer.
684
685 // Loop to fill registers.
686.LfillRegisters:
687 ldrb w17, [x10], #1 // Load next character in signature, and increment.
688 cbz w17, .LcallFunction // Exit at end of signature. Shorty 0 terminated.
689
690 cmp w17, #'F' // is this a float?
691 bne .LisDouble
692
693 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700694 beq .Ladvance4
Stuart Monteithb95a5342014-03-12 13:32:32 +0000695
696 add x17, x13, x15 // Calculate subroutine to jump to.
697 br x17
698
699.LisDouble:
700 cmp w17, #'D' // is this a double?
701 bne .LisLong
702
703 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700704 beq .Ladvance8
Stuart Monteithb95a5342014-03-12 13:32:32 +0000705
706 add x17, x14, x15 // Calculate subroutine to jump to.
707 br x17
708
709.LisLong:
710 cmp w17, #'J' // is this a long?
711 bne .LisOther
712
Andreas Gampe9de65ff2014-03-21 17:25:57 -0700713 cmp x8, # 6*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700714 beq .Ladvance8
Stuart Monteithb95a5342014-03-12 13:32:32 +0000715
716 add x17, x12, x8 // Calculate subroutine to jump to.
717 br x17
718
Stuart Monteithb95a5342014-03-12 13:32:32 +0000719.LisOther: // Everything else takes one vReg.
Andreas Gampe9de65ff2014-03-21 17:25:57 -0700720 cmp x8, # 6*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700721 beq .Ladvance4
722
Stuart Monteithb95a5342014-03-12 13:32:32 +0000723 add x17, x11, x8 // Calculate subroutine to jump to.
724 br x17
725
Andreas Gampe03906cf2014-04-07 12:08:28 -0700726.Ladvance4:
727 add x9, x9, #4
728 b .LfillRegisters
729
730.Ladvance8:
731 add x9, x9, #8
732 b .LfillRegisters
733
Stuart Monteithb95a5342014-03-12 13:32:32 +0000734// Macro for loading a parameter into a register.
735// counter - the register with offset into these tables
736// size - the size of the register - 4 or 8 bytes.
737// register - the name of the register to be loaded.
738.macro LOADREG counter size register return
739 ldr \register , [x9], #\size
740 add \counter, \counter, 12
741 b \return
742.endm
743
744// Store ints.
745.LstoreW2:
746 LOADREG x8 4 w2 .LfillRegisters
747 LOADREG x8 4 w3 .LfillRegisters
748 LOADREG x8 4 w4 .LfillRegisters
749 LOADREG x8 4 w5 .LfillRegisters
750 LOADREG x8 4 w6 .LfillRegisters
751 LOADREG x8 4 w7 .LfillRegisters
752
753// Store longs.
754.LstoreX2:
755 LOADREG x8 8 x2 .LfillRegisters
756 LOADREG x8 8 x3 .LfillRegisters
757 LOADREG x8 8 x4 .LfillRegisters
758 LOADREG x8 8 x5 .LfillRegisters
759 LOADREG x8 8 x6 .LfillRegisters
760 LOADREG x8 8 x7 .LfillRegisters
761
762// Store singles.
763.LstoreS0:
764 LOADREG x15 4 s0 .LfillRegisters
765 LOADREG x15 4 s1 .LfillRegisters
766 LOADREG x15 4 s2 .LfillRegisters
767 LOADREG x15 4 s3 .LfillRegisters
768 LOADREG x15 4 s4 .LfillRegisters
769 LOADREG x15 4 s5 .LfillRegisters
770 LOADREG x15 4 s6 .LfillRegisters
771 LOADREG x15 4 s7 .LfillRegisters
772
773// Store doubles.
774.LstoreD0:
775 LOADREG x15 8 d0 .LfillRegisters
776 LOADREG x15 8 d1 .LfillRegisters
777 LOADREG x15 8 d2 .LfillRegisters
778 LOADREG x15 8 d3 .LfillRegisters
779 LOADREG x15 8 d4 .LfillRegisters
780 LOADREG x15 8 d5 .LfillRegisters
781 LOADREG x15 8 d6 .LfillRegisters
782 LOADREG x15 8 d7 .LfillRegisters
783
784
785.LcallFunction:
786
Andreas Gampe03906cf2014-04-07 12:08:28 -0700787 INVOKE_STUB_CALL_AND_RETURN
Stuart Monteithb95a5342014-03-12 13:32:32 +0000788
Stuart Monteithb95a5342014-03-12 13:32:32 +0000789END art_quick_invoke_stub
790
791/* extern"C"
792 * void art_quick_invoke_static_stub(ArtMethod *method, x0
793 * uint32_t *args, x1
794 * uint32_t argsize, w2
795 * Thread *self, x3
796 * JValue *result, x4
797 * char *shorty); x5
798 */
799ENTRY art_quick_invoke_static_stub
800 // Spill registers as per AACPS64 calling convention.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700801 INVOKE_STUB_CREATE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +0000802
803 // Fill registers x/w1 to x/w7 and s/d0 to s/d7 with parameters.
804 // Parse the passed shorty to determine which register to load.
805 // Load addresses for routines that load WXSD registers.
806 adr x11, .LstoreW1_2
807 adr x12, .LstoreX1_2
808 adr x13, .LstoreS0_2
809 adr x14, .LstoreD0_2
810
811 // Initialize routine offsets to 0 for integers and floats.
812 // x8 for integers, x15 for floating point.
813 mov x8, #0
814 mov x15, #0
815
816 add x10, x5, #1 // Load shorty address, plus one to skip return value.
817
818 // Loop to fill registers.
819.LfillRegisters2:
820 ldrb w17, [x10], #1 // Load next character in signature, and increment.
821 cbz w17, .LcallFunction2 // Exit at end of signature. Shorty 0 terminated.
822
823 cmp w17, #'F' // is this a float?
824 bne .LisDouble2
825
826 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700827 beq .Ladvance4_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000828
829 add x17, x13, x15 // Calculate subroutine to jump to.
830 br x17
831
832.LisDouble2:
833 cmp w17, #'D' // is this a double?
834 bne .LisLong2
835
836 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700837 beq .Ladvance8_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000838
839 add x17, x14, x15 // Calculate subroutine to jump to.
840 br x17
841
842.LisLong2:
843 cmp w17, #'J' // is this a long?
844 bne .LisOther2
845
846 cmp x8, # 7*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700847 beq .Ladvance8_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000848
849 add x17, x12, x8 // Calculate subroutine to jump to.
850 br x17
851
Stuart Monteithb95a5342014-03-12 13:32:32 +0000852.LisOther2: // Everything else takes one vReg.
853 cmp x8, # 7*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700854 beq .Ladvance4_2
855
Stuart Monteithb95a5342014-03-12 13:32:32 +0000856 add x17, x11, x8 // Calculate subroutine to jump to.
857 br x17
858
Andreas Gampe03906cf2014-04-07 12:08:28 -0700859.Ladvance4_2:
860 add x9, x9, #4
861 b .LfillRegisters2
862
863.Ladvance8_2:
864 add x9, x9, #8
865 b .LfillRegisters2
866
Stuart Monteithb95a5342014-03-12 13:32:32 +0000867// Store ints.
868.LstoreW1_2:
869 LOADREG x8 4 w1 .LfillRegisters2
870 LOADREG x8 4 w2 .LfillRegisters2
871 LOADREG x8 4 w3 .LfillRegisters2
872 LOADREG x8 4 w4 .LfillRegisters2
873 LOADREG x8 4 w5 .LfillRegisters2
874 LOADREG x8 4 w6 .LfillRegisters2
875 LOADREG x8 4 w7 .LfillRegisters2
876
877// Store longs.
878.LstoreX1_2:
879 LOADREG x8 8 x1 .LfillRegisters2
880 LOADREG x8 8 x2 .LfillRegisters2
881 LOADREG x8 8 x3 .LfillRegisters2
882 LOADREG x8 8 x4 .LfillRegisters2
883 LOADREG x8 8 x5 .LfillRegisters2
884 LOADREG x8 8 x6 .LfillRegisters2
885 LOADREG x8 8 x7 .LfillRegisters2
886
887// Store singles.
888.LstoreS0_2:
889 LOADREG x15 4 s0 .LfillRegisters2
890 LOADREG x15 4 s1 .LfillRegisters2
891 LOADREG x15 4 s2 .LfillRegisters2
892 LOADREG x15 4 s3 .LfillRegisters2
893 LOADREG x15 4 s4 .LfillRegisters2
894 LOADREG x15 4 s5 .LfillRegisters2
895 LOADREG x15 4 s6 .LfillRegisters2
896 LOADREG x15 4 s7 .LfillRegisters2
897
898// Store doubles.
899.LstoreD0_2:
900 LOADREG x15 8 d0 .LfillRegisters2
901 LOADREG x15 8 d1 .LfillRegisters2
902 LOADREG x15 8 d2 .LfillRegisters2
903 LOADREG x15 8 d3 .LfillRegisters2
904 LOADREG x15 8 d4 .LfillRegisters2
905 LOADREG x15 8 d5 .LfillRegisters2
906 LOADREG x15 8 d6 .LfillRegisters2
907 LOADREG x15 8 d7 .LfillRegisters2
908
909
910.LcallFunction2:
911
Andreas Gampe03906cf2014-04-07 12:08:28 -0700912 INVOKE_STUB_CALL_AND_RETURN
Stuart Monteithb95a5342014-03-12 13:32:32 +0000913
Stuart Monteithb95a5342014-03-12 13:32:32 +0000914END art_quick_invoke_static_stub
915
Andreas Gampe03906cf2014-04-07 12:08:28 -0700916
Stuart Monteithb95a5342014-03-12 13:32:32 +0000917
918 /*
919 * On entry x0 is uintptr_t* gprs_ and x1 is uint64_t* fprs_
920 */
921
922ENTRY art_quick_do_long_jump
923 // Load FPRs
924 ldp d0, d1, [x1], #16
925 ldp d2, d3, [x1], #16
926 ldp d4, d5, [x1], #16
927 ldp d6, d7, [x1], #16
928 ldp d8, d9, [x1], #16
929 ldp d10, d11, [x1], #16
930 ldp d12, d13, [x1], #16
931 ldp d14, d15, [x1], #16
932 ldp d16, d17, [x1], #16
933 ldp d18, d19, [x1], #16
934 ldp d20, d21, [x1], #16
935 ldp d22, d23, [x1], #16
936 ldp d24, d25, [x1], #16
937 ldp d26, d27, [x1], #16
938 ldp d28, d29, [x1], #16
939 ldp d30, d31, [x1]
940
941 // Load GPRs
942 // TODO: lots of those are smashed, could optimize.
943 add x0, x0, #30*8
Andreas Gampe639bdd12015-06-03 11:22:45 -0700944 ldp x30, x1, [x0], #-16 // LR & SP
Stuart Monteithb95a5342014-03-12 13:32:32 +0000945 ldp x28, x29, [x0], #-16
946 ldp x26, x27, [x0], #-16
947 ldp x24, x25, [x0], #-16
948 ldp x22, x23, [x0], #-16
949 ldp x20, x21, [x0], #-16
950 ldp x18, x19, [x0], #-16
951 ldp x16, x17, [x0], #-16
952 ldp x14, x15, [x0], #-16
953 ldp x12, x13, [x0], #-16
954 ldp x10, x11, [x0], #-16
955 ldp x8, x9, [x0], #-16
956 ldp x6, x7, [x0], #-16
957 ldp x4, x5, [x0], #-16
958 ldp x2, x3, [x0], #-16
959 mov sp, x1
960
Andreas Gampe639bdd12015-06-03 11:22:45 -0700961 // Need to load PC, it's at the end (after the space for the unused XZR). Use x1.
962 ldr x1, [x0, #33*8]
963 // And the value of x0.
964 ldr x0, [x0]
965
966 br x1
Stuart Monteithb95a5342014-03-12 13:32:32 +0000967END art_quick_do_long_jump
968
Andreas Gampef4e910b2014-04-29 16:55:52 -0700969 /*
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700970 * Entry from managed code that calls artLockObjectFromCode, may block for GC. x0 holds the
971 * possibly null object to lock.
972 *
973 * Derived from arm32 code.
974 */
975 .extern artLockObjectFromCode
976ENTRY art_quick_lock_object
977 cbz w0, .Lslow_lock
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700978 add x4, x0, #MIRROR_OBJECT_LOCK_WORD_OFFSET // exclusive load/store has no immediate anymore
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700979.Lretry_lock:
980 ldr w2, [xSELF, #THREAD_ID_OFFSET] // TODO: Can the thread ID really change during the loop?
981 ldxr w1, [x4]
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -0800982 mov x3, x1
983 and w3, w3, #LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED // zero the read barrier bits
984 cbnz w3, .Lnot_unlocked // already thin locked
985 // unlocked case - x1: original lock word that's zero except for the read barrier bits.
986 orr x2, x1, x2 // x2 holds thread id with count of 0 with preserved read barrier bits
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700987 stxr w3, w2, [x4]
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -0800988 cbnz w3, .Llock_stxr_fail // store failed, retry
Andreas Gampe675967d2014-05-14 16:28:34 -0700989 dmb ishld // full (LoadLoad|LoadStore) memory barrier
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700990 ret
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -0800991.Lnot_unlocked: // x1: original lock word
992 lsr w3, w1, LOCK_WORD_STATE_SHIFT
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700993 cbnz w3, .Lslow_lock // if either of the top two bits are set, go slow path
994 eor w2, w1, w2 // lock_word.ThreadId() ^ self->ThreadId()
995 uxth w2, w2 // zero top 16 bits
996 cbnz w2, .Lslow_lock // lock word and self thread id's match -> recursive lock
997 // else contention, go to slow path
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -0800998 mov x3, x1 // copy the lock word to check count overflow.
999 and w3, w3, #LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED // zero the read barrier bits.
1000 add w2, w3, #LOCK_WORD_THIN_LOCK_COUNT_ONE // increment count in lock word placing in w2 to check overflow
1001 lsr w3, w2, LOCK_WORD_READ_BARRIER_STATE_SHIFT // if either of the upper two bits (28-29) are set, we overflowed.
1002 cbnz w3, .Lslow_lock // if we overflow the count go slow path
1003 add w2, w1, #LOCK_WORD_THIN_LOCK_COUNT_ONE // increment count for real
1004 stxr w3, w2, [x4]
1005 cbnz w3, .Llock_stxr_fail // store failed, retry
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001006 ret
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001007.Llock_stxr_fail:
1008 b .Lretry_lock // retry
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001009.Lslow_lock:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001010 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case we block
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001011 mov x1, xSELF // pass Thread::Current
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001012 bl artLockObjectFromCode // (Object* obj, Thread*)
1013 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001014 RETURN_IF_W0_IS_ZERO_OR_DELIVER
1015END art_quick_lock_object
1016
1017 /*
1018 * Entry from managed code that calls artUnlockObjectFromCode and delivers exception on failure.
1019 * x0 holds the possibly null object to lock.
1020 *
1021 * Derived from arm32 code.
1022 */
1023 .extern artUnlockObjectFromCode
1024ENTRY art_quick_unlock_object
1025 cbz x0, .Lslow_unlock
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001026 add x4, x0, #MIRROR_OBJECT_LOCK_WORD_OFFSET // exclusive load/store has no immediate anymore
1027.Lretry_unlock:
1028#ifndef USE_READ_BARRIER
1029 ldr w1, [x4]
1030#else
1031 ldxr w1, [x4] // Need to use atomic instructions for read barrier
1032#endif
1033 lsr w2, w1, LOCK_WORD_STATE_SHIFT
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001034 cbnz w2, .Lslow_unlock // if either of the top two bits are set, go slow path
1035 ldr w2, [xSELF, #THREAD_ID_OFFSET]
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001036 mov x3, x1 // copy lock word to check thread id equality
1037 and w3, w3, #LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED // zero the read barrier bits
1038 eor w3, w3, w2 // lock_word.ThreadId() ^ self->ThreadId()
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001039 uxth w3, w3 // zero top 16 bits
1040 cbnz w3, .Lslow_unlock // do lock word and self thread id's match?
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001041 mov x3, x1 // copy lock word to detect transition to unlocked
1042 and w3, w3, #LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED // zero the read barrier bits
1043 cmp w3, #LOCK_WORD_THIN_LOCK_COUNT_ONE
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001044 bpl .Lrecursive_thin_unlock
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001045 // transition to unlocked
1046 mov x3, x1
1047 and w3, w3, #LOCK_WORD_READ_BARRIER_STATE_MASK // w3: zero except for the preserved read barrier bits
Andreas Gampe675967d2014-05-14 16:28:34 -07001048 dmb ish // full (LoadStore|StoreStore) memory barrier
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001049#ifndef USE_READ_BARRIER
1050 str w3, [x4]
1051#else
1052 stxr w2, w3, [x4] // Need to use atomic instructions for read barrier
1053 cbnz w2, .Lunlock_stxr_fail // store failed, retry
1054#endif
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001055 ret
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001056.Lrecursive_thin_unlock: // w1: original lock word
1057 sub w1, w1, #LOCK_WORD_THIN_LOCK_COUNT_ONE // decrement count
1058#ifndef USE_READ_BARRIER
1059 str w1, [x4]
1060#else
1061 stxr w2, w1, [x4] // Need to use atomic instructions for read barrier
1062 cbnz w2, .Lunlock_stxr_fail // store failed, retry
1063#endif
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001064 ret
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001065.Lunlock_stxr_fail:
1066 b .Lretry_unlock // retry
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001067.Lslow_unlock:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001068 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case exception allocation triggers GC
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001069 mov x1, xSELF // pass Thread::Current
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001070 bl artUnlockObjectFromCode // (Object* obj, Thread*)
1071 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001072 RETURN_IF_W0_IS_ZERO_OR_DELIVER
1073END art_quick_unlock_object
Andreas Gampe525cde22014-04-22 15:44:50 -07001074
1075 /*
1076 * Entry from managed code that calls artIsAssignableFromCode and on failure calls
1077 * artThrowClassCastException.
1078 */
1079 .extern artThrowClassCastException
1080ENTRY art_quick_check_cast
1081 // Store arguments and link register
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01001082 // Stack needs to be 16B aligned on calls.
1083 stp x0, x1, [sp,#-32]!
Andreas Gampe525cde22014-04-22 15:44:50 -07001084 .cfi_adjust_cfa_offset 32
Andreas Gampe525cde22014-04-22 15:44:50 -07001085 .cfi_rel_offset x0, 0
1086 .cfi_rel_offset x1, 8
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01001087 str xLR, [sp, #24]
Andreas Gampe525cde22014-04-22 15:44:50 -07001088 .cfi_rel_offset x30, 24
1089
1090 // Call runtime code
1091 bl artIsAssignableFromCode
1092
1093 // Check for exception
1094 cbz x0, .Lthrow_class_cast_exception
1095
1096 // Restore and return
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01001097 ldr xLR, [sp, #24]
1098 .cfi_restore x30
1099 ldp x0, x1, [sp], #32
Andreas Gampe525cde22014-04-22 15:44:50 -07001100 .cfi_restore x0
1101 .cfi_restore x1
Andreas Gampe525cde22014-04-22 15:44:50 -07001102 .cfi_adjust_cfa_offset -32
1103 ret
1104
Andreas Gampe6b90d422015-06-26 19:49:24 -07001105 .cfi_adjust_cfa_offset 32 // Reset unwind info so following code unwinds.
1106
Andreas Gampe525cde22014-04-22 15:44:50 -07001107.Lthrow_class_cast_exception:
1108 // Restore
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01001109 ldr xLR, [sp, #24]
1110 .cfi_restore x30
1111 ldp x0, x1, [sp], #32
Andreas Gampe525cde22014-04-22 15:44:50 -07001112 .cfi_restore x0
1113 .cfi_restore x1
Andreas Gampe525cde22014-04-22 15:44:50 -07001114 .cfi_adjust_cfa_offset -32
1115
1116 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
1117 mov x2, xSELF // pass Thread::Current
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001118 b artThrowClassCastException // (Class*, Class*, Thread*)
Andreas Gampe525cde22014-04-22 15:44:50 -07001119 brk 0 // We should not return here...
1120END art_quick_check_cast
1121
Man Cao1aee9002015-07-14 22:31:42 -07001122// Restore xReg's value from [sp, #offset] if xReg is not the same as xExclude.
1123.macro POP_REG_NE xReg, offset, xExclude
1124 .ifnc \xReg, \xExclude
1125 ldr \xReg, [sp, #\offset] // restore xReg
1126 .cfi_restore \xReg
1127 .endif
1128.endm
1129
1130 /*
1131 * Macro to insert read barrier, only used in art_quick_aput_obj.
1132 * xDest, wDest and xObj are registers, offset is a defined literal such as
1133 * MIRROR_OBJECT_CLASS_OFFSET. Dest needs both x and w versions of the same register to handle
1134 * name mismatch between instructions. This macro uses the lower 32b of register when possible.
1135 * TODO: When read barrier has a fast path, add heap unpoisoning support for the fast path.
1136 */
1137.macro READ_BARRIER xDest, wDest, xObj, offset
1138#ifdef USE_READ_BARRIER
1139 // Store registers used in art_quick_aput_obj (x0-x4, LR), stack is 16B aligned.
1140 stp x0, x1, [sp, #-48]!
1141 .cfi_adjust_cfa_offset 48
1142 .cfi_rel_offset x0, 0
1143 .cfi_rel_offset x1, 8
1144 stp x2, x3, [sp, #16]
1145 .cfi_rel_offset x2, 16
1146 .cfi_rel_offset x3, 24
1147 stp x4, xLR, [sp, #32]
1148 .cfi_rel_offset x4, 32
1149 .cfi_rel_offset x30, 40
1150
Man Cao63069212015-08-21 15:51:39 -07001151 // mov x0, \xRef // pass ref in x0 (no-op for now since parameter ref is unused)
Man Cao1aee9002015-07-14 22:31:42 -07001152 .ifnc \xObj, x1
1153 mov x1, \xObj // pass xObj
1154 .endif
1155 mov w2, #\offset // pass offset
1156 bl artReadBarrierSlow // artReadBarrierSlow(ref, xObj, offset)
1157 // No need to unpoison return value in w0, artReadBarrierSlow() would do the unpoisoning.
1158 .ifnc \wDest, w0
1159 mov \wDest, w0 // save return value in wDest
1160 .endif
1161
1162 // Conditionally restore saved registers
1163 POP_REG_NE x0, 0, \xDest
1164 POP_REG_NE x1, 8, \xDest
1165 POP_REG_NE x2, 16, \xDest
1166 POP_REG_NE x3, 24, \xDest
1167 POP_REG_NE x4, 32, \xDest
1168 ldr xLR, [sp, #40]
1169 .cfi_restore x30
1170 add sp, sp, #48
1171 .cfi_adjust_cfa_offset -48
1172#else
1173 ldr \wDest, [\xObj, #\offset] // Heap reference = 32b. This also zero-extends to \xDest.
1174 UNPOISON_HEAP_REF \wDest
1175#endif // USE_READ_BARRIER
1176.endm
1177
Andreas Gampef4e910b2014-04-29 16:55:52 -07001178 /*
1179 * Entry from managed code for array put operations of objects where the value being stored
1180 * needs to be checked for compatibility.
1181 * x0 = array, x1 = index, x2 = value
1182 *
1183 * Currently all values should fit into w0/w1/w2, and w1 always will as indices are 32b. We
1184 * assume, though, that the upper 32b are zeroed out. At least for x1/w1 we can do better by
1185 * using index-zero-extension in load/stores.
1186 *
1187 * Temporaries: x3, x4
1188 * TODO: x4 OK? ip seems wrong here.
1189 */
1190ENTRY art_quick_aput_obj_with_null_and_bound_check
1191 tst x0, x0
1192 bne art_quick_aput_obj_with_bound_check
1193 b art_quick_throw_null_pointer_exception
1194END art_quick_aput_obj_with_null_and_bound_check
1195
1196ENTRY art_quick_aput_obj_with_bound_check
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001197 ldr w3, [x0, #MIRROR_ARRAY_LENGTH_OFFSET]
Andreas Gampef4e910b2014-04-29 16:55:52 -07001198 cmp w3, w1
1199 bhi art_quick_aput_obj
1200 mov x0, x1
1201 mov x1, x3
1202 b art_quick_throw_array_bounds
1203END art_quick_aput_obj_with_bound_check
1204
Man Cao1aee9002015-07-14 22:31:42 -07001205#ifdef USE_READ_BARRIER
1206 .extern artReadBarrierSlow
1207#endif
Andreas Gampef4e910b2014-04-29 16:55:52 -07001208ENTRY art_quick_aput_obj
1209 cbz x2, .Ldo_aput_null
Man Cao1aee9002015-07-14 22:31:42 -07001210 READ_BARRIER x3, w3, x0, MIRROR_OBJECT_CLASS_OFFSET // Heap reference = 32b
Andreas Gampef4e910b2014-04-29 16:55:52 -07001211 // This also zero-extends to x3
Man Cao1aee9002015-07-14 22:31:42 -07001212 READ_BARRIER x4, w4, x2, MIRROR_OBJECT_CLASS_OFFSET // Heap reference = 32b
Andreas Gampef4e910b2014-04-29 16:55:52 -07001213 // This also zero-extends to x4
Man Cao1aee9002015-07-14 22:31:42 -07001214 READ_BARRIER x3, w3, x3, MIRROR_CLASS_COMPONENT_TYPE_OFFSET // Heap reference = 32b
Andreas Gampef4e910b2014-04-29 16:55:52 -07001215 // This also zero-extends to x3
1216 cmp w3, w4 // value's type == array's component type - trivial assignability
1217 bne .Lcheck_assignability
1218.Ldo_aput:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001219 add x3, x0, #MIRROR_OBJECT_ARRAY_DATA_OFFSET
Andreas Gampef4e910b2014-04-29 16:55:52 -07001220 // "Compress" = do nothing
Hiroshi Yamauchibfa5eb62015-05-29 15:04:41 -07001221 POISON_HEAP_REF w2
Andreas Gampef4e910b2014-04-29 16:55:52 -07001222 str w2, [x3, x1, lsl #2] // Heap reference = 32b
1223 ldr x3, [xSELF, #THREAD_CARD_TABLE_OFFSET]
1224 lsr x0, x0, #7
1225 strb w3, [x3, x0]
1226 ret
1227.Ldo_aput_null:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001228 add x3, x0, #MIRROR_OBJECT_ARRAY_DATA_OFFSET
Andreas Gampef4e910b2014-04-29 16:55:52 -07001229 // "Compress" = do nothing
1230 str w2, [x3, x1, lsl #2] // Heap reference = 32b
1231 ret
1232.Lcheck_assignability:
1233 // Store arguments and link register
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01001234 stp x0, x1, [sp,#-32]!
1235 .cfi_adjust_cfa_offset 32
Andreas Gampef4e910b2014-04-29 16:55:52 -07001236 .cfi_rel_offset x0, 0
1237 .cfi_rel_offset x1, 8
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01001238 stp x2, xLR, [sp, #16]
Andreas Gampef4e910b2014-04-29 16:55:52 -07001239 .cfi_rel_offset x2, 16
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01001240 .cfi_rel_offset x30, 24
Andreas Gampef4e910b2014-04-29 16:55:52 -07001241
1242 // Call runtime code
1243 mov x0, x3 // Heap reference, 32b, "uncompress" = do nothing, already zero-extended
1244 mov x1, x4 // Heap reference, 32b, "uncompress" = do nothing, already zero-extended
1245 bl artIsAssignableFromCode
1246
1247 // Check for exception
1248 cbz x0, .Lthrow_array_store_exception
1249
1250 // Restore
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01001251 ldp x2, x30, [sp, #16]
1252 .cfi_restore x2
1253 .cfi_restore x30
1254 ldp x0, x1, [sp], #32
Andreas Gampef4e910b2014-04-29 16:55:52 -07001255 .cfi_restore x0
1256 .cfi_restore x1
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01001257 .cfi_adjust_cfa_offset -32
Andreas Gampef4e910b2014-04-29 16:55:52 -07001258
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001259 add x3, x0, #MIRROR_OBJECT_ARRAY_DATA_OFFSET
Andreas Gampef4e910b2014-04-29 16:55:52 -07001260 // "Compress" = do nothing
Hiroshi Yamauchibfa5eb62015-05-29 15:04:41 -07001261 POISON_HEAP_REF w2
Andreas Gampef4e910b2014-04-29 16:55:52 -07001262 str w2, [x3, x1, lsl #2] // Heap reference = 32b
1263 ldr x3, [xSELF, #THREAD_CARD_TABLE_OFFSET]
1264 lsr x0, x0, #7
1265 strb w3, [x3, x0]
1266 ret
Mathieu Chartier27386392015-06-27 15:42:27 -07001267 .cfi_adjust_cfa_offset 32 // 4 restores after cbz for unwinding.
Andreas Gampef4e910b2014-04-29 16:55:52 -07001268.Lthrow_array_store_exception:
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01001269 ldp x2, x30, [sp, #16]
1270 .cfi_restore x2
1271 .cfi_restore x30
1272 ldp x0, x1, [sp], #32
Andreas Gampef4e910b2014-04-29 16:55:52 -07001273 .cfi_restore x0
1274 .cfi_restore x1
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01001275 .cfi_adjust_cfa_offset -32
Andreas Gampef4e910b2014-04-29 16:55:52 -07001276
1277 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
1278 mov x1, x2 // Pass value.
1279 mov x2, xSELF // Pass Thread::Current.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001280 b artThrowArrayStoreException // (Object*, Object*, Thread*).
Andreas Gampef4e910b2014-04-29 16:55:52 -07001281 brk 0 // Unreached.
1282END art_quick_aput_obj
1283
Stuart Monteithb95a5342014-03-12 13:32:32 +00001284// Macro to facilitate adding new allocation entrypoints.
Vladimir Marko5ea536a2015-04-20 20:11:30 +01001285.macro ONE_ARG_DOWNCALL name, entrypoint, return
1286 .extern \entrypoint
1287ENTRY \name
Jeff Hao848f70a2014-01-15 13:49:50 -08001288 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Vladimir Marko5ea536a2015-04-20 20:11:30 +01001289 mov x1, xSELF // pass Thread::Current
1290 bl \entrypoint // (uint32_t type_idx, Method* method, Thread*)
1291 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
1292 \return
1293END \name
1294.endm
1295
1296// Macro to facilitate adding new allocation entrypoints.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001297.macro TWO_ARG_DOWNCALL name, entrypoint, return
1298 .extern \entrypoint
1299ENTRY \name
Jeff Hao848f70a2014-01-15 13:49:50 -08001300 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001301 mov x2, xSELF // pass Thread::Current
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001302 bl \entrypoint // (uint32_t type_idx, Method* method, Thread*)
1303 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001304 \return
Stuart Monteithb95a5342014-03-12 13:32:32 +00001305END \name
1306.endm
1307
Jeff Hao848f70a2014-01-15 13:49:50 -08001308// Macro to facilitate adding new allocation entrypoints.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001309.macro THREE_ARG_DOWNCALL name, entrypoint, return
1310 .extern \entrypoint
1311ENTRY \name
Jeff Hao848f70a2014-01-15 13:49:50 -08001312 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001313 mov x3, xSELF // pass Thread::Current
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001314 bl \entrypoint
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001315 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001316 \return
Stuart Monteithb95a5342014-03-12 13:32:32 +00001317END \name
1318.endm
1319
Jeff Hao848f70a2014-01-15 13:49:50 -08001320// Macro to facilitate adding new allocation entrypoints.
1321.macro FOUR_ARG_DOWNCALL name, entrypoint, return
1322 .extern \entrypoint
1323ENTRY \name
1324 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
1325 mov x4, xSELF // pass Thread::Current
1326 bl \entrypoint //
1327 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
1328 \return
1329 DELIVER_PENDING_EXCEPTION
1330END \name
1331.endm
1332
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001333// Macros taking opportunity of code similarities for downcalls with referrer.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001334.macro ONE_ARG_REF_DOWNCALL name, entrypoint, return
1335 .extern \entrypoint
1336ENTRY \name
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001337 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Mathieu Chartiere401d142015-04-22 13:56:20 -07001338 ldr x1, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001339 mov x2, xSELF // pass Thread::Current
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001340 bl \entrypoint // (uint32_t type_idx, Method* method, Thread*, SP)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001341 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001342 \return
1343END \name
1344.endm
1345
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001346.macro TWO_ARG_REF_DOWNCALL name, entrypoint, return
1347 .extern \entrypoint
1348ENTRY \name
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001349 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Mathieu Chartiere401d142015-04-22 13:56:20 -07001350 ldr x2, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001351 mov x3, xSELF // pass Thread::Current
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001352 bl \entrypoint
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001353 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001354 \return
1355END \name
1356.endm
1357
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001358.macro THREE_ARG_REF_DOWNCALL name, entrypoint, return
1359 .extern \entrypoint
1360ENTRY \name
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001361 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Mathieu Chartiere401d142015-04-22 13:56:20 -07001362 ldr x3, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001363 mov x4, xSELF // pass Thread::Current
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001364 bl \entrypoint
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001365 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001366 \return
1367END \name
1368.endm
1369
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001370.macro RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
1371 cbz w0, 1f // result zero branch over
1372 ret // return
13731:
1374 DELIVER_PENDING_EXCEPTION
1375.endm
1376
Matteo Franchindfd891a2014-04-30 12:17:17 +01001377 /*
Vladimir Marko3b370732014-10-09 18:34:28 +01001378 * Entry from managed code that calls artHandleFillArrayDataFromCode and delivers exception on
1379 * failure.
1380 */
1381TWO_ARG_REF_DOWNCALL art_quick_handle_fill_data, artHandleFillArrayDataFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1382
1383 /*
Matteo Franchindfd891a2014-04-30 12:17:17 +01001384 * Entry from managed code when uninitialized static storage, this stub will run the class
1385 * initializer and deliver the exception on error. On success the static storage base is
1386 * returned.
1387 */
Vladimir Marko5ea536a2015-04-20 20:11:30 +01001388ONE_ARG_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
Matteo Franchindfd891a2014-04-30 12:17:17 +01001389
Vladimir Marko5ea536a2015-04-20 20:11:30 +01001390ONE_ARG_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
1391ONE_ARG_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
Matteo Franchindfd891a2014-04-30 12:17:17 +01001392
Fred Shih37f05ef2014-07-16 18:38:08 -07001393ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1394ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1395ONE_ARG_REF_DOWNCALL art_quick_get_char_static, artGetCharStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1396ONE_ARG_REF_DOWNCALL art_quick_get_short_static, artGetShortStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001397ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1398ONE_ARG_REF_DOWNCALL art_quick_get64_static, artGet64StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1399ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1400
Fred Shih37f05ef2014-07-16 18:38:08 -07001401TWO_ARG_REF_DOWNCALL art_quick_get_boolean_instance, artGetBooleanInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1402TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1403TWO_ARG_REF_DOWNCALL art_quick_get_char_instance, artGetCharInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1404TWO_ARG_REF_DOWNCALL art_quick_get_short_instance, artGetShortInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001405TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1406TWO_ARG_REF_DOWNCALL art_quick_get64_instance, artGet64InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1407TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1408
Fred Shih37f05ef2014-07-16 18:38:08 -07001409TWO_ARG_REF_DOWNCALL art_quick_set8_static, artSet8StaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1410TWO_ARG_REF_DOWNCALL art_quick_set16_static, artSet16StaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001411TWO_ARG_REF_DOWNCALL art_quick_set32_static, artSet32StaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1412TWO_ARG_REF_DOWNCALL art_quick_set_obj_static, artSetObjStaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1413
Fred Shih37f05ef2014-07-16 18:38:08 -07001414THREE_ARG_REF_DOWNCALL art_quick_set8_instance, artSet8InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1415THREE_ARG_REF_DOWNCALL art_quick_set16_instance, artSet16InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001416THREE_ARG_REF_DOWNCALL art_quick_set32_instance, artSet32InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
Stephen Kyle0ff20d52014-10-22 15:23:46 +01001417THREE_ARG_REF_DOWNCALL art_quick_set64_instance, artSet64InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001418THREE_ARG_REF_DOWNCALL art_quick_set_obj_instance, artSetObjInstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1419
1420// This is separated out as the argument order is different.
1421 .extern artSet64StaticFromCode
1422ENTRY art_quick_set64_static
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001423 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
Mathieu Chartiere401d142015-04-22 13:56:20 -07001424 ldr x1, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer
Calin Juravlee460d1d2015-09-29 04:52:17 +01001425 // x2 contains the parameter
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001426 mov x3, xSELF // pass Thread::Current
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001427 bl artSet64StaticFromCode
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001428 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001429 RETURN_IF_W0_IS_ZERO_OR_DELIVER
1430END art_quick_set64_static
1431
Matteo Franchindfd891a2014-04-30 12:17:17 +01001432 /*
1433 * Entry from managed code to resolve a string, this stub will allocate a String and deliver an
Vladimir Marko5ea536a2015-04-20 20:11:30 +01001434 * exception on error. On success the String is returned. w0 holds the string index. The fast
1435 * path check for hit in strings cache has already been performed.
Matteo Franchindfd891a2014-04-30 12:17:17 +01001436 */
Vladimir Marko5ea536a2015-04-20 20:11:30 +01001437ONE_ARG_DOWNCALL art_quick_resolve_string, artResolveStringFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001438
Stuart Monteithb95a5342014-03-12 13:32:32 +00001439// Generate the allocation entrypoints for each allocator.
Hiroshi Yamauchi6f6244a2015-10-22 12:08:12 -07001440GENERATE_ALLOC_ENTRYPOINTS_FOR_EACH_ALLOCATOR
1441GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_tlab, TLAB)
1442// A hand-written override for GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_rosalloc, RosAlloc).
1443ENTRY art_quick_alloc_object_rosalloc
1444 // Fast path rosalloc allocation.
1445 // x0: type_idx/return value, x1: ArtMethod*, xSELF(x19): Thread::Current
1446 // x2-x7: free.
1447 ldr x2, [x1, #ART_METHOD_DEX_CACHE_TYPES_OFFSET_64] // Load dex cache resolved types array
1448 // Load the class (x2)
1449 ldr w2, [x2, x0, lsl #COMPRESSED_REFERENCE_SIZE_SHIFT]
1450 cbz x2, .Lart_quick_alloc_object_rosalloc_slow_path // Check null class
1451 // Check class status.
1452 ldr w3, [x2, #MIRROR_CLASS_STATUS_OFFSET]
1453 cmp x3, #MIRROR_CLASS_STATUS_INITIALIZED
1454 bne .Lart_quick_alloc_object_rosalloc_slow_path
1455 // Add a fake dependence from the
1456 // following access flag and size
1457 // loads to the status load.
1458 // This is to prevent those loads
1459 // from being reordered above the
1460 // status load and reading wrong
1461 // values (an alternative is to use
1462 // a load-acquire for the status).
1463 eor x3, x3, x3
1464 add x2, x2, x3
1465 // Check access flags has
1466 // kAccClassIsFinalizable
1467 ldr w3, [x2, #MIRROR_CLASS_ACCESS_FLAGS_OFFSET]
1468 tst x3, #ACCESS_FLAGS_CLASS_IS_FINALIZABLE
1469 bne .Lart_quick_alloc_object_rosalloc_slow_path
1470 ldr x3, [xSELF, #THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET] // Check if the thread local
1471 // allocation stack has room.
1472 // ldp won't work due to large offset.
1473 ldr x4, [xSELF, #THREAD_LOCAL_ALLOC_STACK_END_OFFSET]
1474 cmp x3, x4
1475 bhs .Lart_quick_alloc_object_rosalloc_slow_path
1476 ldr w3, [x2, #MIRROR_CLASS_OBJECT_SIZE_OFFSET] // Load the object size (x3)
1477 cmp x3, #ROSALLOC_MAX_THREAD_LOCAL_BRACKET_SIZE // Check if the size is for a thread
1478 // local allocation
1479 bhs .Lart_quick_alloc_object_rosalloc_slow_path
1480 // Compute the rosalloc bracket index
1481 // from the size.
1482 // Align up the size by the rosalloc
1483 // bracket quantum size and divide
1484 // by the quantum size and subtract
1485 // by 1. This code is a shorter but
1486 // equivalent version.
1487 sub x3, x3, #1
1488 lsr x3, x3, #ROSALLOC_BRACKET_QUANTUM_SIZE_SHIFT
1489 // Load the rosalloc run (x4)
1490 add x4, xSELF, x3, lsl #POINTER_SIZE_SHIFT
1491 ldr x4, [x4, #THREAD_ROSALLOC_RUNS_OFFSET]
1492 // Load the free list head (x3). This
1493 // will be the return val.
1494 ldr x3, [x4, #(ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET)]
1495 cbz x3, .Lart_quick_alloc_object_rosalloc_slow_path
1496 // "Point of no slow path". Won't go to the slow path from here on. OK to clobber x0 and x1.
1497 ldr x1, [x3, #ROSALLOC_SLOT_NEXT_OFFSET] // Load the next pointer of the head
1498 // and update the list head with the
1499 // next pointer.
1500 str x1, [x4, #(ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET)]
1501 // Store the class pointer in the
1502 // header. This also overwrites the
1503 // next pointer. The offsets are
1504 // asserted to match.
1505#if ROSALLOC_SLOT_NEXT_OFFSET != MIRROR_OBJECT_CLASS_OFFSET
1506#error "Class pointer needs to overwrite next pointer."
1507#endif
1508 POISON_HEAP_REF w2
1509 str w2, [x3, #MIRROR_OBJECT_CLASS_OFFSET]
1510 // Push the new object onto the thread
1511 // local allocation stack and
1512 // increment the thread local
1513 // allocation stack top.
1514 ldr x1, [xSELF, #THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET]
1515 str w3, [x1], #COMPRESSED_REFERENCE_SIZE // (Increment x1 as a side effect.)
1516 str x1, [xSELF, #THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET]
1517 // Decrement the size of the free list
1518 ldr w1, [x4, #(ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_SIZE_OFFSET)]
1519 sub x1, x1, #1
1520 // TODO: consider combining this store
1521 // and the list head store above using
1522 // strd.
1523 str w1, [x4, #(ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_SIZE_OFFSET)]
1524 // Fence. This is "ish" not "ishst" so
1525 // that the code after this allocation
1526 // site will see the right values in
1527 // the fields of the class.
1528 // Alternatively we could use "ishst"
1529 // if we use load-acquire for the
1530 // class status load.)
1531 dmb ish
1532 mov x0, x3 // Set the return value and return.
1533 ret
1534.Lart_quick_alloc_object_rosalloc_slow_path:
1535 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC
1536 mov x2, xSELF // pass Thread::Current
1537 bl artAllocObjectFromCodeRosAlloc // (uint32_t type_idx, Method* method, Thread*)
1538 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
1539 RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
1540END art_quick_alloc_object_rosalloc
Stuart Monteithb95a5342014-03-12 13:32:32 +00001541
Zheng Xu48241e72014-05-23 11:52:42 +08001542 /*
Zheng Xu69a50302015-04-14 20:04:41 +08001543 * Called by managed code when the thread has been asked to suspend.
Zheng Xu48241e72014-05-23 11:52:42 +08001544 */
1545 .extern artTestSuspendFromCode
1546ENTRY art_quick_test_suspend
1547 ldrh w0, [xSELF, #THREAD_FLAGS_OFFSET] // get xSELF->state_and_flags.as_struct.flags
Zheng Xu48241e72014-05-23 11:52:42 +08001548 cbnz w0, .Lneed_suspend // check flags == 0
1549 ret // return if flags == 0
1550.Lneed_suspend:
1551 mov x0, xSELF
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001552 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves for stack crawl
1553 bl artTestSuspendFromCode // (Thread*)
1554 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
Zheng Xu48241e72014-05-23 11:52:42 +08001555END art_quick_test_suspend
Stuart Monteithb95a5342014-03-12 13:32:32 +00001556
Stuart Monteithd5c78f42014-06-11 16:44:46 +01001557ENTRY art_quick_implicit_suspend
1558 mov x0, xSELF
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001559 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves for stack crawl
1560 bl artTestSuspendFromCode // (Thread*)
1561 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME_AND_RETURN
Stuart Monteithd5c78f42014-06-11 16:44:46 +01001562END art_quick_implicit_suspend
1563
Andreas Gampee62a07e2014-03-26 14:53:21 -07001564 /*
1565 * Called by managed code that is attempting to call a method on a proxy class. On entry
1566 * x0 holds the proxy method and x1 holds the receiver; The frame size of the invoked proxy
1567 * method agrees with a ref and args callee save frame.
1568 */
1569 .extern artQuickProxyInvokeHandler
1570ENTRY art_quick_proxy_invoke_handler
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001571 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_X0
Andreas Gampee62a07e2014-03-26 14:53:21 -07001572 mov x2, xSELF // pass Thread::Current
1573 mov x3, sp // pass SP
1574 bl artQuickProxyInvokeHandler // (Method* proxy method, receiver, Thread*, SP)
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01001575 ldr x2, [xSELF, THREAD_EXCEPTION_OFFSET]
Andreas Gampee62a07e2014-03-26 14:53:21 -07001576 cbnz x2, .Lexception_in_proxy // success if no exception is pending
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001577 RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME // Restore frame
Andreas Gamped1e91672014-06-02 22:50:05 -07001578 fmov d0, x0 // Store result in d0 in case it was float or double
Andreas Gampee62a07e2014-03-26 14:53:21 -07001579 ret // return on success
1580.Lexception_in_proxy:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001581 RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Andreas Gampee62a07e2014-03-26 14:53:21 -07001582 DELIVER_PENDING_EXCEPTION
1583END art_quick_proxy_invoke_handler
Stuart Monteithb95a5342014-03-12 13:32:32 +00001584
Andreas Gampe51f76352014-05-21 08:28:48 -07001585 /*
Zheng Xub551fdc2014-07-25 11:49:42 +08001586 * Called to resolve an imt conflict. xIP1 is a hidden argument that holds the target method's
Andreas Gampe51f76352014-05-21 08:28:48 -07001587 * dex method index.
1588 */
Andreas Gampe3031c8d2015-07-13 20:11:06 -07001589 .extern artInvokeInterfaceTrampoline
Andreas Gampe51f76352014-05-21 08:28:48 -07001590ENTRY art_quick_imt_conflict_trampoline
Nicolas Geoffray8ea18d02015-05-26 16:29:08 +01001591 mov x0, xIP1
Andreas Gampe3031c8d2015-07-13 20:11:06 -07001592 INVOKE_TRAMPOLINE_BODY artInvokeInterfaceTrampoline
Andreas Gampe51f76352014-05-21 08:28:48 -07001593END art_quick_imt_conflict_trampoline
Stuart Monteithb95a5342014-03-12 13:32:32 +00001594
1595ENTRY art_quick_resolution_trampoline
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001596 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +00001597 mov x2, xSELF
1598 mov x3, sp
1599 bl artQuickResolutionTrampoline // (called, receiver, Thread*, SP)
Matteo Franchindfd891a2014-04-30 12:17:17 +01001600 cbz x0, 1f
Zheng Xub551fdc2014-07-25 11:49:42 +08001601 mov xIP0, x0 // Remember returned code pointer in xIP0.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001602 ldr x0, [sp, #0] // artQuickResolutionTrampoline puts called method in *SP.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001603 RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Zheng Xub551fdc2014-07-25 11:49:42 +08001604 br xIP0
Stuart Monteithb95a5342014-03-12 13:32:32 +000016051:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001606 RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +00001607 DELIVER_PENDING_EXCEPTION
1608END art_quick_resolution_trampoline
1609
1610/*
1611 * Generic JNI frame layout:
1612 *
1613 * #-------------------#
1614 * | |
1615 * | caller method... |
1616 * #-------------------# <--- SP on entry
1617 * | Return X30/LR |
1618 * | X29/FP | callee save
1619 * | X28 | callee save
1620 * | X27 | callee save
1621 * | X26 | callee save
1622 * | X25 | callee save
1623 * | X24 | callee save
1624 * | X23 | callee save
1625 * | X22 | callee save
1626 * | X21 | callee save
1627 * | X20 | callee save
Zheng Xu69a50302015-04-14 20:04:41 +08001628 * | X19 | callee save
Stuart Monteithb95a5342014-03-12 13:32:32 +00001629 * | X7 | arg7
1630 * | X6 | arg6
1631 * | X5 | arg5
1632 * | X4 | arg4
1633 * | X3 | arg3
1634 * | X2 | arg2
1635 * | X1 | arg1
Stuart Monteithb95a5342014-03-12 13:32:32 +00001636 * | D7 | float arg 8
1637 * | D6 | float arg 7
1638 * | D5 | float arg 6
1639 * | D4 | float arg 5
1640 * | D3 | float arg 4
1641 * | D2 | float arg 3
1642 * | D1 | float arg 2
1643 * | D0 | float arg 1
Andreas Gampecf4035a2014-05-28 22:43:01 -07001644 * | Method* | <- X0
Stuart Monteithb95a5342014-03-12 13:32:32 +00001645 * #-------------------#
1646 * | local ref cookie | // 4B
Mathieu Chartier421c5372014-05-14 14:11:40 -07001647 * | handle scope size | // 4B
Stuart Monteithb95a5342014-03-12 13:32:32 +00001648 * #-------------------#
1649 * | JNI Call Stack |
1650 * #-------------------# <--- SP on native call
1651 * | |
1652 * | Stack for Regs | The trampoline assembly will pop these values
1653 * | | into registers for native call
1654 * #-------------------#
1655 * | Native code ptr |
1656 * #-------------------#
1657 * | Free scratch |
1658 * #-------------------#
1659 * | Ptr to (1) | <--- SP
1660 * #-------------------#
1661 */
1662 /*
1663 * Called to do a generic JNI down-call
1664 */
Ian Rogers6f3dbba2014-10-14 17:41:57 -07001665ENTRY art_quick_generic_jni_trampoline
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001666 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_X0
Stuart Monteithb95a5342014-03-12 13:32:32 +00001667
1668 // Save SP , so we can have static CFI info.
1669 mov x28, sp
1670 .cfi_def_cfa_register x28
1671
1672 // This looks the same, but is different: this will be updated to point to the bottom
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001673 // of the frame when the handle scope is inserted.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001674 mov xFP, sp
1675
Zheng Xub551fdc2014-07-25 11:49:42 +08001676 mov xIP0, #5120
1677 sub sp, sp, xIP0
Stuart Monteithb95a5342014-03-12 13:32:32 +00001678
1679 // prepare for artQuickGenericJniTrampoline call
1680 // (Thread*, SP)
1681 // x0 x1 <= C calling convention
1682 // xSELF xFP <= where they are
1683
1684 mov x0, xSELF // Thread*
1685 mov x1, xFP
1686 bl artQuickGenericJniTrampoline // (Thread*, sp)
1687
Andreas Gampec200a4a2014-06-16 18:39:09 -07001688 // The C call will have registered the complete save-frame on success.
1689 // The result of the call is:
1690 // x0: pointer to native code, 0 on error.
1691 // x1: pointer to the bottom of the used area of the alloca, can restore stack till there.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001692
Andreas Gampec200a4a2014-06-16 18:39:09 -07001693 // Check for error = 0.
Nicolas Geoffray126d6592015-03-03 14:28:35 +00001694 cbz x0, .Lexception_in_native
Stuart Monteithb95a5342014-03-12 13:32:32 +00001695
Andreas Gampec200a4a2014-06-16 18:39:09 -07001696 // Release part of the alloca.
1697 mov sp, x1
Stuart Monteithb95a5342014-03-12 13:32:32 +00001698
Andreas Gampec200a4a2014-06-16 18:39:09 -07001699 // Save the code pointer
1700 mov xIP0, x0
Stuart Monteithb95a5342014-03-12 13:32:32 +00001701
1702 // Load parameters from frame into registers.
1703 // TODO Check with artQuickGenericJniTrampoline.
1704 // Also, check again APPCS64 - the stack arguments are interleaved.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001705 ldp x0, x1, [sp]
1706 ldp x2, x3, [sp, #16]
1707 ldp x4, x5, [sp, #32]
1708 ldp x6, x7, [sp, #48]
Stuart Monteithb95a5342014-03-12 13:32:32 +00001709
Andreas Gampec200a4a2014-06-16 18:39:09 -07001710 ldp d0, d1, [sp, #64]
1711 ldp d2, d3, [sp, #80]
1712 ldp d4, d5, [sp, #96]
1713 ldp d6, d7, [sp, #112]
Stuart Monteithb95a5342014-03-12 13:32:32 +00001714
Andreas Gampec200a4a2014-06-16 18:39:09 -07001715 add sp, sp, #128
Stuart Monteithb95a5342014-03-12 13:32:32 +00001716
Zheng Xub551fdc2014-07-25 11:49:42 +08001717 blr xIP0 // native call.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001718
1719 // result sign extension is handled in C code
1720 // prepare for artQuickGenericJniEndTrampoline call
Andreas Gampec200a4a2014-06-16 18:39:09 -07001721 // (Thread*, result, result_f)
1722 // x0 x1 x2 <= C calling convention
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01001723 mov x1, x0 // Result (from saved).
1724 mov x0, xSELF // Thread register.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001725 fmov x2, d0 // d0 will contain floating point result, but needs to go into x2
Stuart Monteithb95a5342014-03-12 13:32:32 +00001726
1727 bl artQuickGenericJniEndTrampoline
1728
Nicolas Geoffray126d6592015-03-03 14:28:35 +00001729 // Pending exceptions possible.
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01001730 ldr x2, [xSELF, THREAD_EXCEPTION_OFFSET]
Nicolas Geoffray126d6592015-03-03 14:28:35 +00001731 cbnz x2, .Lexception_in_native
1732
Stuart Monteithb95a5342014-03-12 13:32:32 +00001733 // Tear down the alloca.
1734 mov sp, x28
1735 .cfi_def_cfa_register sp
1736
Stuart Monteithb95a5342014-03-12 13:32:32 +00001737 // Tear down the callee-save frame.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001738 RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +00001739
1740 // store into fpr, for when it's a fpr return...
1741 fmov d0, x0
1742 ret
1743
Stuart Monteithb95a5342014-03-12 13:32:32 +00001744.Lexception_in_native:
Nicolas Geoffray126d6592015-03-03 14:28:35 +00001745 // Move to x1 then sp to please assembler.
1746 ldr x1, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET]
1747 mov sp, x1
1748 .cfi_def_cfa_register sp
1749 # This will create a new save-all frame, required by the runtime.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001750 DELIVER_PENDING_EXCEPTION
Stuart Monteithb95a5342014-03-12 13:32:32 +00001751END art_quick_generic_jni_trampoline
1752
1753/*
1754 * Called to bridge from the quick to interpreter ABI. On entry the arguments match those
1755 * of a quick call:
1756 * x0 = method being called/to bridge to.
1757 * x1..x7, d0..d7 = arguments to that method.
1758 */
Ian Rogers6f3dbba2014-10-14 17:41:57 -07001759ENTRY art_quick_to_interpreter_bridge
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001760 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME // Set up frame and save arguments.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001761
1762 // x0 will contain mirror::ArtMethod* method.
1763 mov x1, xSELF // How to get Thread::Current() ???
1764 mov x2, sp
1765
1766 // uint64_t artQuickToInterpreterBridge(mirror::ArtMethod* method, Thread* self,
1767 // mirror::ArtMethod** sp)
1768 bl artQuickToInterpreterBridge
1769
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001770 RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME // TODO: no need to restore arguments in this case.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001771
1772 fmov d0, x0
1773
1774 RETURN_OR_DELIVER_PENDING_EXCEPTION
1775END art_quick_to_interpreter_bridge
1776
Andreas Gamped58342c2014-06-05 14:18:08 -07001777
1778//
1779// Instrumentation-related stubs
1780//
1781 .extern artInstrumentationMethodEntryFromCode
1782ENTRY art_quick_instrumentation_entry
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001783 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Andreas Gamped58342c2014-06-05 14:18:08 -07001784
Zheng Xub551fdc2014-07-25 11:49:42 +08001785 mov x20, x0 // Preserve method reference in a callee-save.
Andreas Gamped58342c2014-06-05 14:18:08 -07001786
1787 mov x2, xSELF
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001788 mov x3, xLR
1789 bl artInstrumentationMethodEntryFromCode // (Method*, Object*, Thread*, LR)
Andreas Gamped58342c2014-06-05 14:18:08 -07001790
Zheng Xub551fdc2014-07-25 11:49:42 +08001791 mov xIP0, x0 // x0 = result of call.
1792 mov x0, x20 // Reload method reference.
Andreas Gamped58342c2014-06-05 14:18:08 -07001793
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001794 RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME // Note: will restore xSELF
Andreas Gamped58342c2014-06-05 14:18:08 -07001795 adr xLR, art_quick_instrumentation_exit
Zheng Xub551fdc2014-07-25 11:49:42 +08001796 br xIP0 // Tail-call method with lr set to art_quick_instrumentation_exit.
Andreas Gamped58342c2014-06-05 14:18:08 -07001797END art_quick_instrumentation_entry
1798
1799 .extern artInstrumentationMethodExitFromCode
1800ENTRY art_quick_instrumentation_exit
1801 mov xLR, #0 // Clobber LR for later checks.
1802
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001803 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gamped58342c2014-06-05 14:18:08 -07001804
1805 // We need to save x0 and d0. We could use a callee-save from SETUP_REF_ONLY, but then
1806 // we would need to fully restore it. As there are a lot of callee-save registers, it seems
1807 // easier to have an extra small stack area.
1808
Sebastien Hertz70f8d4b2014-06-19 11:51:41 +02001809 str x0, [sp, #-16]! // Save integer result.
Andreas Gamped58342c2014-06-05 14:18:08 -07001810 .cfi_adjust_cfa_offset 16
1811 str d0, [sp, #8] // Save floating-point result.
1812
Andreas Gamped58342c2014-06-05 14:18:08 -07001813 add x1, sp, #16 // Pass SP.
1814 mov x2, x0 // Pass integer result.
1815 fmov x3, d0 // Pass floating-point result.
Sebastien Hertz70f8d4b2014-06-19 11:51:41 +02001816 mov x0, xSELF // Pass Thread.
Andreas Gamped58342c2014-06-05 14:18:08 -07001817 bl artInstrumentationMethodExitFromCode // (Thread*, SP, gpr_res, fpr_res)
1818
Zheng Xub551fdc2014-07-25 11:49:42 +08001819 mov xIP0, x0 // Return address from instrumentation call.
Andreas Gamped58342c2014-06-05 14:18:08 -07001820 mov xLR, x1 // r1 is holding link register if we're to bounce to deoptimize
1821
1822 ldr d0, [sp, #8] // Restore floating-point result.
1823 ldr x0, [sp], 16 // Restore integer result, and drop stack area.
1824 .cfi_adjust_cfa_offset 16
1825
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001826 POP_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gamped58342c2014-06-05 14:18:08 -07001827
Zheng Xub551fdc2014-07-25 11:49:42 +08001828 br xIP0 // Tail-call out.
Andreas Gamped58342c2014-06-05 14:18:08 -07001829END art_quick_instrumentation_exit
1830
1831 /*
1832 * Instrumentation has requested that we deoptimize into the interpreter. The deoptimization
1833 * will long jump to the upcall with a special exception of -1.
1834 */
1835 .extern artDeoptimize
1836ENTRY art_quick_deoptimize
1837 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
1838 mov x0, xSELF // Pass thread.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001839 bl artDeoptimize // artDeoptimize(Thread*)
Serban Constantinescu86797a72014-06-19 16:17:56 +01001840 brk 0
Andreas Gamped58342c2014-06-05 14:18:08 -07001841END art_quick_deoptimize
1842
Sebastien Hertz07474662015-08-25 15:12:33 +00001843 /*
1844 * Compiled code has requested that we deoptimize into the interpreter. The deoptimization
1845 * will long jump to the upcall with a special exception of -1.
1846 */
1847 .extern artDeoptimizeFromCompiledCode
1848ENTRY art_quick_deoptimize_from_compiled_code
1849 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
1850 mov x0, xSELF // Pass thread.
1851 bl artDeoptimizeFromCompiledCode // artDeoptimizeFromCompiledCode(Thread*)
1852 brk 0
1853END art_quick_deoptimize_from_compiled_code
1854
Andreas Gamped58342c2014-06-05 14:18:08 -07001855
Serban Constantinescu169489b2014-06-11 16:43:35 +01001856 /*
1857 * String's indexOf.
1858 *
1859 * TODO: Not very optimized.
1860 * On entry:
1861 * x0: string object (known non-null)
1862 * w1: char to match (known <= 0xFFFF)
1863 * w2: Starting offset in string data
1864 */
1865ENTRY art_quick_indexof
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001866 ldr w3, [x0, #MIRROR_STRING_COUNT_OFFSET]
Jeff Hao848f70a2014-01-15 13:49:50 -08001867 add x0, x0, #MIRROR_STRING_VALUE_OFFSET
Serban Constantinescu169489b2014-06-11 16:43:35 +01001868
1869 /* Clamp start to [0..count] */
1870 cmp w2, #0
1871 csel w2, wzr, w2, lt
1872 cmp w2, w3
1873 csel w2, w3, w2, gt
1874
Serban Constantinescu169489b2014-06-11 16:43:35 +01001875 /* Save a copy to compute result */
1876 mov x5, x0
1877
1878 /* Build pointer to start of data to compare and pre-bias */
1879 add x0, x0, x2, lsl #1
1880 sub x0, x0, #2
1881
1882 /* Compute iteration count */
1883 sub w2, w3, w2
1884
1885 /*
1886 * At this point we have:
1887 * x0: start of the data to test
1888 * w1: char to compare
1889 * w2: iteration count
1890 * x5: original start of string data
1891 */
1892
1893 subs w2, w2, #4
1894 b.lt .Lindexof_remainder
1895
1896.Lindexof_loop4:
1897 ldrh w6, [x0, #2]!
1898 ldrh w7, [x0, #2]!
Zheng Xub551fdc2014-07-25 11:49:42 +08001899 ldrh wIP0, [x0, #2]!
1900 ldrh wIP1, [x0, #2]!
Serban Constantinescu169489b2014-06-11 16:43:35 +01001901 cmp w6, w1
1902 b.eq .Lmatch_0
1903 cmp w7, w1
1904 b.eq .Lmatch_1
Zheng Xub551fdc2014-07-25 11:49:42 +08001905 cmp wIP0, w1
Serban Constantinescu169489b2014-06-11 16:43:35 +01001906 b.eq .Lmatch_2
Zheng Xub551fdc2014-07-25 11:49:42 +08001907 cmp wIP1, w1
Serban Constantinescu169489b2014-06-11 16:43:35 +01001908 b.eq .Lmatch_3
1909 subs w2, w2, #4
1910 b.ge .Lindexof_loop4
1911
1912.Lindexof_remainder:
1913 adds w2, w2, #4
1914 b.eq .Lindexof_nomatch
1915
1916.Lindexof_loop1:
1917 ldrh w6, [x0, #2]!
1918 cmp w6, w1
1919 b.eq .Lmatch_3
1920 subs w2, w2, #1
1921 b.ne .Lindexof_loop1
1922
1923.Lindexof_nomatch:
1924 mov x0, #-1
1925 ret
1926
1927.Lmatch_0:
1928 sub x0, x0, #6
1929 sub x0, x0, x5
1930 asr x0, x0, #1
1931 ret
1932.Lmatch_1:
1933 sub x0, x0, #4
1934 sub x0, x0, x5
1935 asr x0, x0, #1
1936 ret
1937.Lmatch_2:
1938 sub x0, x0, #2
1939 sub x0, x0, x5
1940 asr x0, x0, #1
1941 ret
1942.Lmatch_3:
1943 sub x0, x0, x5
1944 asr x0, x0, #1
1945 ret
1946END art_quick_indexof
Andreas Gampe266340d2014-05-02 07:55:24 -07001947
1948 /*
1949 * String's compareTo.
1950 *
1951 * TODO: Not very optimized.
1952 *
1953 * On entry:
1954 * x0: this object pointer
1955 * x1: comp object pointer
1956 *
1957 */
Serban Constantinescu86797a72014-06-19 16:17:56 +01001958 .extern __memcmp16
Andreas Gampe266340d2014-05-02 07:55:24 -07001959ENTRY art_quick_string_compareto
1960 mov x2, x0 // x0 is return, use x2 for first input.
1961 sub x0, x2, x1 // Same string object?
1962 cbnz x0,1f
1963 ret
19641: // Different string objects.
1965
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001966 ldr w4, [x2, #MIRROR_STRING_COUNT_OFFSET]
1967 ldr w3, [x1, #MIRROR_STRING_COUNT_OFFSET]
Jeff Hao848f70a2014-01-15 13:49:50 -08001968 add x2, x2, #MIRROR_STRING_VALUE_OFFSET
1969 add x1, x1, #MIRROR_STRING_VALUE_OFFSET
Andreas Gampe266340d2014-05-02 07:55:24 -07001970
1971 /*
Jeff Hao848f70a2014-01-15 13:49:50 -08001972 * Now: Data* Count
1973 * first arg x2 w4
1974 * second arg x1 w3
Andreas Gampe266340d2014-05-02 07:55:24 -07001975 */
1976
1977 // x0 := str1.length(w4) - str2.length(w3). ldr zero-extended w3/w4 into x3/x4.
1978 subs x0, x4, x3
1979 // Min(count1, count2) into w3.
1980 csel x3, x3, x4, ge
1981
Serban Constantinescu169489b2014-06-11 16:43:35 +01001982 // TODO: Tune this value.
Andreas Gampe266340d2014-05-02 07:55:24 -07001983 // Check for long string, do memcmp16 for them.
1984 cmp w3, #28 // Constant from arm32.
1985 bgt .Ldo_memcmp16
1986
1987 /*
1988 * Now:
1989 * x2: *first string data
1990 * x1: *second string data
1991 * w3: iteration count
1992 * x0: return value if comparison equal
1993 * x4, x5, x6, x7: free
1994 */
1995
1996 // Do a simple unrolled loop.
1997.Lloop:
1998 // At least two more elements?
1999 subs w3, w3, #2
2000 b.lt .Lremainder_or_done
2001
2002 ldrh w4, [x2], #2
2003 ldrh w5, [x1], #2
2004
2005 ldrh w6, [x2], #2
2006 ldrh w7, [x1], #2
2007
2008 subs w4, w4, w5
2009 b.ne .Lw4_result
2010
2011 subs w6, w6, w7
2012 b.ne .Lw6_result
2013
2014 b .Lloop
2015
2016.Lremainder_or_done:
2017 adds w3, w3, #1
2018 b.eq .Lremainder
2019 ret
2020
2021.Lremainder:
2022 ldrh w4, [x2], #2
2023 ldrh w5, [x1], #2
2024 subs w4, w4, w5
2025 b.ne .Lw4_result
2026 ret
2027
2028// Result is in w4
2029.Lw4_result:
2030 sxtw x0, w4
2031 ret
2032
2033// Result is in w6
2034.Lw6_result:
2035 sxtw x0, w6
2036 ret
2037
2038.Ldo_memcmp16:
Zheng Xu62ddb322014-08-12 17:19:12 +08002039 mov x14, x0 // Save x0 and LR. __memcmp16 does not use these temps.
2040 mov x15, xLR // TODO: Codify and check that?
Andreas Gampe266340d2014-05-02 07:55:24 -07002041
2042 mov x0, x2
2043 uxtw x2, w3
Serban Constantinescu86797a72014-06-19 16:17:56 +01002044 bl __memcmp16
Andreas Gampe266340d2014-05-02 07:55:24 -07002045
Zheng Xu62ddb322014-08-12 17:19:12 +08002046 mov xLR, x15 // Restore LR.
Andreas Gampe266340d2014-05-02 07:55:24 -07002047
Serban Constantinescu86797a72014-06-19 16:17:56 +01002048 cmp x0, #0 // Check the memcmp difference.
Zheng Xu62ddb322014-08-12 17:19:12 +08002049 csel x0, x0, x14, ne // x0 := x0 != 0 ? x14(prev x0=length diff) : x1.
Andreas Gampe266340d2014-05-02 07:55:24 -07002050 ret
2051END art_quick_string_compareto