blob: d806715ec9202cf99fed198f42e177b13821b2c2 [file] [log] [blame]
Stuart Monteithb95a5342014-03-12 13:32:32 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "asm_support_arm64.S"
18
19#include "arch/quick_alloc_entrypoints.S"
20
21
Vladimir Markoae6ba1f2016-09-09 11:56:05 +010022.macro INCREASE_FRAME frame_adjustment
23 sub sp, sp, #(\frame_adjustment)
24 .cfi_adjust_cfa_offset (\frame_adjustment)
25.endm
26
27.macro DECREASE_FRAME frame_adjustment
28 add sp, sp, #(\frame_adjustment)
29 .cfi_adjust_cfa_offset -(\frame_adjustment)
30.endm
31
Vladimir Marko215076b2016-09-07 18:05:55 +010032.macro SAVE_REG reg, offset
33 str \reg, [sp, #(\offset)]
34 .cfi_rel_offset \reg, (\offset)
35.endm
36
37.macro RESTORE_REG reg, offset
38 ldr \reg, [sp, #(\offset)]
39 .cfi_restore \reg
40.endm
41
42.macro SAVE_TWO_REGS reg1, reg2, offset
43 stp \reg1, \reg2, [sp, #(\offset)]
44 .cfi_rel_offset \reg1, (\offset)
45 .cfi_rel_offset \reg2, (\offset) + 8
46.endm
47
48.macro RESTORE_TWO_REGS reg1, reg2, offset
49 ldp \reg1, \reg2, [sp, #(\offset)]
50 .cfi_restore \reg1
51 .cfi_restore \reg2
52.endm
53
54.macro SAVE_TWO_REGS_INCREASE_FRAME reg1, reg2, frame_adjustment
55 stp \reg1, \reg2, [sp, #-(\frame_adjustment)]!
56 .cfi_adjust_cfa_offset (\frame_adjustment)
57 .cfi_rel_offset \reg1, 0
58 .cfi_rel_offset \reg2, 8
59.endm
60
61.macro RESTORE_TWO_REGS_DECREASE_FRAME reg1, reg2, frame_adjustment
62 ldp \reg1, \reg2, [sp], #(\frame_adjustment)
63 .cfi_restore \reg1
64 .cfi_restore \reg2
65 .cfi_adjust_cfa_offset -(\frame_adjustment)
66.endm
67
Stuart Monteithb95a5342014-03-12 13:32:32 +000068 /*
69 * Macro that sets up the callee save frame to conform with
Vladimir Markofd36f1f2016-08-03 18:49:58 +010070 * Runtime::CreateCalleeSaveMethod(kSaveAllCalleeSaves)
Stuart Monteithb95a5342014-03-12 13:32:32 +000071 */
Vladimir Markofd36f1f2016-08-03 18:49:58 +010072.macro SETUP_SAVE_ALL_CALLEE_SAVES_FRAME
73 // art::Runtime** xIP0 = &art::Runtime::instance_
Zheng Xub551fdc2014-07-25 11:49:42 +080074 adrp xIP0, :got:_ZN3art7Runtime9instance_E
75 ldr xIP0, [xIP0, #:got_lo12:_ZN3art7Runtime9instance_E]
Stuart Monteithb95a5342014-03-12 13:32:32 +000076
77 // Our registers aren't intermixed - just spill in order.
Vladimir Markofd36f1f2016-08-03 18:49:58 +010078 ldr xIP0, [xIP0] // art::Runtime* xIP0 = art::Runtime::instance_;
Stuart Monteithb95a5342014-03-12 13:32:32 +000079
Vladimir Markofd36f1f2016-08-03 18:49:58 +010080 // ArtMethod* xIP0 = Runtime::instance_->callee_save_methods_[kSaveAllCalleeSaves];
81 ldr xIP0, [xIP0, RUNTIME_SAVE_ALL_CALLEE_SAVES_METHOD_OFFSET]
Andreas Gampe5c1e4352014-04-21 19:28:24 -070082
Vladimir Markoae6ba1f2016-09-09 11:56:05 +010083 INCREASE_FRAME 176
Andreas Gampe5c1e4352014-04-21 19:28:24 -070084
85 // Ugly compile-time check, but we only have the preprocessor.
Vladimir Markofd36f1f2016-08-03 18:49:58 +010086#if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVES != 176)
87#error "FRAME_SIZE_SAVE_ALL_CALLEE_SAVES(ARM64) size not as expected."
Andreas Gampe5c1e4352014-04-21 19:28:24 -070088#endif
89
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010090 // Stack alignment filler [sp, #8].
91 // FP callee-saves.
92 stp d8, d9, [sp, #16]
93 stp d10, d11, [sp, #32]
94 stp d12, d13, [sp, #48]
95 stp d14, d15, [sp, #64]
Andreas Gampe5c1e4352014-04-21 19:28:24 -070096
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010097 // GP callee-saves
Vladimir Marko215076b2016-09-07 18:05:55 +010098 SAVE_TWO_REGS x19, x20, 80
99 SAVE_TWO_REGS x21, x22, 96
100 SAVE_TWO_REGS x23, x24, 112
101 SAVE_TWO_REGS x25, x26, 128
102 SAVE_TWO_REGS x27, x28, 144
103 SAVE_TWO_REGS x29, xLR, 160
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700104
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100105 // Store ArtMethod* Runtime::callee_save_methods_[kSaveAllCalleeSaves].
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100106 str xIP0, [sp]
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700107 // Place sp in Thread::Current()->top_quick_frame.
108 mov xIP0, sp
109 str xIP0, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000110.endm
111
Zheng Xub551fdc2014-07-25 11:49:42 +0800112 /*
113 * Macro that sets up the callee save frame to conform with
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100114 * Runtime::CreateCalleeSaveMethod(kSaveRefsOnly).
Zheng Xub551fdc2014-07-25 11:49:42 +0800115 */
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100116.macro SETUP_SAVE_REFS_ONLY_FRAME
117 // art::Runtime** xIP0 = &art::Runtime::instance_
Zheng Xub551fdc2014-07-25 11:49:42 +0800118 adrp xIP0, :got:_ZN3art7Runtime9instance_E
119 ldr xIP0, [xIP0, #:got_lo12:_ZN3art7Runtime9instance_E]
120
121 // Our registers aren't intermixed - just spill in order.
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100122 ldr xIP0, [xIP0] // art::Runtime* xIP0 = art::Runtime::instance_;
Zheng Xub551fdc2014-07-25 11:49:42 +0800123
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100124 // ArtMethod* xIP0 = Runtime::instance_->callee_save_methods_[kSaveRefOnly];
125 ldr xIP0, [xIP0, RUNTIME_SAVE_REFS_ONLY_METHOD_OFFSET]
Zheng Xub551fdc2014-07-25 11:49:42 +0800126
Vladimir Markoae6ba1f2016-09-09 11:56:05 +0100127 INCREASE_FRAME 96
Zheng Xub551fdc2014-07-25 11:49:42 +0800128
129 // Ugly compile-time check, but we only have the preprocessor.
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100130#if (FRAME_SIZE_SAVE_REFS_ONLY != 96)
131#error "FRAME_SIZE_SAVE_REFS_ONLY(ARM64) size not as expected."
Zheng Xub551fdc2014-07-25 11:49:42 +0800132#endif
133
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100134 // GP callee-saves.
135 // x20 paired with ArtMethod* - see below.
Vladimir Marko215076b2016-09-07 18:05:55 +0100136 SAVE_TWO_REGS x21, x22, 16
137 SAVE_TWO_REGS x23, x24, 32
138 SAVE_TWO_REGS x25, x26, 48
139 SAVE_TWO_REGS x27, x28, 64
140 SAVE_TWO_REGS x29, xLR, 80
Zheng Xub551fdc2014-07-25 11:49:42 +0800141
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100142 // Store ArtMethod* Runtime::callee_save_methods_[kSaveRefsOnly].
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100143 stp xIP0, x20, [sp]
144 .cfi_rel_offset x20, 8
Zheng Xub551fdc2014-07-25 11:49:42 +0800145
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700146 // Place sp in Thread::Current()->top_quick_frame.
147 mov xIP0, sp
148 str xIP0, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET]
Zheng Xub551fdc2014-07-25 11:49:42 +0800149.endm
150
151// TODO: Probably no need to restore registers preserved by aapcs64.
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100152.macro RESTORE_SAVE_REFS_ONLY_FRAME
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100153 // Callee-saves.
Vladimir Marko215076b2016-09-07 18:05:55 +0100154 RESTORE_REG x20, 8
155 RESTORE_TWO_REGS x21, x22, 16
156 RESTORE_TWO_REGS x23, x24, 32
157 RESTORE_TWO_REGS x25, x26, 48
158 RESTORE_TWO_REGS x27, x28, 64
159 RESTORE_TWO_REGS x29, xLR, 80
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700160
Vladimir Markoae6ba1f2016-09-09 11:56:05 +0100161 DECREASE_FRAME 96
Stuart Monteithb95a5342014-03-12 13:32:32 +0000162.endm
163
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100164.macro POP_SAVE_REFS_ONLY_FRAME
Vladimir Markoae6ba1f2016-09-09 11:56:05 +0100165 DECREASE_FRAME 96
Andreas Gamped58342c2014-06-05 14:18:08 -0700166.endm
167
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100168.macro RESTORE_SAVE_REFS_ONLY_FRAME_AND_RETURN
169 RESTORE_SAVE_REFS_ONLY_FRAME
Zheng Xu48241e72014-05-23 11:52:42 +0800170 ret
Stuart Monteithb95a5342014-03-12 13:32:32 +0000171.endm
172
173
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100174.macro SETUP_SAVE_REFS_AND_ARGS_FRAME_INTERNAL
Vladimir Markoae6ba1f2016-09-09 11:56:05 +0100175 INCREASE_FRAME 224
Stuart Monteithb95a5342014-03-12 13:32:32 +0000176
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700177 // Ugly compile-time check, but we only have the preprocessor.
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100178#if (FRAME_SIZE_SAVE_REFS_AND_ARGS != 224)
179#error "FRAME_SIZE_SAVE_REFS_AND_ARGS(ARM64) size not as expected."
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700180#endif
181
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100182 // Stack alignment filler [sp, #8].
Zheng Xu69a50302015-04-14 20:04:41 +0800183 // FP args.
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100184 stp d0, d1, [sp, #16]
185 stp d2, d3, [sp, #32]
186 stp d4, d5, [sp, #48]
187 stp d6, d7, [sp, #64]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000188
Zheng Xu69a50302015-04-14 20:04:41 +0800189 // Core args.
Vladimir Marko215076b2016-09-07 18:05:55 +0100190 SAVE_TWO_REGS x1, x2, 80
191 SAVE_TWO_REGS x3, x4, 96
192 SAVE_TWO_REGS x5, x6, 112
Andreas Gampe03906cf2014-04-07 12:08:28 -0700193
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100194 // x7, Callee-saves.
Vladimir Marko215076b2016-09-07 18:05:55 +0100195 SAVE_TWO_REGS x7, x20, 128
196 SAVE_TWO_REGS x21, x22, 144
197 SAVE_TWO_REGS x23, x24, 160
198 SAVE_TWO_REGS x25, x26, 176
199 SAVE_TWO_REGS x27, x28, 192
Andreas Gampe03906cf2014-04-07 12:08:28 -0700200
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100201 // x29(callee-save) and LR.
Vladimir Marko215076b2016-09-07 18:05:55 +0100202 SAVE_TWO_REGS x29, xLR, 208
Andreas Gampe03906cf2014-04-07 12:08:28 -0700203
Stuart Monteithb95a5342014-03-12 13:32:32 +0000204.endm
205
206 /*
207 * Macro that sets up the callee save frame to conform with
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100208 * Runtime::CreateCalleeSaveMethod(kSaveRefsAndArgs).
Stuart Monteithb95a5342014-03-12 13:32:32 +0000209 *
210 * TODO This is probably too conservative - saving FP & LR.
211 */
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100212.macro SETUP_SAVE_REFS_AND_ARGS_FRAME
213 // art::Runtime** xIP0 = &art::Runtime::instance_
Zheng Xub551fdc2014-07-25 11:49:42 +0800214 adrp xIP0, :got:_ZN3art7Runtime9instance_E
215 ldr xIP0, [xIP0, #:got_lo12:_ZN3art7Runtime9instance_E]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000216
217 // Our registers aren't intermixed - just spill in order.
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100218 ldr xIP0, [xIP0] // art::Runtime* xIP0 = art::Runtime::instance_;
Stuart Monteithb95a5342014-03-12 13:32:32 +0000219
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100220 // ArtMethod* xIP0 = Runtime::instance_->callee_save_methods_[kSaveRefAndArgs];
221 ldr xIP0, [xIP0, RUNTIME_SAVE_REFS_AND_ARGS_METHOD_OFFSET]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000222
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100223 SETUP_SAVE_REFS_AND_ARGS_FRAME_INTERNAL
Stuart Monteithb95a5342014-03-12 13:32:32 +0000224
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100225 str xIP0, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kSaveRefsAndArgs].
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700226 // Place sp in Thread::Current()->top_quick_frame.
227 mov xIP0, sp
228 str xIP0, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET]
229.endm
230
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100231.macro SETUP_SAVE_REFS_AND_ARGS_FRAME_WITH_METHOD_IN_X0
232 SETUP_SAVE_REFS_AND_ARGS_FRAME_INTERNAL
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700233 str x0, [sp, #0] // Store ArtMethod* to bottom of stack.
234 // Place sp in Thread::Current()->top_quick_frame.
235 mov xIP0, sp
236 str xIP0, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000237.endm
238
Zheng Xub551fdc2014-07-25 11:49:42 +0800239// TODO: Probably no need to restore registers preserved by aapcs64.
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100240.macro RESTORE_SAVE_REFS_AND_ARGS_FRAME
Zheng Xu69a50302015-04-14 20:04:41 +0800241 // FP args.
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100242 ldp d0, d1, [sp, #16]
243 ldp d2, d3, [sp, #32]
244 ldp d4, d5, [sp, #48]
245 ldp d6, d7, [sp, #64]
Stuart Monteithb95a5342014-03-12 13:32:32 +0000246
Zheng Xu69a50302015-04-14 20:04:41 +0800247 // Core args.
Vladimir Marko215076b2016-09-07 18:05:55 +0100248 RESTORE_TWO_REGS x1, x2, 80
249 RESTORE_TWO_REGS x3, x4, 96
250 RESTORE_TWO_REGS x5, x6, 112
Andreas Gampe03906cf2014-04-07 12:08:28 -0700251
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100252 // x7, Callee-saves.
Vladimir Marko215076b2016-09-07 18:05:55 +0100253 RESTORE_TWO_REGS x7, x20, 128
254 RESTORE_TWO_REGS x21, x22, 144
255 RESTORE_TWO_REGS x23, x24, 160
256 RESTORE_TWO_REGS x25, x26, 176
257 RESTORE_TWO_REGS x27, x28, 192
Andreas Gampe03906cf2014-04-07 12:08:28 -0700258
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100259 // x29(callee-save) and LR.
Vladimir Marko215076b2016-09-07 18:05:55 +0100260 RESTORE_TWO_REGS x29, xLR, 208
Stuart Monteithb95a5342014-03-12 13:32:32 +0000261
Vladimir Markoae6ba1f2016-09-09 11:56:05 +0100262 DECREASE_FRAME 224
Stuart Monteithb95a5342014-03-12 13:32:32 +0000263.endm
264
Vladimir Marko952dbb12016-07-28 12:01:51 +0100265 /*
266 * Macro that sets up the callee save frame to conform with
267 * Runtime::CreateCalleeSaveMethod(kSaveEverything)
Vladimir Marko3b7537b2016-09-13 11:56:01 +0000268 * when the SP has already been decremented by FRAME_SIZE_SAVE_EVERYTHING
269 * and saving registers x29 and LR is handled elsewhere.
Vladimir Marko952dbb12016-07-28 12:01:51 +0100270 */
Vladimir Marko3b7537b2016-09-13 11:56:01 +0000271.macro SETUP_SAVE_EVERYTHING_FRAME_DECREMENTED_SP_SKIP_X29_LR
Vladimir Marko952dbb12016-07-28 12:01:51 +0100272 // Ugly compile-time check, but we only have the preprocessor.
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100273#if (FRAME_SIZE_SAVE_EVERYTHING != 512)
274#error "FRAME_SIZE_SAVE_EVERYTHING(ARM64) size not as expected."
Vladimir Marko952dbb12016-07-28 12:01:51 +0100275#endif
276
277 // Save FP registers.
Vladimir Marko40df7c12016-08-22 16:02:12 +0100278 // For better performance, store d0 and d31 separately, so that all STPs are 16-byte aligned.
Vladimir Markode5f1942016-08-10 12:30:05 +0100279 str d0, [sp, #8]
280 stp d1, d2, [sp, #16]
281 stp d3, d4, [sp, #32]
282 stp d5, d6, [sp, #48]
283 stp d7, d8, [sp, #64]
284 stp d9, d10, [sp, #80]
285 stp d11, d12, [sp, #96]
286 stp d13, d14, [sp, #112]
287 stp d15, d16, [sp, #128]
288 stp d17, d18, [sp, #144]
289 stp d19, d20, [sp, #160]
290 stp d21, d22, [sp, #176]
291 stp d23, d24, [sp, #192]
292 stp d25, d26, [sp, #208]
293 stp d27, d28, [sp, #224]
294 stp d29, d30, [sp, #240]
295 str d31, [sp, #256]
Vladimir Marko952dbb12016-07-28 12:01:51 +0100296
297 // Save core registers.
Vladimir Marko215076b2016-09-07 18:05:55 +0100298 SAVE_REG x0, 264
299 SAVE_TWO_REGS x1, x2, 272
300 SAVE_TWO_REGS x3, x4, 288
301 SAVE_TWO_REGS x5, x6, 304
302 SAVE_TWO_REGS x7, x8, 320
303 SAVE_TWO_REGS x9, x10, 336
304 SAVE_TWO_REGS x11, x12, 352
305 SAVE_TWO_REGS x13, x14, 368
306 SAVE_TWO_REGS x15, x16, 384
307 SAVE_TWO_REGS x17, x18, 400
308 SAVE_TWO_REGS x19, x20, 416
309 SAVE_TWO_REGS x21, x22, 432
310 SAVE_TWO_REGS x23, x24, 448
311 SAVE_TWO_REGS x25, x26, 464
312 SAVE_TWO_REGS x27, x28, 480
Vladimir Marko952dbb12016-07-28 12:01:51 +0100313
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100314 // art::Runtime** xIP0 = &art::Runtime::instance_
Vladimir Marko952dbb12016-07-28 12:01:51 +0100315 adrp xIP0, :got:_ZN3art7Runtime9instance_E
316 ldr xIP0, [xIP0, #:got_lo12:_ZN3art7Runtime9instance_E]
317
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100318 ldr xIP0, [xIP0] // art::Runtime* xIP0 = art::Runtime::instance_;
Vladimir Marko952dbb12016-07-28 12:01:51 +0100319
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100320 // ArtMethod* xIP0 = Runtime::instance_->callee_save_methods_[kSaveEverything];
321 ldr xIP0, [xIP0, RUNTIME_SAVE_EVERYTHING_METHOD_OFFSET]
Vladimir Marko952dbb12016-07-28 12:01:51 +0100322
323 // Store ArtMethod* Runtime::callee_save_methods_[kSaveEverything].
324 str xIP0, [sp]
325 // Place sp in Thread::Current()->top_quick_frame.
326 mov xIP0, sp
327 str xIP0, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET]
328.endm
329
Vladimir Marko3b7537b2016-09-13 11:56:01 +0000330 /*
331 * Macro that sets up the callee save frame to conform with
332 * Runtime::CreateCalleeSaveMethod(kSaveEverything)
333 */
334.macro SETUP_SAVE_EVERYTHING_FRAME
335 INCREASE_FRAME 512
336 SAVE_TWO_REGS x29, xLR, 496
337 SETUP_SAVE_EVERYTHING_FRAME_DECREMENTED_SP_SKIP_X29_LR
338.endm
339
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100340.macro RESTORE_SAVE_EVERYTHING_FRAME_KEEP_X0
Vladimir Marko952dbb12016-07-28 12:01:51 +0100341 // Restore FP registers.
Vladimir Marko40df7c12016-08-22 16:02:12 +0100342 // For better performance, load d0 and d31 separately, so that all LDPs are 16-byte aligned.
Vladimir Markode5f1942016-08-10 12:30:05 +0100343 ldr d0, [sp, #8]
344 ldp d1, d2, [sp, #16]
345 ldp d3, d4, [sp, #32]
346 ldp d5, d6, [sp, #48]
347 ldp d7, d8, [sp, #64]
348 ldp d9, d10, [sp, #80]
349 ldp d11, d12, [sp, #96]
350 ldp d13, d14, [sp, #112]
351 ldp d15, d16, [sp, #128]
352 ldp d17, d18, [sp, #144]
353 ldp d19, d20, [sp, #160]
354 ldp d21, d22, [sp, #176]
355 ldp d23, d24, [sp, #192]
356 ldp d25, d26, [sp, #208]
357 ldp d27, d28, [sp, #224]
358 ldp d29, d30, [sp, #240]
359 ldr d31, [sp, #256]
Vladimir Marko952dbb12016-07-28 12:01:51 +0100360
361 // Restore core registers.
Vladimir Marko215076b2016-09-07 18:05:55 +0100362 RESTORE_TWO_REGS x1, x2, 272
363 RESTORE_TWO_REGS x3, x4, 288
364 RESTORE_TWO_REGS x5, x6, 304
365 RESTORE_TWO_REGS x7, x8, 320
366 RESTORE_TWO_REGS x9, x10, 336
367 RESTORE_TWO_REGS x11, x12, 352
368 RESTORE_TWO_REGS x13, x14, 368
369 RESTORE_TWO_REGS x15, x16, 384
370 RESTORE_TWO_REGS x17, x18, 400
371 RESTORE_TWO_REGS x19, x20, 416
372 RESTORE_TWO_REGS x21, x22, 432
373 RESTORE_TWO_REGS x23, x24, 448
374 RESTORE_TWO_REGS x25, x26, 464
375 RESTORE_TWO_REGS x27, x28, 480
376 RESTORE_TWO_REGS x29, xLR, 496
Vladimir Marko952dbb12016-07-28 12:01:51 +0100377
Vladimir Markoae6ba1f2016-09-09 11:56:05 +0100378 DECREASE_FRAME 512
Vladimir Marko952dbb12016-07-28 12:01:51 +0100379.endm
380
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100381.macro RESTORE_SAVE_EVERYTHING_FRAME
382 RESTORE_REG x0, 264
383 RESTORE_SAVE_EVERYTHING_FRAME_KEEP_X0
384.endm
385
Stuart Monteithb95a5342014-03-12 13:32:32 +0000386.macro RETURN_IF_RESULT_IS_ZERO
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700387 cbnz x0, 1f // result non-zero branch over
388 ret // return
3891:
Stuart Monteithb95a5342014-03-12 13:32:32 +0000390.endm
391
392.macro RETURN_IF_RESULT_IS_NON_ZERO
Andreas Gampe00c1e6d2014-04-25 15:47:13 -0700393 cbz x0, 1f // result zero branch over
394 ret // return
3951:
Stuart Monteithb95a5342014-03-12 13:32:32 +0000396.endm
397
398 /*
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100399 * Macro that calls through to artDeliverPendingExceptionFromCode, where the pending
400 * exception is Thread::Current()->exception_ when the runtime method frame is ready.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000401 */
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100402.macro DELIVER_PENDING_EXCEPTION_FRAME_READY
Stuart Monteithb95a5342014-03-12 13:32:32 +0000403 mov x0, xSELF
Stuart Monteithb95a5342014-03-12 13:32:32 +0000404
405 // Point of no return.
Vladimir Marko908eb222016-09-14 10:29:18 +0100406 bl artDeliverPendingExceptionFromCode // artDeliverPendingExceptionFromCode(Thread*)
Stuart Monteithb95a5342014-03-12 13:32:32 +0000407 brk 0 // Unreached
408.endm
409
Vladimir Marko94ce9c22016-09-30 14:50:51 +0100410 /*
411 * Macro that calls through to artDeliverPendingExceptionFromCode, where the pending
412 * exception is Thread::Current()->exception_.
413 */
414.macro DELIVER_PENDING_EXCEPTION
415 SETUP_SAVE_ALL_CALLEE_SAVES_FRAME
416 DELIVER_PENDING_EXCEPTION_FRAME_READY
417.endm
418
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700419.macro RETURN_OR_DELIVER_PENDING_EXCEPTION_REG reg
420 ldr \reg, [xSELF, # THREAD_EXCEPTION_OFFSET] // Get exception field.
421 cbnz \reg, 1f
Stuart Monteithb95a5342014-03-12 13:32:32 +0000422 ret
4231:
424 DELIVER_PENDING_EXCEPTION
425.endm
426
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700427.macro RETURN_OR_DELIVER_PENDING_EXCEPTION
Zheng Xub551fdc2014-07-25 11:49:42 +0800428 RETURN_OR_DELIVER_PENDING_EXCEPTION_REG xIP0
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700429.endm
430
431// Same as above with x1. This is helpful in stubs that want to avoid clobbering another register.
432.macro RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
433 RETURN_OR_DELIVER_PENDING_EXCEPTION_REG x1
434.endm
435
436.macro RETURN_IF_W0_IS_ZERO_OR_DELIVER
437 cbnz w0, 1f // result non-zero branch over
438 ret // return
4391:
440 DELIVER_PENDING_EXCEPTION
441.endm
442
Stuart Monteithb95a5342014-03-12 13:32:32 +0000443.macro NO_ARG_RUNTIME_EXCEPTION c_name, cxx_name
444 .extern \cxx_name
445ENTRY \c_name
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100446 SETUP_SAVE_ALL_CALLEE_SAVES_FRAME // save all registers as basis for long jump context
Zheng Xub551fdc2014-07-25 11:49:42 +0800447 mov x0, xSELF // pass Thread::Current
Vladimir Marko908eb222016-09-14 10:29:18 +0100448 bl \cxx_name // \cxx_name(Thread*)
Vladimir Marko804b03f2016-09-14 16:26:36 +0100449 brk 0
450END \c_name
451.endm
452
453.macro NO_ARG_RUNTIME_EXCEPTION_SAVE_EVERYTHING c_name, cxx_name
454 .extern \cxx_name
455ENTRY \c_name
456 SETUP_SAVE_EVERYTHING_FRAME // save all registers as basis for long jump context
457 mov x0, xSELF // pass Thread::Current
458 bl \cxx_name // \cxx_name(Thread*)
459 brk 0
Stuart Monteithb95a5342014-03-12 13:32:32 +0000460END \c_name
461.endm
462
463.macro ONE_ARG_RUNTIME_EXCEPTION c_name, cxx_name
464 .extern \cxx_name
465ENTRY \c_name
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100466 SETUP_SAVE_ALL_CALLEE_SAVES_FRAME // save all registers as basis for long jump context.
Zheng Xub551fdc2014-07-25 11:49:42 +0800467 mov x1, xSELF // pass Thread::Current.
Vladimir Marko908eb222016-09-14 10:29:18 +0100468 bl \cxx_name // \cxx_name(arg, Thread*).
Stuart Monteithb95a5342014-03-12 13:32:32 +0000469 brk 0
470END \c_name
471.endm
472
Vladimir Marko804b03f2016-09-14 16:26:36 +0100473.macro TWO_ARG_RUNTIME_EXCEPTION_SAVE_EVERYTHING c_name, cxx_name
Stuart Monteithb95a5342014-03-12 13:32:32 +0000474 .extern \cxx_name
475ENTRY \c_name
Vladimir Marko804b03f2016-09-14 16:26:36 +0100476 SETUP_SAVE_EVERYTHING_FRAME // save all registers as basis for long jump context
Zheng Xub551fdc2014-07-25 11:49:42 +0800477 mov x2, xSELF // pass Thread::Current
Vladimir Marko908eb222016-09-14 10:29:18 +0100478 bl \cxx_name // \cxx_name(arg1, arg2, Thread*)
Stuart Monteithb95a5342014-03-12 13:32:32 +0000479 brk 0
480END \c_name
481.endm
482
483 /*
484 * Called by managed code, saves callee saves and then calls artThrowException
485 * that will place a mock Method* at the bottom of the stack. Arg1 holds the exception.
486 */
487ONE_ARG_RUNTIME_EXCEPTION art_quick_deliver_exception, artDeliverExceptionFromCode
488
489 /*
490 * Called by managed code to create and deliver a NullPointerException.
491 */
Vladimir Marko804b03f2016-09-14 16:26:36 +0100492NO_ARG_RUNTIME_EXCEPTION_SAVE_EVERYTHING art_quick_throw_null_pointer_exception, artThrowNullPointerExceptionFromCode
Stuart Monteithb95a5342014-03-12 13:32:32 +0000493
494 /*
Nicolas Geoffraye8e11272016-06-28 18:08:46 +0100495 * Call installed by a signal handler to create and deliver a NullPointerException.
496 */
Vladimir Marko3b7537b2016-09-13 11:56:01 +0000497 .extern art_quick_throw_null_pointer_exception_from_signal
498ENTRY art_quick_throw_null_pointer_exception_from_signal
499 // The fault handler pushes the gc map address, i.e. "return address", to stack
500 // and passes the fault address in LR. So we need to set up the CFI info accordingly.
501 .cfi_def_cfa_offset __SIZEOF_POINTER__
502 .cfi_rel_offset lr, 0
503 // Save all registers as basis for long jump context.
504 INCREASE_FRAME (FRAME_SIZE_SAVE_EVERYTHING - __SIZEOF_POINTER__)
505 SAVE_REG x29, (FRAME_SIZE_SAVE_EVERYTHING - 2 * __SIZEOF_POINTER__) // LR already saved.
506 SETUP_SAVE_EVERYTHING_FRAME_DECREMENTED_SP_SKIP_X29_LR
507 mov x0, lr // pass the fault address stored in LR by the fault handler.
508 mov x1, xSELF // pass Thread::Current.
Vladimir Marko908eb222016-09-14 10:29:18 +0100509 bl artThrowNullPointerExceptionFromSignal // (arg, Thread*).
Vladimir Marko3b7537b2016-09-13 11:56:01 +0000510 brk 0
511END art_quick_throw_null_pointer_exception_from_signal
Nicolas Geoffraye8e11272016-06-28 18:08:46 +0100512
513 /*
Stuart Monteithb95a5342014-03-12 13:32:32 +0000514 * Called by managed code to create and deliver an ArithmeticException.
515 */
Vladimir Marko804b03f2016-09-14 16:26:36 +0100516NO_ARG_RUNTIME_EXCEPTION_SAVE_EVERYTHING art_quick_throw_div_zero, artThrowDivZeroFromCode
Stuart Monteithb95a5342014-03-12 13:32:32 +0000517
518 /*
519 * Called by managed code to create and deliver an ArrayIndexOutOfBoundsException. Arg1 holds
520 * index, arg2 holds limit.
521 */
Vladimir Marko804b03f2016-09-14 16:26:36 +0100522TWO_ARG_RUNTIME_EXCEPTION_SAVE_EVERYTHING art_quick_throw_array_bounds, artThrowArrayBoundsFromCode
Stuart Monteithb95a5342014-03-12 13:32:32 +0000523
524 /*
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100525 * Called by managed code to create and deliver a StringIndexOutOfBoundsException
526 * as if thrown from a call to String.charAt(). Arg1 holds index, arg2 holds limit.
527 */
Vladimir Marko804b03f2016-09-14 16:26:36 +0100528TWO_ARG_RUNTIME_EXCEPTION_SAVE_EVERYTHING art_quick_throw_string_bounds, artThrowStringBoundsFromCode
Vladimir Marko87f3fcb2016-04-28 15:52:11 +0100529
530 /*
Stuart Monteithb95a5342014-03-12 13:32:32 +0000531 * Called by managed code to create and deliver a StackOverflowError.
532 */
533NO_ARG_RUNTIME_EXCEPTION art_quick_throw_stack_overflow, artThrowStackOverflowFromCode
534
535 /*
Stuart Monteithb95a5342014-03-12 13:32:32 +0000536 * All generated callsites for interface invokes and invocation slow paths will load arguments
Andreas Gampe51f76352014-05-21 08:28:48 -0700537 * as usual - except instead of loading arg0/x0 with the target Method*, arg0/x0 will contain
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +0100538 * the method_idx. This wrapper will save arg1-arg3, and call the appropriate C helper.
Andreas Gampe51f76352014-05-21 08:28:48 -0700539 * NOTE: "this" is first visible argument of the target, and so can be found in arg1/x1.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000540 *
Andreas Gampe51f76352014-05-21 08:28:48 -0700541 * The helper will attempt to locate the target and return a 128-bit result in x0/x1 consisting
Stuart Monteithb95a5342014-03-12 13:32:32 +0000542 * of the target Method* in x0 and method->code_ in x1.
543 *
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700544 * If unsuccessful, the helper will return null/????. There will be a pending exception in the
Stuart Monteithb95a5342014-03-12 13:32:32 +0000545 * thread and we branch to another stub to deliver it.
546 *
547 * On success this wrapper will restore arguments and *jump* to the target, leaving the lr
548 * pointing back to the original caller.
Andreas Gampe51f76352014-05-21 08:28:48 -0700549 *
550 * Adapted from ARM32 code.
551 *
Zheng Xub551fdc2014-07-25 11:49:42 +0800552 * Clobbers xIP0.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000553 */
Andreas Gampe3031c8d2015-07-13 20:11:06 -0700554.macro INVOKE_TRAMPOLINE_BODY cxx_name
Stuart Monteithb95a5342014-03-12 13:32:32 +0000555 .extern \cxx_name
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100556 SETUP_SAVE_REFS_AND_ARGS_FRAME // save callee saves in case allocation triggers GC
Andreas Gampe51f76352014-05-21 08:28:48 -0700557 // Helper signature is always
558 // (method_idx, *this_object, *caller_method, *self, sp)
559
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +0100560 mov x2, xSELF // pass Thread::Current
561 mov x3, sp
562 bl \cxx_name // (method_idx, this, Thread*, SP)
Zheng Xub551fdc2014-07-25 11:49:42 +0800563 mov xIP0, x1 // save Method*->code_
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100564 RESTORE_SAVE_REFS_AND_ARGS_FRAME
Andreas Gampe51f76352014-05-21 08:28:48 -0700565 cbz x0, 1f // did we find the target? if not go to exception delivery
Zheng Xub551fdc2014-07-25 11:49:42 +0800566 br xIP0 // tail call to target
Andreas Gampe51f76352014-05-21 08:28:48 -07005671:
568 DELIVER_PENDING_EXCEPTION
Andreas Gampe3031c8d2015-07-13 20:11:06 -0700569.endm
570.macro INVOKE_TRAMPOLINE c_name, cxx_name
571ENTRY \c_name
572 INVOKE_TRAMPOLINE_BODY \cxx_name
Stuart Monteithb95a5342014-03-12 13:32:32 +0000573END \c_name
574.endm
575
Stuart Monteithb95a5342014-03-12 13:32:32 +0000576INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline_with_access_check, artInvokeInterfaceTrampolineWithAccessCheck
577
578INVOKE_TRAMPOLINE art_quick_invoke_static_trampoline_with_access_check, artInvokeStaticTrampolineWithAccessCheck
579INVOKE_TRAMPOLINE art_quick_invoke_direct_trampoline_with_access_check, artInvokeDirectTrampolineWithAccessCheck
580INVOKE_TRAMPOLINE art_quick_invoke_super_trampoline_with_access_check, artInvokeSuperTrampolineWithAccessCheck
581INVOKE_TRAMPOLINE art_quick_invoke_virtual_trampoline_with_access_check, artInvokeVirtualTrampolineWithAccessCheck
582
Andreas Gampe03906cf2014-04-07 12:08:28 -0700583
584.macro INVOKE_STUB_CREATE_FRAME
585
Zheng Xu69a50302015-04-14 20:04:41 +0800586SAVE_SIZE=15*8 // x4, x5, x19, x20, x21, x22, x23, x24, x25, x26, x27, x28, SP, LR, FP saved.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700587SAVE_SIZE_AND_METHOD=SAVE_SIZE+8
Andreas Gampecf4035a2014-05-28 22:43:01 -0700588
Andreas Gampe03906cf2014-04-07 12:08:28 -0700589
Zheng Xu48241e72014-05-23 11:52:42 +0800590 mov x9, sp // Save stack pointer.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700591 .cfi_register sp,x9
592
Zheng Xu48241e72014-05-23 11:52:42 +0800593 add x10, x2, # SAVE_SIZE_AND_METHOD // calculate size of frame.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700594 sub x10, sp, x10 // Calculate SP position - saves + ArtMethod* + args
Zheng Xu48241e72014-05-23 11:52:42 +0800595 and x10, x10, # ~0xf // Enforce 16 byte stack alignment.
596 mov sp, x10 // Set new SP.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700597
Zheng Xu48241e72014-05-23 11:52:42 +0800598 sub x10, x9, #SAVE_SIZE // Calculate new FP (later). Done here as we must move SP
599 .cfi_def_cfa_register x10 // before this.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700600 .cfi_adjust_cfa_offset SAVE_SIZE
601
Nicolas Geoffray48088462014-12-12 10:29:38 +0000602 str x28, [x10, #112]
603 .cfi_rel_offset x28, 112
604
605 stp x26, x27, [x10, #96]
606 .cfi_rel_offset x26, 96
607 .cfi_rel_offset x27, 104
608
609 stp x24, x25, [x10, #80]
610 .cfi_rel_offset x24, 80
611 .cfi_rel_offset x25, 88
612
613 stp x22, x23, [x10, #64]
614 .cfi_rel_offset x22, 64
615 .cfi_rel_offset x23, 72
616
617 stp x20, x21, [x10, #48]
618 .cfi_rel_offset x20, 48
619 .cfi_rel_offset x21, 56
620
Zheng Xu69a50302015-04-14 20:04:41 +0800621 stp x9, x19, [x10, #32] // Save old stack pointer and x19.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700622 .cfi_rel_offset sp, 32
Andreas Gampecf4035a2014-05-28 22:43:01 -0700623 .cfi_rel_offset x19, 40
Andreas Gampe03906cf2014-04-07 12:08:28 -0700624
Zheng Xu48241e72014-05-23 11:52:42 +0800625 stp x4, x5, [x10, #16] // Save result and shorty addresses.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700626 .cfi_rel_offset x4, 16
627 .cfi_rel_offset x5, 24
628
Zheng Xu48241e72014-05-23 11:52:42 +0800629 stp xFP, xLR, [x10] // Store LR & FP.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700630 .cfi_rel_offset x29, 0
631 .cfi_rel_offset x30, 8
632
Zheng Xu48241e72014-05-23 11:52:42 +0800633 mov xFP, x10 // Use xFP now, as it's callee-saved.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700634 .cfi_def_cfa_register x29
Zheng Xu48241e72014-05-23 11:52:42 +0800635 mov xSELF, x3 // Move thread pointer into SELF register.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700636
637 // Copy arguments into stack frame.
638 // Use simple copy routine for now.
639 // 4 bytes per slot.
640 // X1 - source address
641 // W2 - args length
642 // X9 - destination address.
643 // W10 - temporary
Mathieu Chartiere401d142015-04-22 13:56:20 -0700644 add x9, sp, #8 // Destination address is bottom of stack + null.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700645
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700646 // Copy parameters into the stack. Use numeric label as this is a macro and Clang's assembler
647 // does not have unique-id variables.
6481:
Andreas Gampe03906cf2014-04-07 12:08:28 -0700649 cmp w2, #0
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700650 beq 2f
Andreas Gampe03906cf2014-04-07 12:08:28 -0700651 sub w2, w2, #4 // Need 65536 bytes of range.
652 ldr w10, [x1, x2]
653 str w10, [x9, x2]
654
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700655 b 1b
Andreas Gampe03906cf2014-04-07 12:08:28 -0700656
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -07006572:
Mathieu Chartiere401d142015-04-22 13:56:20 -0700658 // Store null into ArtMethod* at bottom of frame.
659 str xzr, [sp]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700660.endm
661
662.macro INVOKE_STUB_CALL_AND_RETURN
663
664 // load method-> METHOD_QUICK_CODE_OFFSET
Mathieu Chartiere401d142015-04-22 13:56:20 -0700665 ldr x9, [x0, #ART_METHOD_QUICK_CODE_OFFSET_64]
Andreas Gampe03906cf2014-04-07 12:08:28 -0700666 // Branch to method.
667 blr x9
668
669 // Restore return value address and shorty address.
670 ldp x4,x5, [xFP, #16]
671 .cfi_restore x4
672 .cfi_restore x5
673
Nicolas Geoffray48088462014-12-12 10:29:38 +0000674 ldr x28, [xFP, #112]
675 .cfi_restore x28
676
677 ldp x26, x27, [xFP, #96]
678 .cfi_restore x26
679 .cfi_restore x27
680
681 ldp x24, x25, [xFP, #80]
682 .cfi_restore x24
683 .cfi_restore x25
684
685 ldp x22, x23, [xFP, #64]
686 .cfi_restore x22
687 .cfi_restore x23
688
689 ldp x20, x21, [xFP, #48]
690 .cfi_restore x20
691 .cfi_restore x21
692
Andreas Gampe03906cf2014-04-07 12:08:28 -0700693 // Store result (w0/x0/s0/d0) appropriately, depending on resultType.
694 ldrb w10, [x5]
695
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700696 // Check the return type and store the correct register into the jvalue in memory.
697 // Use numeric label as this is a macro and Clang's assembler does not have unique-id variables.
698
Andreas Gampe03906cf2014-04-07 12:08:28 -0700699 // Don't set anything for a void type.
700 cmp w10, #'V'
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700701 beq 3f
Andreas Gampe03906cf2014-04-07 12:08:28 -0700702
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700703 // Is it a double?
Andreas Gampe03906cf2014-04-07 12:08:28 -0700704 cmp w10, #'D'
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700705 bne 1f
Andreas Gampe03906cf2014-04-07 12:08:28 -0700706 str d0, [x4]
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700707 b 3f
Andreas Gampe03906cf2014-04-07 12:08:28 -0700708
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -07007091: // Is it a float?
Andreas Gampe03906cf2014-04-07 12:08:28 -0700710 cmp w10, #'F'
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700711 bne 2f
Andreas Gampe03906cf2014-04-07 12:08:28 -0700712 str s0, [x4]
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -0700713 b 3f
Andreas Gampe03906cf2014-04-07 12:08:28 -0700714
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -07007152: // Just store x0. Doesn't matter if it is 64 or 32 bits.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700716 str x0, [x4]
717
Chih-Hung Hsiehc0da7ac2015-07-27 10:10:44 -07007183: // Finish up.
Zheng Xu69a50302015-04-14 20:04:41 +0800719 ldp x2, x19, [xFP, #32] // Restore stack pointer and x19.
Andreas Gampecf4035a2014-05-28 22:43:01 -0700720 .cfi_restore x19
Andreas Gampe03906cf2014-04-07 12:08:28 -0700721 mov sp, x2
722 .cfi_restore sp
723
Andreas Gamped58342c2014-06-05 14:18:08 -0700724 ldp xFP, xLR, [xFP] // Restore old frame pointer and link register.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700725 .cfi_restore x29
726 .cfi_restore x30
727
728 ret
729
730.endm
731
732
Stuart Monteithb95a5342014-03-12 13:32:32 +0000733/*
734 * extern"C" void art_quick_invoke_stub(ArtMethod *method, x0
735 * uint32_t *args, x1
736 * uint32_t argsize, w2
737 * Thread *self, x3
738 * JValue *result, x4
739 * char *shorty); x5
740 * +----------------------+
741 * | |
742 * | C/C++ frame |
743 * | LR'' |
744 * | FP'' | <- SP'
745 * +----------------------+
746 * +----------------------+
Zheng Xu69a50302015-04-14 20:04:41 +0800747 * | x28 | <- TODO: Remove callee-saves.
748 * | : |
749 * | x19 |
Stuart Monteithb95a5342014-03-12 13:32:32 +0000750 * | SP' |
751 * | X5 |
752 * | X4 | Saved registers
753 * | LR' |
754 * | FP' | <- FP
755 * +----------------------+
756 * | uint32_t out[n-1] |
757 * | : : | Outs
758 * | uint32_t out[0] |
Mathieu Chartiere401d142015-04-22 13:56:20 -0700759 * | ArtMethod* | <- SP value=null
Stuart Monteithb95a5342014-03-12 13:32:32 +0000760 * +----------------------+
761 *
762 * Outgoing registers:
763 * x0 - Method*
764 * x1-x7 - integer parameters.
765 * d0-d7 - Floating point parameters.
766 * xSELF = self
767 * SP = & of ArtMethod*
768 * x1 = "this" pointer.
769 *
770 */
771ENTRY art_quick_invoke_stub
772 // Spill registers as per AACPS64 calling convention.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700773 INVOKE_STUB_CREATE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +0000774
775 // Fill registers x/w1 to x/w7 and s/d0 to s/d7 with parameters.
776 // Parse the passed shorty to determine which register to load.
777 // Load addresses for routines that load WXSD registers.
778 adr x11, .LstoreW2
779 adr x12, .LstoreX2
780 adr x13, .LstoreS0
781 adr x14, .LstoreD0
782
783 // Initialize routine offsets to 0 for integers and floats.
784 // x8 for integers, x15 for floating point.
785 mov x8, #0
786 mov x15, #0
787
788 add x10, x5, #1 // Load shorty address, plus one to skip return value.
789 ldr w1, [x9],#4 // Load "this" parameter, and increment arg pointer.
790
791 // Loop to fill registers.
792.LfillRegisters:
793 ldrb w17, [x10], #1 // Load next character in signature, and increment.
794 cbz w17, .LcallFunction // Exit at end of signature. Shorty 0 terminated.
795
796 cmp w17, #'F' // is this a float?
797 bne .LisDouble
798
799 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700800 beq .Ladvance4
Stuart Monteithb95a5342014-03-12 13:32:32 +0000801
802 add x17, x13, x15 // Calculate subroutine to jump to.
803 br x17
804
805.LisDouble:
806 cmp w17, #'D' // is this a double?
807 bne .LisLong
808
809 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700810 beq .Ladvance8
Stuart Monteithb95a5342014-03-12 13:32:32 +0000811
812 add x17, x14, x15 // Calculate subroutine to jump to.
813 br x17
814
815.LisLong:
816 cmp w17, #'J' // is this a long?
817 bne .LisOther
818
Andreas Gampe9de65ff2014-03-21 17:25:57 -0700819 cmp x8, # 6*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700820 beq .Ladvance8
Stuart Monteithb95a5342014-03-12 13:32:32 +0000821
822 add x17, x12, x8 // Calculate subroutine to jump to.
823 br x17
824
Stuart Monteithb95a5342014-03-12 13:32:32 +0000825.LisOther: // Everything else takes one vReg.
Andreas Gampe9de65ff2014-03-21 17:25:57 -0700826 cmp x8, # 6*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700827 beq .Ladvance4
828
Stuart Monteithb95a5342014-03-12 13:32:32 +0000829 add x17, x11, x8 // Calculate subroutine to jump to.
830 br x17
831
Andreas Gampe03906cf2014-04-07 12:08:28 -0700832.Ladvance4:
833 add x9, x9, #4
834 b .LfillRegisters
835
836.Ladvance8:
837 add x9, x9, #8
838 b .LfillRegisters
839
Stuart Monteithb95a5342014-03-12 13:32:32 +0000840// Macro for loading a parameter into a register.
841// counter - the register with offset into these tables
842// size - the size of the register - 4 or 8 bytes.
843// register - the name of the register to be loaded.
844.macro LOADREG counter size register return
845 ldr \register , [x9], #\size
846 add \counter, \counter, 12
847 b \return
848.endm
849
850// Store ints.
851.LstoreW2:
852 LOADREG x8 4 w2 .LfillRegisters
853 LOADREG x8 4 w3 .LfillRegisters
854 LOADREG x8 4 w4 .LfillRegisters
855 LOADREG x8 4 w5 .LfillRegisters
856 LOADREG x8 4 w6 .LfillRegisters
857 LOADREG x8 4 w7 .LfillRegisters
858
859// Store longs.
860.LstoreX2:
861 LOADREG x8 8 x2 .LfillRegisters
862 LOADREG x8 8 x3 .LfillRegisters
863 LOADREG x8 8 x4 .LfillRegisters
864 LOADREG x8 8 x5 .LfillRegisters
865 LOADREG x8 8 x6 .LfillRegisters
866 LOADREG x8 8 x7 .LfillRegisters
867
868// Store singles.
869.LstoreS0:
870 LOADREG x15 4 s0 .LfillRegisters
871 LOADREG x15 4 s1 .LfillRegisters
872 LOADREG x15 4 s2 .LfillRegisters
873 LOADREG x15 4 s3 .LfillRegisters
874 LOADREG x15 4 s4 .LfillRegisters
875 LOADREG x15 4 s5 .LfillRegisters
876 LOADREG x15 4 s6 .LfillRegisters
877 LOADREG x15 4 s7 .LfillRegisters
878
879// Store doubles.
880.LstoreD0:
881 LOADREG x15 8 d0 .LfillRegisters
882 LOADREG x15 8 d1 .LfillRegisters
883 LOADREG x15 8 d2 .LfillRegisters
884 LOADREG x15 8 d3 .LfillRegisters
885 LOADREG x15 8 d4 .LfillRegisters
886 LOADREG x15 8 d5 .LfillRegisters
887 LOADREG x15 8 d6 .LfillRegisters
888 LOADREG x15 8 d7 .LfillRegisters
889
890
891.LcallFunction:
892
Andreas Gampe03906cf2014-04-07 12:08:28 -0700893 INVOKE_STUB_CALL_AND_RETURN
Stuart Monteithb95a5342014-03-12 13:32:32 +0000894
Stuart Monteithb95a5342014-03-12 13:32:32 +0000895END art_quick_invoke_stub
896
897/* extern"C"
898 * void art_quick_invoke_static_stub(ArtMethod *method, x0
899 * uint32_t *args, x1
900 * uint32_t argsize, w2
901 * Thread *self, x3
902 * JValue *result, x4
903 * char *shorty); x5
904 */
905ENTRY art_quick_invoke_static_stub
906 // Spill registers as per AACPS64 calling convention.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700907 INVOKE_STUB_CREATE_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +0000908
909 // Fill registers x/w1 to x/w7 and s/d0 to s/d7 with parameters.
910 // Parse the passed shorty to determine which register to load.
911 // Load addresses for routines that load WXSD registers.
912 adr x11, .LstoreW1_2
913 adr x12, .LstoreX1_2
914 adr x13, .LstoreS0_2
915 adr x14, .LstoreD0_2
916
917 // Initialize routine offsets to 0 for integers and floats.
918 // x8 for integers, x15 for floating point.
919 mov x8, #0
920 mov x15, #0
921
922 add x10, x5, #1 // Load shorty address, plus one to skip return value.
923
924 // Loop to fill registers.
925.LfillRegisters2:
926 ldrb w17, [x10], #1 // Load next character in signature, and increment.
927 cbz w17, .LcallFunction2 // Exit at end of signature. Shorty 0 terminated.
928
929 cmp w17, #'F' // is this a float?
930 bne .LisDouble2
931
932 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700933 beq .Ladvance4_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000934
935 add x17, x13, x15 // Calculate subroutine to jump to.
936 br x17
937
938.LisDouble2:
939 cmp w17, #'D' // is this a double?
940 bne .LisLong2
941
942 cmp x15, # 8*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700943 beq .Ladvance8_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000944
945 add x17, x14, x15 // Calculate subroutine to jump to.
946 br x17
947
948.LisLong2:
949 cmp w17, #'J' // is this a long?
950 bne .LisOther2
951
952 cmp x8, # 7*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700953 beq .Ladvance8_2
Stuart Monteithb95a5342014-03-12 13:32:32 +0000954
955 add x17, x12, x8 // Calculate subroutine to jump to.
956 br x17
957
Stuart Monteithb95a5342014-03-12 13:32:32 +0000958.LisOther2: // Everything else takes one vReg.
959 cmp x8, # 7*12 // Skip this load if all registers full.
Andreas Gampe03906cf2014-04-07 12:08:28 -0700960 beq .Ladvance4_2
961
Stuart Monteithb95a5342014-03-12 13:32:32 +0000962 add x17, x11, x8 // Calculate subroutine to jump to.
963 br x17
964
Andreas Gampe03906cf2014-04-07 12:08:28 -0700965.Ladvance4_2:
966 add x9, x9, #4
967 b .LfillRegisters2
968
969.Ladvance8_2:
970 add x9, x9, #8
971 b .LfillRegisters2
972
Stuart Monteithb95a5342014-03-12 13:32:32 +0000973// Store ints.
974.LstoreW1_2:
975 LOADREG x8 4 w1 .LfillRegisters2
976 LOADREG x8 4 w2 .LfillRegisters2
977 LOADREG x8 4 w3 .LfillRegisters2
978 LOADREG x8 4 w4 .LfillRegisters2
979 LOADREG x8 4 w5 .LfillRegisters2
980 LOADREG x8 4 w6 .LfillRegisters2
981 LOADREG x8 4 w7 .LfillRegisters2
982
983// Store longs.
984.LstoreX1_2:
985 LOADREG x8 8 x1 .LfillRegisters2
986 LOADREG x8 8 x2 .LfillRegisters2
987 LOADREG x8 8 x3 .LfillRegisters2
988 LOADREG x8 8 x4 .LfillRegisters2
989 LOADREG x8 8 x5 .LfillRegisters2
990 LOADREG x8 8 x6 .LfillRegisters2
991 LOADREG x8 8 x7 .LfillRegisters2
992
993// Store singles.
994.LstoreS0_2:
995 LOADREG x15 4 s0 .LfillRegisters2
996 LOADREG x15 4 s1 .LfillRegisters2
997 LOADREG x15 4 s2 .LfillRegisters2
998 LOADREG x15 4 s3 .LfillRegisters2
999 LOADREG x15 4 s4 .LfillRegisters2
1000 LOADREG x15 4 s5 .LfillRegisters2
1001 LOADREG x15 4 s6 .LfillRegisters2
1002 LOADREG x15 4 s7 .LfillRegisters2
1003
1004// Store doubles.
1005.LstoreD0_2:
1006 LOADREG x15 8 d0 .LfillRegisters2
1007 LOADREG x15 8 d1 .LfillRegisters2
1008 LOADREG x15 8 d2 .LfillRegisters2
1009 LOADREG x15 8 d3 .LfillRegisters2
1010 LOADREG x15 8 d4 .LfillRegisters2
1011 LOADREG x15 8 d5 .LfillRegisters2
1012 LOADREG x15 8 d6 .LfillRegisters2
1013 LOADREG x15 8 d7 .LfillRegisters2
1014
1015
1016.LcallFunction2:
1017
Andreas Gampe03906cf2014-04-07 12:08:28 -07001018 INVOKE_STUB_CALL_AND_RETURN
Stuart Monteithb95a5342014-03-12 13:32:32 +00001019
Stuart Monteithb95a5342014-03-12 13:32:32 +00001020END art_quick_invoke_static_stub
1021
Andreas Gampe03906cf2014-04-07 12:08:28 -07001022
Stuart Monteithb95a5342014-03-12 13:32:32 +00001023
Nicolas Geoffrayb331feb2016-02-05 16:51:53 +00001024/* extern"C" void art_quick_osr_stub(void** stack, x0
1025 * size_t stack_size_in_bytes, x1
1026 * const uin8_t* native_pc, x2
1027 * JValue *result, x3
1028 * char *shorty, x4
1029 * Thread *self) x5
1030 */
1031ENTRY art_quick_osr_stub
1032SAVE_SIZE=15*8 // x3, x4, x19, x20, x21, x22, x23, x24, x25, x26, x27, x28, SP, LR, FP saved.
1033 mov x9, sp // Save stack pointer.
1034 .cfi_register sp,x9
1035
1036 sub x10, sp, # SAVE_SIZE
1037 and x10, x10, # ~0xf // Enforce 16 byte stack alignment.
1038 mov sp, x10 // Set new SP.
1039
1040 str x28, [sp, #112]
1041 stp x26, x27, [sp, #96]
1042 stp x24, x25, [sp, #80]
1043 stp x22, x23, [sp, #64]
1044 stp x20, x21, [sp, #48]
1045 stp x9, x19, [sp, #32] // Save old stack pointer and x19.
1046 stp x3, x4, [sp, #16] // Save result and shorty addresses.
1047 stp xFP, xLR, [sp] // Store LR & FP.
1048 mov xSELF, x5 // Move thread pointer into SELF register.
1049
1050 sub sp, sp, #16
1051 str xzr, [sp] // Store null for ArtMethod* slot
1052 // Branch to stub.
1053 bl .Losr_entry
1054 add sp, sp, #16
1055
1056 // Restore return value address and shorty address.
1057 ldp x3,x4, [sp, #16]
1058 ldr x28, [sp, #112]
1059 ldp x26, x27, [sp, #96]
1060 ldp x24, x25, [sp, #80]
1061 ldp x22, x23, [sp, #64]
1062 ldp x20, x21, [sp, #48]
1063
1064 // Store result (w0/x0/s0/d0) appropriately, depending on resultType.
1065 ldrb w10, [x4]
1066
1067 // Check the return type and store the correct register into the jvalue in memory.
1068
1069 // Don't set anything for a void type.
1070 cmp w10, #'V'
1071 beq .Losr_exit
1072
1073 // Is it a double?
1074 cmp w10, #'D'
1075 bne .Lno_double
1076 str d0, [x3]
1077 b .Losr_exit
1078
1079.Lno_double: // Is it a float?
1080 cmp w10, #'F'
1081 bne .Lno_float
1082 str s0, [x3]
1083 b .Losr_exit
1084
1085.Lno_float: // Just store x0. Doesn't matter if it is 64 or 32 bits.
1086 str x0, [x3]
1087
1088.Losr_exit: // Finish up.
1089 ldp x2, x19, [sp, #32] // Restore stack pointer and x19.
1090 ldp xFP, xLR, [sp] // Restore old frame pointer and link register.
1091 mov sp, x2
1092 ret
1093
1094.Losr_entry:
1095 // Update stack pointer for the callee
1096 sub sp, sp, x1
1097
1098 // Update link register slot expected by the callee.
1099 sub w1, w1, #8
1100 str lr, [sp, x1]
1101
1102 // Copy arguments into stack frame.
1103 // Use simple copy routine for now.
1104 // 4 bytes per slot.
1105 // X0 - source address
1106 // W1 - args length
1107 // SP - destination address.
1108 // W10 - temporary
1109.Losr_loop_entry:
1110 cmp w1, #0
1111 beq .Losr_loop_exit
1112 sub w1, w1, #4
1113 ldr w10, [x0, x1]
1114 str w10, [sp, x1]
1115 b .Losr_loop_entry
1116
1117.Losr_loop_exit:
1118 // Branch to the OSR entry point.
1119 br x2
1120
1121END art_quick_osr_stub
1122
Stuart Monteithb95a5342014-03-12 13:32:32 +00001123 /*
1124 * On entry x0 is uintptr_t* gprs_ and x1 is uint64_t* fprs_
1125 */
1126
1127ENTRY art_quick_do_long_jump
1128 // Load FPRs
1129 ldp d0, d1, [x1], #16
1130 ldp d2, d3, [x1], #16
1131 ldp d4, d5, [x1], #16
1132 ldp d6, d7, [x1], #16
1133 ldp d8, d9, [x1], #16
1134 ldp d10, d11, [x1], #16
1135 ldp d12, d13, [x1], #16
1136 ldp d14, d15, [x1], #16
1137 ldp d16, d17, [x1], #16
1138 ldp d18, d19, [x1], #16
1139 ldp d20, d21, [x1], #16
1140 ldp d22, d23, [x1], #16
1141 ldp d24, d25, [x1], #16
1142 ldp d26, d27, [x1], #16
1143 ldp d28, d29, [x1], #16
1144 ldp d30, d31, [x1]
1145
1146 // Load GPRs
1147 // TODO: lots of those are smashed, could optimize.
1148 add x0, x0, #30*8
Andreas Gampe639bdd12015-06-03 11:22:45 -07001149 ldp x30, x1, [x0], #-16 // LR & SP
Stuart Monteithb95a5342014-03-12 13:32:32 +00001150 ldp x28, x29, [x0], #-16
1151 ldp x26, x27, [x0], #-16
1152 ldp x24, x25, [x0], #-16
1153 ldp x22, x23, [x0], #-16
1154 ldp x20, x21, [x0], #-16
1155 ldp x18, x19, [x0], #-16
1156 ldp x16, x17, [x0], #-16
1157 ldp x14, x15, [x0], #-16
1158 ldp x12, x13, [x0], #-16
1159 ldp x10, x11, [x0], #-16
1160 ldp x8, x9, [x0], #-16
1161 ldp x6, x7, [x0], #-16
1162 ldp x4, x5, [x0], #-16
1163 ldp x2, x3, [x0], #-16
1164 mov sp, x1
1165
Andreas Gampe639bdd12015-06-03 11:22:45 -07001166 // Need to load PC, it's at the end (after the space for the unused XZR). Use x1.
1167 ldr x1, [x0, #33*8]
1168 // And the value of x0.
1169 ldr x0, [x0]
1170
1171 br x1
Stuart Monteithb95a5342014-03-12 13:32:32 +00001172END art_quick_do_long_jump
1173
Andreas Gampef4e910b2014-04-29 16:55:52 -07001174 /*
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001175 * Entry from managed code that calls artLockObjectFromCode, may block for GC. x0 holds the
1176 * possibly null object to lock.
1177 *
1178 * Derived from arm32 code.
1179 */
1180 .extern artLockObjectFromCode
1181ENTRY art_quick_lock_object
1182 cbz w0, .Lslow_lock
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001183 add x4, x0, #MIRROR_OBJECT_LOCK_WORD_OFFSET // exclusive load/store has no immediate anymore
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001184.Lretry_lock:
1185 ldr w2, [xSELF, #THREAD_ID_OFFSET] // TODO: Can the thread ID really change during the loop?
1186 ldxr w1, [x4]
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001187 mov x3, x1
Mathieu Chartier36a270a2016-07-28 18:08:51 -07001188 and w3, w3, #LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED // zero the gc bits
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001189 cbnz w3, .Lnot_unlocked // already thin locked
1190 // unlocked case - x1: original lock word that's zero except for the read barrier bits.
1191 orr x2, x1, x2 // x2 holds thread id with count of 0 with preserved read barrier bits
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001192 stxr w3, w2, [x4]
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001193 cbnz w3, .Llock_stxr_fail // store failed, retry
Andreas Gampe675967d2014-05-14 16:28:34 -07001194 dmb ishld // full (LoadLoad|LoadStore) memory barrier
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001195 ret
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001196.Lnot_unlocked: // x1: original lock word
1197 lsr w3, w1, LOCK_WORD_STATE_SHIFT
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001198 cbnz w3, .Lslow_lock // if either of the top two bits are set, go slow path
1199 eor w2, w1, w2 // lock_word.ThreadId() ^ self->ThreadId()
1200 uxth w2, w2 // zero top 16 bits
1201 cbnz w2, .Lslow_lock // lock word and self thread id's match -> recursive lock
1202 // else contention, go to slow path
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001203 mov x3, x1 // copy the lock word to check count overflow.
Mathieu Chartier36a270a2016-07-28 18:08:51 -07001204 and w3, w3, #LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED // zero the gc bits.
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001205 add w2, w3, #LOCK_WORD_THIN_LOCK_COUNT_ONE // increment count in lock word placing in w2 to check overflow
Mathieu Chartier36a270a2016-07-28 18:08:51 -07001206 lsr w3, w2, #LOCK_WORD_GC_STATE_SHIFT // if the first gc state bit is set, we overflowed.
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001207 cbnz w3, .Lslow_lock // if we overflow the count go slow path
1208 add w2, w1, #LOCK_WORD_THIN_LOCK_COUNT_ONE // increment count for real
1209 stxr w3, w2, [x4]
1210 cbnz w3, .Llock_stxr_fail // store failed, retry
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001211 ret
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001212.Llock_stxr_fail:
1213 b .Lretry_lock // retry
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001214.Lslow_lock:
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001215 SETUP_SAVE_REFS_ONLY_FRAME // save callee saves in case we block
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001216 mov x1, xSELF // pass Thread::Current
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001217 bl artLockObjectFromCode // (Object* obj, Thread*)
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001218 RESTORE_SAVE_REFS_ONLY_FRAME
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001219 RETURN_IF_W0_IS_ZERO_OR_DELIVER
1220END art_quick_lock_object
1221
Andreas Gampec7ed09b2016-04-25 20:08:55 -07001222ENTRY art_quick_lock_object_no_inline
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001223 SETUP_SAVE_REFS_ONLY_FRAME // save callee saves in case we block
Andreas Gampec7ed09b2016-04-25 20:08:55 -07001224 mov x1, xSELF // pass Thread::Current
1225 bl artLockObjectFromCode // (Object* obj, Thread*)
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001226 RESTORE_SAVE_REFS_ONLY_FRAME
Andreas Gampec7ed09b2016-04-25 20:08:55 -07001227 RETURN_IF_W0_IS_ZERO_OR_DELIVER
1228END art_quick_lock_object_no_inline
1229
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001230 /*
1231 * Entry from managed code that calls artUnlockObjectFromCode and delivers exception on failure.
1232 * x0 holds the possibly null object to lock.
1233 *
1234 * Derived from arm32 code.
1235 */
1236 .extern artUnlockObjectFromCode
1237ENTRY art_quick_unlock_object
1238 cbz x0, .Lslow_unlock
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001239 add x4, x0, #MIRROR_OBJECT_LOCK_WORD_OFFSET // exclusive load/store has no immediate anymore
1240.Lretry_unlock:
1241#ifndef USE_READ_BARRIER
1242 ldr w1, [x4]
1243#else
1244 ldxr w1, [x4] // Need to use atomic instructions for read barrier
1245#endif
1246 lsr w2, w1, LOCK_WORD_STATE_SHIFT
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001247 cbnz w2, .Lslow_unlock // if either of the top two bits are set, go slow path
1248 ldr w2, [xSELF, #THREAD_ID_OFFSET]
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001249 mov x3, x1 // copy lock word to check thread id equality
Mathieu Chartier36a270a2016-07-28 18:08:51 -07001250 and w3, w3, #LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED // zero the gc bits
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001251 eor w3, w3, w2 // lock_word.ThreadId() ^ self->ThreadId()
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001252 uxth w3, w3 // zero top 16 bits
1253 cbnz w3, .Lslow_unlock // do lock word and self thread id's match?
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001254 mov x3, x1 // copy lock word to detect transition to unlocked
Mathieu Chartier36a270a2016-07-28 18:08:51 -07001255 and w3, w3, #LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED // zero the gc bits
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001256 cmp w3, #LOCK_WORD_THIN_LOCK_COUNT_ONE
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001257 bpl .Lrecursive_thin_unlock
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001258 // transition to unlocked
1259 mov x3, x1
Mathieu Chartier36a270a2016-07-28 18:08:51 -07001260 and w3, w3, #LOCK_WORD_GC_STATE_MASK_SHIFTED // w3: zero except for the preserved read barrier bits
Andreas Gampe675967d2014-05-14 16:28:34 -07001261 dmb ish // full (LoadStore|StoreStore) memory barrier
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001262#ifndef USE_READ_BARRIER
1263 str w3, [x4]
1264#else
1265 stxr w2, w3, [x4] // Need to use atomic instructions for read barrier
1266 cbnz w2, .Lunlock_stxr_fail // store failed, retry
1267#endif
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001268 ret
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001269.Lrecursive_thin_unlock: // w1: original lock word
1270 sub w1, w1, #LOCK_WORD_THIN_LOCK_COUNT_ONE // decrement count
1271#ifndef USE_READ_BARRIER
1272 str w1, [x4]
1273#else
1274 stxr w2, w1, [x4] // Need to use atomic instructions for read barrier
1275 cbnz w2, .Lunlock_stxr_fail // store failed, retry
1276#endif
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001277 ret
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001278.Lunlock_stxr_fail:
1279 b .Lretry_unlock // retry
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001280.Lslow_unlock:
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001281 SETUP_SAVE_REFS_ONLY_FRAME // save callee saves in case exception allocation triggers GC
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001282 mov x1, xSELF // pass Thread::Current
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001283 bl artUnlockObjectFromCode // (Object* obj, Thread*)
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001284 RESTORE_SAVE_REFS_ONLY_FRAME
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001285 RETURN_IF_W0_IS_ZERO_OR_DELIVER
1286END art_quick_unlock_object
Andreas Gampe525cde22014-04-22 15:44:50 -07001287
Andreas Gampec7ed09b2016-04-25 20:08:55 -07001288ENTRY art_quick_unlock_object_no_inline
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001289 SETUP_SAVE_REFS_ONLY_FRAME // save callee saves in case exception allocation triggers GC
Andreas Gampec7ed09b2016-04-25 20:08:55 -07001290 mov x1, xSELF // pass Thread::Current
1291 bl artUnlockObjectFromCode // (Object* obj, Thread*)
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001292 RESTORE_SAVE_REFS_ONLY_FRAME
Andreas Gampec7ed09b2016-04-25 20:08:55 -07001293 RETURN_IF_W0_IS_ZERO_OR_DELIVER
1294END art_quick_unlock_object_no_inline
1295
Andreas Gampe525cde22014-04-22 15:44:50 -07001296 /*
1297 * Entry from managed code that calls artIsAssignableFromCode and on failure calls
1298 * artThrowClassCastException.
1299 */
1300 .extern artThrowClassCastException
1301ENTRY art_quick_check_cast
1302 // Store arguments and link register
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01001303 // Stack needs to be 16B aligned on calls.
Vladimir Marko215076b2016-09-07 18:05:55 +01001304 SAVE_TWO_REGS_INCREASE_FRAME x0, x1, 32
1305 SAVE_REG xLR, 24
Andreas Gampe525cde22014-04-22 15:44:50 -07001306
1307 // Call runtime code
1308 bl artIsAssignableFromCode
1309
1310 // Check for exception
1311 cbz x0, .Lthrow_class_cast_exception
1312
1313 // Restore and return
Vladimir Markoae6ba1f2016-09-09 11:56:05 +01001314 .cfi_remember_state
Vladimir Marko215076b2016-09-07 18:05:55 +01001315 RESTORE_REG xLR, 24
1316 RESTORE_TWO_REGS_DECREASE_FRAME x0, x1, 32
Andreas Gampe525cde22014-04-22 15:44:50 -07001317 ret
Vladimir Markoae6ba1f2016-09-09 11:56:05 +01001318 .cfi_restore_state // Reset unwind info so following code unwinds.
Andreas Gampe6b90d422015-06-26 19:49:24 -07001319
Andreas Gampe525cde22014-04-22 15:44:50 -07001320.Lthrow_class_cast_exception:
1321 // Restore
Vladimir Marko215076b2016-09-07 18:05:55 +01001322 RESTORE_REG xLR, 24
1323 RESTORE_TWO_REGS_DECREASE_FRAME x0, x1, 32
Andreas Gampe525cde22014-04-22 15:44:50 -07001324
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001325 SETUP_SAVE_ALL_CALLEE_SAVES_FRAME // save all registers as basis for long jump context
Andreas Gampe525cde22014-04-22 15:44:50 -07001326 mov x2, xSELF // pass Thread::Current
Vladimir Marko908eb222016-09-14 10:29:18 +01001327 bl artThrowClassCastException // (Class*, Class*, Thread*)
Andreas Gampe525cde22014-04-22 15:44:50 -07001328 brk 0 // We should not return here...
1329END art_quick_check_cast
1330
Man Cao1aee9002015-07-14 22:31:42 -07001331// Restore xReg's value from [sp, #offset] if xReg is not the same as xExclude.
1332.macro POP_REG_NE xReg, offset, xExclude
1333 .ifnc \xReg, \xExclude
1334 ldr \xReg, [sp, #\offset] // restore xReg
1335 .cfi_restore \xReg
1336 .endif
1337.endm
1338
Roland Levillain4359e612016-07-20 11:32:19 +01001339// Restore xReg1's value from [sp, #offset] if xReg1 is not the same as xExclude.
1340// Restore xReg2's value from [sp, #(offset + 8)] if xReg2 is not the same as xExclude.
1341.macro POP_REGS_NE xReg1, xReg2, offset, xExclude
1342 .ifc \xReg1, \xExclude
1343 ldr \xReg2, [sp, #(\offset + 8)] // restore xReg2
1344 .else
1345 .ifc \xReg2, \xExclude
1346 ldr \xReg1, [sp, #\offset] // restore xReg1
1347 .else
1348 ldp \xReg1, \xReg2, [sp, #\offset] // restore xReg1 and xReg2
1349 .endif
1350 .endif
1351 .cfi_restore \xReg1
1352 .cfi_restore \xReg2
1353.endm
1354
Man Cao1aee9002015-07-14 22:31:42 -07001355 /*
1356 * Macro to insert read barrier, only used in art_quick_aput_obj.
1357 * xDest, wDest and xObj are registers, offset is a defined literal such as
1358 * MIRROR_OBJECT_CLASS_OFFSET. Dest needs both x and w versions of the same register to handle
1359 * name mismatch between instructions. This macro uses the lower 32b of register when possible.
1360 * TODO: When read barrier has a fast path, add heap unpoisoning support for the fast path.
1361 */
Mathieu Chartier4b5f7912016-07-21 14:59:04 -07001362.macro READ_BARRIER xDest, wDest, xObj, xTemp, wTemp, offset, number
Man Cao1aee9002015-07-14 22:31:42 -07001363#ifdef USE_READ_BARRIER
Mathieu Chartier4b5f7912016-07-21 14:59:04 -07001364#ifdef USE_BAKER_READ_BARRIER
1365 ldr \wTemp, [\xObj, #MIRROR_OBJECT_LOCK_WORD_OFFSET]
1366 tbnz \wTemp, #LOCK_WORD_READ_BARRIER_STATE_SHIFT, .Lrb_slowpath\number
1367 // False dependency to avoid needing load/load fence.
1368 add \xObj, \xObj, \xTemp, lsr #32
1369 ldr \wDest, [\xObj, #\offset] // Heap reference = 32b. This also zero-extends to \xDest.
1370 UNPOISON_HEAP_REF \wDest
1371 b .Lrb_exit\number
1372#endif
1373.Lrb_slowpath\number:
Man Cao1aee9002015-07-14 22:31:42 -07001374 // Store registers used in art_quick_aput_obj (x0-x4, LR), stack is 16B aligned.
Vladimir Marko215076b2016-09-07 18:05:55 +01001375 SAVE_TWO_REGS_INCREASE_FRAME x0, x1, 48
1376 SAVE_TWO_REGS x2, x3, 16
1377 SAVE_TWO_REGS x4, xLR, 32
Man Cao1aee9002015-07-14 22:31:42 -07001378
Man Cao63069212015-08-21 15:51:39 -07001379 // mov x0, \xRef // pass ref in x0 (no-op for now since parameter ref is unused)
Man Cao1aee9002015-07-14 22:31:42 -07001380 .ifnc \xObj, x1
1381 mov x1, \xObj // pass xObj
1382 .endif
1383 mov w2, #\offset // pass offset
1384 bl artReadBarrierSlow // artReadBarrierSlow(ref, xObj, offset)
1385 // No need to unpoison return value in w0, artReadBarrierSlow() would do the unpoisoning.
1386 .ifnc \wDest, w0
1387 mov \wDest, w0 // save return value in wDest
1388 .endif
1389
1390 // Conditionally restore saved registers
1391 POP_REG_NE x0, 0, \xDest
1392 POP_REG_NE x1, 8, \xDest
1393 POP_REG_NE x2, 16, \xDest
1394 POP_REG_NE x3, 24, \xDest
1395 POP_REG_NE x4, 32, \xDest
Vladimir Marko215076b2016-09-07 18:05:55 +01001396 RESTORE_REG xLR, 40
Vladimir Markoae6ba1f2016-09-09 11:56:05 +01001397 DECREASE_FRAME 48
Mathieu Chartier4b5f7912016-07-21 14:59:04 -07001398.Lrb_exit\number:
Man Cao1aee9002015-07-14 22:31:42 -07001399#else
1400 ldr \wDest, [\xObj, #\offset] // Heap reference = 32b. This also zero-extends to \xDest.
1401 UNPOISON_HEAP_REF \wDest
1402#endif // USE_READ_BARRIER
1403.endm
1404
Andreas Gampef4e910b2014-04-29 16:55:52 -07001405 /*
1406 * Entry from managed code for array put operations of objects where the value being stored
1407 * needs to be checked for compatibility.
1408 * x0 = array, x1 = index, x2 = value
1409 *
1410 * Currently all values should fit into w0/w1/w2, and w1 always will as indices are 32b. We
1411 * assume, though, that the upper 32b are zeroed out. At least for x1/w1 we can do better by
1412 * using index-zero-extension in load/stores.
1413 *
1414 * Temporaries: x3, x4
1415 * TODO: x4 OK? ip seems wrong here.
1416 */
1417ENTRY art_quick_aput_obj_with_null_and_bound_check
1418 tst x0, x0
1419 bne art_quick_aput_obj_with_bound_check
1420 b art_quick_throw_null_pointer_exception
1421END art_quick_aput_obj_with_null_and_bound_check
1422
1423ENTRY art_quick_aput_obj_with_bound_check
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001424 ldr w3, [x0, #MIRROR_ARRAY_LENGTH_OFFSET]
Andreas Gampef4e910b2014-04-29 16:55:52 -07001425 cmp w3, w1
1426 bhi art_quick_aput_obj
1427 mov x0, x1
1428 mov x1, x3
1429 b art_quick_throw_array_bounds
1430END art_quick_aput_obj_with_bound_check
1431
Man Cao1aee9002015-07-14 22:31:42 -07001432#ifdef USE_READ_BARRIER
1433 .extern artReadBarrierSlow
1434#endif
Andreas Gampef4e910b2014-04-29 16:55:52 -07001435ENTRY art_quick_aput_obj
1436 cbz x2, .Ldo_aput_null
Mathieu Chartier4b5f7912016-07-21 14:59:04 -07001437 READ_BARRIER x3, w3, x0, x3, w3, MIRROR_OBJECT_CLASS_OFFSET, 0 // Heap reference = 32b
1438 // This also zero-extends to x3
1439 READ_BARRIER x3, w3, x3, x4, w4, MIRROR_CLASS_COMPONENT_TYPE_OFFSET, 1 // Heap reference = 32b
1440 // This also zero-extends to x3
1441 READ_BARRIER x4, w4, x2, x4, w4, MIRROR_OBJECT_CLASS_OFFSET, 2 // Heap reference = 32b
1442 // This also zero-extends to x4
Andreas Gampef4e910b2014-04-29 16:55:52 -07001443 cmp w3, w4 // value's type == array's component type - trivial assignability
1444 bne .Lcheck_assignability
1445.Ldo_aput:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001446 add x3, x0, #MIRROR_OBJECT_ARRAY_DATA_OFFSET
Andreas Gampef4e910b2014-04-29 16:55:52 -07001447 // "Compress" = do nothing
Hiroshi Yamauchibfa5eb62015-05-29 15:04:41 -07001448 POISON_HEAP_REF w2
Andreas Gampef4e910b2014-04-29 16:55:52 -07001449 str w2, [x3, x1, lsl #2] // Heap reference = 32b
1450 ldr x3, [xSELF, #THREAD_CARD_TABLE_OFFSET]
1451 lsr x0, x0, #7
1452 strb w3, [x3, x0]
1453 ret
1454.Ldo_aput_null:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001455 add x3, x0, #MIRROR_OBJECT_ARRAY_DATA_OFFSET
Andreas Gampef4e910b2014-04-29 16:55:52 -07001456 // "Compress" = do nothing
1457 str w2, [x3, x1, lsl #2] // Heap reference = 32b
1458 ret
1459.Lcheck_assignability:
1460 // Store arguments and link register
Vladimir Marko215076b2016-09-07 18:05:55 +01001461 SAVE_TWO_REGS_INCREASE_FRAME x0, x1, 32
1462 SAVE_TWO_REGS x2, xLR, 16
Andreas Gampef4e910b2014-04-29 16:55:52 -07001463
1464 // Call runtime code
1465 mov x0, x3 // Heap reference, 32b, "uncompress" = do nothing, already zero-extended
1466 mov x1, x4 // Heap reference, 32b, "uncompress" = do nothing, already zero-extended
1467 bl artIsAssignableFromCode
1468
1469 // Check for exception
1470 cbz x0, .Lthrow_array_store_exception
1471
1472 // Restore
Vladimir Markoae6ba1f2016-09-09 11:56:05 +01001473 .cfi_remember_state
Vladimir Marko215076b2016-09-07 18:05:55 +01001474 RESTORE_TWO_REGS x2, xLR, 16
1475 RESTORE_TWO_REGS_DECREASE_FRAME x0, x1, 32
Andreas Gampef4e910b2014-04-29 16:55:52 -07001476
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001477 add x3, x0, #MIRROR_OBJECT_ARRAY_DATA_OFFSET
Andreas Gampef4e910b2014-04-29 16:55:52 -07001478 // "Compress" = do nothing
Hiroshi Yamauchibfa5eb62015-05-29 15:04:41 -07001479 POISON_HEAP_REF w2
Andreas Gampef4e910b2014-04-29 16:55:52 -07001480 str w2, [x3, x1, lsl #2] // Heap reference = 32b
1481 ldr x3, [xSELF, #THREAD_CARD_TABLE_OFFSET]
1482 lsr x0, x0, #7
1483 strb w3, [x3, x0]
1484 ret
Vladimir Markoae6ba1f2016-09-09 11:56:05 +01001485 .cfi_restore_state // Reset unwind info so following code unwinds.
Andreas Gampef4e910b2014-04-29 16:55:52 -07001486.Lthrow_array_store_exception:
Vladimir Marko215076b2016-09-07 18:05:55 +01001487 RESTORE_TWO_REGS x2, xLR, 16
1488 RESTORE_TWO_REGS_DECREASE_FRAME x0, x1, 32
Andreas Gampef4e910b2014-04-29 16:55:52 -07001489
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001490 SETUP_SAVE_ALL_CALLEE_SAVES_FRAME
Vladimir Marko908eb222016-09-14 10:29:18 +01001491 mov x1, x2 // Pass value.
1492 mov x2, xSELF // Pass Thread::Current.
1493 bl artThrowArrayStoreException // (Object*, Object*, Thread*).
1494 brk 0 // Unreached.
Andreas Gampef4e910b2014-04-29 16:55:52 -07001495END art_quick_aput_obj
1496
Stuart Monteithb95a5342014-03-12 13:32:32 +00001497// Macro to facilitate adding new allocation entrypoints.
Vladimir Marko5ea536a2015-04-20 20:11:30 +01001498.macro ONE_ARG_DOWNCALL name, entrypoint, return
1499 .extern \entrypoint
1500ENTRY \name
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001501 SETUP_SAVE_REFS_ONLY_FRAME // save callee saves in case of GC
Vladimir Marko5ea536a2015-04-20 20:11:30 +01001502 mov x1, xSELF // pass Thread::Current
1503 bl \entrypoint // (uint32_t type_idx, Method* method, Thread*)
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001504 RESTORE_SAVE_REFS_ONLY_FRAME
Vladimir Marko5ea536a2015-04-20 20:11:30 +01001505 \return
1506END \name
1507.endm
1508
1509// Macro to facilitate adding new allocation entrypoints.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001510.macro TWO_ARG_DOWNCALL name, entrypoint, return
1511 .extern \entrypoint
1512ENTRY \name
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001513 SETUP_SAVE_REFS_ONLY_FRAME // save callee saves in case of GC
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001514 mov x2, xSELF // pass Thread::Current
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001515 bl \entrypoint // (uint32_t type_idx, Method* method, Thread*)
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001516 RESTORE_SAVE_REFS_ONLY_FRAME
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001517 \return
Stuart Monteithb95a5342014-03-12 13:32:32 +00001518END \name
1519.endm
1520
Jeff Hao848f70a2014-01-15 13:49:50 -08001521// Macro to facilitate adding new allocation entrypoints.
Stuart Monteithb95a5342014-03-12 13:32:32 +00001522.macro THREE_ARG_DOWNCALL name, entrypoint, return
1523 .extern \entrypoint
1524ENTRY \name
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001525 SETUP_SAVE_REFS_ONLY_FRAME // save callee saves in case of GC
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001526 mov x3, xSELF // pass Thread::Current
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001527 bl \entrypoint
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001528 RESTORE_SAVE_REFS_ONLY_FRAME
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001529 \return
Stuart Monteithb95a5342014-03-12 13:32:32 +00001530END \name
1531.endm
1532
Jeff Hao848f70a2014-01-15 13:49:50 -08001533// Macro to facilitate adding new allocation entrypoints.
1534.macro FOUR_ARG_DOWNCALL name, entrypoint, return
1535 .extern \entrypoint
1536ENTRY \name
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001537 SETUP_SAVE_REFS_ONLY_FRAME // save callee saves in case of GC
Jeff Hao848f70a2014-01-15 13:49:50 -08001538 mov x4, xSELF // pass Thread::Current
1539 bl \entrypoint //
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001540 RESTORE_SAVE_REFS_ONLY_FRAME
Jeff Hao848f70a2014-01-15 13:49:50 -08001541 \return
1542 DELIVER_PENDING_EXCEPTION
1543END \name
1544.endm
1545
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001546// Macros taking opportunity of code similarities for downcalls with referrer.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001547.macro ONE_ARG_REF_DOWNCALL name, entrypoint, return
1548 .extern \entrypoint
1549ENTRY \name
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001550 SETUP_SAVE_REFS_ONLY_FRAME // save callee saves in case of GC
1551 ldr x1, [sp, #FRAME_SIZE_SAVE_REFS_ONLY] // Load referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001552 mov x2, xSELF // pass Thread::Current
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001553 bl \entrypoint // (uint32_t type_idx, Method* method, Thread*, SP)
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001554 RESTORE_SAVE_REFS_ONLY_FRAME
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001555 \return
1556END \name
1557.endm
1558
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001559.macro TWO_ARG_REF_DOWNCALL name, entrypoint, return
1560 .extern \entrypoint
1561ENTRY \name
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001562 SETUP_SAVE_REFS_ONLY_FRAME // save callee saves in case of GC
1563 ldr x2, [sp, #FRAME_SIZE_SAVE_REFS_ONLY] // Load referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001564 mov x3, xSELF // pass Thread::Current
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001565 bl \entrypoint
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001566 RESTORE_SAVE_REFS_ONLY_FRAME
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001567 \return
1568END \name
1569.endm
1570
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001571.macro THREE_ARG_REF_DOWNCALL name, entrypoint, return
1572 .extern \entrypoint
1573ENTRY \name
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001574 SETUP_SAVE_REFS_ONLY_FRAME // save callee saves in case of GC
1575 ldr x3, [sp, #FRAME_SIZE_SAVE_REFS_ONLY] // Load referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001576 mov x4, xSELF // pass Thread::Current
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001577 bl \entrypoint
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001578 RESTORE_SAVE_REFS_ONLY_FRAME
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001579 \return
1580END \name
1581.endm
1582
Andreas Gampe1cc7dba2014-12-17 18:43:01 -08001583.macro RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
1584 cbz w0, 1f // result zero branch over
1585 ret // return
15861:
1587 DELIVER_PENDING_EXCEPTION
1588.endm
1589
Matteo Franchindfd891a2014-04-30 12:17:17 +01001590 /*
Vladimir Marko3b370732014-10-09 18:34:28 +01001591 * Entry from managed code that calls artHandleFillArrayDataFromCode and delivers exception on
1592 * failure.
1593 */
1594TWO_ARG_REF_DOWNCALL art_quick_handle_fill_data, artHandleFillArrayDataFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1595
1596 /*
Matteo Franchindfd891a2014-04-30 12:17:17 +01001597 * Entry from managed code when uninitialized static storage, this stub will run the class
1598 * initializer and deliver the exception on error. On success the static storage base is
1599 * returned.
1600 */
Vladimir Marko5ea536a2015-04-20 20:11:30 +01001601ONE_ARG_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
Matteo Franchindfd891a2014-04-30 12:17:17 +01001602
Vladimir Marko5ea536a2015-04-20 20:11:30 +01001603ONE_ARG_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
1604ONE_ARG_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
Matteo Franchindfd891a2014-04-30 12:17:17 +01001605
Fred Shih37f05ef2014-07-16 18:38:08 -07001606ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1607ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1608ONE_ARG_REF_DOWNCALL art_quick_get_char_static, artGetCharStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1609ONE_ARG_REF_DOWNCALL art_quick_get_short_static, artGetShortStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001610ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1611ONE_ARG_REF_DOWNCALL art_quick_get64_static, artGet64StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1612ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1613
Fred Shih37f05ef2014-07-16 18:38:08 -07001614TWO_ARG_REF_DOWNCALL art_quick_get_boolean_instance, artGetBooleanInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1615TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1616TWO_ARG_REF_DOWNCALL art_quick_get_char_instance, artGetCharInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1617TWO_ARG_REF_DOWNCALL art_quick_get_short_instance, artGetShortInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001618TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1619TWO_ARG_REF_DOWNCALL art_quick_get64_instance, artGet64InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1620TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
1621
Fred Shih37f05ef2014-07-16 18:38:08 -07001622TWO_ARG_REF_DOWNCALL art_quick_set8_static, artSet8StaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1623TWO_ARG_REF_DOWNCALL art_quick_set16_static, artSet16StaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001624TWO_ARG_REF_DOWNCALL art_quick_set32_static, artSet32StaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1625TWO_ARG_REF_DOWNCALL art_quick_set_obj_static, artSetObjStaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1626
Fred Shih37f05ef2014-07-16 18:38:08 -07001627THREE_ARG_REF_DOWNCALL art_quick_set8_instance, artSet8InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1628THREE_ARG_REF_DOWNCALL art_quick_set16_instance, artSet16InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001629THREE_ARG_REF_DOWNCALL art_quick_set32_instance, artSet32InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
Stephen Kyle0ff20d52014-10-22 15:23:46 +01001630THREE_ARG_REF_DOWNCALL art_quick_set64_instance, artSet64InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001631THREE_ARG_REF_DOWNCALL art_quick_set_obj_instance, artSetObjInstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
1632
1633// This is separated out as the argument order is different.
1634 .extern artSet64StaticFromCode
1635ENTRY art_quick_set64_static
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001636 SETUP_SAVE_REFS_ONLY_FRAME // save callee saves in case of GC
1637 ldr x1, [sp, #FRAME_SIZE_SAVE_REFS_ONLY] // Load referrer
Calin Juravlee460d1d2015-09-29 04:52:17 +01001638 // x2 contains the parameter
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001639 mov x3, xSELF // pass Thread::Current
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001640 bl artSet64StaticFromCode
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001641 RESTORE_SAVE_REFS_ONLY_FRAME
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001642 RETURN_IF_W0_IS_ZERO_OR_DELIVER
1643END art_quick_set64_static
1644
Matteo Franchindfd891a2014-04-30 12:17:17 +01001645 /*
Christina Wadsworthead8ba32016-08-08 13:08:05 -07001646 * Entry from managed code to resolve a string, this stub will
1647 * check the dex cache for a matching string (the fast path), and if not found,
1648 * it will allocate a String and deliver an exception on error.
1649 * On success the String is returned. R0 holds the string index.
Matteo Franchindfd891a2014-04-30 12:17:17 +01001650 */
Christina Wadsworthead8ba32016-08-08 13:08:05 -07001651
1652ENTRY art_quick_resolve_string
Vladimir Marko94ce9c22016-09-30 14:50:51 +01001653 SAVE_TWO_REGS_INCREASE_FRAME x29, xLR, 2 * __SIZEOF_POINTER__
1654 ldr x29, [sp, #(2 * __SIZEOF_POINTER__)] // load referrer
1655 ldr w29, [x29, #ART_METHOD_DECLARING_CLASS_OFFSET] // load declaring class
1656 ldr x29, [x29, #DECLARING_CLASS_DEX_CACHE_STRINGS_OFFSET] // load string dex cache
1657 ubfx lr, x0, #0, #STRING_DEX_CACHE_HASH_BITS // get masked string index into LR
1658 ldr x29, [x29, lr, lsl #STRING_DEX_CACHE_ELEMENT_SIZE_SHIFT] // load dex cache pair into x29
1659 cmp x0, x29, lsr #32 // compare against upper 32 bits
Christina Wadsworthead8ba32016-08-08 13:08:05 -07001660 bne .Lart_quick_resolve_string_slow_path
Vladimir Marko94ce9c22016-09-30 14:50:51 +01001661 ubfx x0, x29, #0, #32 // extract lower 32 bits into x0
Christina Wadsworthead8ba32016-08-08 13:08:05 -07001662#ifdef USE_READ_BARRIER
Mathieu Chartier5f404332016-08-22 15:38:08 -07001663 // Most common case: GC is not marking.
Vladimir Marko94ce9c22016-09-30 14:50:51 +01001664 ldr w29, [xSELF, #THREAD_IS_GC_MARKING_OFFSET]
1665 cbnz x29, .Lart_quick_resolve_string_marking
1666.Lart_quick_resolve_string_no_rb:
Christina Wadsworthead8ba32016-08-08 13:08:05 -07001667#endif
Vladimir Marko94ce9c22016-09-30 14:50:51 +01001668 .cfi_remember_state
1669 RESTORE_TWO_REGS_DECREASE_FRAME x29, xLR, 2 * __SIZEOF_POINTER__
Christina Wadsworthead8ba32016-08-08 13:08:05 -07001670 ret
Vladimir Marko94ce9c22016-09-30 14:50:51 +01001671 .cfi_restore_state
1672 .cfi_def_cfa_offset 16 // workaround for clang bug: 31975598
1673
1674#ifdef USE_READ_BARRIER
1675// GC is marking case, need to check the mark bit.
1676.Lart_quick_resolve_string_marking:
1677 ldr x29, [x0, #MIRROR_OBJECT_LOCK_WORD_OFFSET]
1678 tbnz x29, #LOCK_WORD_MARK_BIT_SHIFT, .Lart_quick_resolve_string_no_rb
1679 .cfi_remember_state
1680 RESTORE_TWO_REGS_DECREASE_FRAME x29, xLR, 2 * __SIZEOF_POINTER__
1681 // Note: art_quick_read_barrier_mark_reg00 clobbers IP0 but the .Lslow_rb_* does not.
1682 b .Lslow_rb_art_quick_read_barrier_mark_reg00 // Get the marked string back.
1683 .cfi_restore_state
1684 .cfi_def_cfa_offset 16 // workaround for clang bug: 31975598
1685#endif
Christina Wadsworthead8ba32016-08-08 13:08:05 -07001686
Mathieu Chartier5f404332016-08-22 15:38:08 -07001687// Slow path case, the index did not match.
Christina Wadsworthead8ba32016-08-08 13:08:05 -07001688.Lart_quick_resolve_string_slow_path:
Vladimir Marko94ce9c22016-09-30 14:50:51 +01001689 INCREASE_FRAME (FRAME_SIZE_SAVE_EVERYTHING - 2 * __SIZEOF_POINTER__)
1690 SETUP_SAVE_EVERYTHING_FRAME_DECREMENTED_SP_SKIP_X29_LR // save callee saves in case of GC
Christina Wadsworthead8ba32016-08-08 13:08:05 -07001691 mov x1, xSELF // pass Thread::Current
Mathieu Chartier5f404332016-08-22 15:38:08 -07001692 bl artResolveStringFromCode // (int32_t string_idx, Thread* self)
Vladimir Marko94ce9c22016-09-30 14:50:51 +01001693 cbz w0, 1f // If result is null, deliver the OOME.
1694 .cfi_remember_state
1695 RESTORE_SAVE_EVERYTHING_FRAME_KEEP_X0
1696 ret // return
1697 .cfi_restore_state
1698 .cfi_def_cfa_offset FRAME_SIZE_SAVE_EVERYTHING // workaround for clang bug: 31975598
16991:
1700 DELIVER_PENDING_EXCEPTION_FRAME_READY
Christina Wadsworthead8ba32016-08-08 13:08:05 -07001701END art_quick_resolve_string
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001702
Stuart Monteithb95a5342014-03-12 13:32:32 +00001703// Generate the allocation entrypoints for each allocator.
Mathieu Chartier8261d022016-08-08 09:41:04 -07001704GENERATE_ALLOC_ENTRYPOINTS_FOR_NON_REGION_TLAB_ALLOCATORS
1705// Comment out allocators that have arm64 specific asm.
1706// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_region_tlab, RegionTLAB) implemented in asm
1707// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_region_tlab, RegionTLAB)
1708// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_region_tlab, RegionTLAB)
1709GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_region_tlab, RegionTLAB)
1710// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_region_tlab, RegionTLAB) implemented in asm
1711// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_region_tlab, RegionTLAB)
1712GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_region_tlab, RegionTLAB)
1713GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_region_tlab, RegionTLAB)
1714GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_region_tlab, RegionTLAB)
1715GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_BYTES(_region_tlab, RegionTLAB)
1716GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_CHARS(_region_tlab, RegionTLAB)
1717GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_STRING(_region_tlab, RegionTLAB)
Hiroshi Yamauchi10d4c082016-02-24 12:51:18 -08001718
Hiroshi Yamauchi6f6244a2015-10-22 12:08:12 -07001719// A hand-written override for GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_rosalloc, RosAlloc).
1720ENTRY art_quick_alloc_object_rosalloc
1721 // Fast path rosalloc allocation.
1722 // x0: type_idx/return value, x1: ArtMethod*, xSELF(x19): Thread::Current
1723 // x2-x7: free.
1724 ldr x2, [x1, #ART_METHOD_DEX_CACHE_TYPES_OFFSET_64] // Load dex cache resolved types array
1725 // Load the class (x2)
1726 ldr w2, [x2, x0, lsl #COMPRESSED_REFERENCE_SIZE_SHIFT]
1727 cbz x2, .Lart_quick_alloc_object_rosalloc_slow_path // Check null class
Hiroshi Yamauchi6f6244a2015-10-22 12:08:12 -07001728 ldr x3, [xSELF, #THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET] // Check if the thread local
1729 // allocation stack has room.
1730 // ldp won't work due to large offset.
1731 ldr x4, [xSELF, #THREAD_LOCAL_ALLOC_STACK_END_OFFSET]
1732 cmp x3, x4
1733 bhs .Lart_quick_alloc_object_rosalloc_slow_path
Mathieu Chartier161db1d2016-09-01 14:06:54 -07001734 ldr w3, [x2, #MIRROR_CLASS_OBJECT_SIZE_ALLOC_FAST_PATH_OFFSET] // Load the object size (x3)
Hiroshi Yamauchi6f6244a2015-10-22 12:08:12 -07001735 cmp x3, #ROSALLOC_MAX_THREAD_LOCAL_BRACKET_SIZE // Check if the size is for a thread
Mathieu Chartier161db1d2016-09-01 14:06:54 -07001736 // local allocation. Also does the
1737 // finalizable and initialization
1738 // checks.
Hiroshi Yamauchi6f6244a2015-10-22 12:08:12 -07001739 bhs .Lart_quick_alloc_object_rosalloc_slow_path
1740 // Compute the rosalloc bracket index
Mathieu Chartier161db1d2016-09-01 14:06:54 -07001741 // from the size. Since the size is
1742 // already aligned we can combine the
1743 // two shifts together.
1744 add x4, xSELF, x3, lsr #(ROSALLOC_BRACKET_QUANTUM_SIZE_SHIFT - POINTER_SIZE_SHIFT)
1745 // Subtract pointer size since ther
1746 // are no runs for 0 byte allocations
1747 // and the size is already aligned.
1748 ldr x4, [x4, #(THREAD_ROSALLOC_RUNS_OFFSET - __SIZEOF_POINTER__)]
Hiroshi Yamauchi6f6244a2015-10-22 12:08:12 -07001749 // Load the free list head (x3). This
1750 // will be the return val.
1751 ldr x3, [x4, #(ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET)]
1752 cbz x3, .Lart_quick_alloc_object_rosalloc_slow_path
1753 // "Point of no slow path". Won't go to the slow path from here on. OK to clobber x0 and x1.
1754 ldr x1, [x3, #ROSALLOC_SLOT_NEXT_OFFSET] // Load the next pointer of the head
1755 // and update the list head with the
1756 // next pointer.
1757 str x1, [x4, #(ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET)]
1758 // Store the class pointer in the
1759 // header. This also overwrites the
1760 // next pointer. The offsets are
1761 // asserted to match.
1762#if ROSALLOC_SLOT_NEXT_OFFSET != MIRROR_OBJECT_CLASS_OFFSET
1763#error "Class pointer needs to overwrite next pointer."
1764#endif
1765 POISON_HEAP_REF w2
1766 str w2, [x3, #MIRROR_OBJECT_CLASS_OFFSET]
Mathieu Chartier011dc2c2016-07-18 11:11:45 -07001767 // Fence. This is "ish" not "ishst" so
1768 // that it also ensures ordering of
Mathieu Chartier161db1d2016-09-01 14:06:54 -07001769 // the object size load with respect
Mathieu Chartier011dc2c2016-07-18 11:11:45 -07001770 // to later accesses to the class
1771 // object. Alternatively we could use
1772 // "ishst" if we use load-acquire for
Mathieu Chartier161db1d2016-09-01 14:06:54 -07001773 // the class status load.
Mathieu Chartier011dc2c2016-07-18 11:11:45 -07001774 // Needs to be done before pushing on
1775 // allocation since Heap::VisitObjects
1776 // relies on seeing the class pointer.
1777 // b/28790624
1778 dmb ish
Hiroshi Yamauchi6f6244a2015-10-22 12:08:12 -07001779 // Push the new object onto the thread
1780 // local allocation stack and
1781 // increment the thread local
1782 // allocation stack top.
1783 ldr x1, [xSELF, #THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET]
1784 str w3, [x1], #COMPRESSED_REFERENCE_SIZE // (Increment x1 as a side effect.)
1785 str x1, [xSELF, #THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET]
1786 // Decrement the size of the free list
1787 ldr w1, [x4, #(ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_SIZE_OFFSET)]
1788 sub x1, x1, #1
1789 // TODO: consider combining this store
1790 // and the list head store above using
1791 // strd.
1792 str w1, [x4, #(ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_SIZE_OFFSET)]
Mathieu Chartier011dc2c2016-07-18 11:11:45 -07001793
Hiroshi Yamauchi6f6244a2015-10-22 12:08:12 -07001794 mov x0, x3 // Set the return value and return.
1795 ret
1796.Lart_quick_alloc_object_rosalloc_slow_path:
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001797 SETUP_SAVE_REFS_ONLY_FRAME // save callee saves in case of GC
Hiroshi Yamauchi6f6244a2015-10-22 12:08:12 -07001798 mov x2, xSELF // pass Thread::Current
1799 bl artAllocObjectFromCodeRosAlloc // (uint32_t type_idx, Method* method, Thread*)
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001800 RESTORE_SAVE_REFS_ONLY_FRAME
Hiroshi Yamauchi6f6244a2015-10-22 12:08:12 -07001801 RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
1802END art_quick_alloc_object_rosalloc
Stuart Monteithb95a5342014-03-12 13:32:32 +00001803
Mathieu Chartier8261d022016-08-08 09:41:04 -07001804
1805// The common fast path code for art_quick_alloc_array_region_tlab.
1806.macro ALLOC_ARRAY_TLAB_FAST_PATH slowPathLabel, xClass, wClass, xCount, wCount, xTemp0, wTemp0, xTemp1, wTemp1, xTemp2, wTemp2
1807 // Check null class
1808 cbz \wClass, \slowPathLabel
1809 ALLOC_ARRAY_TLAB_FAST_PATH_RESOLVED \slowPathLabel, \xClass, \wClass, \xCount, \wCount, \xTemp0, \wTemp0, \xTemp1, \wTemp1, \xTemp2, \wTemp2
1810.endm
1811
1812// The common fast path code for art_quick_alloc_array_region_tlab.
1813.macro ALLOC_ARRAY_TLAB_FAST_PATH_RESOLVED slowPathLabel, xClass, wClass, xCount, wCount, xTemp0, wTemp0, xTemp1, wTemp1, xTemp2, wTemp2
1814 // Array classes are never finalizable or uninitialized, no need to check.
1815 ldr \wTemp0, [\xClass, #MIRROR_CLASS_COMPONENT_TYPE_OFFSET] // Load component type
1816 UNPOISON_HEAP_REF \wTemp0
1817 ldr \wTemp0, [\xTemp0, #MIRROR_CLASS_OBJECT_PRIMITIVE_TYPE_OFFSET]
1818 lsr \xTemp0, \xTemp0, #PRIMITIVE_TYPE_SIZE_SHIFT_SHIFT // Component size shift is in high 16
1819 // bits.
1820 // xCount is holding a 32 bit value,
1821 // it can not overflow.
1822 lsl \xTemp1, \xCount, \xTemp0 // Calculate data size
1823 // Add array data offset and alignment.
1824 add \xTemp1, \xTemp1, #(MIRROR_INT_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK)
1825#if MIRROR_LONG_ARRAY_DATA_OFFSET != MIRROR_INT_ARRAY_DATA_OFFSET + 4
1826#error Long array data offset must be 4 greater than int array data offset.
1827#endif
1828
1829 add \xTemp0, \xTemp0, #1 // Add 4 to the length only if the
1830 // component size shift is 3
1831 // (for 64 bit alignment).
1832 and \xTemp0, \xTemp0, #4
1833 add \xTemp1, \xTemp1, \xTemp0
Mathieu Chartier2ee98f22016-08-10 10:08:58 -07001834 and \xTemp1, \xTemp1, #OBJECT_ALIGNMENT_MASK_TOGGLED64 // Apply alignemnt mask
1835 // (addr + 7) & ~7. The mask must
1836 // be 64 bits to keep high bits in
1837 // case of overflow.
1838 // Negative sized arrays are handled here since xCount holds a zero extended 32 bit value.
1839 // Negative ints become large 64 bit unsigned ints which will always be larger than max signed
1840 // 32 bit int. Since the max shift for arrays is 3, it can not become a negative 64 bit int.
Mathieu Chartier8261d022016-08-08 09:41:04 -07001841 cmp \xTemp1, #MIN_LARGE_OBJECT_THRESHOLD // Possibly a large object, go slow
1842 bhs \slowPathLabel // path.
1843
1844 ldr \xTemp0, [xSELF, #THREAD_LOCAL_POS_OFFSET] // Check tlab for space, note that
1845 // we use (end - begin) to handle
1846 // negative size arrays. It is
1847 // assumed that a negative size will
1848 // always be greater unsigned than
1849 // region size.
1850 ldr \xTemp2, [xSELF, #THREAD_LOCAL_END_OFFSET]
1851 sub \xTemp2, \xTemp2, \xTemp0
1852 cmp \xTemp1, \xTemp2
1853 bhi \slowPathLabel
Mathieu Chartier8261d022016-08-08 09:41:04 -07001854 // "Point of no slow path". Won't go to the slow path from here on. OK to clobber x0 and x1.
1855 // Move old thread_local_pos to x0
1856 // for the return value.
1857 mov x0, \xTemp0
1858 add \xTemp0, \xTemp0, \xTemp1
1859 str \xTemp0, [xSELF, #THREAD_LOCAL_POS_OFFSET] // Store new thread_local_pos.
1860 ldr \xTemp0, [xSELF, #THREAD_LOCAL_OBJECTS_OFFSET] // Increment thread_local_objects.
1861 add \xTemp0, \xTemp0, #1
1862 str \xTemp0, [xSELF, #THREAD_LOCAL_OBJECTS_OFFSET]
1863 POISON_HEAP_REF \wClass
1864 str \wClass, [x0, #MIRROR_OBJECT_CLASS_OFFSET] // Store the class pointer.
1865 str \wCount, [x0, #MIRROR_ARRAY_LENGTH_OFFSET] // Store the array length.
1866 // Fence.
1867 dmb ishst
1868 ret
1869.endm
1870
Hiroshi Yamauchicd773782016-04-07 17:18:24 -07001871// The common fast path code for art_quick_alloc_object_tlab and art_quick_alloc_object_region_tlab.
1872//
1873// x0: type_idx/return value, x1: ArtMethod*, x2: Class*, xSELF(x19): Thread::Current
1874// x3-x7: free.
1875// Need to preserve x0 and x1 to the slow path.
1876.macro ALLOC_OBJECT_TLAB_FAST_PATH slowPathLabel
1877 cbz x2, \slowPathLabel // Check null class
Mathieu Chartier8261d022016-08-08 09:41:04 -07001878 ALLOC_OBJECT_TLAB_FAST_PATH_RESOLVED \slowPathLabel
1879.endm
1880
Mathieu Chartier93bbee02016-08-31 09:38:40 -07001881// TODO: delete ALLOC_OBJECT_TLAB_FAST_PATH_RESOLVED since it is the same as
1882// ALLOC_OBJECT_TLAB_FAST_PATH_INITIALIZED.
Mathieu Chartier8261d022016-08-08 09:41:04 -07001883.macro ALLOC_OBJECT_TLAB_FAST_PATH_RESOLVED slowPathLabel
Mathieu Chartier8261d022016-08-08 09:41:04 -07001884 ALLOC_OBJECT_TLAB_FAST_PATH_INITIALIZED \slowPathLabel
1885.endm
1886
1887.macro ALLOC_OBJECT_TLAB_FAST_PATH_INITIALIZED slowPathLabel
Hiroshi Yamauchid72945c2016-03-16 11:23:10 -07001888 ldr x4, [xSELF, #THREAD_LOCAL_POS_OFFSET]
1889 ldr x5, [xSELF, #THREAD_LOCAL_END_OFFSET]
Mathieu Chartier93bbee02016-08-31 09:38:40 -07001890 ldr w7, [x2, #MIRROR_CLASS_OBJECT_SIZE_ALLOC_FAST_PATH_OFFSET] // Load the object size (x7).
1891 add x6, x4, x7 // Add object size to tlab pos.
1892 cmp x6, x5 // Check if it fits, overflow works
1893 // since the tlab pos and end are 32
1894 // bit values.
Hiroshi Yamauchicd773782016-04-07 17:18:24 -07001895 bhi \slowPathLabel
Hiroshi Yamauchid72945c2016-03-16 11:23:10 -07001896 // "Point of no slow path". Won't go to the slow path from here on. OK to clobber x0 and x1.
Hiroshi Yamauchid72945c2016-03-16 11:23:10 -07001897 mov x0, x4
Mathieu Chartier93bbee02016-08-31 09:38:40 -07001898 str x6, [xSELF, #THREAD_LOCAL_POS_OFFSET] // Store new thread_local_pos.
Hiroshi Yamauchid72945c2016-03-16 11:23:10 -07001899 ldr x5, [xSELF, #THREAD_LOCAL_OBJECTS_OFFSET] // Increment thread_local_objects.
1900 add x5, x5, #1
1901 str x5, [xSELF, #THREAD_LOCAL_OBJECTS_OFFSET]
1902 POISON_HEAP_REF w2
1903 str w2, [x0, #MIRROR_OBJECT_CLASS_OFFSET] // Store the class pointer.
1904 // Fence. This is "ish" not "ishst" so
1905 // that the code after this allocation
1906 // site will see the right values in
1907 // the fields of the class.
1908 // Alternatively we could use "ishst"
1909 // if we use load-acquire for the
Mathieu Chartier93bbee02016-08-31 09:38:40 -07001910 // object size load.)
Hiroshi Yamauchid72945c2016-03-16 11:23:10 -07001911 dmb ish
1912 ret
Hiroshi Yamauchicd773782016-04-07 17:18:24 -07001913.endm
1914
1915// A hand-written override for GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_tlab, TLAB).
1916ENTRY art_quick_alloc_object_tlab
1917 // Fast path tlab allocation.
1918 // x0: type_idx/return value, x1: ArtMethod*, xSELF(x19): Thread::Current
1919 // x2-x7: free.
1920#if defined(USE_READ_BARRIER)
1921 mvn x0, xzr // Read barrier not supported here.
1922 ret // Return -1.
1923#endif
1924 ldr x2, [x1, #ART_METHOD_DEX_CACHE_TYPES_OFFSET_64] // Load dex cache resolved types array
1925 // Load the class (x2)
1926 ldr w2, [x2, x0, lsl #COMPRESSED_REFERENCE_SIZE_SHIFT]
1927 ALLOC_OBJECT_TLAB_FAST_PATH .Lart_quick_alloc_object_tlab_slow_path
Hiroshi Yamauchid72945c2016-03-16 11:23:10 -07001928.Lart_quick_alloc_object_tlab_slow_path:
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001929 SETUP_SAVE_REFS_ONLY_FRAME // Save callee saves in case of GC.
Hiroshi Yamauchid72945c2016-03-16 11:23:10 -07001930 mov x2, xSELF // Pass Thread::Current.
1931 bl artAllocObjectFromCodeTLAB // (uint32_t type_idx, Method* method, Thread*)
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001932 RESTORE_SAVE_REFS_ONLY_FRAME
Hiroshi Yamauchid72945c2016-03-16 11:23:10 -07001933 RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
1934END art_quick_alloc_object_tlab
1935
Mathieu Chartier8261d022016-08-08 09:41:04 -07001936// The common code for art_quick_alloc_object_*region_tlab
Mathieu Chartierb6ec5d72016-08-30 15:06:54 -07001937.macro GENERATE_ALLOC_OBJECT_REGION_TLAB name, entrypoint, fast_path, is_resolved, read_barrier
Mathieu Chartier8261d022016-08-08 09:41:04 -07001938ENTRY \name
Hiroshi Yamauchicd773782016-04-07 17:18:24 -07001939 // Fast path region tlab allocation.
Mathieu Chartier8261d022016-08-08 09:41:04 -07001940 // x0: type_idx/resolved class/return value, x1: ArtMethod*, xSELF(x19): Thread::Current
1941 // If is_resolved is 1 then x0 is the resolved type, otherwise it is the index.
Hiroshi Yamauchicd773782016-04-07 17:18:24 -07001942 // x2-x7: free.
1943#if !defined(USE_READ_BARRIER)
1944 mvn x0, xzr // Read barrier must be enabled here.
1945 ret // Return -1.
1946#endif
Mathieu Chartier8261d022016-08-08 09:41:04 -07001947.if \is_resolved
1948 mov x2, x0 // class is actually stored in x0 already
1949.else
Hiroshi Yamauchicd773782016-04-07 17:18:24 -07001950 ldr x2, [x1, #ART_METHOD_DEX_CACHE_TYPES_OFFSET_64] // Load dex cache resolved types array
1951 // Load the class (x2)
1952 ldr w2, [x2, x0, lsl #COMPRESSED_REFERENCE_SIZE_SHIFT]
Mathieu Chartierb6ec5d72016-08-30 15:06:54 -07001953 // If the class is null, go slow path. The check is required to read the lock word.
1954 cbz w2, .Lslow_path\name
Mathieu Chartier8261d022016-08-08 09:41:04 -07001955.endif
Mathieu Chartierb6ec5d72016-08-30 15:06:54 -07001956.if \read_barrier
Mathieu Chartier36a270a2016-07-28 18:08:51 -07001957 // Most common case: GC is not marking.
Hiroshi Yamauchicd773782016-04-07 17:18:24 -07001958 ldr w3, [xSELF, #THREAD_IS_GC_MARKING_OFFSET]
Mathieu Chartier8261d022016-08-08 09:41:04 -07001959 cbnz x3, .Lmarking\name
Mathieu Chartierb6ec5d72016-08-30 15:06:54 -07001960.endif
Mathieu Chartier8261d022016-08-08 09:41:04 -07001961.Ldo_allocation\name:
1962 \fast_path .Lslow_path\name
1963.Lmarking\name:
Mathieu Chartierb6ec5d72016-08-30 15:06:54 -07001964.if \read_barrier
Mathieu Chartier36a270a2016-07-28 18:08:51 -07001965 // GC is marking, check the lock word of the class for the mark bit.
Mathieu Chartier36a270a2016-07-28 18:08:51 -07001966 // Class is not null, check mark bit in lock word.
1967 ldr w3, [x2, #MIRROR_OBJECT_LOCK_WORD_OFFSET]
1968 // If the bit is not zero, do the allocation.
Mathieu Chartier8261d022016-08-08 09:41:04 -07001969 tbnz w3, #LOCK_WORD_MARK_BIT_SHIFT, .Ldo_allocation\name
Hiroshi Yamauchicd773782016-04-07 17:18:24 -07001970 // The read barrier slow path. Mark
1971 // the class.
Vladimir Marko215076b2016-09-07 18:05:55 +01001972 SAVE_TWO_REGS_INCREASE_FRAME x0, x1, 32 // Save registers (x0, x1, lr).
1973 SAVE_REG xLR, 24 // Align sp by 16 bytes.
Hiroshi Yamauchicd773782016-04-07 17:18:24 -07001974 mov x0, x2 // Pass the class as the first param.
1975 bl artReadBarrierMark
1976 mov x2, x0 // Get the (marked) class back.
Vladimir Marko215076b2016-09-07 18:05:55 +01001977 RESTORE_REG xLR, 24
1978 RESTORE_TWO_REGS_DECREASE_FRAME x0, x1, 32 // Restore registers.
Mathieu Chartier8261d022016-08-08 09:41:04 -07001979 b .Ldo_allocation\name
Mathieu Chartierb6ec5d72016-08-30 15:06:54 -07001980.endif
Mathieu Chartier8261d022016-08-08 09:41:04 -07001981.Lslow_path\name:
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001982 SETUP_SAVE_REFS_ONLY_FRAME // Save callee saves in case of GC.
Hiroshi Yamauchicd773782016-04-07 17:18:24 -07001983 mov x2, xSELF // Pass Thread::Current.
Vladimir Markofd36f1f2016-08-03 18:49:58 +01001984 bl \entrypoint // (uint32_t type_idx, Method* method, Thread*)
1985 RESTORE_SAVE_REFS_ONLY_FRAME
Hiroshi Yamauchicd773782016-04-07 17:18:24 -07001986 RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
Mathieu Chartier8261d022016-08-08 09:41:04 -07001987END \name
1988.endm
1989
Mathieu Chartierb6ec5d72016-08-30 15:06:54 -07001990// Use ALLOC_OBJECT_TLAB_FAST_PATH_RESOLVED since the null check is already done in GENERATE_ALLOC_OBJECT_TLAB.
1991GENERATE_ALLOC_OBJECT_REGION_TLAB art_quick_alloc_object_region_tlab, artAllocObjectFromCodeRegionTLAB, ALLOC_OBJECT_TLAB_FAST_PATH_RESOLVED, 0, 1
1992// No read barrier for the resolved or initialized cases since the caller is responsible for the
1993// read barrier due to the to-space invariant.
1994GENERATE_ALLOC_OBJECT_REGION_TLAB art_quick_alloc_object_resolved_region_tlab, artAllocObjectFromCodeResolvedRegionTLAB, ALLOC_OBJECT_TLAB_FAST_PATH_RESOLVED, 1, 0
1995GENERATE_ALLOC_OBJECT_REGION_TLAB art_quick_alloc_object_initialized_region_tlab, artAllocObjectFromCodeInitializedRegionTLAB, ALLOC_OBJECT_TLAB_FAST_PATH_INITIALIZED, 1, 0
1996
1997// TODO: We could use this macro for the normal tlab allocator too.
Mathieu Chartier8261d022016-08-08 09:41:04 -07001998
1999// The common code for art_quick_alloc_array_*region_tlab
2000.macro GENERATE_ALLOC_ARRAY_REGION_TLAB name, entrypoint, fast_path, is_resolved
2001ENTRY \name
2002 // Fast path array allocation for region tlab allocation.
2003 // x0: uint32_t type_idx
2004 // x1: int32_t component_count
2005 // x2: ArtMethod* method
2006 // x3-x7: free.
2007#if !defined(USE_READ_BARRIER)
2008 mvn x0, xzr // Read barrier must be enabled here.
2009 ret // Return -1.
2010#endif
2011.if \is_resolved
2012 mov x3, x0
2013 // If already resolved, class is stored in x0
2014.else
2015 ldr x3, [x2, #ART_METHOD_DEX_CACHE_TYPES_OFFSET_64] // Load dex cache resolved types array
2016 // Load the class (x2)
2017 ldr w3, [x3, x0, lsl #COMPRESSED_REFERENCE_SIZE_SHIFT]
2018.endif
2019 // Most common case: GC is not marking.
2020 ldr w4, [xSELF, #THREAD_IS_GC_MARKING_OFFSET]
2021 cbnz x4, .Lmarking\name
2022.Ldo_allocation\name:
2023 \fast_path .Lslow_path\name, x3, w3, x1, w1, x4, w4, x5, w5, x6, w6
2024.Lmarking\name:
2025 // GC is marking, check the lock word of the class for the mark bit.
2026 // If the class is null, go slow path. The check is required to read the lock word.
2027 cbz w3, .Lslow_path\name
2028 // Class is not null, check mark bit in lock word.
2029 ldr w4, [x3, #MIRROR_OBJECT_LOCK_WORD_OFFSET]
2030 // If the bit is not zero, do the allocation.
2031 tbnz w4, #LOCK_WORD_MARK_BIT_SHIFT, .Ldo_allocation\name
2032 // The read barrier slow path. Mark
2033 // the class.
2034 stp x0, x1, [sp, #-32]! // Save registers (x0, x1, x2, lr).
2035 stp x2, xLR, [sp, #16]
2036 mov x0, x3 // Pass the class as the first param.
2037 bl artReadBarrierMark
2038 mov x3, x0 // Get the (marked) class back.
2039 ldp x2, xLR, [sp, #16]
2040 ldp x0, x1, [sp], #32 // Restore registers.
2041 b .Ldo_allocation\name
2042.Lslow_path\name:
2043 // x0: uint32_t type_idx / mirror::Class* klass (if resolved)
2044 // x1: int32_t component_count
2045 // x2: ArtMethod* method
2046 // x3: Thread* self
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002047 SETUP_SAVE_REFS_ONLY_FRAME // save callee saves in case of GC
Mathieu Chartier8261d022016-08-08 09:41:04 -07002048 mov x3, xSELF // pass Thread::Current
2049 bl \entrypoint
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002050 RESTORE_SAVE_REFS_ONLY_FRAME
Mathieu Chartier8261d022016-08-08 09:41:04 -07002051 RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
2052END \name
2053.endm
2054
2055GENERATE_ALLOC_ARRAY_REGION_TLAB art_quick_alloc_array_region_tlab, artAllocArrayFromCodeRegionTLAB, ALLOC_ARRAY_TLAB_FAST_PATH, 0
2056// TODO: art_quick_alloc_array_resolved_region_tlab seems to not get called. Investigate compiler.
2057GENERATE_ALLOC_ARRAY_REGION_TLAB art_quick_alloc_array_resolved_region_tlab, artAllocArrayFromCodeResolvedRegionTLAB, ALLOC_ARRAY_TLAB_FAST_PATH_RESOLVED, 1
Hiroshi Yamauchi10d4c082016-02-24 12:51:18 -08002058
Zheng Xu48241e72014-05-23 11:52:42 +08002059 /*
Zheng Xu69a50302015-04-14 20:04:41 +08002060 * Called by managed code when the thread has been asked to suspend.
Zheng Xu48241e72014-05-23 11:52:42 +08002061 */
2062 .extern artTestSuspendFromCode
2063ENTRY art_quick_test_suspend
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002064 SETUP_SAVE_EVERYTHING_FRAME // save callee saves for stack crawl
Zheng Xu48241e72014-05-23 11:52:42 +08002065 mov x0, xSELF
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002066 bl artTestSuspendFromCode // (Thread*)
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002067 RESTORE_SAVE_EVERYTHING_FRAME
Vladimir Marko952dbb12016-07-28 12:01:51 +01002068 ret
Zheng Xu48241e72014-05-23 11:52:42 +08002069END art_quick_test_suspend
Stuart Monteithb95a5342014-03-12 13:32:32 +00002070
Stuart Monteithd5c78f42014-06-11 16:44:46 +01002071ENTRY art_quick_implicit_suspend
2072 mov x0, xSELF
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002073 SETUP_SAVE_REFS_ONLY_FRAME // save callee saves for stack crawl
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002074 bl artTestSuspendFromCode // (Thread*)
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002075 RESTORE_SAVE_REFS_ONLY_FRAME_AND_RETURN
Stuart Monteithd5c78f42014-06-11 16:44:46 +01002076END art_quick_implicit_suspend
2077
Andreas Gampee62a07e2014-03-26 14:53:21 -07002078 /*
2079 * Called by managed code that is attempting to call a method on a proxy class. On entry
2080 * x0 holds the proxy method and x1 holds the receiver; The frame size of the invoked proxy
2081 * method agrees with a ref and args callee save frame.
2082 */
2083 .extern artQuickProxyInvokeHandler
2084ENTRY art_quick_proxy_invoke_handler
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002085 SETUP_SAVE_REFS_AND_ARGS_FRAME_WITH_METHOD_IN_X0
Andreas Gampee62a07e2014-03-26 14:53:21 -07002086 mov x2, xSELF // pass Thread::Current
2087 mov x3, sp // pass SP
2088 bl artQuickProxyInvokeHandler // (Method* proxy method, receiver, Thread*, SP)
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01002089 ldr x2, [xSELF, THREAD_EXCEPTION_OFFSET]
Andreas Gampee62a07e2014-03-26 14:53:21 -07002090 cbnz x2, .Lexception_in_proxy // success if no exception is pending
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002091 RESTORE_SAVE_REFS_AND_ARGS_FRAME // Restore frame
Andreas Gamped1e91672014-06-02 22:50:05 -07002092 fmov d0, x0 // Store result in d0 in case it was float or double
Andreas Gampee62a07e2014-03-26 14:53:21 -07002093 ret // return on success
2094.Lexception_in_proxy:
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002095 RESTORE_SAVE_REFS_AND_ARGS_FRAME
Andreas Gampee62a07e2014-03-26 14:53:21 -07002096 DELIVER_PENDING_EXCEPTION
2097END art_quick_proxy_invoke_handler
Stuart Monteithb95a5342014-03-12 13:32:32 +00002098
Andreas Gampe51f76352014-05-21 08:28:48 -07002099 /*
Nicolas Geoffray796d6302016-03-13 22:22:31 +00002100 * Called to resolve an imt conflict.
2101 * x0 is the conflict ArtMethod.
2102 * xIP1 is a hidden argument that holds the target interface method's dex method index.
2103 *
2104 * Note that this stub writes to xIP0, xIP1, and x0.
Andreas Gampe51f76352014-05-21 08:28:48 -07002105 */
Andreas Gampe3031c8d2015-07-13 20:11:06 -07002106 .extern artInvokeInterfaceTrampoline
Andreas Gampe51f76352014-05-21 08:28:48 -07002107ENTRY art_quick_imt_conflict_trampoline
Nicolas Geoffray796d6302016-03-13 22:22:31 +00002108 ldr xIP0, [sp, #0] // Load referrer
2109 ldr xIP0, [xIP0, #ART_METHOD_DEX_CACHE_METHODS_OFFSET_64] // Load dex cache methods array
2110 ldr xIP0, [xIP0, xIP1, lsl #POINTER_SIZE_SHIFT] // Load interface method
2111 ldr xIP1, [x0, #ART_METHOD_JNI_OFFSET_64] // Load ImtConflictTable
2112 ldr x0, [xIP1] // Load first entry in ImtConflictTable.
2113.Limt_table_iterate:
2114 cmp x0, xIP0
2115 // Branch if found. Benchmarks have shown doing a branch here is better.
2116 beq .Limt_table_found
2117 // If the entry is null, the interface method is not in the ImtConflictTable.
2118 cbz x0, .Lconflict_trampoline
2119 // Iterate over the entries of the ImtConflictTable.
2120 ldr x0, [xIP1, #(2 * __SIZEOF_POINTER__)]!
2121 b .Limt_table_iterate
2122.Limt_table_found:
Goran Jakovljevic59028d92016-03-29 18:05:03 +02002123 // We successfully hit an entry in the table. Load the target method
Nicolas Geoffray796d6302016-03-13 22:22:31 +00002124 // and jump to it.
2125 ldr x0, [xIP1, #__SIZEOF_POINTER__]
2126 ldr xIP0, [x0, #ART_METHOD_QUICK_CODE_OFFSET_64]
2127 br xIP0
2128.Lconflict_trampoline:
2129 // Call the runtime stub to populate the ImtConflictTable and jump to the
2130 // resolved method.
Andreas Gampe3031c8d2015-07-13 20:11:06 -07002131 INVOKE_TRAMPOLINE_BODY artInvokeInterfaceTrampoline
Andreas Gampe51f76352014-05-21 08:28:48 -07002132END art_quick_imt_conflict_trampoline
Stuart Monteithb95a5342014-03-12 13:32:32 +00002133
2134ENTRY art_quick_resolution_trampoline
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002135 SETUP_SAVE_REFS_AND_ARGS_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +00002136 mov x2, xSELF
2137 mov x3, sp
2138 bl artQuickResolutionTrampoline // (called, receiver, Thread*, SP)
Matteo Franchindfd891a2014-04-30 12:17:17 +01002139 cbz x0, 1f
Zheng Xub551fdc2014-07-25 11:49:42 +08002140 mov xIP0, x0 // Remember returned code pointer in xIP0.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002141 ldr x0, [sp, #0] // artQuickResolutionTrampoline puts called method in *SP.
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002142 RESTORE_SAVE_REFS_AND_ARGS_FRAME
Zheng Xub551fdc2014-07-25 11:49:42 +08002143 br xIP0
Stuart Monteithb95a5342014-03-12 13:32:32 +000021441:
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002145 RESTORE_SAVE_REFS_AND_ARGS_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +00002146 DELIVER_PENDING_EXCEPTION
2147END art_quick_resolution_trampoline
2148
2149/*
2150 * Generic JNI frame layout:
2151 *
2152 * #-------------------#
2153 * | |
2154 * | caller method... |
2155 * #-------------------# <--- SP on entry
2156 * | Return X30/LR |
2157 * | X29/FP | callee save
2158 * | X28 | callee save
2159 * | X27 | callee save
2160 * | X26 | callee save
2161 * | X25 | callee save
2162 * | X24 | callee save
2163 * | X23 | callee save
2164 * | X22 | callee save
2165 * | X21 | callee save
2166 * | X20 | callee save
Zheng Xu69a50302015-04-14 20:04:41 +08002167 * | X19 | callee save
Stuart Monteithb95a5342014-03-12 13:32:32 +00002168 * | X7 | arg7
2169 * | X6 | arg6
2170 * | X5 | arg5
2171 * | X4 | arg4
2172 * | X3 | arg3
2173 * | X2 | arg2
2174 * | X1 | arg1
Stuart Monteithb95a5342014-03-12 13:32:32 +00002175 * | D7 | float arg 8
2176 * | D6 | float arg 7
2177 * | D5 | float arg 6
2178 * | D4 | float arg 5
2179 * | D3 | float arg 4
2180 * | D2 | float arg 3
2181 * | D1 | float arg 2
2182 * | D0 | float arg 1
Andreas Gampecf4035a2014-05-28 22:43:01 -07002183 * | Method* | <- X0
Stuart Monteithb95a5342014-03-12 13:32:32 +00002184 * #-------------------#
2185 * | local ref cookie | // 4B
Mathieu Chartier421c5372014-05-14 14:11:40 -07002186 * | handle scope size | // 4B
Stuart Monteithb95a5342014-03-12 13:32:32 +00002187 * #-------------------#
2188 * | JNI Call Stack |
2189 * #-------------------# <--- SP on native call
2190 * | |
2191 * | Stack for Regs | The trampoline assembly will pop these values
2192 * | | into registers for native call
2193 * #-------------------#
2194 * | Native code ptr |
2195 * #-------------------#
2196 * | Free scratch |
2197 * #-------------------#
2198 * | Ptr to (1) | <--- SP
2199 * #-------------------#
2200 */
2201 /*
2202 * Called to do a generic JNI down-call
2203 */
Ian Rogers6f3dbba2014-10-14 17:41:57 -07002204ENTRY art_quick_generic_jni_trampoline
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002205 SETUP_SAVE_REFS_AND_ARGS_FRAME_WITH_METHOD_IN_X0
Stuart Monteithb95a5342014-03-12 13:32:32 +00002206
2207 // Save SP , so we can have static CFI info.
2208 mov x28, sp
2209 .cfi_def_cfa_register x28
2210
2211 // This looks the same, but is different: this will be updated to point to the bottom
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002212 // of the frame when the handle scope is inserted.
Stuart Monteithb95a5342014-03-12 13:32:32 +00002213 mov xFP, sp
2214
Zheng Xub551fdc2014-07-25 11:49:42 +08002215 mov xIP0, #5120
2216 sub sp, sp, xIP0
Stuart Monteithb95a5342014-03-12 13:32:32 +00002217
2218 // prepare for artQuickGenericJniTrampoline call
2219 // (Thread*, SP)
2220 // x0 x1 <= C calling convention
2221 // xSELF xFP <= where they are
2222
2223 mov x0, xSELF // Thread*
2224 mov x1, xFP
2225 bl artQuickGenericJniTrampoline // (Thread*, sp)
2226
Andreas Gampec200a4a2014-06-16 18:39:09 -07002227 // The C call will have registered the complete save-frame on success.
2228 // The result of the call is:
2229 // x0: pointer to native code, 0 on error.
2230 // x1: pointer to the bottom of the used area of the alloca, can restore stack till there.
Stuart Monteithb95a5342014-03-12 13:32:32 +00002231
Andreas Gampec200a4a2014-06-16 18:39:09 -07002232 // Check for error = 0.
Nicolas Geoffray126d6592015-03-03 14:28:35 +00002233 cbz x0, .Lexception_in_native
Stuart Monteithb95a5342014-03-12 13:32:32 +00002234
Andreas Gampec200a4a2014-06-16 18:39:09 -07002235 // Release part of the alloca.
2236 mov sp, x1
Stuart Monteithb95a5342014-03-12 13:32:32 +00002237
Andreas Gampec200a4a2014-06-16 18:39:09 -07002238 // Save the code pointer
2239 mov xIP0, x0
Stuart Monteithb95a5342014-03-12 13:32:32 +00002240
2241 // Load parameters from frame into registers.
2242 // TODO Check with artQuickGenericJniTrampoline.
2243 // Also, check again APPCS64 - the stack arguments are interleaved.
Andreas Gampec200a4a2014-06-16 18:39:09 -07002244 ldp x0, x1, [sp]
2245 ldp x2, x3, [sp, #16]
2246 ldp x4, x5, [sp, #32]
2247 ldp x6, x7, [sp, #48]
Stuart Monteithb95a5342014-03-12 13:32:32 +00002248
Andreas Gampec200a4a2014-06-16 18:39:09 -07002249 ldp d0, d1, [sp, #64]
2250 ldp d2, d3, [sp, #80]
2251 ldp d4, d5, [sp, #96]
2252 ldp d6, d7, [sp, #112]
Stuart Monteithb95a5342014-03-12 13:32:32 +00002253
Andreas Gampec200a4a2014-06-16 18:39:09 -07002254 add sp, sp, #128
Stuart Monteithb95a5342014-03-12 13:32:32 +00002255
Zheng Xub551fdc2014-07-25 11:49:42 +08002256 blr xIP0 // native call.
Stuart Monteithb95a5342014-03-12 13:32:32 +00002257
2258 // result sign extension is handled in C code
2259 // prepare for artQuickGenericJniEndTrampoline call
Andreas Gampec200a4a2014-06-16 18:39:09 -07002260 // (Thread*, result, result_f)
2261 // x0 x1 x2 <= C calling convention
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01002262 mov x1, x0 // Result (from saved).
2263 mov x0, xSELF // Thread register.
Andreas Gampec200a4a2014-06-16 18:39:09 -07002264 fmov x2, d0 // d0 will contain floating point result, but needs to go into x2
Stuart Monteithb95a5342014-03-12 13:32:32 +00002265
2266 bl artQuickGenericJniEndTrampoline
2267
Nicolas Geoffray126d6592015-03-03 14:28:35 +00002268 // Pending exceptions possible.
Serban Constantinescu9bd88b02015-04-22 16:24:46 +01002269 ldr x2, [xSELF, THREAD_EXCEPTION_OFFSET]
Nicolas Geoffray126d6592015-03-03 14:28:35 +00002270 cbnz x2, .Lexception_in_native
2271
Stuart Monteithb95a5342014-03-12 13:32:32 +00002272 // Tear down the alloca.
2273 mov sp, x28
2274 .cfi_def_cfa_register sp
2275
Stuart Monteithb95a5342014-03-12 13:32:32 +00002276 // Tear down the callee-save frame.
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002277 RESTORE_SAVE_REFS_AND_ARGS_FRAME
Stuart Monteithb95a5342014-03-12 13:32:32 +00002278
2279 // store into fpr, for when it's a fpr return...
2280 fmov d0, x0
2281 ret
2282
Stuart Monteithb95a5342014-03-12 13:32:32 +00002283.Lexception_in_native:
Nicolas Geoffray126d6592015-03-03 14:28:35 +00002284 // Move to x1 then sp to please assembler.
2285 ldr x1, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET]
2286 mov sp, x1
2287 .cfi_def_cfa_register sp
2288 # This will create a new save-all frame, required by the runtime.
Stuart Monteithb95a5342014-03-12 13:32:32 +00002289 DELIVER_PENDING_EXCEPTION
Stuart Monteithb95a5342014-03-12 13:32:32 +00002290END art_quick_generic_jni_trampoline
2291
2292/*
2293 * Called to bridge from the quick to interpreter ABI. On entry the arguments match those
2294 * of a quick call:
2295 * x0 = method being called/to bridge to.
2296 * x1..x7, d0..d7 = arguments to that method.
2297 */
Ian Rogers6f3dbba2014-10-14 17:41:57 -07002298ENTRY art_quick_to_interpreter_bridge
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002299 SETUP_SAVE_REFS_AND_ARGS_FRAME // Set up frame and save arguments.
Stuart Monteithb95a5342014-03-12 13:32:32 +00002300
2301 // x0 will contain mirror::ArtMethod* method.
2302 mov x1, xSELF // How to get Thread::Current() ???
2303 mov x2, sp
2304
2305 // uint64_t artQuickToInterpreterBridge(mirror::ArtMethod* method, Thread* self,
2306 // mirror::ArtMethod** sp)
2307 bl artQuickToInterpreterBridge
2308
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002309 RESTORE_SAVE_REFS_AND_ARGS_FRAME // TODO: no need to restore arguments in this case.
Stuart Monteithb95a5342014-03-12 13:32:32 +00002310
2311 fmov d0, x0
2312
2313 RETURN_OR_DELIVER_PENDING_EXCEPTION
2314END art_quick_to_interpreter_bridge
2315
Andreas Gamped58342c2014-06-05 14:18:08 -07002316
2317//
2318// Instrumentation-related stubs
2319//
2320 .extern artInstrumentationMethodEntryFromCode
2321ENTRY art_quick_instrumentation_entry
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002322 SETUP_SAVE_REFS_AND_ARGS_FRAME
Andreas Gamped58342c2014-06-05 14:18:08 -07002323
Zheng Xub551fdc2014-07-25 11:49:42 +08002324 mov x20, x0 // Preserve method reference in a callee-save.
Andreas Gamped58342c2014-06-05 14:18:08 -07002325
2326 mov x2, xSELF
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002327 mov x3, xLR
2328 bl artInstrumentationMethodEntryFromCode // (Method*, Object*, Thread*, LR)
Andreas Gamped58342c2014-06-05 14:18:08 -07002329
Zheng Xub551fdc2014-07-25 11:49:42 +08002330 mov xIP0, x0 // x0 = result of call.
2331 mov x0, x20 // Reload method reference.
Andreas Gamped58342c2014-06-05 14:18:08 -07002332
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002333 RESTORE_SAVE_REFS_AND_ARGS_FRAME // Note: will restore xSELF
Andreas Gamped58342c2014-06-05 14:18:08 -07002334 adr xLR, art_quick_instrumentation_exit
Zheng Xub551fdc2014-07-25 11:49:42 +08002335 br xIP0 // Tail-call method with lr set to art_quick_instrumentation_exit.
Andreas Gamped58342c2014-06-05 14:18:08 -07002336END art_quick_instrumentation_entry
2337
2338 .extern artInstrumentationMethodExitFromCode
2339ENTRY art_quick_instrumentation_exit
2340 mov xLR, #0 // Clobber LR for later checks.
2341
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002342 SETUP_SAVE_REFS_ONLY_FRAME
Andreas Gamped58342c2014-06-05 14:18:08 -07002343
2344 // We need to save x0 and d0. We could use a callee-save from SETUP_REF_ONLY, but then
2345 // we would need to fully restore it. As there are a lot of callee-save registers, it seems
2346 // easier to have an extra small stack area.
2347
Sebastien Hertz70f8d4b2014-06-19 11:51:41 +02002348 str x0, [sp, #-16]! // Save integer result.
Andreas Gamped58342c2014-06-05 14:18:08 -07002349 .cfi_adjust_cfa_offset 16
2350 str d0, [sp, #8] // Save floating-point result.
2351
Andreas Gamped58342c2014-06-05 14:18:08 -07002352 add x1, sp, #16 // Pass SP.
2353 mov x2, x0 // Pass integer result.
2354 fmov x3, d0 // Pass floating-point result.
Sebastien Hertz70f8d4b2014-06-19 11:51:41 +02002355 mov x0, xSELF // Pass Thread.
Andreas Gamped58342c2014-06-05 14:18:08 -07002356 bl artInstrumentationMethodExitFromCode // (Thread*, SP, gpr_res, fpr_res)
2357
Zheng Xub551fdc2014-07-25 11:49:42 +08002358 mov xIP0, x0 // Return address from instrumentation call.
Andreas Gamped58342c2014-06-05 14:18:08 -07002359 mov xLR, x1 // r1 is holding link register if we're to bounce to deoptimize
2360
2361 ldr d0, [sp, #8] // Restore floating-point result.
Vladimir Marko215076b2016-09-07 18:05:55 +01002362 ldr x0, [sp], #16 // Restore integer result, and drop stack area.
Andreas Gamped58342c2014-06-05 14:18:08 -07002363 .cfi_adjust_cfa_offset 16
2364
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002365 POP_SAVE_REFS_ONLY_FRAME
Andreas Gamped58342c2014-06-05 14:18:08 -07002366
Zheng Xub551fdc2014-07-25 11:49:42 +08002367 br xIP0 // Tail-call out.
Andreas Gamped58342c2014-06-05 14:18:08 -07002368END art_quick_instrumentation_exit
2369
2370 /*
2371 * Instrumentation has requested that we deoptimize into the interpreter. The deoptimization
2372 * will long jump to the upcall with a special exception of -1.
2373 */
2374 .extern artDeoptimize
2375ENTRY art_quick_deoptimize
Vladimir Markofd36f1f2016-08-03 18:49:58 +01002376 SETUP_SAVE_ALL_CALLEE_SAVES_FRAME
Andreas Gamped58342c2014-06-05 14:18:08 -07002377 mov x0, xSELF // Pass thread.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002378 bl artDeoptimize // artDeoptimize(Thread*)
Serban Constantinescu86797a72014-06-19 16:17:56 +01002379 brk 0
Andreas Gamped58342c2014-06-05 14:18:08 -07002380END art_quick_deoptimize
2381
Sebastien Hertz07474662015-08-25 15:12:33 +00002382 /*
2383 * Compiled code has requested that we deoptimize into the interpreter. The deoptimization
2384 * will long jump to the upcall with a special exception of -1.
2385 */
2386 .extern artDeoptimizeFromCompiledCode
2387ENTRY art_quick_deoptimize_from_compiled_code
Vladimir Marko239d6ea2016-09-05 10:44:04 +01002388 SETUP_SAVE_EVERYTHING_FRAME
Sebastien Hertz07474662015-08-25 15:12:33 +00002389 mov x0, xSELF // Pass thread.
2390 bl artDeoptimizeFromCompiledCode // artDeoptimizeFromCompiledCode(Thread*)
2391 brk 0
2392END art_quick_deoptimize_from_compiled_code
2393
Andreas Gamped58342c2014-06-05 14:18:08 -07002394
Serban Constantinescu169489b2014-06-11 16:43:35 +01002395 /*
2396 * String's indexOf.
2397 *
2398 * TODO: Not very optimized.
2399 * On entry:
2400 * x0: string object (known non-null)
2401 * w1: char to match (known <= 0xFFFF)
2402 * w2: Starting offset in string data
2403 */
2404ENTRY art_quick_indexof
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002405 ldr w3, [x0, #MIRROR_STRING_COUNT_OFFSET]
Jeff Hao848f70a2014-01-15 13:49:50 -08002406 add x0, x0, #MIRROR_STRING_VALUE_OFFSET
jessicahandojo05765752016-09-09 19:01:32 -07002407#if (STRING_COMPRESSION_FEATURE)
2408 /* w4 holds count (with flag) and w3 holds actual length */
2409 mov w4, w3
2410 and w3, w3, #2147483647
2411#endif
Serban Constantinescu169489b2014-06-11 16:43:35 +01002412 /* Clamp start to [0..count] */
2413 cmp w2, #0
2414 csel w2, wzr, w2, lt
2415 cmp w2, w3
2416 csel w2, w3, w2, gt
2417
Serban Constantinescu169489b2014-06-11 16:43:35 +01002418 /* Save a copy to compute result */
2419 mov x5, x0
2420
jessicahandojo05765752016-09-09 19:01:32 -07002421#if (STRING_COMPRESSION_FEATURE)
2422 tbnz w4, #31, .Lstring_indexof_compressed
2423#endif
Serban Constantinescu169489b2014-06-11 16:43:35 +01002424 /* Build pointer to start of data to compare and pre-bias */
2425 add x0, x0, x2, lsl #1
2426 sub x0, x0, #2
Serban Constantinescu169489b2014-06-11 16:43:35 +01002427 /* Compute iteration count */
2428 sub w2, w3, w2
2429
2430 /*
2431 * At this point we have:
2432 * x0: start of the data to test
2433 * w1: char to compare
2434 * w2: iteration count
2435 * x5: original start of string data
2436 */
2437
2438 subs w2, w2, #4
2439 b.lt .Lindexof_remainder
2440
2441.Lindexof_loop4:
2442 ldrh w6, [x0, #2]!
2443 ldrh w7, [x0, #2]!
Zheng Xub551fdc2014-07-25 11:49:42 +08002444 ldrh wIP0, [x0, #2]!
2445 ldrh wIP1, [x0, #2]!
Serban Constantinescu169489b2014-06-11 16:43:35 +01002446 cmp w6, w1
2447 b.eq .Lmatch_0
2448 cmp w7, w1
2449 b.eq .Lmatch_1
Zheng Xub551fdc2014-07-25 11:49:42 +08002450 cmp wIP0, w1
Serban Constantinescu169489b2014-06-11 16:43:35 +01002451 b.eq .Lmatch_2
Zheng Xub551fdc2014-07-25 11:49:42 +08002452 cmp wIP1, w1
Serban Constantinescu169489b2014-06-11 16:43:35 +01002453 b.eq .Lmatch_3
2454 subs w2, w2, #4
2455 b.ge .Lindexof_loop4
2456
2457.Lindexof_remainder:
2458 adds w2, w2, #4
2459 b.eq .Lindexof_nomatch
2460
2461.Lindexof_loop1:
2462 ldrh w6, [x0, #2]!
2463 cmp w6, w1
2464 b.eq .Lmatch_3
2465 subs w2, w2, #1
2466 b.ne .Lindexof_loop1
2467
2468.Lindexof_nomatch:
2469 mov x0, #-1
2470 ret
2471
2472.Lmatch_0:
2473 sub x0, x0, #6
2474 sub x0, x0, x5
2475 asr x0, x0, #1
2476 ret
2477.Lmatch_1:
2478 sub x0, x0, #4
2479 sub x0, x0, x5
2480 asr x0, x0, #1
2481 ret
2482.Lmatch_2:
2483 sub x0, x0, #2
2484 sub x0, x0, x5
2485 asr x0, x0, #1
2486 ret
2487.Lmatch_3:
2488 sub x0, x0, x5
2489 asr x0, x0, #1
2490 ret
jessicahandojo05765752016-09-09 19:01:32 -07002491#if (STRING_COMPRESSION_FEATURE)
2492 /*
2493 * Comparing compressed string character-per-character with
2494 * input character
2495 */
2496.Lstring_indexof_compressed:
2497 add x0, x0, x2
2498 sub x0, x0, #1
2499 sub w2, w3, w2
2500.Lstring_indexof_compressed_loop:
2501 subs w2, w2, #1
2502 b.lt .Lindexof_nomatch
2503 ldrb w6, [x0, #1]!
2504 cmp w6, w1
2505 b.eq .Lstring_indexof_compressed_matched
2506 b .Lstring_indexof_compressed_loop
2507.Lstring_indexof_compressed_matched:
2508 sub x0, x0, x5
2509 ret
2510#endif
Serban Constantinescu169489b2014-06-11 16:43:35 +01002511END art_quick_indexof
Roland Levillain02b75802016-07-13 11:54:35 +01002512
2513 /*
2514 * Create a function `name` calling the ReadBarrier::Mark routine,
Roland Levillain4359e612016-07-20 11:32:19 +01002515 * getting its argument and returning its result through W register
2516 * `wreg` (corresponding to X register `xreg`), saving and restoring
2517 * all caller-save registers.
2518 *
2519 * If `wreg` is different from `w0`, the generated function follows a
2520 * non-standard runtime calling convention:
2521 * - register `wreg` is used to pass the (sole) argument of this
2522 * function (instead of W0);
2523 * - register `wreg` is used to return the result of this function
Roland Levillain02b75802016-07-13 11:54:35 +01002524 * (instead of W0);
Roland Levillain02b75802016-07-13 11:54:35 +01002525 * - W0 is treated like a normal (non-argument) caller-save register;
2526 * - everything else is the same as in the standard runtime calling
Roland Levillain4359e612016-07-20 11:32:19 +01002527 * convention (e.g. standard callee-save registers are preserved).
Roland Levillain02b75802016-07-13 11:54:35 +01002528 */
Roland Levillain4359e612016-07-20 11:32:19 +01002529.macro READ_BARRIER_MARK_REG name, wreg, xreg
Roland Levillain02b75802016-07-13 11:54:35 +01002530ENTRY \name
Mathieu Chartier36a270a2016-07-28 18:08:51 -07002531 // Reference is null, no work to do at all.
2532 cbz \wreg, .Lret_rb_\name
Roland Levillain4359e612016-07-20 11:32:19 +01002533 /*
2534 * Allocate 46 stack slots * 8 = 368 bytes:
2535 * - 20 slots for core registers X0-X19
2536 * - 24 slots for floating-point registers D0-D7 and D16-D31
2537 * - 1 slot for return address register XLR
2538 * - 1 padding slot for 16-byte stack alignment
2539 */
Mathieu Chartier36a270a2016-07-28 18:08:51 -07002540 // Use wIP0 as temp and check the mark bit of the reference. wIP0 is not used by the compiler.
2541 ldr wIP0, [\xreg, #MIRROR_OBJECT_LOCK_WORD_OFFSET]
Mathieu Chartier6f198e32016-11-03 11:15:04 -07002542 tbz wIP0, #LOCK_WORD_MARK_BIT_SHIFT, .Lnot_marked_rb_\name
Vladimir Marko94ce9c22016-09-30 14:50:51 +01002543.Lret_rb_\name:
Mathieu Chartier36a270a2016-07-28 18:08:51 -07002544 ret
Mathieu Chartier6f198e32016-11-03 11:15:04 -07002545.Lnot_marked_rb_\name:
2546 // Check if the top two bits are one, if this is the case it is a forwarding address.
2547 mvn wIP0, wIP0
2548 cmp wzr, wIP0, lsr #30
2549 beq .Lret_forwarding_address\name
Vladimir Marko94ce9c22016-09-30 14:50:51 +01002550.Lslow_rb_\name:
Mathieu Chartier6f198e32016-11-03 11:15:04 -07002551 // We must not clobber IP0 since art_quick_resolve_string makes a tail call here and relies on
2552 // IP0 being restored.
Roland Levillain4359e612016-07-20 11:32:19 +01002553 // Save all potentially live caller-save core registers.
Vladimir Marko215076b2016-09-07 18:05:55 +01002554 SAVE_TWO_REGS_INCREASE_FRAME x0, x1, 368
2555 SAVE_TWO_REGS x2, x3, 16
2556 SAVE_TWO_REGS x4, x5, 32
2557 SAVE_TWO_REGS x6, x7, 48
2558 SAVE_TWO_REGS x8, x9, 64
2559 SAVE_TWO_REGS x10, x11, 80
2560 SAVE_TWO_REGS x12, x13, 96
2561 SAVE_TWO_REGS x14, x15, 112
Vladimir Markoae6ba1f2016-09-09 11:56:05 +01002562 SAVE_TWO_REGS x16, x17, 128
2563 SAVE_TWO_REGS x18, x19, 144
Roland Levillain4359e612016-07-20 11:32:19 +01002564 // Save all potentially live caller-save floating-point registers.
2565 stp d0, d1, [sp, #160]
2566 stp d2, d3, [sp, #176]
2567 stp d4, d5, [sp, #192]
2568 stp d6, d7, [sp, #208]
2569 stp d16, d17, [sp, #224]
2570 stp d18, d19, [sp, #240]
2571 stp d20, d21, [sp, #256]
2572 stp d22, d23, [sp, #272]
2573 stp d24, d25, [sp, #288]
2574 stp d26, d27, [sp, #304]
2575 stp d28, d29, [sp, #320]
2576 stp d30, d31, [sp, #336]
2577 // Save return address.
Vladimir Marko215076b2016-09-07 18:05:55 +01002578 // (sp + #352 is a padding slot)
2579 SAVE_REG xLR, 360
Roland Levillain4359e612016-07-20 11:32:19 +01002580
2581 .ifnc \wreg, w0
2582 mov w0, \wreg // Pass arg1 - obj from `wreg`
2583 .endif
Roland Levillain02b75802016-07-13 11:54:35 +01002584 bl artReadBarrierMark // artReadBarrierMark(obj)
Roland Levillain4359e612016-07-20 11:32:19 +01002585 .ifnc \wreg, w0
2586 mov \wreg, w0 // Return result into `wreg`
2587 .endif
2588
2589 // Restore core regs, except `xreg`, as `wreg` is used to return the
2590 // result of this function (simply remove it from the stack instead).
2591 POP_REGS_NE x0, x1, 0, \xreg
2592 POP_REGS_NE x2, x3, 16, \xreg
2593 POP_REGS_NE x4, x5, 32, \xreg
2594 POP_REGS_NE x6, x7, 48, \xreg
2595 POP_REGS_NE x8, x9, 64, \xreg
2596 POP_REGS_NE x10, x11, 80, \xreg
2597 POP_REGS_NE x12, x13, 96, \xreg
2598 POP_REGS_NE x14, x15, 112, \xreg
2599 POP_REGS_NE x16, x17, 128, \xreg
2600 POP_REGS_NE x18, x19, 144, \xreg
2601 // Restore floating-point registers.
2602 ldp d0, d1, [sp, #160]
2603 ldp d2, d3, [sp, #176]
2604 ldp d4, d5, [sp, #192]
2605 ldp d6, d7, [sp, #208]
2606 ldp d16, d17, [sp, #224]
2607 ldp d18, d19, [sp, #240]
2608 ldp d20, d21, [sp, #256]
2609 ldp d22, d23, [sp, #272]
2610 ldp d24, d25, [sp, #288]
2611 ldp d26, d27, [sp, #304]
2612 ldp d28, d29, [sp, #320]
2613 ldp d30, d31, [sp, #336]
2614 // Restore return address and remove padding.
Vladimir Marko215076b2016-09-07 18:05:55 +01002615 RESTORE_REG xLR, 360
Vladimir Markoae6ba1f2016-09-09 11:56:05 +01002616 DECREASE_FRAME 368
Roland Levillain02b75802016-07-13 11:54:35 +01002617 ret
Mathieu Chartier6f198e32016-11-03 11:15:04 -07002618.Lret_forwarding_address\name:
2619 mvn wIP0, wIP0
2620 // Shift left by the forwarding address shift. This clears out the state bits since they are
2621 // in the top 2 bits of the lock word.
2622 lsl \wreg, wIP0, #LOCK_WORD_STATE_FORWARDING_ADDRESS_SHIFT
2623 ret
Roland Levillain02b75802016-07-13 11:54:35 +01002624END \name
2625.endm
2626
Roland Levillain4359e612016-07-20 11:32:19 +01002627READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg00, w0, x0
2628READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg01, w1, x1
2629READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg02, w2, x2
2630READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg03, w3, x3
2631READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg04, w4, x4
2632READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg05, w5, x5
2633READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg06, w6, x6
2634READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg07, w7, x7
2635READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg08, w8, x8
2636READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg09, w9, x9
2637READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg10, w10, x10
2638READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg11, w11, x11
2639READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg12, w12, x12
2640READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg13, w13, x13
2641READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg14, w14, x14
2642READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg15, w15, x15
Mathieu Chartier36c22712016-08-12 13:19:44 -07002643// READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg16, w16, x16 ip0 is blocked
Roland Levillain4359e612016-07-20 11:32:19 +01002644READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg17, w17, x17
2645READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg18, w18, x18
2646READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg19, w19, x19
2647READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg20, w20, x20
2648READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg21, w21, x21
2649READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg22, w22, x22
2650READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg23, w23, x23
2651READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg24, w24, x24
2652READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg25, w25, x25
2653READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg26, w26, x26
2654READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg27, w27, x27
2655READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg28, w28, x28
2656READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg29, w29, x29