Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2014 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "asm_support_arm64.S" |
| 18 | |
| 19 | #include "arch/quick_alloc_entrypoints.S" |
| 20 | |
| 21 | |
| 22 | /* |
| 23 | * Macro that sets up the callee save frame to conform with |
| 24 | * Runtime::CreateCalleeSaveMethod(kSaveAll) |
| 25 | */ |
| 26 | .macro SETUP_SAVE_ALL_CALLEE_SAVE_FRAME |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 27 | adrp xIP0, :got:_ZN3art7Runtime9instance_E |
| 28 | ldr xIP0, [xIP0, #:got_lo12:_ZN3art7Runtime9instance_E] |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 29 | |
| 30 | // Our registers aren't intermixed - just spill in order. |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 31 | ldr xIP0, [xIP0] // xIP0 = & (art::Runtime * art::Runtime.instance_) . |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 32 | |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 33 | // xIP0 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] . |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 34 | // Loads appropriate callee-save-method. |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 35 | ldr xIP0, [xIP0, RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET ] |
Andreas Gampe | 5c1e435 | 2014-04-21 19:28:24 -0700 | [diff] [blame] | 36 | |
| 37 | sub sp, sp, #176 |
| 38 | .cfi_adjust_cfa_offset 176 |
| 39 | |
| 40 | // Ugly compile-time check, but we only have the preprocessor. |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 41 | #if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVE != 176) |
| 42 | #error "SAVE_ALL_CALLEE_SAVE_FRAME(ARM64) size not as expected." |
Andreas Gampe | 5c1e435 | 2014-04-21 19:28:24 -0700 | [diff] [blame] | 43 | #endif |
| 44 | |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 45 | // Stack alignment filler [sp, #8]. |
| 46 | // FP callee-saves. |
| 47 | stp d8, d9, [sp, #16] |
| 48 | stp d10, d11, [sp, #32] |
| 49 | stp d12, d13, [sp, #48] |
| 50 | stp d14, d15, [sp, #64] |
Andreas Gampe | 5c1e435 | 2014-04-21 19:28:24 -0700 | [diff] [blame] | 51 | |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 52 | // GP callee-saves |
| 53 | stp x19, x20, [sp, #80] |
Andreas Gampe | 5c1e435 | 2014-04-21 19:28:24 -0700 | [diff] [blame] | 54 | .cfi_rel_offset x19, 80 |
Andreas Gampe | 5c1e435 | 2014-04-21 19:28:24 -0700 | [diff] [blame] | 55 | .cfi_rel_offset x20, 88 |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 56 | |
| 57 | stp x21, x22, [sp, #96] |
Andreas Gampe | 5c1e435 | 2014-04-21 19:28:24 -0700 | [diff] [blame] | 58 | .cfi_rel_offset x21, 96 |
Andreas Gampe | 5c1e435 | 2014-04-21 19:28:24 -0700 | [diff] [blame] | 59 | .cfi_rel_offset x22, 104 |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 60 | |
| 61 | stp x23, x24, [sp, #112] |
Andreas Gampe | 5c1e435 | 2014-04-21 19:28:24 -0700 | [diff] [blame] | 62 | .cfi_rel_offset x23, 112 |
Andreas Gampe | 5c1e435 | 2014-04-21 19:28:24 -0700 | [diff] [blame] | 63 | .cfi_rel_offset x24, 120 |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 64 | |
| 65 | stp x25, x26, [sp, #128] |
Andreas Gampe | 5c1e435 | 2014-04-21 19:28:24 -0700 | [diff] [blame] | 66 | .cfi_rel_offset x25, 128 |
Andreas Gampe | 5c1e435 | 2014-04-21 19:28:24 -0700 | [diff] [blame] | 67 | .cfi_rel_offset x26, 136 |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 68 | |
| 69 | stp x27, x28, [sp, #144] |
Andreas Gampe | 5c1e435 | 2014-04-21 19:28:24 -0700 | [diff] [blame] | 70 | .cfi_rel_offset x27, 144 |
Andreas Gampe | 5c1e435 | 2014-04-21 19:28:24 -0700 | [diff] [blame] | 71 | .cfi_rel_offset x28, 152 |
Andreas Gampe | 5c1e435 | 2014-04-21 19:28:24 -0700 | [diff] [blame] | 72 | |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 73 | stp x29, xLR, [sp, #160] |
| 74 | .cfi_rel_offset x29, 160 |
Andreas Gampe | 5c1e435 | 2014-04-21 19:28:24 -0700 | [diff] [blame] | 75 | .cfi_rel_offset x30, 168 |
| 76 | |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 77 | // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs]. |
| 78 | str xIP0, [sp] |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 79 | // Place sp in Thread::Current()->top_quick_frame. |
| 80 | mov xIP0, sp |
| 81 | str xIP0, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET] |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 82 | .endm |
| 83 | |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 84 | /* |
| 85 | * Macro that sets up the callee save frame to conform with |
| 86 | * Runtime::CreateCalleeSaveMethod(kRefsOnly). |
| 87 | */ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 88 | .macro SETUP_REFS_ONLY_CALLEE_SAVE_FRAME |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 89 | adrp xIP0, :got:_ZN3art7Runtime9instance_E |
| 90 | ldr xIP0, [xIP0, #:got_lo12:_ZN3art7Runtime9instance_E] |
| 91 | |
| 92 | // Our registers aren't intermixed - just spill in order. |
| 93 | ldr xIP0, [xIP0] // xIP0 = & (art::Runtime * art::Runtime.instance_) . |
| 94 | |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 95 | // xIP0 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefOnly] . |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 96 | // Loads appropriate callee-save-method. |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 97 | ldr xIP0, [xIP0, RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET ] |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 98 | |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 99 | sub sp, sp, #96 |
| 100 | .cfi_adjust_cfa_offset 96 |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 101 | |
| 102 | // Ugly compile-time check, but we only have the preprocessor. |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 103 | #if (FRAME_SIZE_REFS_ONLY_CALLEE_SAVE != 96) |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 104 | #error "REFS_ONLY_CALLEE_SAVE_FRAME(ARM64) size not as expected." |
| 105 | #endif |
| 106 | |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 107 | // GP callee-saves. |
| 108 | // x20 paired with ArtMethod* - see below. |
| 109 | stp x21, x22, [sp, #16] |
| 110 | .cfi_rel_offset x21, 16 |
| 111 | .cfi_rel_offset x22, 24 |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 112 | |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 113 | stp x23, x24, [sp, #32] |
| 114 | .cfi_rel_offset x23, 32 |
| 115 | .cfi_rel_offset x24, 40 |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 116 | |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 117 | stp x25, x26, [sp, #48] |
| 118 | .cfi_rel_offset x25, 48 |
| 119 | .cfi_rel_offset x26, 56 |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 120 | |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 121 | stp x27, x28, [sp, #64] |
| 122 | .cfi_rel_offset x27, 64 |
| 123 | .cfi_rel_offset x28, 72 |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 124 | |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 125 | stp x29, xLR, [sp, #80] |
| 126 | .cfi_rel_offset x29, 80 |
| 127 | .cfi_rel_offset x30, 88 |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 128 | |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 129 | // Store ArtMethod* Runtime::callee_save_methods_[kRefsOnly]. |
| 130 | stp xIP0, x20, [sp] |
| 131 | .cfi_rel_offset x20, 8 |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 132 | |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 133 | // Place sp in Thread::Current()->top_quick_frame. |
| 134 | mov xIP0, sp |
| 135 | str xIP0, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET] |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 136 | .endm |
| 137 | |
| 138 | // TODO: Probably no need to restore registers preserved by aapcs64. |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 139 | .macro RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 140 | // Callee-saves. |
| 141 | ldr x20, [sp, #8] |
Andreas Gampe | 00c1e6d | 2014-04-25 15:47:13 -0700 | [diff] [blame] | 142 | .cfi_restore x20 |
Zheng Xu | 69a5030 | 2015-04-14 20:04:41 +0800 | [diff] [blame] | 143 | |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 144 | ldp x21, x22, [sp, #16] |
Andreas Gampe | 00c1e6d | 2014-04-25 15:47:13 -0700 | [diff] [blame] | 145 | .cfi_restore x21 |
Andreas Gampe | 00c1e6d | 2014-04-25 15:47:13 -0700 | [diff] [blame] | 146 | .cfi_restore x22 |
Zheng Xu | 69a5030 | 2015-04-14 20:04:41 +0800 | [diff] [blame] | 147 | |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 148 | ldp x23, x24, [sp, #32] |
Andreas Gampe | 00c1e6d | 2014-04-25 15:47:13 -0700 | [diff] [blame] | 149 | .cfi_restore x23 |
Andreas Gampe | 00c1e6d | 2014-04-25 15:47:13 -0700 | [diff] [blame] | 150 | .cfi_restore x24 |
Zheng Xu | 69a5030 | 2015-04-14 20:04:41 +0800 | [diff] [blame] | 151 | |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 152 | ldp x25, x26, [sp, #48] |
Andreas Gampe | 00c1e6d | 2014-04-25 15:47:13 -0700 | [diff] [blame] | 153 | .cfi_restore x25 |
Andreas Gampe | 00c1e6d | 2014-04-25 15:47:13 -0700 | [diff] [blame] | 154 | .cfi_restore x26 |
Zheng Xu | 69a5030 | 2015-04-14 20:04:41 +0800 | [diff] [blame] | 155 | |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 156 | ldp x27, x28, [sp, #64] |
Andreas Gampe | 00c1e6d | 2014-04-25 15:47:13 -0700 | [diff] [blame] | 157 | .cfi_restore x27 |
Andreas Gampe | 00c1e6d | 2014-04-25 15:47:13 -0700 | [diff] [blame] | 158 | .cfi_restore x28 |
Andreas Gampe | 00c1e6d | 2014-04-25 15:47:13 -0700 | [diff] [blame] | 159 | |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 160 | ldp x29, xLR, [sp, #80] |
Zheng Xu | 69a5030 | 2015-04-14 20:04:41 +0800 | [diff] [blame] | 161 | .cfi_restore x29 |
Andreas Gampe | 00c1e6d | 2014-04-25 15:47:13 -0700 | [diff] [blame] | 162 | .cfi_restore x30 |
| 163 | |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 164 | add sp, sp, #96 |
| 165 | .cfi_adjust_cfa_offset -96 |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 166 | .endm |
| 167 | |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 168 | .macro POP_REFS_ONLY_CALLEE_SAVE_FRAME |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 169 | add sp, sp, #96 |
| 170 | .cfi_adjust_cfa_offset - 96 |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 171 | .endm |
| 172 | |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 173 | .macro RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME_AND_RETURN |
| 174 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME |
Zheng Xu | 48241e7 | 2014-05-23 11:52:42 +0800 | [diff] [blame] | 175 | ret |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 176 | .endm |
| 177 | |
| 178 | |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 179 | .macro SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 180 | sub sp, sp, #224 |
| 181 | .cfi_adjust_cfa_offset 224 |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 182 | |
Andreas Gampe | 5c1e435 | 2014-04-21 19:28:24 -0700 | [diff] [blame] | 183 | // Ugly compile-time check, but we only have the preprocessor. |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 184 | #if (FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE != 224) |
Andreas Gampe | 5c1e435 | 2014-04-21 19:28:24 -0700 | [diff] [blame] | 185 | #error "REFS_AND_ARGS_CALLEE_SAVE_FRAME(ARM64) size not as expected." |
| 186 | #endif |
| 187 | |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 188 | // Stack alignment filler [sp, #8]. |
Zheng Xu | 69a5030 | 2015-04-14 20:04:41 +0800 | [diff] [blame] | 189 | // FP args. |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 190 | stp d0, d1, [sp, #16] |
| 191 | stp d2, d3, [sp, #32] |
| 192 | stp d4, d5, [sp, #48] |
| 193 | stp d6, d7, [sp, #64] |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 194 | |
Zheng Xu | 69a5030 | 2015-04-14 20:04:41 +0800 | [diff] [blame] | 195 | // Core args. |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 196 | stp x1, x2, [sp, #80] |
| 197 | .cfi_rel_offset x1, 80 |
| 198 | .cfi_rel_offset x2, 88 |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 199 | |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 200 | stp x3, x4, [sp, #96] |
| 201 | .cfi_rel_offset x3, 96 |
| 202 | .cfi_rel_offset x4, 104 |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 203 | |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 204 | stp x5, x6, [sp, #112] |
| 205 | .cfi_rel_offset x5, 112 |
| 206 | .cfi_rel_offset x6, 120 |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 207 | |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 208 | // x7, Callee-saves. |
| 209 | stp x7, x20, [sp, #128] |
| 210 | .cfi_rel_offset x7, 128 |
Zheng Xu | 69a5030 | 2015-04-14 20:04:41 +0800 | [diff] [blame] | 211 | .cfi_rel_offset x20, 136 |
| 212 | |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 213 | stp x21, x22, [sp, #144] |
| 214 | .cfi_rel_offset x21, 144 |
| 215 | .cfi_rel_offset x22, 152 |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 216 | |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 217 | stp x23, x24, [sp, #160] |
| 218 | .cfi_rel_offset x23, 160 |
| 219 | .cfi_rel_offset x24, 168 |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 220 | |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 221 | stp x25, x26, [sp, #176] |
| 222 | .cfi_rel_offset x25, 176 |
| 223 | .cfi_rel_offset x26, 184 |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 224 | |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 225 | stp x27, x28, [sp, #192] |
| 226 | .cfi_rel_offset x27, 192 |
| 227 | .cfi_rel_offset x28, 200 |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 228 | |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 229 | // x29(callee-save) and LR. |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 230 | stp x29, xLR, [sp, #208] |
| 231 | .cfi_rel_offset x29, 208 |
| 232 | .cfi_rel_offset x30, 216 |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 233 | |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 234 | .endm |
| 235 | |
| 236 | /* |
| 237 | * Macro that sets up the callee save frame to conform with |
| 238 | * Runtime::CreateCalleeSaveMethod(kRefsAndArgs). |
| 239 | * |
| 240 | * TODO This is probably too conservative - saving FP & LR. |
| 241 | */ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 242 | .macro SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 243 | adrp xIP0, :got:_ZN3art7Runtime9instance_E |
| 244 | ldr xIP0, [xIP0, #:got_lo12:_ZN3art7Runtime9instance_E] |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 245 | |
| 246 | // Our registers aren't intermixed - just spill in order. |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 247 | ldr xIP0, [xIP0] // xIP0 = & (art::Runtime * art::Runtime.instance_) . |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 248 | |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 249 | // xIP0 = (ArtMethod*) Runtime.instance_.callee_save_methods[kRefAndArgs] . |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 250 | ldr xIP0, [xIP0, RUNTIME_REFS_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET ] |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 251 | |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 252 | SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 253 | |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 254 | str xIP0, [sp] // Store ArtMethod* Runtime::callee_save_methods_[kRefsAndArgs] |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 255 | // Place sp in Thread::Current()->top_quick_frame. |
| 256 | mov xIP0, sp |
| 257 | str xIP0, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET] |
| 258 | .endm |
| 259 | |
| 260 | .macro SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_X0 |
| 261 | SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_INTERNAL |
| 262 | str x0, [sp, #0] // Store ArtMethod* to bottom of stack. |
| 263 | // Place sp in Thread::Current()->top_quick_frame. |
| 264 | mov xIP0, sp |
| 265 | str xIP0, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET] |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 266 | .endm |
| 267 | |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 268 | // TODO: Probably no need to restore registers preserved by aapcs64. |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 269 | .macro RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME |
Zheng Xu | 69a5030 | 2015-04-14 20:04:41 +0800 | [diff] [blame] | 270 | // FP args. |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 271 | ldp d0, d1, [sp, #16] |
| 272 | ldp d2, d3, [sp, #32] |
| 273 | ldp d4, d5, [sp, #48] |
| 274 | ldp d6, d7, [sp, #64] |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 275 | |
Zheng Xu | 69a5030 | 2015-04-14 20:04:41 +0800 | [diff] [blame] | 276 | // Core args. |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 277 | ldp x1, x2, [sp, #80] |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 278 | .cfi_restore x1 |
| 279 | .cfi_restore x2 |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 280 | |
| 281 | ldp x3, x4, [sp, #96] |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 282 | .cfi_restore x3 |
| 283 | .cfi_restore x4 |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 284 | |
| 285 | ldp x5, x6, [sp, #112] |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 286 | .cfi_restore x5 |
Zheng Xu | 69a5030 | 2015-04-14 20:04:41 +0800 | [diff] [blame] | 287 | .cfi_restore x6 |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 288 | |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 289 | // x7, Callee-saves. |
| 290 | ldp x7, x20, [sp, #128] |
| 291 | .cfi_restore x7 |
Zheng Xu | 69a5030 | 2015-04-14 20:04:41 +0800 | [diff] [blame] | 292 | .cfi_restore x20 |
| 293 | |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 294 | ldp x21, x22, [sp, #144] |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 295 | .cfi_restore x21 |
| 296 | .cfi_restore x22 |
| 297 | |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 298 | ldp x23, x24, [sp, #160] |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 299 | .cfi_restore x23 |
| 300 | .cfi_restore x24 |
| 301 | |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 302 | ldp x25, x26, [sp, #176] |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 303 | .cfi_restore x25 |
| 304 | .cfi_restore x26 |
| 305 | |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 306 | ldp x27, x28, [sp, #192] |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 307 | .cfi_restore x27 |
| 308 | .cfi_restore x28 |
| 309 | |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 310 | // x29(callee-save) and LR. |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 311 | ldp x29, xLR, [sp, #208] |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 312 | .cfi_restore x29 |
| 313 | .cfi_restore x30 |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 314 | |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 315 | add sp, sp, #224 |
| 316 | .cfi_adjust_cfa_offset -224 |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 317 | .endm |
| 318 | |
| 319 | .macro RETURN_IF_RESULT_IS_ZERO |
Andreas Gampe | 00c1e6d | 2014-04-25 15:47:13 -0700 | [diff] [blame] | 320 | cbnz x0, 1f // result non-zero branch over |
| 321 | ret // return |
| 322 | 1: |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 323 | .endm |
| 324 | |
| 325 | .macro RETURN_IF_RESULT_IS_NON_ZERO |
Andreas Gampe | 00c1e6d | 2014-04-25 15:47:13 -0700 | [diff] [blame] | 326 | cbz x0, 1f // result zero branch over |
| 327 | ret // return |
| 328 | 1: |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 329 | .endm |
| 330 | |
| 331 | /* |
| 332 | * Macro that set calls through to artDeliverPendingExceptionFromCode, where the pending |
| 333 | * exception is Thread::Current()->exception_ |
| 334 | */ |
| 335 | .macro DELIVER_PENDING_EXCEPTION |
| 336 | SETUP_SAVE_ALL_CALLEE_SAVE_FRAME |
| 337 | mov x0, xSELF |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 338 | |
| 339 | // Point of no return. |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 340 | b artDeliverPendingExceptionFromCode // artDeliverPendingExceptionFromCode(Thread*) |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 341 | brk 0 // Unreached |
| 342 | .endm |
| 343 | |
Andreas Gampe | 6e4e59c | 2014-05-05 20:11:02 -0700 | [diff] [blame] | 344 | .macro RETURN_OR_DELIVER_PENDING_EXCEPTION_REG reg |
| 345 | ldr \reg, [xSELF, # THREAD_EXCEPTION_OFFSET] // Get exception field. |
| 346 | cbnz \reg, 1f |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 347 | ret |
| 348 | 1: |
| 349 | DELIVER_PENDING_EXCEPTION |
| 350 | .endm |
| 351 | |
Andreas Gampe | 6e4e59c | 2014-05-05 20:11:02 -0700 | [diff] [blame] | 352 | .macro RETURN_OR_DELIVER_PENDING_EXCEPTION |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 353 | RETURN_OR_DELIVER_PENDING_EXCEPTION_REG xIP0 |
Andreas Gampe | 6e4e59c | 2014-05-05 20:11:02 -0700 | [diff] [blame] | 354 | .endm |
| 355 | |
| 356 | // Same as above with x1. This is helpful in stubs that want to avoid clobbering another register. |
| 357 | .macro RETURN_OR_DELIVER_PENDING_EXCEPTION_X1 |
| 358 | RETURN_OR_DELIVER_PENDING_EXCEPTION_REG x1 |
| 359 | .endm |
| 360 | |
| 361 | .macro RETURN_IF_W0_IS_ZERO_OR_DELIVER |
| 362 | cbnz w0, 1f // result non-zero branch over |
| 363 | ret // return |
| 364 | 1: |
| 365 | DELIVER_PENDING_EXCEPTION |
| 366 | .endm |
| 367 | |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 368 | .macro NO_ARG_RUNTIME_EXCEPTION c_name, cxx_name |
| 369 | .extern \cxx_name |
| 370 | ENTRY \c_name |
Andreas Gampe | 5c1e435 | 2014-04-21 19:28:24 -0700 | [diff] [blame] | 371 | SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 372 | mov x0, xSELF // pass Thread::Current |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 373 | b \cxx_name // \cxx_name(Thread*) |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 374 | END \c_name |
| 375 | .endm |
| 376 | |
| 377 | .macro ONE_ARG_RUNTIME_EXCEPTION c_name, cxx_name |
| 378 | .extern \cxx_name |
| 379 | ENTRY \c_name |
Serban Constantinescu | 75b9113 | 2014-04-09 18:39:10 +0100 | [diff] [blame] | 380 | SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context. |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 381 | mov x1, xSELF // pass Thread::Current. |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 382 | b \cxx_name // \cxx_name(arg, Thread*). |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 383 | brk 0 |
| 384 | END \c_name |
| 385 | .endm |
| 386 | |
| 387 | .macro TWO_ARG_RUNTIME_EXCEPTION c_name, cxx_name |
| 388 | .extern \cxx_name |
| 389 | ENTRY \c_name |
| 390 | SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 391 | mov x2, xSELF // pass Thread::Current |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 392 | b \cxx_name // \cxx_name(arg1, arg2, Thread*) |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 393 | brk 0 |
| 394 | END \c_name |
| 395 | .endm |
| 396 | |
| 397 | /* |
| 398 | * Called by managed code, saves callee saves and then calls artThrowException |
| 399 | * that will place a mock Method* at the bottom of the stack. Arg1 holds the exception. |
| 400 | */ |
| 401 | ONE_ARG_RUNTIME_EXCEPTION art_quick_deliver_exception, artDeliverExceptionFromCode |
| 402 | |
| 403 | /* |
| 404 | * Called by managed code to create and deliver a NullPointerException. |
| 405 | */ |
| 406 | NO_ARG_RUNTIME_EXCEPTION art_quick_throw_null_pointer_exception, artThrowNullPointerExceptionFromCode |
| 407 | |
| 408 | /* |
| 409 | * Called by managed code to create and deliver an ArithmeticException. |
| 410 | */ |
| 411 | NO_ARG_RUNTIME_EXCEPTION art_quick_throw_div_zero, artThrowDivZeroFromCode |
| 412 | |
| 413 | /* |
| 414 | * Called by managed code to create and deliver an ArrayIndexOutOfBoundsException. Arg1 holds |
| 415 | * index, arg2 holds limit. |
| 416 | */ |
| 417 | TWO_ARG_RUNTIME_EXCEPTION art_quick_throw_array_bounds, artThrowArrayBoundsFromCode |
| 418 | |
| 419 | /* |
Vladimir Marko | 87f3fcb | 2016-04-28 15:52:11 +0100 | [diff] [blame^] | 420 | * Called by managed code to create and deliver a StringIndexOutOfBoundsException |
| 421 | * as if thrown from a call to String.charAt(). Arg1 holds index, arg2 holds limit. |
| 422 | */ |
| 423 | TWO_ARG_RUNTIME_EXCEPTION art_quick_throw_string_bounds, artThrowStringBoundsFromCode |
| 424 | |
| 425 | /* |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 426 | * Called by managed code to create and deliver a StackOverflowError. |
| 427 | */ |
| 428 | NO_ARG_RUNTIME_EXCEPTION art_quick_throw_stack_overflow, artThrowStackOverflowFromCode |
| 429 | |
| 430 | /* |
| 431 | * Called by managed code to create and deliver a NoSuchMethodError. |
| 432 | */ |
| 433 | ONE_ARG_RUNTIME_EXCEPTION art_quick_throw_no_such_method, artThrowNoSuchMethodFromCode |
| 434 | |
| 435 | /* |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 436 | * All generated callsites for interface invokes and invocation slow paths will load arguments |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 437 | * as usual - except instead of loading arg0/x0 with the target Method*, arg0/x0 will contain |
Nicolas Geoffray | 7ea6a17 | 2015-05-19 18:58:54 +0100 | [diff] [blame] | 438 | * the method_idx. This wrapper will save arg1-arg3, and call the appropriate C helper. |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 439 | * NOTE: "this" is first visible argument of the target, and so can be found in arg1/x1. |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 440 | * |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 441 | * The helper will attempt to locate the target and return a 128-bit result in x0/x1 consisting |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 442 | * of the target Method* in x0 and method->code_ in x1. |
| 443 | * |
Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 444 | * If unsuccessful, the helper will return null/????. There will be a pending exception in the |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 445 | * thread and we branch to another stub to deliver it. |
| 446 | * |
| 447 | * On success this wrapper will restore arguments and *jump* to the target, leaving the lr |
| 448 | * pointing back to the original caller. |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 449 | * |
| 450 | * Adapted from ARM32 code. |
| 451 | * |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 452 | * Clobbers xIP0. |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 453 | */ |
Andreas Gampe | 3031c8d | 2015-07-13 20:11:06 -0700 | [diff] [blame] | 454 | .macro INVOKE_TRAMPOLINE_BODY cxx_name |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 455 | .extern \cxx_name |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 456 | SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME // save callee saves in case allocation triggers GC |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 457 | // Helper signature is always |
| 458 | // (method_idx, *this_object, *caller_method, *self, sp) |
| 459 | |
Nicolas Geoffray | 7ea6a17 | 2015-05-19 18:58:54 +0100 | [diff] [blame] | 460 | mov x2, xSELF // pass Thread::Current |
| 461 | mov x3, sp |
| 462 | bl \cxx_name // (method_idx, this, Thread*, SP) |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 463 | mov xIP0, x1 // save Method*->code_ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 464 | RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 465 | cbz x0, 1f // did we find the target? if not go to exception delivery |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 466 | br xIP0 // tail call to target |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 467 | 1: |
| 468 | DELIVER_PENDING_EXCEPTION |
Andreas Gampe | 3031c8d | 2015-07-13 20:11:06 -0700 | [diff] [blame] | 469 | .endm |
| 470 | .macro INVOKE_TRAMPOLINE c_name, cxx_name |
| 471 | ENTRY \c_name |
| 472 | INVOKE_TRAMPOLINE_BODY \cxx_name |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 473 | END \c_name |
| 474 | .endm |
| 475 | |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 476 | INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline_with_access_check, artInvokeInterfaceTrampolineWithAccessCheck |
| 477 | |
| 478 | INVOKE_TRAMPOLINE art_quick_invoke_static_trampoline_with_access_check, artInvokeStaticTrampolineWithAccessCheck |
| 479 | INVOKE_TRAMPOLINE art_quick_invoke_direct_trampoline_with_access_check, artInvokeDirectTrampolineWithAccessCheck |
| 480 | INVOKE_TRAMPOLINE art_quick_invoke_super_trampoline_with_access_check, artInvokeSuperTrampolineWithAccessCheck |
| 481 | INVOKE_TRAMPOLINE art_quick_invoke_virtual_trampoline_with_access_check, artInvokeVirtualTrampolineWithAccessCheck |
| 482 | |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 483 | |
| 484 | .macro INVOKE_STUB_CREATE_FRAME |
| 485 | |
Zheng Xu | 69a5030 | 2015-04-14 20:04:41 +0800 | [diff] [blame] | 486 | SAVE_SIZE=15*8 // x4, x5, x19, x20, x21, x22, x23, x24, x25, x26, x27, x28, SP, LR, FP saved. |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 487 | SAVE_SIZE_AND_METHOD=SAVE_SIZE+8 |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 488 | |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 489 | |
Zheng Xu | 48241e7 | 2014-05-23 11:52:42 +0800 | [diff] [blame] | 490 | mov x9, sp // Save stack pointer. |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 491 | .cfi_register sp,x9 |
| 492 | |
Zheng Xu | 48241e7 | 2014-05-23 11:52:42 +0800 | [diff] [blame] | 493 | add x10, x2, # SAVE_SIZE_AND_METHOD // calculate size of frame. |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 494 | sub x10, sp, x10 // Calculate SP position - saves + ArtMethod* + args |
Zheng Xu | 48241e7 | 2014-05-23 11:52:42 +0800 | [diff] [blame] | 495 | and x10, x10, # ~0xf // Enforce 16 byte stack alignment. |
| 496 | mov sp, x10 // Set new SP. |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 497 | |
Zheng Xu | 48241e7 | 2014-05-23 11:52:42 +0800 | [diff] [blame] | 498 | sub x10, x9, #SAVE_SIZE // Calculate new FP (later). Done here as we must move SP |
| 499 | .cfi_def_cfa_register x10 // before this. |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 500 | .cfi_adjust_cfa_offset SAVE_SIZE |
| 501 | |
Nicolas Geoffray | 4808846 | 2014-12-12 10:29:38 +0000 | [diff] [blame] | 502 | str x28, [x10, #112] |
| 503 | .cfi_rel_offset x28, 112 |
| 504 | |
| 505 | stp x26, x27, [x10, #96] |
| 506 | .cfi_rel_offset x26, 96 |
| 507 | .cfi_rel_offset x27, 104 |
| 508 | |
| 509 | stp x24, x25, [x10, #80] |
| 510 | .cfi_rel_offset x24, 80 |
| 511 | .cfi_rel_offset x25, 88 |
| 512 | |
| 513 | stp x22, x23, [x10, #64] |
| 514 | .cfi_rel_offset x22, 64 |
| 515 | .cfi_rel_offset x23, 72 |
| 516 | |
| 517 | stp x20, x21, [x10, #48] |
| 518 | .cfi_rel_offset x20, 48 |
| 519 | .cfi_rel_offset x21, 56 |
| 520 | |
Zheng Xu | 69a5030 | 2015-04-14 20:04:41 +0800 | [diff] [blame] | 521 | stp x9, x19, [x10, #32] // Save old stack pointer and x19. |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 522 | .cfi_rel_offset sp, 32 |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 523 | .cfi_rel_offset x19, 40 |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 524 | |
Zheng Xu | 48241e7 | 2014-05-23 11:52:42 +0800 | [diff] [blame] | 525 | stp x4, x5, [x10, #16] // Save result and shorty addresses. |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 526 | .cfi_rel_offset x4, 16 |
| 527 | .cfi_rel_offset x5, 24 |
| 528 | |
Zheng Xu | 48241e7 | 2014-05-23 11:52:42 +0800 | [diff] [blame] | 529 | stp xFP, xLR, [x10] // Store LR & FP. |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 530 | .cfi_rel_offset x29, 0 |
| 531 | .cfi_rel_offset x30, 8 |
| 532 | |
Zheng Xu | 48241e7 | 2014-05-23 11:52:42 +0800 | [diff] [blame] | 533 | mov xFP, x10 // Use xFP now, as it's callee-saved. |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 534 | .cfi_def_cfa_register x29 |
Zheng Xu | 48241e7 | 2014-05-23 11:52:42 +0800 | [diff] [blame] | 535 | mov xSELF, x3 // Move thread pointer into SELF register. |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 536 | |
| 537 | // Copy arguments into stack frame. |
| 538 | // Use simple copy routine for now. |
| 539 | // 4 bytes per slot. |
| 540 | // X1 - source address |
| 541 | // W2 - args length |
| 542 | // X9 - destination address. |
| 543 | // W10 - temporary |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 544 | add x9, sp, #8 // Destination address is bottom of stack + null. |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 545 | |
Chih-Hung Hsieh | c0da7ac | 2015-07-27 10:10:44 -0700 | [diff] [blame] | 546 | // Copy parameters into the stack. Use numeric label as this is a macro and Clang's assembler |
| 547 | // does not have unique-id variables. |
| 548 | 1: |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 549 | cmp w2, #0 |
Chih-Hung Hsieh | c0da7ac | 2015-07-27 10:10:44 -0700 | [diff] [blame] | 550 | beq 2f |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 551 | sub w2, w2, #4 // Need 65536 bytes of range. |
| 552 | ldr w10, [x1, x2] |
| 553 | str w10, [x9, x2] |
| 554 | |
Chih-Hung Hsieh | c0da7ac | 2015-07-27 10:10:44 -0700 | [diff] [blame] | 555 | b 1b |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 556 | |
Chih-Hung Hsieh | c0da7ac | 2015-07-27 10:10:44 -0700 | [diff] [blame] | 557 | 2: |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 558 | // Store null into ArtMethod* at bottom of frame. |
| 559 | str xzr, [sp] |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 560 | .endm |
| 561 | |
| 562 | .macro INVOKE_STUB_CALL_AND_RETURN |
| 563 | |
| 564 | // load method-> METHOD_QUICK_CODE_OFFSET |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 565 | ldr x9, [x0, #ART_METHOD_QUICK_CODE_OFFSET_64] |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 566 | // Branch to method. |
| 567 | blr x9 |
| 568 | |
| 569 | // Restore return value address and shorty address. |
| 570 | ldp x4,x5, [xFP, #16] |
| 571 | .cfi_restore x4 |
| 572 | .cfi_restore x5 |
| 573 | |
Nicolas Geoffray | 4808846 | 2014-12-12 10:29:38 +0000 | [diff] [blame] | 574 | ldr x28, [xFP, #112] |
| 575 | .cfi_restore x28 |
| 576 | |
| 577 | ldp x26, x27, [xFP, #96] |
| 578 | .cfi_restore x26 |
| 579 | .cfi_restore x27 |
| 580 | |
| 581 | ldp x24, x25, [xFP, #80] |
| 582 | .cfi_restore x24 |
| 583 | .cfi_restore x25 |
| 584 | |
| 585 | ldp x22, x23, [xFP, #64] |
| 586 | .cfi_restore x22 |
| 587 | .cfi_restore x23 |
| 588 | |
| 589 | ldp x20, x21, [xFP, #48] |
| 590 | .cfi_restore x20 |
| 591 | .cfi_restore x21 |
| 592 | |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 593 | // Store result (w0/x0/s0/d0) appropriately, depending on resultType. |
| 594 | ldrb w10, [x5] |
| 595 | |
Chih-Hung Hsieh | c0da7ac | 2015-07-27 10:10:44 -0700 | [diff] [blame] | 596 | // Check the return type and store the correct register into the jvalue in memory. |
| 597 | // Use numeric label as this is a macro and Clang's assembler does not have unique-id variables. |
| 598 | |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 599 | // Don't set anything for a void type. |
| 600 | cmp w10, #'V' |
Chih-Hung Hsieh | c0da7ac | 2015-07-27 10:10:44 -0700 | [diff] [blame] | 601 | beq 3f |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 602 | |
Chih-Hung Hsieh | c0da7ac | 2015-07-27 10:10:44 -0700 | [diff] [blame] | 603 | // Is it a double? |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 604 | cmp w10, #'D' |
Chih-Hung Hsieh | c0da7ac | 2015-07-27 10:10:44 -0700 | [diff] [blame] | 605 | bne 1f |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 606 | str d0, [x4] |
Chih-Hung Hsieh | c0da7ac | 2015-07-27 10:10:44 -0700 | [diff] [blame] | 607 | b 3f |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 608 | |
Chih-Hung Hsieh | c0da7ac | 2015-07-27 10:10:44 -0700 | [diff] [blame] | 609 | 1: // Is it a float? |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 610 | cmp w10, #'F' |
Chih-Hung Hsieh | c0da7ac | 2015-07-27 10:10:44 -0700 | [diff] [blame] | 611 | bne 2f |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 612 | str s0, [x4] |
Chih-Hung Hsieh | c0da7ac | 2015-07-27 10:10:44 -0700 | [diff] [blame] | 613 | b 3f |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 614 | |
Chih-Hung Hsieh | c0da7ac | 2015-07-27 10:10:44 -0700 | [diff] [blame] | 615 | 2: // Just store x0. Doesn't matter if it is 64 or 32 bits. |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 616 | str x0, [x4] |
| 617 | |
Chih-Hung Hsieh | c0da7ac | 2015-07-27 10:10:44 -0700 | [diff] [blame] | 618 | 3: // Finish up. |
Zheng Xu | 69a5030 | 2015-04-14 20:04:41 +0800 | [diff] [blame] | 619 | ldp x2, x19, [xFP, #32] // Restore stack pointer and x19. |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 620 | .cfi_restore x19 |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 621 | mov sp, x2 |
| 622 | .cfi_restore sp |
| 623 | |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 624 | ldp xFP, xLR, [xFP] // Restore old frame pointer and link register. |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 625 | .cfi_restore x29 |
| 626 | .cfi_restore x30 |
| 627 | |
| 628 | ret |
| 629 | |
| 630 | .endm |
| 631 | |
| 632 | |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 633 | /* |
| 634 | * extern"C" void art_quick_invoke_stub(ArtMethod *method, x0 |
| 635 | * uint32_t *args, x1 |
| 636 | * uint32_t argsize, w2 |
| 637 | * Thread *self, x3 |
| 638 | * JValue *result, x4 |
| 639 | * char *shorty); x5 |
| 640 | * +----------------------+ |
| 641 | * | | |
| 642 | * | C/C++ frame | |
| 643 | * | LR'' | |
| 644 | * | FP'' | <- SP' |
| 645 | * +----------------------+ |
| 646 | * +----------------------+ |
Zheng Xu | 69a5030 | 2015-04-14 20:04:41 +0800 | [diff] [blame] | 647 | * | x28 | <- TODO: Remove callee-saves. |
| 648 | * | : | |
| 649 | * | x19 | |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 650 | * | SP' | |
| 651 | * | X5 | |
| 652 | * | X4 | Saved registers |
| 653 | * | LR' | |
| 654 | * | FP' | <- FP |
| 655 | * +----------------------+ |
| 656 | * | uint32_t out[n-1] | |
| 657 | * | : : | Outs |
| 658 | * | uint32_t out[0] | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 659 | * | ArtMethod* | <- SP value=null |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 660 | * +----------------------+ |
| 661 | * |
| 662 | * Outgoing registers: |
| 663 | * x0 - Method* |
| 664 | * x1-x7 - integer parameters. |
| 665 | * d0-d7 - Floating point parameters. |
| 666 | * xSELF = self |
| 667 | * SP = & of ArtMethod* |
| 668 | * x1 = "this" pointer. |
| 669 | * |
| 670 | */ |
| 671 | ENTRY art_quick_invoke_stub |
| 672 | // Spill registers as per AACPS64 calling convention. |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 673 | INVOKE_STUB_CREATE_FRAME |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 674 | |
| 675 | // Fill registers x/w1 to x/w7 and s/d0 to s/d7 with parameters. |
| 676 | // Parse the passed shorty to determine which register to load. |
| 677 | // Load addresses for routines that load WXSD registers. |
| 678 | adr x11, .LstoreW2 |
| 679 | adr x12, .LstoreX2 |
| 680 | adr x13, .LstoreS0 |
| 681 | adr x14, .LstoreD0 |
| 682 | |
| 683 | // Initialize routine offsets to 0 for integers and floats. |
| 684 | // x8 for integers, x15 for floating point. |
| 685 | mov x8, #0 |
| 686 | mov x15, #0 |
| 687 | |
| 688 | add x10, x5, #1 // Load shorty address, plus one to skip return value. |
| 689 | ldr w1, [x9],#4 // Load "this" parameter, and increment arg pointer. |
| 690 | |
| 691 | // Loop to fill registers. |
| 692 | .LfillRegisters: |
| 693 | ldrb w17, [x10], #1 // Load next character in signature, and increment. |
| 694 | cbz w17, .LcallFunction // Exit at end of signature. Shorty 0 terminated. |
| 695 | |
| 696 | cmp w17, #'F' // is this a float? |
| 697 | bne .LisDouble |
| 698 | |
| 699 | cmp x15, # 8*12 // Skip this load if all registers full. |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 700 | beq .Ladvance4 |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 701 | |
| 702 | add x17, x13, x15 // Calculate subroutine to jump to. |
| 703 | br x17 |
| 704 | |
| 705 | .LisDouble: |
| 706 | cmp w17, #'D' // is this a double? |
| 707 | bne .LisLong |
| 708 | |
| 709 | cmp x15, # 8*12 // Skip this load if all registers full. |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 710 | beq .Ladvance8 |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 711 | |
| 712 | add x17, x14, x15 // Calculate subroutine to jump to. |
| 713 | br x17 |
| 714 | |
| 715 | .LisLong: |
| 716 | cmp w17, #'J' // is this a long? |
| 717 | bne .LisOther |
| 718 | |
Andreas Gampe | 9de65ff | 2014-03-21 17:25:57 -0700 | [diff] [blame] | 719 | cmp x8, # 6*12 // Skip this load if all registers full. |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 720 | beq .Ladvance8 |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 721 | |
| 722 | add x17, x12, x8 // Calculate subroutine to jump to. |
| 723 | br x17 |
| 724 | |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 725 | .LisOther: // Everything else takes one vReg. |
Andreas Gampe | 9de65ff | 2014-03-21 17:25:57 -0700 | [diff] [blame] | 726 | cmp x8, # 6*12 // Skip this load if all registers full. |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 727 | beq .Ladvance4 |
| 728 | |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 729 | add x17, x11, x8 // Calculate subroutine to jump to. |
| 730 | br x17 |
| 731 | |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 732 | .Ladvance4: |
| 733 | add x9, x9, #4 |
| 734 | b .LfillRegisters |
| 735 | |
| 736 | .Ladvance8: |
| 737 | add x9, x9, #8 |
| 738 | b .LfillRegisters |
| 739 | |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 740 | // Macro for loading a parameter into a register. |
| 741 | // counter - the register with offset into these tables |
| 742 | // size - the size of the register - 4 or 8 bytes. |
| 743 | // register - the name of the register to be loaded. |
| 744 | .macro LOADREG counter size register return |
| 745 | ldr \register , [x9], #\size |
| 746 | add \counter, \counter, 12 |
| 747 | b \return |
| 748 | .endm |
| 749 | |
| 750 | // Store ints. |
| 751 | .LstoreW2: |
| 752 | LOADREG x8 4 w2 .LfillRegisters |
| 753 | LOADREG x8 4 w3 .LfillRegisters |
| 754 | LOADREG x8 4 w4 .LfillRegisters |
| 755 | LOADREG x8 4 w5 .LfillRegisters |
| 756 | LOADREG x8 4 w6 .LfillRegisters |
| 757 | LOADREG x8 4 w7 .LfillRegisters |
| 758 | |
| 759 | // Store longs. |
| 760 | .LstoreX2: |
| 761 | LOADREG x8 8 x2 .LfillRegisters |
| 762 | LOADREG x8 8 x3 .LfillRegisters |
| 763 | LOADREG x8 8 x4 .LfillRegisters |
| 764 | LOADREG x8 8 x5 .LfillRegisters |
| 765 | LOADREG x8 8 x6 .LfillRegisters |
| 766 | LOADREG x8 8 x7 .LfillRegisters |
| 767 | |
| 768 | // Store singles. |
| 769 | .LstoreS0: |
| 770 | LOADREG x15 4 s0 .LfillRegisters |
| 771 | LOADREG x15 4 s1 .LfillRegisters |
| 772 | LOADREG x15 4 s2 .LfillRegisters |
| 773 | LOADREG x15 4 s3 .LfillRegisters |
| 774 | LOADREG x15 4 s4 .LfillRegisters |
| 775 | LOADREG x15 4 s5 .LfillRegisters |
| 776 | LOADREG x15 4 s6 .LfillRegisters |
| 777 | LOADREG x15 4 s7 .LfillRegisters |
| 778 | |
| 779 | // Store doubles. |
| 780 | .LstoreD0: |
| 781 | LOADREG x15 8 d0 .LfillRegisters |
| 782 | LOADREG x15 8 d1 .LfillRegisters |
| 783 | LOADREG x15 8 d2 .LfillRegisters |
| 784 | LOADREG x15 8 d3 .LfillRegisters |
| 785 | LOADREG x15 8 d4 .LfillRegisters |
| 786 | LOADREG x15 8 d5 .LfillRegisters |
| 787 | LOADREG x15 8 d6 .LfillRegisters |
| 788 | LOADREG x15 8 d7 .LfillRegisters |
| 789 | |
| 790 | |
| 791 | .LcallFunction: |
| 792 | |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 793 | INVOKE_STUB_CALL_AND_RETURN |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 794 | |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 795 | END art_quick_invoke_stub |
| 796 | |
| 797 | /* extern"C" |
| 798 | * void art_quick_invoke_static_stub(ArtMethod *method, x0 |
| 799 | * uint32_t *args, x1 |
| 800 | * uint32_t argsize, w2 |
| 801 | * Thread *self, x3 |
| 802 | * JValue *result, x4 |
| 803 | * char *shorty); x5 |
| 804 | */ |
| 805 | ENTRY art_quick_invoke_static_stub |
| 806 | // Spill registers as per AACPS64 calling convention. |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 807 | INVOKE_STUB_CREATE_FRAME |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 808 | |
| 809 | // Fill registers x/w1 to x/w7 and s/d0 to s/d7 with parameters. |
| 810 | // Parse the passed shorty to determine which register to load. |
| 811 | // Load addresses for routines that load WXSD registers. |
| 812 | adr x11, .LstoreW1_2 |
| 813 | adr x12, .LstoreX1_2 |
| 814 | adr x13, .LstoreS0_2 |
| 815 | adr x14, .LstoreD0_2 |
| 816 | |
| 817 | // Initialize routine offsets to 0 for integers and floats. |
| 818 | // x8 for integers, x15 for floating point. |
| 819 | mov x8, #0 |
| 820 | mov x15, #0 |
| 821 | |
| 822 | add x10, x5, #1 // Load shorty address, plus one to skip return value. |
| 823 | |
| 824 | // Loop to fill registers. |
| 825 | .LfillRegisters2: |
| 826 | ldrb w17, [x10], #1 // Load next character in signature, and increment. |
| 827 | cbz w17, .LcallFunction2 // Exit at end of signature. Shorty 0 terminated. |
| 828 | |
| 829 | cmp w17, #'F' // is this a float? |
| 830 | bne .LisDouble2 |
| 831 | |
| 832 | cmp x15, # 8*12 // Skip this load if all registers full. |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 833 | beq .Ladvance4_2 |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 834 | |
| 835 | add x17, x13, x15 // Calculate subroutine to jump to. |
| 836 | br x17 |
| 837 | |
| 838 | .LisDouble2: |
| 839 | cmp w17, #'D' // is this a double? |
| 840 | bne .LisLong2 |
| 841 | |
| 842 | cmp x15, # 8*12 // Skip this load if all registers full. |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 843 | beq .Ladvance8_2 |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 844 | |
| 845 | add x17, x14, x15 // Calculate subroutine to jump to. |
| 846 | br x17 |
| 847 | |
| 848 | .LisLong2: |
| 849 | cmp w17, #'J' // is this a long? |
| 850 | bne .LisOther2 |
| 851 | |
| 852 | cmp x8, # 7*12 // Skip this load if all registers full. |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 853 | beq .Ladvance8_2 |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 854 | |
| 855 | add x17, x12, x8 // Calculate subroutine to jump to. |
| 856 | br x17 |
| 857 | |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 858 | .LisOther2: // Everything else takes one vReg. |
| 859 | cmp x8, # 7*12 // Skip this load if all registers full. |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 860 | beq .Ladvance4_2 |
| 861 | |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 862 | add x17, x11, x8 // Calculate subroutine to jump to. |
| 863 | br x17 |
| 864 | |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 865 | .Ladvance4_2: |
| 866 | add x9, x9, #4 |
| 867 | b .LfillRegisters2 |
| 868 | |
| 869 | .Ladvance8_2: |
| 870 | add x9, x9, #8 |
| 871 | b .LfillRegisters2 |
| 872 | |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 873 | // Store ints. |
| 874 | .LstoreW1_2: |
| 875 | LOADREG x8 4 w1 .LfillRegisters2 |
| 876 | LOADREG x8 4 w2 .LfillRegisters2 |
| 877 | LOADREG x8 4 w3 .LfillRegisters2 |
| 878 | LOADREG x8 4 w4 .LfillRegisters2 |
| 879 | LOADREG x8 4 w5 .LfillRegisters2 |
| 880 | LOADREG x8 4 w6 .LfillRegisters2 |
| 881 | LOADREG x8 4 w7 .LfillRegisters2 |
| 882 | |
| 883 | // Store longs. |
| 884 | .LstoreX1_2: |
| 885 | LOADREG x8 8 x1 .LfillRegisters2 |
| 886 | LOADREG x8 8 x2 .LfillRegisters2 |
| 887 | LOADREG x8 8 x3 .LfillRegisters2 |
| 888 | LOADREG x8 8 x4 .LfillRegisters2 |
| 889 | LOADREG x8 8 x5 .LfillRegisters2 |
| 890 | LOADREG x8 8 x6 .LfillRegisters2 |
| 891 | LOADREG x8 8 x7 .LfillRegisters2 |
| 892 | |
| 893 | // Store singles. |
| 894 | .LstoreS0_2: |
| 895 | LOADREG x15 4 s0 .LfillRegisters2 |
| 896 | LOADREG x15 4 s1 .LfillRegisters2 |
| 897 | LOADREG x15 4 s2 .LfillRegisters2 |
| 898 | LOADREG x15 4 s3 .LfillRegisters2 |
| 899 | LOADREG x15 4 s4 .LfillRegisters2 |
| 900 | LOADREG x15 4 s5 .LfillRegisters2 |
| 901 | LOADREG x15 4 s6 .LfillRegisters2 |
| 902 | LOADREG x15 4 s7 .LfillRegisters2 |
| 903 | |
| 904 | // Store doubles. |
| 905 | .LstoreD0_2: |
| 906 | LOADREG x15 8 d0 .LfillRegisters2 |
| 907 | LOADREG x15 8 d1 .LfillRegisters2 |
| 908 | LOADREG x15 8 d2 .LfillRegisters2 |
| 909 | LOADREG x15 8 d3 .LfillRegisters2 |
| 910 | LOADREG x15 8 d4 .LfillRegisters2 |
| 911 | LOADREG x15 8 d5 .LfillRegisters2 |
| 912 | LOADREG x15 8 d6 .LfillRegisters2 |
| 913 | LOADREG x15 8 d7 .LfillRegisters2 |
| 914 | |
| 915 | |
| 916 | .LcallFunction2: |
| 917 | |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 918 | INVOKE_STUB_CALL_AND_RETURN |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 919 | |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 920 | END art_quick_invoke_static_stub |
| 921 | |
Andreas Gampe | 03906cf | 2014-04-07 12:08:28 -0700 | [diff] [blame] | 922 | |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 923 | |
Nicolas Geoffray | b331feb | 2016-02-05 16:51:53 +0000 | [diff] [blame] | 924 | /* extern"C" void art_quick_osr_stub(void** stack, x0 |
| 925 | * size_t stack_size_in_bytes, x1 |
| 926 | * const uin8_t* native_pc, x2 |
| 927 | * JValue *result, x3 |
| 928 | * char *shorty, x4 |
| 929 | * Thread *self) x5 |
| 930 | */ |
| 931 | ENTRY art_quick_osr_stub |
| 932 | SAVE_SIZE=15*8 // x3, x4, x19, x20, x21, x22, x23, x24, x25, x26, x27, x28, SP, LR, FP saved. |
| 933 | mov x9, sp // Save stack pointer. |
| 934 | .cfi_register sp,x9 |
| 935 | |
| 936 | sub x10, sp, # SAVE_SIZE |
| 937 | and x10, x10, # ~0xf // Enforce 16 byte stack alignment. |
| 938 | mov sp, x10 // Set new SP. |
| 939 | |
| 940 | str x28, [sp, #112] |
| 941 | stp x26, x27, [sp, #96] |
| 942 | stp x24, x25, [sp, #80] |
| 943 | stp x22, x23, [sp, #64] |
| 944 | stp x20, x21, [sp, #48] |
| 945 | stp x9, x19, [sp, #32] // Save old stack pointer and x19. |
| 946 | stp x3, x4, [sp, #16] // Save result and shorty addresses. |
| 947 | stp xFP, xLR, [sp] // Store LR & FP. |
| 948 | mov xSELF, x5 // Move thread pointer into SELF register. |
| 949 | |
| 950 | sub sp, sp, #16 |
| 951 | str xzr, [sp] // Store null for ArtMethod* slot |
| 952 | // Branch to stub. |
| 953 | bl .Losr_entry |
| 954 | add sp, sp, #16 |
| 955 | |
| 956 | // Restore return value address and shorty address. |
| 957 | ldp x3,x4, [sp, #16] |
| 958 | ldr x28, [sp, #112] |
| 959 | ldp x26, x27, [sp, #96] |
| 960 | ldp x24, x25, [sp, #80] |
| 961 | ldp x22, x23, [sp, #64] |
| 962 | ldp x20, x21, [sp, #48] |
| 963 | |
| 964 | // Store result (w0/x0/s0/d0) appropriately, depending on resultType. |
| 965 | ldrb w10, [x4] |
| 966 | |
| 967 | // Check the return type and store the correct register into the jvalue in memory. |
| 968 | |
| 969 | // Don't set anything for a void type. |
| 970 | cmp w10, #'V' |
| 971 | beq .Losr_exit |
| 972 | |
| 973 | // Is it a double? |
| 974 | cmp w10, #'D' |
| 975 | bne .Lno_double |
| 976 | str d0, [x3] |
| 977 | b .Losr_exit |
| 978 | |
| 979 | .Lno_double: // Is it a float? |
| 980 | cmp w10, #'F' |
| 981 | bne .Lno_float |
| 982 | str s0, [x3] |
| 983 | b .Losr_exit |
| 984 | |
| 985 | .Lno_float: // Just store x0. Doesn't matter if it is 64 or 32 bits. |
| 986 | str x0, [x3] |
| 987 | |
| 988 | .Losr_exit: // Finish up. |
| 989 | ldp x2, x19, [sp, #32] // Restore stack pointer and x19. |
| 990 | ldp xFP, xLR, [sp] // Restore old frame pointer and link register. |
| 991 | mov sp, x2 |
| 992 | ret |
| 993 | |
| 994 | .Losr_entry: |
| 995 | // Update stack pointer for the callee |
| 996 | sub sp, sp, x1 |
| 997 | |
| 998 | // Update link register slot expected by the callee. |
| 999 | sub w1, w1, #8 |
| 1000 | str lr, [sp, x1] |
| 1001 | |
| 1002 | // Copy arguments into stack frame. |
| 1003 | // Use simple copy routine for now. |
| 1004 | // 4 bytes per slot. |
| 1005 | // X0 - source address |
| 1006 | // W1 - args length |
| 1007 | // SP - destination address. |
| 1008 | // W10 - temporary |
| 1009 | .Losr_loop_entry: |
| 1010 | cmp w1, #0 |
| 1011 | beq .Losr_loop_exit |
| 1012 | sub w1, w1, #4 |
| 1013 | ldr w10, [x0, x1] |
| 1014 | str w10, [sp, x1] |
| 1015 | b .Losr_loop_entry |
| 1016 | |
| 1017 | .Losr_loop_exit: |
| 1018 | // Branch to the OSR entry point. |
| 1019 | br x2 |
| 1020 | |
| 1021 | END art_quick_osr_stub |
| 1022 | |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1023 | /* |
| 1024 | * On entry x0 is uintptr_t* gprs_ and x1 is uint64_t* fprs_ |
| 1025 | */ |
| 1026 | |
| 1027 | ENTRY art_quick_do_long_jump |
| 1028 | // Load FPRs |
| 1029 | ldp d0, d1, [x1], #16 |
| 1030 | ldp d2, d3, [x1], #16 |
| 1031 | ldp d4, d5, [x1], #16 |
| 1032 | ldp d6, d7, [x1], #16 |
| 1033 | ldp d8, d9, [x1], #16 |
| 1034 | ldp d10, d11, [x1], #16 |
| 1035 | ldp d12, d13, [x1], #16 |
| 1036 | ldp d14, d15, [x1], #16 |
| 1037 | ldp d16, d17, [x1], #16 |
| 1038 | ldp d18, d19, [x1], #16 |
| 1039 | ldp d20, d21, [x1], #16 |
| 1040 | ldp d22, d23, [x1], #16 |
| 1041 | ldp d24, d25, [x1], #16 |
| 1042 | ldp d26, d27, [x1], #16 |
| 1043 | ldp d28, d29, [x1], #16 |
| 1044 | ldp d30, d31, [x1] |
| 1045 | |
| 1046 | // Load GPRs |
| 1047 | // TODO: lots of those are smashed, could optimize. |
| 1048 | add x0, x0, #30*8 |
Andreas Gampe | 639bdd1 | 2015-06-03 11:22:45 -0700 | [diff] [blame] | 1049 | ldp x30, x1, [x0], #-16 // LR & SP |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1050 | ldp x28, x29, [x0], #-16 |
| 1051 | ldp x26, x27, [x0], #-16 |
| 1052 | ldp x24, x25, [x0], #-16 |
| 1053 | ldp x22, x23, [x0], #-16 |
| 1054 | ldp x20, x21, [x0], #-16 |
| 1055 | ldp x18, x19, [x0], #-16 |
| 1056 | ldp x16, x17, [x0], #-16 |
| 1057 | ldp x14, x15, [x0], #-16 |
| 1058 | ldp x12, x13, [x0], #-16 |
| 1059 | ldp x10, x11, [x0], #-16 |
| 1060 | ldp x8, x9, [x0], #-16 |
| 1061 | ldp x6, x7, [x0], #-16 |
| 1062 | ldp x4, x5, [x0], #-16 |
| 1063 | ldp x2, x3, [x0], #-16 |
| 1064 | mov sp, x1 |
| 1065 | |
Andreas Gampe | 639bdd1 | 2015-06-03 11:22:45 -0700 | [diff] [blame] | 1066 | // Need to load PC, it's at the end (after the space for the unused XZR). Use x1. |
| 1067 | ldr x1, [x0, #33*8] |
| 1068 | // And the value of x0. |
| 1069 | ldr x0, [x0] |
| 1070 | |
| 1071 | br x1 |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1072 | END art_quick_do_long_jump |
| 1073 | |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1074 | /* |
Andreas Gampe | 4fc046e | 2014-05-06 16:56:39 -0700 | [diff] [blame] | 1075 | * Entry from managed code that calls artLockObjectFromCode, may block for GC. x0 holds the |
| 1076 | * possibly null object to lock. |
| 1077 | * |
| 1078 | * Derived from arm32 code. |
| 1079 | */ |
| 1080 | .extern artLockObjectFromCode |
| 1081 | ENTRY art_quick_lock_object |
| 1082 | cbz w0, .Lslow_lock |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1083 | add x4, x0, #MIRROR_OBJECT_LOCK_WORD_OFFSET // exclusive load/store has no immediate anymore |
Andreas Gampe | 4fc046e | 2014-05-06 16:56:39 -0700 | [diff] [blame] | 1084 | .Lretry_lock: |
| 1085 | ldr w2, [xSELF, #THREAD_ID_OFFSET] // TODO: Can the thread ID really change during the loop? |
| 1086 | ldxr w1, [x4] |
Hiroshi Yamauchi | e15ea08 | 2015-02-09 17:11:42 -0800 | [diff] [blame] | 1087 | mov x3, x1 |
| 1088 | and w3, w3, #LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED // zero the read barrier bits |
| 1089 | cbnz w3, .Lnot_unlocked // already thin locked |
| 1090 | // unlocked case - x1: original lock word that's zero except for the read barrier bits. |
| 1091 | orr x2, x1, x2 // x2 holds thread id with count of 0 with preserved read barrier bits |
Andreas Gampe | 4fc046e | 2014-05-06 16:56:39 -0700 | [diff] [blame] | 1092 | stxr w3, w2, [x4] |
Hiroshi Yamauchi | e15ea08 | 2015-02-09 17:11:42 -0800 | [diff] [blame] | 1093 | cbnz w3, .Llock_stxr_fail // store failed, retry |
Andreas Gampe | 675967d | 2014-05-14 16:28:34 -0700 | [diff] [blame] | 1094 | dmb ishld // full (LoadLoad|LoadStore) memory barrier |
Andreas Gampe | 4fc046e | 2014-05-06 16:56:39 -0700 | [diff] [blame] | 1095 | ret |
Hiroshi Yamauchi | e15ea08 | 2015-02-09 17:11:42 -0800 | [diff] [blame] | 1096 | .Lnot_unlocked: // x1: original lock word |
| 1097 | lsr w3, w1, LOCK_WORD_STATE_SHIFT |
Andreas Gampe | 4fc046e | 2014-05-06 16:56:39 -0700 | [diff] [blame] | 1098 | cbnz w3, .Lslow_lock // if either of the top two bits are set, go slow path |
| 1099 | eor w2, w1, w2 // lock_word.ThreadId() ^ self->ThreadId() |
| 1100 | uxth w2, w2 // zero top 16 bits |
| 1101 | cbnz w2, .Lslow_lock // lock word and self thread id's match -> recursive lock |
| 1102 | // else contention, go to slow path |
Hiroshi Yamauchi | e15ea08 | 2015-02-09 17:11:42 -0800 | [diff] [blame] | 1103 | mov x3, x1 // copy the lock word to check count overflow. |
| 1104 | and w3, w3, #LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED // zero the read barrier bits. |
| 1105 | add w2, w3, #LOCK_WORD_THIN_LOCK_COUNT_ONE // increment count in lock word placing in w2 to check overflow |
| 1106 | lsr w3, w2, LOCK_WORD_READ_BARRIER_STATE_SHIFT // if either of the upper two bits (28-29) are set, we overflowed. |
| 1107 | cbnz w3, .Lslow_lock // if we overflow the count go slow path |
| 1108 | add w2, w1, #LOCK_WORD_THIN_LOCK_COUNT_ONE // increment count for real |
| 1109 | stxr w3, w2, [x4] |
| 1110 | cbnz w3, .Llock_stxr_fail // store failed, retry |
Andreas Gampe | 4fc046e | 2014-05-06 16:56:39 -0700 | [diff] [blame] | 1111 | ret |
Hiroshi Yamauchi | e15ea08 | 2015-02-09 17:11:42 -0800 | [diff] [blame] | 1112 | .Llock_stxr_fail: |
| 1113 | b .Lretry_lock // retry |
Andreas Gampe | 4fc046e | 2014-05-06 16:56:39 -0700 | [diff] [blame] | 1114 | .Lslow_lock: |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1115 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case we block |
Andreas Gampe | 4fc046e | 2014-05-06 16:56:39 -0700 | [diff] [blame] | 1116 | mov x1, xSELF // pass Thread::Current |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1117 | bl artLockObjectFromCode // (Object* obj, Thread*) |
| 1118 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME |
Andreas Gampe | 4fc046e | 2014-05-06 16:56:39 -0700 | [diff] [blame] | 1119 | RETURN_IF_W0_IS_ZERO_OR_DELIVER |
| 1120 | END art_quick_lock_object |
| 1121 | |
Andreas Gampe | c7ed09b | 2016-04-25 20:08:55 -0700 | [diff] [blame] | 1122 | ENTRY art_quick_lock_object_no_inline |
| 1123 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case we block |
| 1124 | mov x1, xSELF // pass Thread::Current |
| 1125 | bl artLockObjectFromCode // (Object* obj, Thread*) |
| 1126 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME |
| 1127 | RETURN_IF_W0_IS_ZERO_OR_DELIVER |
| 1128 | END art_quick_lock_object_no_inline |
| 1129 | |
Andreas Gampe | 4fc046e | 2014-05-06 16:56:39 -0700 | [diff] [blame] | 1130 | /* |
| 1131 | * Entry from managed code that calls artUnlockObjectFromCode and delivers exception on failure. |
| 1132 | * x0 holds the possibly null object to lock. |
| 1133 | * |
| 1134 | * Derived from arm32 code. |
| 1135 | */ |
| 1136 | .extern artUnlockObjectFromCode |
| 1137 | ENTRY art_quick_unlock_object |
| 1138 | cbz x0, .Lslow_unlock |
Hiroshi Yamauchi | e15ea08 | 2015-02-09 17:11:42 -0800 | [diff] [blame] | 1139 | add x4, x0, #MIRROR_OBJECT_LOCK_WORD_OFFSET // exclusive load/store has no immediate anymore |
| 1140 | .Lretry_unlock: |
| 1141 | #ifndef USE_READ_BARRIER |
| 1142 | ldr w1, [x4] |
| 1143 | #else |
| 1144 | ldxr w1, [x4] // Need to use atomic instructions for read barrier |
| 1145 | #endif |
| 1146 | lsr w2, w1, LOCK_WORD_STATE_SHIFT |
Andreas Gampe | 4fc046e | 2014-05-06 16:56:39 -0700 | [diff] [blame] | 1147 | cbnz w2, .Lslow_unlock // if either of the top two bits are set, go slow path |
| 1148 | ldr w2, [xSELF, #THREAD_ID_OFFSET] |
Hiroshi Yamauchi | e15ea08 | 2015-02-09 17:11:42 -0800 | [diff] [blame] | 1149 | mov x3, x1 // copy lock word to check thread id equality |
| 1150 | and w3, w3, #LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED // zero the read barrier bits |
| 1151 | eor w3, w3, w2 // lock_word.ThreadId() ^ self->ThreadId() |
Andreas Gampe | 4fc046e | 2014-05-06 16:56:39 -0700 | [diff] [blame] | 1152 | uxth w3, w3 // zero top 16 bits |
| 1153 | cbnz w3, .Lslow_unlock // do lock word and self thread id's match? |
Hiroshi Yamauchi | e15ea08 | 2015-02-09 17:11:42 -0800 | [diff] [blame] | 1154 | mov x3, x1 // copy lock word to detect transition to unlocked |
| 1155 | and w3, w3, #LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED // zero the read barrier bits |
| 1156 | cmp w3, #LOCK_WORD_THIN_LOCK_COUNT_ONE |
Andreas Gampe | 4fc046e | 2014-05-06 16:56:39 -0700 | [diff] [blame] | 1157 | bpl .Lrecursive_thin_unlock |
Hiroshi Yamauchi | e15ea08 | 2015-02-09 17:11:42 -0800 | [diff] [blame] | 1158 | // transition to unlocked |
| 1159 | mov x3, x1 |
| 1160 | and w3, w3, #LOCK_WORD_READ_BARRIER_STATE_MASK // w3: zero except for the preserved read barrier bits |
Andreas Gampe | 675967d | 2014-05-14 16:28:34 -0700 | [diff] [blame] | 1161 | dmb ish // full (LoadStore|StoreStore) memory barrier |
Hiroshi Yamauchi | e15ea08 | 2015-02-09 17:11:42 -0800 | [diff] [blame] | 1162 | #ifndef USE_READ_BARRIER |
| 1163 | str w3, [x4] |
| 1164 | #else |
| 1165 | stxr w2, w3, [x4] // Need to use atomic instructions for read barrier |
| 1166 | cbnz w2, .Lunlock_stxr_fail // store failed, retry |
| 1167 | #endif |
Andreas Gampe | 4fc046e | 2014-05-06 16:56:39 -0700 | [diff] [blame] | 1168 | ret |
Hiroshi Yamauchi | e15ea08 | 2015-02-09 17:11:42 -0800 | [diff] [blame] | 1169 | .Lrecursive_thin_unlock: // w1: original lock word |
| 1170 | sub w1, w1, #LOCK_WORD_THIN_LOCK_COUNT_ONE // decrement count |
| 1171 | #ifndef USE_READ_BARRIER |
| 1172 | str w1, [x4] |
| 1173 | #else |
| 1174 | stxr w2, w1, [x4] // Need to use atomic instructions for read barrier |
| 1175 | cbnz w2, .Lunlock_stxr_fail // store failed, retry |
| 1176 | #endif |
Andreas Gampe | 4fc046e | 2014-05-06 16:56:39 -0700 | [diff] [blame] | 1177 | ret |
Hiroshi Yamauchi | e15ea08 | 2015-02-09 17:11:42 -0800 | [diff] [blame] | 1178 | .Lunlock_stxr_fail: |
| 1179 | b .Lretry_unlock // retry |
Andreas Gampe | 4fc046e | 2014-05-06 16:56:39 -0700 | [diff] [blame] | 1180 | .Lslow_unlock: |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1181 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case exception allocation triggers GC |
Andreas Gampe | 4fc046e | 2014-05-06 16:56:39 -0700 | [diff] [blame] | 1182 | mov x1, xSELF // pass Thread::Current |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1183 | bl artUnlockObjectFromCode // (Object* obj, Thread*) |
| 1184 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME |
Andreas Gampe | 4fc046e | 2014-05-06 16:56:39 -0700 | [diff] [blame] | 1185 | RETURN_IF_W0_IS_ZERO_OR_DELIVER |
| 1186 | END art_quick_unlock_object |
Andreas Gampe | 525cde2 | 2014-04-22 15:44:50 -0700 | [diff] [blame] | 1187 | |
Andreas Gampe | c7ed09b | 2016-04-25 20:08:55 -0700 | [diff] [blame] | 1188 | ENTRY art_quick_unlock_object_no_inline |
| 1189 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case exception allocation triggers GC |
| 1190 | mov x1, xSELF // pass Thread::Current |
| 1191 | bl artUnlockObjectFromCode // (Object* obj, Thread*) |
| 1192 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME |
| 1193 | RETURN_IF_W0_IS_ZERO_OR_DELIVER |
| 1194 | END art_quick_unlock_object_no_inline |
| 1195 | |
Andreas Gampe | 525cde2 | 2014-04-22 15:44:50 -0700 | [diff] [blame] | 1196 | /* |
| 1197 | * Entry from managed code that calls artIsAssignableFromCode and on failure calls |
| 1198 | * artThrowClassCastException. |
| 1199 | */ |
| 1200 | .extern artThrowClassCastException |
| 1201 | ENTRY art_quick_check_cast |
| 1202 | // Store arguments and link register |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 1203 | // Stack needs to be 16B aligned on calls. |
| 1204 | stp x0, x1, [sp,#-32]! |
Andreas Gampe | 525cde2 | 2014-04-22 15:44:50 -0700 | [diff] [blame] | 1205 | .cfi_adjust_cfa_offset 32 |
Andreas Gampe | 525cde2 | 2014-04-22 15:44:50 -0700 | [diff] [blame] | 1206 | .cfi_rel_offset x0, 0 |
| 1207 | .cfi_rel_offset x1, 8 |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 1208 | str xLR, [sp, #24] |
Andreas Gampe | 525cde2 | 2014-04-22 15:44:50 -0700 | [diff] [blame] | 1209 | .cfi_rel_offset x30, 24 |
| 1210 | |
| 1211 | // Call runtime code |
| 1212 | bl artIsAssignableFromCode |
| 1213 | |
| 1214 | // Check for exception |
| 1215 | cbz x0, .Lthrow_class_cast_exception |
| 1216 | |
| 1217 | // Restore and return |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 1218 | ldr xLR, [sp, #24] |
| 1219 | .cfi_restore x30 |
| 1220 | ldp x0, x1, [sp], #32 |
Andreas Gampe | 525cde2 | 2014-04-22 15:44:50 -0700 | [diff] [blame] | 1221 | .cfi_restore x0 |
| 1222 | .cfi_restore x1 |
Andreas Gampe | 525cde2 | 2014-04-22 15:44:50 -0700 | [diff] [blame] | 1223 | .cfi_adjust_cfa_offset -32 |
| 1224 | ret |
| 1225 | |
Andreas Gampe | 6b90d42 | 2015-06-26 19:49:24 -0700 | [diff] [blame] | 1226 | .cfi_adjust_cfa_offset 32 // Reset unwind info so following code unwinds. |
| 1227 | |
Andreas Gampe | 525cde2 | 2014-04-22 15:44:50 -0700 | [diff] [blame] | 1228 | .Lthrow_class_cast_exception: |
| 1229 | // Restore |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 1230 | ldr xLR, [sp, #24] |
| 1231 | .cfi_restore x30 |
| 1232 | ldp x0, x1, [sp], #32 |
Andreas Gampe | 525cde2 | 2014-04-22 15:44:50 -0700 | [diff] [blame] | 1233 | .cfi_restore x0 |
| 1234 | .cfi_restore x1 |
Andreas Gampe | 525cde2 | 2014-04-22 15:44:50 -0700 | [diff] [blame] | 1235 | .cfi_adjust_cfa_offset -32 |
| 1236 | |
| 1237 | SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context |
| 1238 | mov x2, xSELF // pass Thread::Current |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1239 | b artThrowClassCastException // (Class*, Class*, Thread*) |
Andreas Gampe | 525cde2 | 2014-04-22 15:44:50 -0700 | [diff] [blame] | 1240 | brk 0 // We should not return here... |
| 1241 | END art_quick_check_cast |
| 1242 | |
Man Cao | 1aee900 | 2015-07-14 22:31:42 -0700 | [diff] [blame] | 1243 | // Restore xReg's value from [sp, #offset] if xReg is not the same as xExclude. |
| 1244 | .macro POP_REG_NE xReg, offset, xExclude |
| 1245 | .ifnc \xReg, \xExclude |
| 1246 | ldr \xReg, [sp, #\offset] // restore xReg |
| 1247 | .cfi_restore \xReg |
| 1248 | .endif |
| 1249 | .endm |
| 1250 | |
| 1251 | /* |
| 1252 | * Macro to insert read barrier, only used in art_quick_aput_obj. |
| 1253 | * xDest, wDest and xObj are registers, offset is a defined literal such as |
| 1254 | * MIRROR_OBJECT_CLASS_OFFSET. Dest needs both x and w versions of the same register to handle |
| 1255 | * name mismatch between instructions. This macro uses the lower 32b of register when possible. |
| 1256 | * TODO: When read barrier has a fast path, add heap unpoisoning support for the fast path. |
| 1257 | */ |
| 1258 | .macro READ_BARRIER xDest, wDest, xObj, offset |
| 1259 | #ifdef USE_READ_BARRIER |
| 1260 | // Store registers used in art_quick_aput_obj (x0-x4, LR), stack is 16B aligned. |
| 1261 | stp x0, x1, [sp, #-48]! |
| 1262 | .cfi_adjust_cfa_offset 48 |
| 1263 | .cfi_rel_offset x0, 0 |
| 1264 | .cfi_rel_offset x1, 8 |
| 1265 | stp x2, x3, [sp, #16] |
| 1266 | .cfi_rel_offset x2, 16 |
| 1267 | .cfi_rel_offset x3, 24 |
| 1268 | stp x4, xLR, [sp, #32] |
| 1269 | .cfi_rel_offset x4, 32 |
| 1270 | .cfi_rel_offset x30, 40 |
| 1271 | |
Man Cao | 6306921 | 2015-08-21 15:51:39 -0700 | [diff] [blame] | 1272 | // mov x0, \xRef // pass ref in x0 (no-op for now since parameter ref is unused) |
Man Cao | 1aee900 | 2015-07-14 22:31:42 -0700 | [diff] [blame] | 1273 | .ifnc \xObj, x1 |
| 1274 | mov x1, \xObj // pass xObj |
| 1275 | .endif |
| 1276 | mov w2, #\offset // pass offset |
| 1277 | bl artReadBarrierSlow // artReadBarrierSlow(ref, xObj, offset) |
| 1278 | // No need to unpoison return value in w0, artReadBarrierSlow() would do the unpoisoning. |
| 1279 | .ifnc \wDest, w0 |
| 1280 | mov \wDest, w0 // save return value in wDest |
| 1281 | .endif |
| 1282 | |
| 1283 | // Conditionally restore saved registers |
| 1284 | POP_REG_NE x0, 0, \xDest |
| 1285 | POP_REG_NE x1, 8, \xDest |
| 1286 | POP_REG_NE x2, 16, \xDest |
| 1287 | POP_REG_NE x3, 24, \xDest |
| 1288 | POP_REG_NE x4, 32, \xDest |
| 1289 | ldr xLR, [sp, #40] |
| 1290 | .cfi_restore x30 |
| 1291 | add sp, sp, #48 |
| 1292 | .cfi_adjust_cfa_offset -48 |
| 1293 | #else |
| 1294 | ldr \wDest, [\xObj, #\offset] // Heap reference = 32b. This also zero-extends to \xDest. |
| 1295 | UNPOISON_HEAP_REF \wDest |
| 1296 | #endif // USE_READ_BARRIER |
| 1297 | .endm |
| 1298 | |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1299 | /* |
| 1300 | * Entry from managed code for array put operations of objects where the value being stored |
| 1301 | * needs to be checked for compatibility. |
| 1302 | * x0 = array, x1 = index, x2 = value |
| 1303 | * |
| 1304 | * Currently all values should fit into w0/w1/w2, and w1 always will as indices are 32b. We |
| 1305 | * assume, though, that the upper 32b are zeroed out. At least for x1/w1 we can do better by |
| 1306 | * using index-zero-extension in load/stores. |
| 1307 | * |
| 1308 | * Temporaries: x3, x4 |
| 1309 | * TODO: x4 OK? ip seems wrong here. |
| 1310 | */ |
| 1311 | ENTRY art_quick_aput_obj_with_null_and_bound_check |
| 1312 | tst x0, x0 |
| 1313 | bne art_quick_aput_obj_with_bound_check |
| 1314 | b art_quick_throw_null_pointer_exception |
| 1315 | END art_quick_aput_obj_with_null_and_bound_check |
| 1316 | |
| 1317 | ENTRY art_quick_aput_obj_with_bound_check |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1318 | ldr w3, [x0, #MIRROR_ARRAY_LENGTH_OFFSET] |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1319 | cmp w3, w1 |
| 1320 | bhi art_quick_aput_obj |
| 1321 | mov x0, x1 |
| 1322 | mov x1, x3 |
| 1323 | b art_quick_throw_array_bounds |
| 1324 | END art_quick_aput_obj_with_bound_check |
| 1325 | |
Man Cao | 1aee900 | 2015-07-14 22:31:42 -0700 | [diff] [blame] | 1326 | #ifdef USE_READ_BARRIER |
| 1327 | .extern artReadBarrierSlow |
| 1328 | #endif |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1329 | ENTRY art_quick_aput_obj |
| 1330 | cbz x2, .Ldo_aput_null |
Man Cao | 1aee900 | 2015-07-14 22:31:42 -0700 | [diff] [blame] | 1331 | READ_BARRIER x3, w3, x0, MIRROR_OBJECT_CLASS_OFFSET // Heap reference = 32b |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1332 | // This also zero-extends to x3 |
Man Cao | 1aee900 | 2015-07-14 22:31:42 -0700 | [diff] [blame] | 1333 | READ_BARRIER x4, w4, x2, MIRROR_OBJECT_CLASS_OFFSET // Heap reference = 32b |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1334 | // This also zero-extends to x4 |
Man Cao | 1aee900 | 2015-07-14 22:31:42 -0700 | [diff] [blame] | 1335 | READ_BARRIER x3, w3, x3, MIRROR_CLASS_COMPONENT_TYPE_OFFSET // Heap reference = 32b |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1336 | // This also zero-extends to x3 |
| 1337 | cmp w3, w4 // value's type == array's component type - trivial assignability |
| 1338 | bne .Lcheck_assignability |
| 1339 | .Ldo_aput: |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1340 | add x3, x0, #MIRROR_OBJECT_ARRAY_DATA_OFFSET |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1341 | // "Compress" = do nothing |
Hiroshi Yamauchi | bfa5eb6 | 2015-05-29 15:04:41 -0700 | [diff] [blame] | 1342 | POISON_HEAP_REF w2 |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1343 | str w2, [x3, x1, lsl #2] // Heap reference = 32b |
| 1344 | ldr x3, [xSELF, #THREAD_CARD_TABLE_OFFSET] |
| 1345 | lsr x0, x0, #7 |
| 1346 | strb w3, [x3, x0] |
| 1347 | ret |
| 1348 | .Ldo_aput_null: |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1349 | add x3, x0, #MIRROR_OBJECT_ARRAY_DATA_OFFSET |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1350 | // "Compress" = do nothing |
| 1351 | str w2, [x3, x1, lsl #2] // Heap reference = 32b |
| 1352 | ret |
| 1353 | .Lcheck_assignability: |
| 1354 | // Store arguments and link register |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 1355 | stp x0, x1, [sp,#-32]! |
| 1356 | .cfi_adjust_cfa_offset 32 |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1357 | .cfi_rel_offset x0, 0 |
| 1358 | .cfi_rel_offset x1, 8 |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 1359 | stp x2, xLR, [sp, #16] |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1360 | .cfi_rel_offset x2, 16 |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 1361 | .cfi_rel_offset x30, 24 |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1362 | |
| 1363 | // Call runtime code |
| 1364 | mov x0, x3 // Heap reference, 32b, "uncompress" = do nothing, already zero-extended |
| 1365 | mov x1, x4 // Heap reference, 32b, "uncompress" = do nothing, already zero-extended |
| 1366 | bl artIsAssignableFromCode |
| 1367 | |
| 1368 | // Check for exception |
| 1369 | cbz x0, .Lthrow_array_store_exception |
| 1370 | |
| 1371 | // Restore |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 1372 | ldp x2, x30, [sp, #16] |
| 1373 | .cfi_restore x2 |
| 1374 | .cfi_restore x30 |
| 1375 | ldp x0, x1, [sp], #32 |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1376 | .cfi_restore x0 |
| 1377 | .cfi_restore x1 |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 1378 | .cfi_adjust_cfa_offset -32 |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1379 | |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1380 | add x3, x0, #MIRROR_OBJECT_ARRAY_DATA_OFFSET |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1381 | // "Compress" = do nothing |
Hiroshi Yamauchi | bfa5eb6 | 2015-05-29 15:04:41 -0700 | [diff] [blame] | 1382 | POISON_HEAP_REF w2 |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1383 | str w2, [x3, x1, lsl #2] // Heap reference = 32b |
| 1384 | ldr x3, [xSELF, #THREAD_CARD_TABLE_OFFSET] |
| 1385 | lsr x0, x0, #7 |
| 1386 | strb w3, [x3, x0] |
| 1387 | ret |
Mathieu Chartier | 2738639 | 2015-06-27 15:42:27 -0700 | [diff] [blame] | 1388 | .cfi_adjust_cfa_offset 32 // 4 restores after cbz for unwinding. |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1389 | .Lthrow_array_store_exception: |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 1390 | ldp x2, x30, [sp, #16] |
| 1391 | .cfi_restore x2 |
| 1392 | .cfi_restore x30 |
| 1393 | ldp x0, x1, [sp], #32 |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1394 | .cfi_restore x0 |
| 1395 | .cfi_restore x1 |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 1396 | .cfi_adjust_cfa_offset -32 |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1397 | |
| 1398 | SETUP_SAVE_ALL_CALLEE_SAVE_FRAME |
| 1399 | mov x1, x2 // Pass value. |
| 1400 | mov x2, xSELF // Pass Thread::Current. |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1401 | b artThrowArrayStoreException // (Object*, Object*, Thread*). |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1402 | brk 0 // Unreached. |
| 1403 | END art_quick_aput_obj |
| 1404 | |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1405 | // Macro to facilitate adding new allocation entrypoints. |
Vladimir Marko | 5ea536a | 2015-04-20 20:11:30 +0100 | [diff] [blame] | 1406 | .macro ONE_ARG_DOWNCALL name, entrypoint, return |
| 1407 | .extern \entrypoint |
| 1408 | ENTRY \name |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1409 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC |
Vladimir Marko | 5ea536a | 2015-04-20 20:11:30 +0100 | [diff] [blame] | 1410 | mov x1, xSELF // pass Thread::Current |
| 1411 | bl \entrypoint // (uint32_t type_idx, Method* method, Thread*) |
| 1412 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME |
| 1413 | \return |
| 1414 | END \name |
| 1415 | .endm |
| 1416 | |
| 1417 | // Macro to facilitate adding new allocation entrypoints. |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1418 | .macro TWO_ARG_DOWNCALL name, entrypoint, return |
| 1419 | .extern \entrypoint |
| 1420 | ENTRY \name |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1421 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC |
Andreas Gampe | 00c1e6d | 2014-04-25 15:47:13 -0700 | [diff] [blame] | 1422 | mov x2, xSELF // pass Thread::Current |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1423 | bl \entrypoint // (uint32_t type_idx, Method* method, Thread*) |
| 1424 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME |
Andreas Gampe | 00c1e6d | 2014-04-25 15:47:13 -0700 | [diff] [blame] | 1425 | \return |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1426 | END \name |
| 1427 | .endm |
| 1428 | |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1429 | // Macro to facilitate adding new allocation entrypoints. |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1430 | .macro THREE_ARG_DOWNCALL name, entrypoint, return |
| 1431 | .extern \entrypoint |
| 1432 | ENTRY \name |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1433 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC |
Andreas Gampe | 00c1e6d | 2014-04-25 15:47:13 -0700 | [diff] [blame] | 1434 | mov x3, xSELF // pass Thread::Current |
Andreas Gampe | 00c1e6d | 2014-04-25 15:47:13 -0700 | [diff] [blame] | 1435 | bl \entrypoint |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1436 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME |
Andreas Gampe | 00c1e6d | 2014-04-25 15:47:13 -0700 | [diff] [blame] | 1437 | \return |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1438 | END \name |
| 1439 | .endm |
| 1440 | |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 1441 | // Macro to facilitate adding new allocation entrypoints. |
| 1442 | .macro FOUR_ARG_DOWNCALL name, entrypoint, return |
| 1443 | .extern \entrypoint |
| 1444 | ENTRY \name |
| 1445 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC |
| 1446 | mov x4, xSELF // pass Thread::Current |
| 1447 | bl \entrypoint // |
| 1448 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME |
| 1449 | \return |
| 1450 | DELIVER_PENDING_EXCEPTION |
| 1451 | END \name |
| 1452 | .endm |
| 1453 | |
Andreas Gampe | 6e4e59c | 2014-05-05 20:11:02 -0700 | [diff] [blame] | 1454 | // Macros taking opportunity of code similarities for downcalls with referrer. |
Andreas Gampe | 6e4e59c | 2014-05-05 20:11:02 -0700 | [diff] [blame] | 1455 | .macro ONE_ARG_REF_DOWNCALL name, entrypoint, return |
| 1456 | .extern \entrypoint |
| 1457 | ENTRY \name |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1458 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 1459 | ldr x1, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer |
Andreas Gampe | 6e4e59c | 2014-05-05 20:11:02 -0700 | [diff] [blame] | 1460 | mov x2, xSELF // pass Thread::Current |
Andreas Gampe | 6e4e59c | 2014-05-05 20:11:02 -0700 | [diff] [blame] | 1461 | bl \entrypoint // (uint32_t type_idx, Method* method, Thread*, SP) |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1462 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME |
Andreas Gampe | 6e4e59c | 2014-05-05 20:11:02 -0700 | [diff] [blame] | 1463 | \return |
| 1464 | END \name |
| 1465 | .endm |
| 1466 | |
Andreas Gampe | 6e4e59c | 2014-05-05 20:11:02 -0700 | [diff] [blame] | 1467 | .macro TWO_ARG_REF_DOWNCALL name, entrypoint, return |
| 1468 | .extern \entrypoint |
| 1469 | ENTRY \name |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1470 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 1471 | ldr x2, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer |
Andreas Gampe | 6e4e59c | 2014-05-05 20:11:02 -0700 | [diff] [blame] | 1472 | mov x3, xSELF // pass Thread::Current |
Andreas Gampe | 6e4e59c | 2014-05-05 20:11:02 -0700 | [diff] [blame] | 1473 | bl \entrypoint |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1474 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME |
Andreas Gampe | 6e4e59c | 2014-05-05 20:11:02 -0700 | [diff] [blame] | 1475 | \return |
| 1476 | END \name |
| 1477 | .endm |
| 1478 | |
Andreas Gampe | 6e4e59c | 2014-05-05 20:11:02 -0700 | [diff] [blame] | 1479 | .macro THREE_ARG_REF_DOWNCALL name, entrypoint, return |
| 1480 | .extern \entrypoint |
| 1481 | ENTRY \name |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1482 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 1483 | ldr x3, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer |
Andreas Gampe | 6e4e59c | 2014-05-05 20:11:02 -0700 | [diff] [blame] | 1484 | mov x4, xSELF // pass Thread::Current |
Andreas Gampe | 6e4e59c | 2014-05-05 20:11:02 -0700 | [diff] [blame] | 1485 | bl \entrypoint |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1486 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME |
Andreas Gampe | 6e4e59c | 2014-05-05 20:11:02 -0700 | [diff] [blame] | 1487 | \return |
| 1488 | END \name |
| 1489 | .endm |
| 1490 | |
Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 1491 | .macro RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER |
| 1492 | cbz w0, 1f // result zero branch over |
| 1493 | ret // return |
| 1494 | 1: |
| 1495 | DELIVER_PENDING_EXCEPTION |
| 1496 | .endm |
| 1497 | |
Matteo Franchin | dfd891a | 2014-04-30 12:17:17 +0100 | [diff] [blame] | 1498 | /* |
Vladimir Marko | 3b37073 | 2014-10-09 18:34:28 +0100 | [diff] [blame] | 1499 | * Entry from managed code that calls artHandleFillArrayDataFromCode and delivers exception on |
| 1500 | * failure. |
| 1501 | */ |
| 1502 | TWO_ARG_REF_DOWNCALL art_quick_handle_fill_data, artHandleFillArrayDataFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER |
| 1503 | |
| 1504 | /* |
Matteo Franchin | dfd891a | 2014-04-30 12:17:17 +0100 | [diff] [blame] | 1505 | * Entry from managed code when uninitialized static storage, this stub will run the class |
| 1506 | * initializer and deliver the exception on error. On success the static storage base is |
| 1507 | * returned. |
| 1508 | */ |
Vladimir Marko | 5ea536a | 2015-04-20 20:11:30 +0100 | [diff] [blame] | 1509 | ONE_ARG_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER |
Matteo Franchin | dfd891a | 2014-04-30 12:17:17 +0100 | [diff] [blame] | 1510 | |
Vladimir Marko | 5ea536a | 2015-04-20 20:11:30 +0100 | [diff] [blame] | 1511 | ONE_ARG_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER |
| 1512 | ONE_ARG_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER |
Matteo Franchin | dfd891a | 2014-04-30 12:17:17 +0100 | [diff] [blame] | 1513 | |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame] | 1514 | ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1 |
| 1515 | ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1 |
| 1516 | ONE_ARG_REF_DOWNCALL art_quick_get_char_static, artGetCharStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1 |
| 1517 | ONE_ARG_REF_DOWNCALL art_quick_get_short_static, artGetShortStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1 |
Andreas Gampe | 6e4e59c | 2014-05-05 20:11:02 -0700 | [diff] [blame] | 1518 | ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1 |
| 1519 | ONE_ARG_REF_DOWNCALL art_quick_get64_static, artGet64StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1 |
| 1520 | ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1 |
| 1521 | |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame] | 1522 | TWO_ARG_REF_DOWNCALL art_quick_get_boolean_instance, artGetBooleanInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1 |
| 1523 | TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1 |
| 1524 | TWO_ARG_REF_DOWNCALL art_quick_get_char_instance, artGetCharInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1 |
| 1525 | TWO_ARG_REF_DOWNCALL art_quick_get_short_instance, artGetShortInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1 |
Andreas Gampe | 6e4e59c | 2014-05-05 20:11:02 -0700 | [diff] [blame] | 1526 | TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1 |
| 1527 | TWO_ARG_REF_DOWNCALL art_quick_get64_instance, artGet64InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1 |
| 1528 | TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1 |
| 1529 | |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame] | 1530 | TWO_ARG_REF_DOWNCALL art_quick_set8_static, artSet8StaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER |
| 1531 | TWO_ARG_REF_DOWNCALL art_quick_set16_static, artSet16StaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER |
Andreas Gampe | 6e4e59c | 2014-05-05 20:11:02 -0700 | [diff] [blame] | 1532 | TWO_ARG_REF_DOWNCALL art_quick_set32_static, artSet32StaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER |
| 1533 | TWO_ARG_REF_DOWNCALL art_quick_set_obj_static, artSetObjStaticFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER |
| 1534 | |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame] | 1535 | THREE_ARG_REF_DOWNCALL art_quick_set8_instance, artSet8InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER |
| 1536 | THREE_ARG_REF_DOWNCALL art_quick_set16_instance, artSet16InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER |
Andreas Gampe | 6e4e59c | 2014-05-05 20:11:02 -0700 | [diff] [blame] | 1537 | THREE_ARG_REF_DOWNCALL art_quick_set32_instance, artSet32InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER |
Stephen Kyle | 0ff20d5 | 2014-10-22 15:23:46 +0100 | [diff] [blame] | 1538 | THREE_ARG_REF_DOWNCALL art_quick_set64_instance, artSet64InstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER |
Andreas Gampe | 6e4e59c | 2014-05-05 20:11:02 -0700 | [diff] [blame] | 1539 | THREE_ARG_REF_DOWNCALL art_quick_set_obj_instance, artSetObjInstanceFromCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER |
| 1540 | |
| 1541 | // This is separated out as the argument order is different. |
| 1542 | .extern artSet64StaticFromCode |
| 1543 | ENTRY art_quick_set64_static |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1544 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 1545 | ldr x1, [sp, #FRAME_SIZE_REFS_ONLY_CALLEE_SAVE] // Load referrer |
Calin Juravle | e460d1d | 2015-09-29 04:52:17 +0100 | [diff] [blame] | 1546 | // x2 contains the parameter |
Andreas Gampe | 6e4e59c | 2014-05-05 20:11:02 -0700 | [diff] [blame] | 1547 | mov x3, xSELF // pass Thread::Current |
Andreas Gampe | 6e4e59c | 2014-05-05 20:11:02 -0700 | [diff] [blame] | 1548 | bl artSet64StaticFromCode |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1549 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME |
Andreas Gampe | 6e4e59c | 2014-05-05 20:11:02 -0700 | [diff] [blame] | 1550 | RETURN_IF_W0_IS_ZERO_OR_DELIVER |
| 1551 | END art_quick_set64_static |
| 1552 | |
Matteo Franchin | dfd891a | 2014-04-30 12:17:17 +0100 | [diff] [blame] | 1553 | /* |
| 1554 | * Entry from managed code to resolve a string, this stub will allocate a String and deliver an |
Vladimir Marko | 5ea536a | 2015-04-20 20:11:30 +0100 | [diff] [blame] | 1555 | * exception on error. On success the String is returned. w0 holds the string index. The fast |
| 1556 | * path check for hit in strings cache has already been performed. |
Matteo Franchin | dfd891a | 2014-04-30 12:17:17 +0100 | [diff] [blame] | 1557 | */ |
Vladimir Marko | 5ea536a | 2015-04-20 20:11:30 +0100 | [diff] [blame] | 1558 | ONE_ARG_DOWNCALL art_quick_resolve_string, artResolveStringFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER |
Andreas Gampe | 6e4e59c | 2014-05-05 20:11:02 -0700 | [diff] [blame] | 1559 | |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1560 | // Generate the allocation entrypoints for each allocator. |
Hiroshi Yamauchi | 6f6244a | 2015-10-22 12:08:12 -0700 | [diff] [blame] | 1561 | GENERATE_ALLOC_ENTRYPOINTS_FOR_EACH_ALLOCATOR |
Hiroshi Yamauchi | 10d4c08 | 2016-02-24 12:51:18 -0800 | [diff] [blame] | 1562 | |
Hiroshi Yamauchi | 6f6244a | 2015-10-22 12:08:12 -0700 | [diff] [blame] | 1563 | // A hand-written override for GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_rosalloc, RosAlloc). |
| 1564 | ENTRY art_quick_alloc_object_rosalloc |
| 1565 | // Fast path rosalloc allocation. |
| 1566 | // x0: type_idx/return value, x1: ArtMethod*, xSELF(x19): Thread::Current |
| 1567 | // x2-x7: free. |
| 1568 | ldr x2, [x1, #ART_METHOD_DEX_CACHE_TYPES_OFFSET_64] // Load dex cache resolved types array |
| 1569 | // Load the class (x2) |
| 1570 | ldr w2, [x2, x0, lsl #COMPRESSED_REFERENCE_SIZE_SHIFT] |
| 1571 | cbz x2, .Lart_quick_alloc_object_rosalloc_slow_path // Check null class |
| 1572 | // Check class status. |
| 1573 | ldr w3, [x2, #MIRROR_CLASS_STATUS_OFFSET] |
| 1574 | cmp x3, #MIRROR_CLASS_STATUS_INITIALIZED |
| 1575 | bne .Lart_quick_alloc_object_rosalloc_slow_path |
| 1576 | // Add a fake dependence from the |
| 1577 | // following access flag and size |
| 1578 | // loads to the status load. |
| 1579 | // This is to prevent those loads |
| 1580 | // from being reordered above the |
| 1581 | // status load and reading wrong |
| 1582 | // values (an alternative is to use |
| 1583 | // a load-acquire for the status). |
| 1584 | eor x3, x3, x3 |
| 1585 | add x2, x2, x3 |
| 1586 | // Check access flags has |
| 1587 | // kAccClassIsFinalizable |
| 1588 | ldr w3, [x2, #MIRROR_CLASS_ACCESS_FLAGS_OFFSET] |
| 1589 | tst x3, #ACCESS_FLAGS_CLASS_IS_FINALIZABLE |
| 1590 | bne .Lart_quick_alloc_object_rosalloc_slow_path |
| 1591 | ldr x3, [xSELF, #THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET] // Check if the thread local |
| 1592 | // allocation stack has room. |
| 1593 | // ldp won't work due to large offset. |
| 1594 | ldr x4, [xSELF, #THREAD_LOCAL_ALLOC_STACK_END_OFFSET] |
| 1595 | cmp x3, x4 |
| 1596 | bhs .Lart_quick_alloc_object_rosalloc_slow_path |
| 1597 | ldr w3, [x2, #MIRROR_CLASS_OBJECT_SIZE_OFFSET] // Load the object size (x3) |
| 1598 | cmp x3, #ROSALLOC_MAX_THREAD_LOCAL_BRACKET_SIZE // Check if the size is for a thread |
| 1599 | // local allocation |
| 1600 | bhs .Lart_quick_alloc_object_rosalloc_slow_path |
| 1601 | // Compute the rosalloc bracket index |
| 1602 | // from the size. |
| 1603 | // Align up the size by the rosalloc |
| 1604 | // bracket quantum size and divide |
| 1605 | // by the quantum size and subtract |
| 1606 | // by 1. This code is a shorter but |
| 1607 | // equivalent version. |
| 1608 | sub x3, x3, #1 |
| 1609 | lsr x3, x3, #ROSALLOC_BRACKET_QUANTUM_SIZE_SHIFT |
| 1610 | // Load the rosalloc run (x4) |
| 1611 | add x4, xSELF, x3, lsl #POINTER_SIZE_SHIFT |
| 1612 | ldr x4, [x4, #THREAD_ROSALLOC_RUNS_OFFSET] |
| 1613 | // Load the free list head (x3). This |
| 1614 | // will be the return val. |
| 1615 | ldr x3, [x4, #(ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET)] |
| 1616 | cbz x3, .Lart_quick_alloc_object_rosalloc_slow_path |
| 1617 | // "Point of no slow path". Won't go to the slow path from here on. OK to clobber x0 and x1. |
| 1618 | ldr x1, [x3, #ROSALLOC_SLOT_NEXT_OFFSET] // Load the next pointer of the head |
| 1619 | // and update the list head with the |
| 1620 | // next pointer. |
| 1621 | str x1, [x4, #(ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET)] |
| 1622 | // Store the class pointer in the |
| 1623 | // header. This also overwrites the |
| 1624 | // next pointer. The offsets are |
| 1625 | // asserted to match. |
| 1626 | #if ROSALLOC_SLOT_NEXT_OFFSET != MIRROR_OBJECT_CLASS_OFFSET |
| 1627 | #error "Class pointer needs to overwrite next pointer." |
| 1628 | #endif |
| 1629 | POISON_HEAP_REF w2 |
| 1630 | str w2, [x3, #MIRROR_OBJECT_CLASS_OFFSET] |
| 1631 | // Push the new object onto the thread |
| 1632 | // local allocation stack and |
| 1633 | // increment the thread local |
| 1634 | // allocation stack top. |
| 1635 | ldr x1, [xSELF, #THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET] |
| 1636 | str w3, [x1], #COMPRESSED_REFERENCE_SIZE // (Increment x1 as a side effect.) |
| 1637 | str x1, [xSELF, #THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET] |
| 1638 | // Decrement the size of the free list |
| 1639 | ldr w1, [x4, #(ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_SIZE_OFFSET)] |
| 1640 | sub x1, x1, #1 |
| 1641 | // TODO: consider combining this store |
| 1642 | // and the list head store above using |
| 1643 | // strd. |
| 1644 | str w1, [x4, #(ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_SIZE_OFFSET)] |
| 1645 | // Fence. This is "ish" not "ishst" so |
| 1646 | // that the code after this allocation |
| 1647 | // site will see the right values in |
| 1648 | // the fields of the class. |
| 1649 | // Alternatively we could use "ishst" |
| 1650 | // if we use load-acquire for the |
| 1651 | // class status load.) |
| 1652 | dmb ish |
| 1653 | mov x0, x3 // Set the return value and return. |
| 1654 | ret |
| 1655 | .Lart_quick_alloc_object_rosalloc_slow_path: |
| 1656 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves in case of GC |
| 1657 | mov x2, xSELF // pass Thread::Current |
| 1658 | bl artAllocObjectFromCodeRosAlloc // (uint32_t type_idx, Method* method, Thread*) |
| 1659 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME |
| 1660 | RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER |
| 1661 | END art_quick_alloc_object_rosalloc |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1662 | |
Hiroshi Yamauchi | cd77378 | 2016-04-07 17:18:24 -0700 | [diff] [blame] | 1663 | // The common fast path code for art_quick_alloc_object_tlab and art_quick_alloc_object_region_tlab. |
| 1664 | // |
| 1665 | // x0: type_idx/return value, x1: ArtMethod*, x2: Class*, xSELF(x19): Thread::Current |
| 1666 | // x3-x7: free. |
| 1667 | // Need to preserve x0 and x1 to the slow path. |
| 1668 | .macro ALLOC_OBJECT_TLAB_FAST_PATH slowPathLabel |
| 1669 | cbz x2, \slowPathLabel // Check null class |
Hiroshi Yamauchi | d72945c | 2016-03-16 11:23:10 -0700 | [diff] [blame] | 1670 | // Check class status. |
| 1671 | ldr w3, [x2, #MIRROR_CLASS_STATUS_OFFSET] |
| 1672 | cmp x3, #MIRROR_CLASS_STATUS_INITIALIZED |
Hiroshi Yamauchi | cd77378 | 2016-04-07 17:18:24 -0700 | [diff] [blame] | 1673 | bne \slowPathLabel |
Hiroshi Yamauchi | d72945c | 2016-03-16 11:23:10 -0700 | [diff] [blame] | 1674 | // Add a fake dependence from the |
| 1675 | // following access flag and size |
| 1676 | // loads to the status load. |
| 1677 | // This is to prevent those loads |
| 1678 | // from being reordered above the |
| 1679 | // status load and reading wrong |
| 1680 | // values (an alternative is to use |
| 1681 | // a load-acquire for the status). |
| 1682 | eor x3, x3, x3 |
| 1683 | add x2, x2, x3 |
| 1684 | // Check access flags has |
| 1685 | // kAccClassIsFinalizable. |
| 1686 | ldr w3, [x2, #MIRROR_CLASS_ACCESS_FLAGS_OFFSET] |
Hiroshi Yamauchi | cd77378 | 2016-04-07 17:18:24 -0700 | [diff] [blame] | 1687 | tbnz x3, #ACCESS_FLAGS_CLASS_IS_FINALIZABLE_BIT, \slowPathLabel |
Hiroshi Yamauchi | d72945c | 2016-03-16 11:23:10 -0700 | [diff] [blame] | 1688 | // Load thread_local_pos (x4) and |
| 1689 | // thread_local_end (x5). |
| 1690 | ldr x4, [xSELF, #THREAD_LOCAL_POS_OFFSET] |
| 1691 | ldr x5, [xSELF, #THREAD_LOCAL_END_OFFSET] |
| 1692 | sub x6, x5, x4 // Compute the remaining buf size. |
| 1693 | ldr w7, [x2, #MIRROR_CLASS_OBJECT_SIZE_OFFSET] // Load the object size (x7). |
| 1694 | cmp x7, x6 // Check if it fits. OK to do this |
| 1695 | // before rounding up the object size |
| 1696 | // assuming the buf size alignment. |
Hiroshi Yamauchi | cd77378 | 2016-04-07 17:18:24 -0700 | [diff] [blame] | 1697 | bhi \slowPathLabel |
Hiroshi Yamauchi | d72945c | 2016-03-16 11:23:10 -0700 | [diff] [blame] | 1698 | // "Point of no slow path". Won't go to the slow path from here on. OK to clobber x0 and x1. |
| 1699 | // Round up the object size by the |
| 1700 | // object alignment. (addr + 7) & ~7. |
| 1701 | add x7, x7, #OBJECT_ALIGNMENT_MASK |
| 1702 | and x7, x7, #OBJECT_ALIGNMENT_MASK_TOGGLED |
| 1703 | // Move old thread_local_pos to x0 |
| 1704 | // for the return value. |
| 1705 | mov x0, x4 |
| 1706 | add x5, x0, x7 |
| 1707 | str x5, [xSELF, #THREAD_LOCAL_POS_OFFSET] // Store new thread_local_pos. |
| 1708 | ldr x5, [xSELF, #THREAD_LOCAL_OBJECTS_OFFSET] // Increment thread_local_objects. |
| 1709 | add x5, x5, #1 |
| 1710 | str x5, [xSELF, #THREAD_LOCAL_OBJECTS_OFFSET] |
| 1711 | POISON_HEAP_REF w2 |
| 1712 | str w2, [x0, #MIRROR_OBJECT_CLASS_OFFSET] // Store the class pointer. |
| 1713 | // Fence. This is "ish" not "ishst" so |
| 1714 | // that the code after this allocation |
| 1715 | // site will see the right values in |
| 1716 | // the fields of the class. |
| 1717 | // Alternatively we could use "ishst" |
| 1718 | // if we use load-acquire for the |
| 1719 | // class status load.) |
| 1720 | dmb ish |
| 1721 | ret |
Hiroshi Yamauchi | cd77378 | 2016-04-07 17:18:24 -0700 | [diff] [blame] | 1722 | .endm |
| 1723 | |
| 1724 | // A hand-written override for GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_tlab, TLAB). |
| 1725 | ENTRY art_quick_alloc_object_tlab |
| 1726 | // Fast path tlab allocation. |
| 1727 | // x0: type_idx/return value, x1: ArtMethod*, xSELF(x19): Thread::Current |
| 1728 | // x2-x7: free. |
| 1729 | #if defined(USE_READ_BARRIER) |
| 1730 | mvn x0, xzr // Read barrier not supported here. |
| 1731 | ret // Return -1. |
| 1732 | #endif |
| 1733 | ldr x2, [x1, #ART_METHOD_DEX_CACHE_TYPES_OFFSET_64] // Load dex cache resolved types array |
| 1734 | // Load the class (x2) |
| 1735 | ldr w2, [x2, x0, lsl #COMPRESSED_REFERENCE_SIZE_SHIFT] |
| 1736 | ALLOC_OBJECT_TLAB_FAST_PATH .Lart_quick_alloc_object_tlab_slow_path |
Hiroshi Yamauchi | d72945c | 2016-03-16 11:23:10 -0700 | [diff] [blame] | 1737 | .Lart_quick_alloc_object_tlab_slow_path: |
| 1738 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // Save callee saves in case of GC. |
| 1739 | mov x2, xSELF // Pass Thread::Current. |
| 1740 | bl artAllocObjectFromCodeTLAB // (uint32_t type_idx, Method* method, Thread*) |
| 1741 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME |
| 1742 | RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER |
| 1743 | END art_quick_alloc_object_tlab |
| 1744 | |
Hiroshi Yamauchi | cd77378 | 2016-04-07 17:18:24 -0700 | [diff] [blame] | 1745 | // A hand-written override for GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_region_tlab, RegionTLAB) |
| 1746 | ENTRY art_quick_alloc_object_region_tlab |
| 1747 | // Fast path region tlab allocation. |
| 1748 | // x0: type_idx/return value, x1: ArtMethod*, xSELF(x19): Thread::Current |
| 1749 | // x2-x7: free. |
| 1750 | #if !defined(USE_READ_BARRIER) |
| 1751 | mvn x0, xzr // Read barrier must be enabled here. |
| 1752 | ret // Return -1. |
| 1753 | #endif |
| 1754 | ldr x2, [x1, #ART_METHOD_DEX_CACHE_TYPES_OFFSET_64] // Load dex cache resolved types array |
| 1755 | // Load the class (x2) |
| 1756 | ldr w2, [x2, x0, lsl #COMPRESSED_REFERENCE_SIZE_SHIFT] |
| 1757 | // Read barrier for class load. |
| 1758 | ldr w3, [xSELF, #THREAD_IS_GC_MARKING_OFFSET] |
| 1759 | cbnz x3, .Lart_quick_alloc_object_region_tlab_class_load_read_barrier_slow_path |
| 1760 | .Lart_quick_alloc_object_region_tlab_class_load_read_barrier_slow_path_exit: |
| 1761 | ALLOC_OBJECT_TLAB_FAST_PATH .Lart_quick_alloc_object_region_tlab_slow_path |
| 1762 | .Lart_quick_alloc_object_region_tlab_class_load_read_barrier_slow_path: |
| 1763 | // The read barrier slow path. Mark |
| 1764 | // the class. |
| 1765 | stp x0, x1, [sp, #-32]! // Save registers (x0, x1, lr). |
| 1766 | str xLR, [sp, #16] // Align sp by 16 bytes. |
| 1767 | mov x0, x2 // Pass the class as the first param. |
| 1768 | bl artReadBarrierMark |
| 1769 | mov x2, x0 // Get the (marked) class back. |
| 1770 | ldp x0, x1, [sp, #0] // Restore registers. |
| 1771 | ldr xLR, [sp, #16] |
| 1772 | add sp, sp, #32 |
| 1773 | b .Lart_quick_alloc_object_region_tlab_class_load_read_barrier_slow_path_exit |
| 1774 | .Lart_quick_alloc_object_region_tlab_slow_path: |
| 1775 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // Save callee saves in case of GC. |
| 1776 | mov x2, xSELF // Pass Thread::Current. |
| 1777 | bl artAllocObjectFromCodeRegionTLAB // (uint32_t type_idx, Method* method, Thread*) |
| 1778 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME |
| 1779 | RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER |
| 1780 | END art_quick_alloc_object_region_tlab |
Hiroshi Yamauchi | 10d4c08 | 2016-02-24 12:51:18 -0800 | [diff] [blame] | 1781 | |
Zheng Xu | 48241e7 | 2014-05-23 11:52:42 +0800 | [diff] [blame] | 1782 | /* |
Zheng Xu | 69a5030 | 2015-04-14 20:04:41 +0800 | [diff] [blame] | 1783 | * Called by managed code when the thread has been asked to suspend. |
Zheng Xu | 48241e7 | 2014-05-23 11:52:42 +0800 | [diff] [blame] | 1784 | */ |
| 1785 | .extern artTestSuspendFromCode |
| 1786 | ENTRY art_quick_test_suspend |
| 1787 | ldrh w0, [xSELF, #THREAD_FLAGS_OFFSET] // get xSELF->state_and_flags.as_struct.flags |
Zheng Xu | 48241e7 | 2014-05-23 11:52:42 +0800 | [diff] [blame] | 1788 | cbnz w0, .Lneed_suspend // check flags == 0 |
| 1789 | ret // return if flags == 0 |
| 1790 | .Lneed_suspend: |
| 1791 | mov x0, xSELF |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1792 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves for stack crawl |
| 1793 | bl artTestSuspendFromCode // (Thread*) |
| 1794 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME_AND_RETURN |
Zheng Xu | 48241e7 | 2014-05-23 11:52:42 +0800 | [diff] [blame] | 1795 | END art_quick_test_suspend |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1796 | |
Stuart Monteith | d5c78f4 | 2014-06-11 16:44:46 +0100 | [diff] [blame] | 1797 | ENTRY art_quick_implicit_suspend |
| 1798 | mov x0, xSELF |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1799 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save callee saves for stack crawl |
| 1800 | bl artTestSuspendFromCode // (Thread*) |
| 1801 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME_AND_RETURN |
Stuart Monteith | d5c78f4 | 2014-06-11 16:44:46 +0100 | [diff] [blame] | 1802 | END art_quick_implicit_suspend |
| 1803 | |
Andreas Gampe | e62a07e | 2014-03-26 14:53:21 -0700 | [diff] [blame] | 1804 | /* |
| 1805 | * Called by managed code that is attempting to call a method on a proxy class. On entry |
| 1806 | * x0 holds the proxy method and x1 holds the receiver; The frame size of the invoked proxy |
| 1807 | * method agrees with a ref and args callee save frame. |
| 1808 | */ |
| 1809 | .extern artQuickProxyInvokeHandler |
| 1810 | ENTRY art_quick_proxy_invoke_handler |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1811 | SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_X0 |
Andreas Gampe | e62a07e | 2014-03-26 14:53:21 -0700 | [diff] [blame] | 1812 | mov x2, xSELF // pass Thread::Current |
| 1813 | mov x3, sp // pass SP |
| 1814 | bl artQuickProxyInvokeHandler // (Method* proxy method, receiver, Thread*, SP) |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 1815 | ldr x2, [xSELF, THREAD_EXCEPTION_OFFSET] |
Andreas Gampe | e62a07e | 2014-03-26 14:53:21 -0700 | [diff] [blame] | 1816 | cbnz x2, .Lexception_in_proxy // success if no exception is pending |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1817 | RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME // Restore frame |
Andreas Gampe | d1e9167 | 2014-06-02 22:50:05 -0700 | [diff] [blame] | 1818 | fmov d0, x0 // Store result in d0 in case it was float or double |
Andreas Gampe | e62a07e | 2014-03-26 14:53:21 -0700 | [diff] [blame] | 1819 | ret // return on success |
| 1820 | .Lexception_in_proxy: |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1821 | RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME |
Andreas Gampe | e62a07e | 2014-03-26 14:53:21 -0700 | [diff] [blame] | 1822 | DELIVER_PENDING_EXCEPTION |
| 1823 | END art_quick_proxy_invoke_handler |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1824 | |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 1825 | /* |
Nicolas Geoffray | 796d630 | 2016-03-13 22:22:31 +0000 | [diff] [blame] | 1826 | * Called to resolve an imt conflict. |
| 1827 | * x0 is the conflict ArtMethod. |
| 1828 | * xIP1 is a hidden argument that holds the target interface method's dex method index. |
| 1829 | * |
| 1830 | * Note that this stub writes to xIP0, xIP1, and x0. |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 1831 | */ |
Andreas Gampe | 3031c8d | 2015-07-13 20:11:06 -0700 | [diff] [blame] | 1832 | .extern artInvokeInterfaceTrampoline |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 1833 | ENTRY art_quick_imt_conflict_trampoline |
Nicolas Geoffray | 796d630 | 2016-03-13 22:22:31 +0000 | [diff] [blame] | 1834 | ldr xIP0, [sp, #0] // Load referrer |
| 1835 | ldr xIP0, [xIP0, #ART_METHOD_DEX_CACHE_METHODS_OFFSET_64] // Load dex cache methods array |
| 1836 | ldr xIP0, [xIP0, xIP1, lsl #POINTER_SIZE_SHIFT] // Load interface method |
| 1837 | ldr xIP1, [x0, #ART_METHOD_JNI_OFFSET_64] // Load ImtConflictTable |
| 1838 | ldr x0, [xIP1] // Load first entry in ImtConflictTable. |
| 1839 | .Limt_table_iterate: |
| 1840 | cmp x0, xIP0 |
| 1841 | // Branch if found. Benchmarks have shown doing a branch here is better. |
| 1842 | beq .Limt_table_found |
| 1843 | // If the entry is null, the interface method is not in the ImtConflictTable. |
| 1844 | cbz x0, .Lconflict_trampoline |
| 1845 | // Iterate over the entries of the ImtConflictTable. |
| 1846 | ldr x0, [xIP1, #(2 * __SIZEOF_POINTER__)]! |
| 1847 | b .Limt_table_iterate |
| 1848 | .Limt_table_found: |
Goran Jakovljevic | 59028d9 | 2016-03-29 18:05:03 +0200 | [diff] [blame] | 1849 | // We successfully hit an entry in the table. Load the target method |
Nicolas Geoffray | 796d630 | 2016-03-13 22:22:31 +0000 | [diff] [blame] | 1850 | // and jump to it. |
| 1851 | ldr x0, [xIP1, #__SIZEOF_POINTER__] |
| 1852 | ldr xIP0, [x0, #ART_METHOD_QUICK_CODE_OFFSET_64] |
| 1853 | br xIP0 |
| 1854 | .Lconflict_trampoline: |
| 1855 | // Call the runtime stub to populate the ImtConflictTable and jump to the |
| 1856 | // resolved method. |
Andreas Gampe | 3031c8d | 2015-07-13 20:11:06 -0700 | [diff] [blame] | 1857 | INVOKE_TRAMPOLINE_BODY artInvokeInterfaceTrampoline |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 1858 | END art_quick_imt_conflict_trampoline |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1859 | |
| 1860 | ENTRY art_quick_resolution_trampoline |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1861 | SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1862 | mov x2, xSELF |
| 1863 | mov x3, sp |
| 1864 | bl artQuickResolutionTrampoline // (called, receiver, Thread*, SP) |
Matteo Franchin | dfd891a | 2014-04-30 12:17:17 +0100 | [diff] [blame] | 1865 | cbz x0, 1f |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 1866 | mov xIP0, x0 // Remember returned code pointer in xIP0. |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 1867 | ldr x0, [sp, #0] // artQuickResolutionTrampoline puts called method in *SP. |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1868 | RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 1869 | br xIP0 |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1870 | 1: |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1871 | RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1872 | DELIVER_PENDING_EXCEPTION |
| 1873 | END art_quick_resolution_trampoline |
| 1874 | |
| 1875 | /* |
| 1876 | * Generic JNI frame layout: |
| 1877 | * |
| 1878 | * #-------------------# |
| 1879 | * | | |
| 1880 | * | caller method... | |
| 1881 | * #-------------------# <--- SP on entry |
| 1882 | * | Return X30/LR | |
| 1883 | * | X29/FP | callee save |
| 1884 | * | X28 | callee save |
| 1885 | * | X27 | callee save |
| 1886 | * | X26 | callee save |
| 1887 | * | X25 | callee save |
| 1888 | * | X24 | callee save |
| 1889 | * | X23 | callee save |
| 1890 | * | X22 | callee save |
| 1891 | * | X21 | callee save |
| 1892 | * | X20 | callee save |
Zheng Xu | 69a5030 | 2015-04-14 20:04:41 +0800 | [diff] [blame] | 1893 | * | X19 | callee save |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1894 | * | X7 | arg7 |
| 1895 | * | X6 | arg6 |
| 1896 | * | X5 | arg5 |
| 1897 | * | X4 | arg4 |
| 1898 | * | X3 | arg3 |
| 1899 | * | X2 | arg2 |
| 1900 | * | X1 | arg1 |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1901 | * | D7 | float arg 8 |
| 1902 | * | D6 | float arg 7 |
| 1903 | * | D5 | float arg 6 |
| 1904 | * | D4 | float arg 5 |
| 1905 | * | D3 | float arg 4 |
| 1906 | * | D2 | float arg 3 |
| 1907 | * | D1 | float arg 2 |
| 1908 | * | D0 | float arg 1 |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 1909 | * | Method* | <- X0 |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1910 | * #-------------------# |
| 1911 | * | local ref cookie | // 4B |
Mathieu Chartier | 421c537 | 2014-05-14 14:11:40 -0700 | [diff] [blame] | 1912 | * | handle scope size | // 4B |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1913 | * #-------------------# |
| 1914 | * | JNI Call Stack | |
| 1915 | * #-------------------# <--- SP on native call |
| 1916 | * | | |
| 1917 | * | Stack for Regs | The trampoline assembly will pop these values |
| 1918 | * | | into registers for native call |
| 1919 | * #-------------------# |
| 1920 | * | Native code ptr | |
| 1921 | * #-------------------# |
| 1922 | * | Free scratch | |
| 1923 | * #-------------------# |
| 1924 | * | Ptr to (1) | <--- SP |
| 1925 | * #-------------------# |
| 1926 | */ |
| 1927 | /* |
| 1928 | * Called to do a generic JNI down-call |
| 1929 | */ |
Ian Rogers | 6f3dbba | 2014-10-14 17:41:57 -0700 | [diff] [blame] | 1930 | ENTRY art_quick_generic_jni_trampoline |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1931 | SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_X0 |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1932 | |
| 1933 | // Save SP , so we can have static CFI info. |
| 1934 | mov x28, sp |
| 1935 | .cfi_def_cfa_register x28 |
| 1936 | |
| 1937 | // This looks the same, but is different: this will be updated to point to the bottom |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 1938 | // of the frame when the handle scope is inserted. |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1939 | mov xFP, sp |
| 1940 | |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 1941 | mov xIP0, #5120 |
| 1942 | sub sp, sp, xIP0 |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1943 | |
| 1944 | // prepare for artQuickGenericJniTrampoline call |
| 1945 | // (Thread*, SP) |
| 1946 | // x0 x1 <= C calling convention |
| 1947 | // xSELF xFP <= where they are |
| 1948 | |
| 1949 | mov x0, xSELF // Thread* |
| 1950 | mov x1, xFP |
| 1951 | bl artQuickGenericJniTrampoline // (Thread*, sp) |
| 1952 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1953 | // The C call will have registered the complete save-frame on success. |
| 1954 | // The result of the call is: |
| 1955 | // x0: pointer to native code, 0 on error. |
| 1956 | // x1: pointer to the bottom of the used area of the alloca, can restore stack till there. |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1957 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1958 | // Check for error = 0. |
Nicolas Geoffray | 126d659 | 2015-03-03 14:28:35 +0000 | [diff] [blame] | 1959 | cbz x0, .Lexception_in_native |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1960 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1961 | // Release part of the alloca. |
| 1962 | mov sp, x1 |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1963 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1964 | // Save the code pointer |
| 1965 | mov xIP0, x0 |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1966 | |
| 1967 | // Load parameters from frame into registers. |
| 1968 | // TODO Check with artQuickGenericJniTrampoline. |
| 1969 | // Also, check again APPCS64 - the stack arguments are interleaved. |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1970 | ldp x0, x1, [sp] |
| 1971 | ldp x2, x3, [sp, #16] |
| 1972 | ldp x4, x5, [sp, #32] |
| 1973 | ldp x6, x7, [sp, #48] |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1974 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1975 | ldp d0, d1, [sp, #64] |
| 1976 | ldp d2, d3, [sp, #80] |
| 1977 | ldp d4, d5, [sp, #96] |
| 1978 | ldp d6, d7, [sp, #112] |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1979 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1980 | add sp, sp, #128 |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1981 | |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 1982 | blr xIP0 // native call. |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1983 | |
| 1984 | // result sign extension is handled in C code |
| 1985 | // prepare for artQuickGenericJniEndTrampoline call |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1986 | // (Thread*, result, result_f) |
| 1987 | // x0 x1 x2 <= C calling convention |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 1988 | mov x1, x0 // Result (from saved). |
| 1989 | mov x0, xSELF // Thread register. |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1990 | fmov x2, d0 // d0 will contain floating point result, but needs to go into x2 |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1991 | |
| 1992 | bl artQuickGenericJniEndTrampoline |
| 1993 | |
Nicolas Geoffray | 126d659 | 2015-03-03 14:28:35 +0000 | [diff] [blame] | 1994 | // Pending exceptions possible. |
Serban Constantinescu | 9bd88b0 | 2015-04-22 16:24:46 +0100 | [diff] [blame] | 1995 | ldr x2, [xSELF, THREAD_EXCEPTION_OFFSET] |
Nicolas Geoffray | 126d659 | 2015-03-03 14:28:35 +0000 | [diff] [blame] | 1996 | cbnz x2, .Lexception_in_native |
| 1997 | |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 1998 | // Tear down the alloca. |
| 1999 | mov sp, x28 |
| 2000 | .cfi_def_cfa_register sp |
| 2001 | |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 2002 | // Tear down the callee-save frame. |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 2003 | RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 2004 | |
| 2005 | // store into fpr, for when it's a fpr return... |
| 2006 | fmov d0, x0 |
| 2007 | ret |
| 2008 | |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 2009 | .Lexception_in_native: |
Nicolas Geoffray | 126d659 | 2015-03-03 14:28:35 +0000 | [diff] [blame] | 2010 | // Move to x1 then sp to please assembler. |
| 2011 | ldr x1, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET] |
| 2012 | mov sp, x1 |
| 2013 | .cfi_def_cfa_register sp |
| 2014 | # This will create a new save-all frame, required by the runtime. |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 2015 | DELIVER_PENDING_EXCEPTION |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 2016 | END art_quick_generic_jni_trampoline |
| 2017 | |
| 2018 | /* |
| 2019 | * Called to bridge from the quick to interpreter ABI. On entry the arguments match those |
| 2020 | * of a quick call: |
| 2021 | * x0 = method being called/to bridge to. |
| 2022 | * x1..x7, d0..d7 = arguments to that method. |
| 2023 | */ |
Ian Rogers | 6f3dbba | 2014-10-14 17:41:57 -0700 | [diff] [blame] | 2024 | ENTRY art_quick_to_interpreter_bridge |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 2025 | SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME // Set up frame and save arguments. |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 2026 | |
| 2027 | // x0 will contain mirror::ArtMethod* method. |
| 2028 | mov x1, xSELF // How to get Thread::Current() ??? |
| 2029 | mov x2, sp |
| 2030 | |
| 2031 | // uint64_t artQuickToInterpreterBridge(mirror::ArtMethod* method, Thread* self, |
| 2032 | // mirror::ArtMethod** sp) |
| 2033 | bl artQuickToInterpreterBridge |
| 2034 | |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 2035 | RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME // TODO: no need to restore arguments in this case. |
Stuart Monteith | b95a534 | 2014-03-12 13:32:32 +0000 | [diff] [blame] | 2036 | |
| 2037 | fmov d0, x0 |
| 2038 | |
| 2039 | RETURN_OR_DELIVER_PENDING_EXCEPTION |
| 2040 | END art_quick_to_interpreter_bridge |
| 2041 | |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 2042 | |
| 2043 | // |
| 2044 | // Instrumentation-related stubs |
| 2045 | // |
| 2046 | .extern artInstrumentationMethodEntryFromCode |
| 2047 | ENTRY art_quick_instrumentation_entry |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 2048 | SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 2049 | |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 2050 | mov x20, x0 // Preserve method reference in a callee-save. |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 2051 | |
| 2052 | mov x2, xSELF |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 2053 | mov x3, xLR |
| 2054 | bl artInstrumentationMethodEntryFromCode // (Method*, Object*, Thread*, LR) |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 2055 | |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 2056 | mov xIP0, x0 // x0 = result of call. |
| 2057 | mov x0, x20 // Reload method reference. |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 2058 | |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 2059 | RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME // Note: will restore xSELF |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 2060 | adr xLR, art_quick_instrumentation_exit |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 2061 | br xIP0 // Tail-call method with lr set to art_quick_instrumentation_exit. |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 2062 | END art_quick_instrumentation_entry |
| 2063 | |
| 2064 | .extern artInstrumentationMethodExitFromCode |
| 2065 | ENTRY art_quick_instrumentation_exit |
| 2066 | mov xLR, #0 // Clobber LR for later checks. |
| 2067 | |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 2068 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 2069 | |
| 2070 | // We need to save x0 and d0. We could use a callee-save from SETUP_REF_ONLY, but then |
| 2071 | // we would need to fully restore it. As there are a lot of callee-save registers, it seems |
| 2072 | // easier to have an extra small stack area. |
| 2073 | |
Sebastien Hertz | 70f8d4b | 2014-06-19 11:51:41 +0200 | [diff] [blame] | 2074 | str x0, [sp, #-16]! // Save integer result. |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 2075 | .cfi_adjust_cfa_offset 16 |
| 2076 | str d0, [sp, #8] // Save floating-point result. |
| 2077 | |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 2078 | add x1, sp, #16 // Pass SP. |
| 2079 | mov x2, x0 // Pass integer result. |
| 2080 | fmov x3, d0 // Pass floating-point result. |
Sebastien Hertz | 70f8d4b | 2014-06-19 11:51:41 +0200 | [diff] [blame] | 2081 | mov x0, xSELF // Pass Thread. |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 2082 | bl artInstrumentationMethodExitFromCode // (Thread*, SP, gpr_res, fpr_res) |
| 2083 | |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 2084 | mov xIP0, x0 // Return address from instrumentation call. |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 2085 | mov xLR, x1 // r1 is holding link register if we're to bounce to deoptimize |
| 2086 | |
| 2087 | ldr d0, [sp, #8] // Restore floating-point result. |
| 2088 | ldr x0, [sp], 16 // Restore integer result, and drop stack area. |
| 2089 | .cfi_adjust_cfa_offset 16 |
| 2090 | |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 2091 | POP_REFS_ONLY_CALLEE_SAVE_FRAME |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 2092 | |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 2093 | br xIP0 // Tail-call out. |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 2094 | END art_quick_instrumentation_exit |
| 2095 | |
| 2096 | /* |
| 2097 | * Instrumentation has requested that we deoptimize into the interpreter. The deoptimization |
| 2098 | * will long jump to the upcall with a special exception of -1. |
| 2099 | */ |
| 2100 | .extern artDeoptimize |
| 2101 | ENTRY art_quick_deoptimize |
| 2102 | SETUP_SAVE_ALL_CALLEE_SAVE_FRAME |
| 2103 | mov x0, xSELF // Pass thread. |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 2104 | bl artDeoptimize // artDeoptimize(Thread*) |
Serban Constantinescu | 86797a7 | 2014-06-19 16:17:56 +0100 | [diff] [blame] | 2105 | brk 0 |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 2106 | END art_quick_deoptimize |
| 2107 | |
Sebastien Hertz | 0747466 | 2015-08-25 15:12:33 +0000 | [diff] [blame] | 2108 | /* |
| 2109 | * Compiled code has requested that we deoptimize into the interpreter. The deoptimization |
| 2110 | * will long jump to the upcall with a special exception of -1. |
| 2111 | */ |
| 2112 | .extern artDeoptimizeFromCompiledCode |
| 2113 | ENTRY art_quick_deoptimize_from_compiled_code |
| 2114 | SETUP_SAVE_ALL_CALLEE_SAVE_FRAME |
| 2115 | mov x0, xSELF // Pass thread. |
| 2116 | bl artDeoptimizeFromCompiledCode // artDeoptimizeFromCompiledCode(Thread*) |
| 2117 | brk 0 |
| 2118 | END art_quick_deoptimize_from_compiled_code |
| 2119 | |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 2120 | |
Serban Constantinescu | 169489b | 2014-06-11 16:43:35 +0100 | [diff] [blame] | 2121 | /* |
| 2122 | * String's indexOf. |
| 2123 | * |
| 2124 | * TODO: Not very optimized. |
| 2125 | * On entry: |
| 2126 | * x0: string object (known non-null) |
| 2127 | * w1: char to match (known <= 0xFFFF) |
| 2128 | * w2: Starting offset in string data |
| 2129 | */ |
| 2130 | ENTRY art_quick_indexof |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 2131 | ldr w3, [x0, #MIRROR_STRING_COUNT_OFFSET] |
Jeff Hao | 848f70a | 2014-01-15 13:49:50 -0800 | [diff] [blame] | 2132 | add x0, x0, #MIRROR_STRING_VALUE_OFFSET |
Serban Constantinescu | 169489b | 2014-06-11 16:43:35 +0100 | [diff] [blame] | 2133 | |
| 2134 | /* Clamp start to [0..count] */ |
| 2135 | cmp w2, #0 |
| 2136 | csel w2, wzr, w2, lt |
| 2137 | cmp w2, w3 |
| 2138 | csel w2, w3, w2, gt |
| 2139 | |
Serban Constantinescu | 169489b | 2014-06-11 16:43:35 +0100 | [diff] [blame] | 2140 | /* Save a copy to compute result */ |
| 2141 | mov x5, x0 |
| 2142 | |
| 2143 | /* Build pointer to start of data to compare and pre-bias */ |
| 2144 | add x0, x0, x2, lsl #1 |
| 2145 | sub x0, x0, #2 |
| 2146 | |
| 2147 | /* Compute iteration count */ |
| 2148 | sub w2, w3, w2 |
| 2149 | |
| 2150 | /* |
| 2151 | * At this point we have: |
| 2152 | * x0: start of the data to test |
| 2153 | * w1: char to compare |
| 2154 | * w2: iteration count |
| 2155 | * x5: original start of string data |
| 2156 | */ |
| 2157 | |
| 2158 | subs w2, w2, #4 |
| 2159 | b.lt .Lindexof_remainder |
| 2160 | |
| 2161 | .Lindexof_loop4: |
| 2162 | ldrh w6, [x0, #2]! |
| 2163 | ldrh w7, [x0, #2]! |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 2164 | ldrh wIP0, [x0, #2]! |
| 2165 | ldrh wIP1, [x0, #2]! |
Serban Constantinescu | 169489b | 2014-06-11 16:43:35 +0100 | [diff] [blame] | 2166 | cmp w6, w1 |
| 2167 | b.eq .Lmatch_0 |
| 2168 | cmp w7, w1 |
| 2169 | b.eq .Lmatch_1 |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 2170 | cmp wIP0, w1 |
Serban Constantinescu | 169489b | 2014-06-11 16:43:35 +0100 | [diff] [blame] | 2171 | b.eq .Lmatch_2 |
Zheng Xu | b551fdc | 2014-07-25 11:49:42 +0800 | [diff] [blame] | 2172 | cmp wIP1, w1 |
Serban Constantinescu | 169489b | 2014-06-11 16:43:35 +0100 | [diff] [blame] | 2173 | b.eq .Lmatch_3 |
| 2174 | subs w2, w2, #4 |
| 2175 | b.ge .Lindexof_loop4 |
| 2176 | |
| 2177 | .Lindexof_remainder: |
| 2178 | adds w2, w2, #4 |
| 2179 | b.eq .Lindexof_nomatch |
| 2180 | |
| 2181 | .Lindexof_loop1: |
| 2182 | ldrh w6, [x0, #2]! |
| 2183 | cmp w6, w1 |
| 2184 | b.eq .Lmatch_3 |
| 2185 | subs w2, w2, #1 |
| 2186 | b.ne .Lindexof_loop1 |
| 2187 | |
| 2188 | .Lindexof_nomatch: |
| 2189 | mov x0, #-1 |
| 2190 | ret |
| 2191 | |
| 2192 | .Lmatch_0: |
| 2193 | sub x0, x0, #6 |
| 2194 | sub x0, x0, x5 |
| 2195 | asr x0, x0, #1 |
| 2196 | ret |
| 2197 | .Lmatch_1: |
| 2198 | sub x0, x0, #4 |
| 2199 | sub x0, x0, x5 |
| 2200 | asr x0, x0, #1 |
| 2201 | ret |
| 2202 | .Lmatch_2: |
| 2203 | sub x0, x0, #2 |
| 2204 | sub x0, x0, x5 |
| 2205 | asr x0, x0, #1 |
| 2206 | ret |
| 2207 | .Lmatch_3: |
| 2208 | sub x0, x0, x5 |
| 2209 | asr x0, x0, #1 |
| 2210 | ret |
| 2211 | END art_quick_indexof |