sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 1 | |
| 2 | ##--------------------------------------------------------------------## |
| 3 | ##--- The core dispatch loop, for jumping to a code address. ---## |
| 4 | ##--- vg_dispatch.S ---## |
| 5 | ##--------------------------------------------------------------------## |
| 6 | |
| 7 | /* |
| 8 | This file is part of Valgrind, an x86 protected-mode emulator |
| 9 | designed for debugging and profiling binaries on x86-Unixes. |
| 10 | |
| 11 | Copyright (C) 2000-2002 Julian Seward |
| 12 | jseward@acm.org |
| 13 | Julian_Seward@muraroa.demon.co.uk |
| 14 | |
| 15 | This program is free software; you can redistribute it and/or |
| 16 | modify it under the terms of the GNU General Public License as |
| 17 | published by the Free Software Foundation; either version 2 of the |
| 18 | License, or (at your option) any later version. |
| 19 | |
| 20 | This program is distributed in the hope that it will be useful, but |
| 21 | WITHOUT ANY WARRANTY; without even the implied warranty of |
| 22 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
| 23 | General Public License for more details. |
| 24 | |
| 25 | You should have received a copy of the GNU General Public License |
| 26 | along with this program; if not, write to the Free Software |
| 27 | Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA |
| 28 | 02111-1307, USA. |
| 29 | |
| 30 | The GNU General Public License is contained in the file LICENSE. |
| 31 | */ |
| 32 | |
| 33 | #include "vg_constants.h" |
| 34 | |
| 35 | |
| 36 | /*------------------------------------------------------------*/ |
| 37 | /*--- The normal-case dispatch machinery. ---*/ |
| 38 | /*------------------------------------------------------------*/ |
| 39 | |
| 40 | /* To transfer to an (original) code address, load it into %eax and |
| 41 | jump to vg_dispatch. This fragment of code tries to find the |
| 42 | address of the corresponding translation by searching the translation |
| 43 | table. If it fails, a new translation is made, added to the |
| 44 | translation table, and then jumped to. Almost all the hard |
| 45 | work is done by C routines; this code simply handles the |
| 46 | common case fast -- when the translation address is found in |
| 47 | the translation cache. |
| 48 | |
| 49 | At entry, %eax is the only live (real-machine) register; the |
| 50 | entire simulated state is tidily saved in vg_m_state. |
| 51 | */ |
| 52 | |
| 53 | |
| 54 | /* The C world needs a way to get started simulating. So we provide |
| 55 | a function void vg_run_innerloop ( void ), which starts running |
| 56 | from vg_m_eip, and exits when the counter reaches zero. This loop |
| 57 | can also exit if vg_oursignalhandler() catches a non-resumable |
| 58 | signal, for example SIGSEGV. It then longjmp()s back past here. |
| 59 | */ |
| 60 | |
| 61 | .globl VG_(run_innerloop) |
| 62 | VG_(run_innerloop): |
| 63 | #OYNK(1000) |
| 64 | # ----- entry point to VG_(run_innerloop) ----- |
| 65 | pushal |
| 66 | # Set up the baseBlock pointer |
| 67 | movl $VG_(baseBlock), %ebp |
| 68 | |
| 69 | # fetch m_eip into %eax |
| 70 | movl VGOFF_(m_eip), %esi |
| 71 | movl (%ebp, %esi, 4), %eax |
| 72 | |
| 73 | # fall thru to vg_dispatch |
| 74 | |
| 75 | .globl VG_(dispatch) |
| 76 | VG_(dispatch): |
| 77 | # %eax holds destination (original) address |
| 78 | # To signal any kind of interruption, set vg_dispatch_ctr |
| 79 | # to 1, and vg_interrupt_reason to the appropriate value |
| 80 | # before jumping here. |
| 81 | |
| 82 | # %ebp indicates further details of the control transfer |
| 83 | # requested to the address in %eax. The idea is that we |
| 84 | # want to check all jump targets to see if they are either |
| 85 | # VG_(signalreturn_bogusRA) or VG_(trap_here), both of which |
| 86 | # require special treatment. However, testing all branch |
| 87 | # targets is expensive, and anyway in most cases JITter knows |
| 88 | # that a jump cannot be to either of these two. We therefore |
| 89 | # adopt the following trick. |
| 90 | # |
| 91 | # If ebp == & VG_(baseBlock), which is what it started out as, |
| 92 | # this is a jump for which the JITter knows no check need be |
| 93 | # made. |
| 94 | # |
| 95 | # If it is ebp == VG_EBP_DISPATCH_CHECKED, we had better make |
| 96 | # the check. |
| 97 | # |
| 98 | # If %ebp has any other value, we panic. |
| 99 | # |
| 100 | # What the JITter assumes is that VG_(signalreturn_bogusRA) can |
| 101 | # only be arrived at from an x86 ret insn, and dually that |
| 102 | # VG_(trap_here) can only be arrived at from an x86 call insn. |
| 103 | # The net effect is that all call and return targets are checked |
| 104 | # but straightforward jumps are not. |
| 105 | # |
| 106 | # Thinks ... is this safe if the client happens to tailcall |
| 107 | # VG_(trap_here) ? I dont think that can happen -- if it did |
| 108 | # it would be a problem. |
| 109 | # |
| 110 | cmpl $VG_(baseBlock), %ebp |
| 111 | jnz dispatch_checked_maybe |
| 112 | |
| 113 | dispatch_unchecked: |
| 114 | # save the jump address at VG_(baseBlock)[VGOFF_(m_eip)], |
| 115 | # so that if this block takes a fault, we later know where we were. |
| 116 | movl VGOFF_(m_eip), %esi |
| 117 | movl %eax, (%ebp, %esi, 4) |
| 118 | |
| 119 | # do we require attention? |
| 120 | # this check has to be after the call/ret transfer checks, because |
| 121 | # we have to ensure that any control transfer following a syscall |
| 122 | # return is an ordinary transfer. By the time we get here, we have |
| 123 | # established that the next transfer, which might get delayed till |
| 124 | # after a syscall return, is an ordinary one. |
| 125 | # All a bit subtle ... |
| 126 | #OYNK(1001) |
| 127 | decl VG_(dispatch_ctr) |
| 128 | jz counter_is_zero |
| 129 | |
| 130 | #OYNK(1002) |
| 131 | # try a fast lookup in the translation cache |
| 132 | movl %eax, %ebx |
| 133 | andl $VG_TT_FAST_MASK, %ebx |
| 134 | # ebx = tt_fast index |
| 135 | movl VG_(tt_fast)(,%ebx,4), %ebx |
| 136 | # ebx points at a tt entry |
| 137 | # now compare target with the tte.orig_addr field (+0) |
| 138 | cmpl %eax, (%ebx) |
| 139 | jnz full_search |
| 140 | # Found a match. Set the tte.mru_epoch field (+8) |
| 141 | # and call the tte.trans_addr field (+4) |
| 142 | movl VG_(current_epoch), %ecx |
| 143 | movl %ecx, 8(%ebx) |
| 144 | call *4(%ebx) |
| 145 | jmp VG_(dispatch) |
| 146 | |
| 147 | full_search: |
| 148 | #no luck? try the full table search |
| 149 | pushl %eax |
| 150 | call VG_(search_transtab) |
| 151 | addl $4, %esp |
| 152 | |
| 153 | # %eax has trans addr or zero |
| 154 | cmpl $0, %eax |
| 155 | jz need_translation |
| 156 | # full table search also zeroes the tte.last_use field, |
| 157 | # so we dont have to do so here. |
| 158 | call *%eax |
| 159 | jmp VG_(dispatch) |
| 160 | |
| 161 | need_translation: |
| 162 | OYNK(1003) |
| 163 | movl $VG_Y_TRANSLATE, VG_(interrupt_reason) |
| 164 | counter_is_zero: |
| 165 | OYNK(1004) |
| 166 | popal |
| 167 | # ----- (the only) exit point from VG_(run_innerloop) ----- |
| 168 | # ----- unless of course vg_oursignalhandler longjmp()s |
| 169 | # ----- back through it, due to an unmanagable signal |
| 170 | ret |
| 171 | |
| 172 | |
| 173 | /* The normal way to get back to the translation loop is to put |
| 174 | the address of the next (original) address and return. |
| 175 | However, simulation of a RET insn requires a check as to whether |
| 176 | the next address is vg_signalreturn_bogusRA. If so, a signal |
| 177 | handler is returning, so we need to invoke our own mechanism to |
| 178 | deal with that, by calling vg_signal_returns(). This restores |
| 179 | the simulated machine state from the VgSigContext structure on |
| 180 | the stack, including the (simulated, of course) %eip saved when |
| 181 | the signal was delivered. We then arrange to jump to the |
| 182 | restored %eip. |
| 183 | */ |
| 184 | dispatch_checked_maybe: |
| 185 | # Possibly a checked dispatch. Sanity check ... |
| 186 | cmpl $VG_EBP_DISPATCH_CHECKED, %ebp |
| 187 | jz dispatch_checked |
| 188 | # ebp has an invalid value ... crap out. |
| 189 | pushl $panic_msg_ebp |
| 190 | call VG_(panic) |
| 191 | # (never returns) |
| 192 | |
| 193 | dispatch_checked: |
| 194 | OYNK(2000) |
| 195 | # first off, restore %ebp -- since it is currently wrong |
| 196 | movl $VG_(baseBlock), %ebp |
| 197 | |
| 198 | # see if we need to mess with stack blocks |
| 199 | pushl %ebp |
| 200 | pushl %eax |
| 201 | call VG_(delete_client_stack_blocks_following_ESP_change) |
| 202 | popl %eax |
| 203 | popl %ebp |
| 204 | |
| 205 | # is this a signal return? |
| 206 | cmpl $VG_(signalreturn_bogusRA), %eax |
| 207 | jz dispatch_to_signalreturn_bogusRA |
| 208 | # should we intercept this call? |
| 209 | cmpl $VG_(trap_here), %eax |
| 210 | jz dispatch_to_trap_here |
| 211 | # ok, its not interesting. Handle the normal way. |
| 212 | jmp dispatch_unchecked |
| 213 | |
| 214 | dispatch_to_signalreturn_bogusRA: |
| 215 | OYNK(2001) |
| 216 | pushal |
| 217 | call VG_(signal_returns) |
| 218 | popal |
| 219 | # %EIP will now point to the insn which should have followed |
| 220 | # the signal delivery. Jump to it. Since we no longer have any |
| 221 | # hint from the JITter about whether or not it is checkable, |
| 222 | # go via the conservative route. |
| 223 | movl VGOFF_(m_eip), %esi |
| 224 | movl (%ebp, %esi, 4), %eax |
| 225 | jmp dispatch_checked |
| 226 | |
| 227 | |
| 228 | /* Similarly, check CALL targets to see if it is the ultra-magical |
| 229 | vg_trap_here(), and, if so, act accordingly. See vg_clientmalloc.c. |
| 230 | Be careful not to get the real and simulated CPUs, |
| 231 | stacks and regs mixed up ... |
| 232 | */ |
| 233 | dispatch_to_trap_here: |
| 234 | OYNK(111) |
| 235 | /* Considering the params to vg_trap_here(), we should have: |
| 236 | 12(%ESP) is what_to_do |
| 237 | 8(%ESP) is arg2 |
| 238 | 4(%ESP) is arg1 |
| 239 | 0(%ESP) is return address |
| 240 | */ |
| 241 | movl VGOFF_(m_esp), %esi |
| 242 | movl (%ebp, %esi, 4), %ebx |
| 243 | # %ebx now holds simulated %ESP |
| 244 | cmpl $0x4000, 12(%ebx) |
| 245 | jz handle_malloc |
| 246 | cmpl $0x4001, 12(%ebx) |
| 247 | jz handle_malloc |
| 248 | cmpl $0x4002, 12(%ebx) |
| 249 | jz handle_malloc |
| 250 | cmpl $0x5000, 12(%ebx) |
| 251 | jz handle_free |
| 252 | cmpl $0x5001, 12(%ebx) |
| 253 | jz handle_free |
| 254 | cmpl $0x5002, 12(%ebx) |
| 255 | jz handle_free |
| 256 | cmpl $6666, 12(%ebx) |
| 257 | jz handle_calloc |
| 258 | cmpl $7777, 12(%ebx) |
| 259 | jz handle_realloc |
| 260 | cmpl $8888, 12(%ebx) |
| 261 | jz handle_memalign |
| 262 | push $panic_msg_trap |
| 263 | call VG_(panic) |
| 264 | # vg_panic never returns |
| 265 | |
| 266 | handle_malloc: |
| 267 | # %ESP is in %ebx |
| 268 | pushl 12(%ebx) |
| 269 | pushl 8(%ebx) |
| 270 | call VG_(client_malloc) |
| 271 | addl $8, %esp |
| 272 | # returned value is in %eax |
| 273 | jmp save_eax_and_simulate_RET |
| 274 | |
| 275 | handle_free: |
| 276 | # %ESP is in %ebx |
| 277 | pushl 12(%ebx) |
| 278 | pushl 8(%ebx) |
| 279 | call VG_(client_free) |
| 280 | addl $8, %esp |
| 281 | jmp simulate_RET |
| 282 | |
| 283 | handle_calloc: |
| 284 | # %ESP is in %ebx |
| 285 | pushl 8(%ebx) |
| 286 | pushl 4(%ebx) |
| 287 | call VG_(client_calloc) |
| 288 | addl $8, %esp |
| 289 | # returned value is in %eax |
| 290 | jmp save_eax_and_simulate_RET |
| 291 | |
| 292 | handle_realloc: |
| 293 | # %ESP is in %ebx |
| 294 | pushl 8(%ebx) |
| 295 | pushl 4(%ebx) |
| 296 | call VG_(client_realloc) |
| 297 | addl $8, %esp |
| 298 | # returned value is in %eax |
| 299 | jmp save_eax_and_simulate_RET |
| 300 | |
| 301 | handle_memalign: |
| 302 | # %ESP is in %ebx |
| 303 | pushl 8(%ebx) |
| 304 | pushl 4(%ebx) |
| 305 | call VG_(client_memalign) |
| 306 | addl $8, %esp |
| 307 | # returned value is in %eax |
| 308 | jmp save_eax_and_simulate_RET |
| 309 | |
| 310 | save_eax_and_simulate_RET: |
| 311 | movl VGOFF_(m_eax), %esi |
| 312 | movl %eax, (%ebp, %esi, 4) # %eax -> %EAX |
| 313 | # set %EAX bits to VALID |
| 314 | movl VGOFF_(sh_eax), %esi |
| 315 | movl $0x0 /* All 32 bits VALID */, (%ebp, %esi, 4) |
| 316 | # fall thru ... |
| 317 | simulate_RET: |
| 318 | # standard return |
| 319 | movl VGOFF_(m_esp), %esi |
| 320 | movl (%ebp, %esi, 4), %ebx # %ESP -> %ebx |
| 321 | movl 0(%ebx), %eax # RA -> %eax |
| 322 | addl $4, %ebx # %ESP += 4 |
| 323 | movl %ebx, (%ebp, %esi, 4) # %ebx -> %ESP |
| 324 | jmp dispatch_checked # jump to %eax |
| 325 | |
| 326 | .data |
| 327 | panic_msg_trap: |
| 328 | .ascii "dispatch_to_trap_here: unknown what_to_do" |
| 329 | .byte 0 |
| 330 | panic_msg_ebp: |
| 331 | .ascii "vg_dispatch: %ebp has invalid value!" |
| 332 | .byte 0 |
| 333 | .text |
| 334 | |
| 335 | |
| 336 | /*------------------------------------------------------------*/ |
| 337 | /*--- A helper for delivering signals when the client is ---*/ |
| 338 | /*--- (presumably) blocked in a system call. ---*/ |
| 339 | /*------------------------------------------------------------*/ |
| 340 | |
| 341 | /* Returns, in %eax, the next orig_addr to run. |
| 342 | The caller needs to decide whether the returned orig_addr |
| 343 | requires special handling. |
| 344 | |
| 345 | extern Addr VG_(run_singleton_translation) ( Addr trans_addr ) |
| 346 | */ |
| 347 | |
| 348 | /* should we take care to save the FPU state here? */ |
| 349 | |
| 350 | .globl VG_(run_singleton_translation) |
| 351 | VG_(run_singleton_translation): |
| 352 | movl 4(%esp), %eax # eax = trans_addr |
| 353 | pushl %ebx |
| 354 | pushl %ecx |
| 355 | pushl %edx |
| 356 | pushl %esi |
| 357 | pushl %edi |
| 358 | pushl %ebp |
| 359 | |
| 360 | # set up ebp correctly for translations |
| 361 | movl $VG_(baseBlock), %ebp |
| 362 | |
| 363 | # run the translation |
| 364 | call *%eax |
| 365 | |
| 366 | # next orig_addr is correctly in %eax already |
| 367 | |
| 368 | popl %ebp |
| 369 | popl %edi |
| 370 | popl %esi |
| 371 | popl %edx |
| 372 | popl %ecx |
| 373 | popl %ebx |
| 374 | |
| 375 | ret |
| 376 | |
| 377 | ##--------------------------------------------------------------------## |
| 378 | ##--- end vg_dispatch.S ---## |
| 379 | ##--------------------------------------------------------------------## |