blob: aec5b3a876b673499c29e5fccdbee839786791df [file] [log] [blame]
sewardj8eb8bab2015-07-21 14:44:28 +00001
2/*--------------------------------------------------------------------*/
3/*--- The core dispatch loop, for jumping to a code address. ---*/
4/*--- dispatch-x86-solaris.S ---*/
5/*--------------------------------------------------------------------*/
6
7/*
8 This file is part of Valgrind, a dynamic binary instrumentation
9 framework.
10
Elliott Hughesed398002017-06-21 14:41:24 -070011 Copyright (C) 2012-2017 Petr Pavlu
12 setup@dagobah.cz
sewardj8eb8bab2015-07-21 14:44:28 +000013
14 This program is free software; you can redistribute it and/or
15 modify it under the terms of the GNU General Public License as
16 published by the Free Software Foundation; either version 2 of the
17 License, or (at your option) any later version.
18
19 This program is distributed in the hope that it will be useful, but
20 WITHOUT ANY WARRANTY; without even the implied warranty of
21 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
22 General Public License for more details.
23
24 You should have received a copy of the GNU General Public License
25 along with this program; if not, write to the Free Software
26 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
27 02111-1307, USA.
28
29 The GNU General Public License is contained in the file COPYING.
30*/
31
florian3f1d6132015-09-30 20:30:48 +000032#include "pub_core_basics_asm.h"
33
sewardj8eb8bab2015-07-21 14:44:28 +000034#if defined(VGP_x86_solaris)
35
sewardj8eb8bab2015-07-21 14:44:28 +000036#include "pub_core_dispatch_asm.h"
37#include "pub_core_transtab_asm.h"
38#include "libvex_guest_offsets.h" /* for OFFSET_x86_EIP */
39
40
41/*------------------------------------------------------------*/
42/*--- ---*/
43/*--- The dispatch loop. VG_(disp_run_translations) is ---*/
44/*--- used to run all translations, ---*/
45/*--- including no-redir ones. ---*/
46/*--- ---*/
47/*------------------------------------------------------------*/
48
49/*----------------------------------------------------*/
50/*--- Entry and preamble (set everything up) ---*/
51/*----------------------------------------------------*/
52
53/* signature:
54void VG_(disp_run_translations)( UWord* two_words,
55 void* guest_state,
56 Addr host_addr );
57*/
58.text
59.globl VG_(disp_run_translations)
60.type VG_(disp_run_translations), @function
61VG_(disp_run_translations):
62 /* 0(%esp) holds our return address. */
63 /* 4(%esp) holds two_words */
64 /* 8(%esp) holds guest_state */
65 /* 12(%esp) holds host_addr */
66
67 /* The preamble */
68
69 /* Save integer registers, since this is a pseudo-function. */
70 pushl %eax
71 pushl %ebx
72 pushl %ecx
73 pushl %edx
74 pushl %esi
75 pushl %edi
76 pushl %ebp
77
78 /* 28+4(%esp) holds two_words */
79 /* 28+8(%esp) holds guest_state */
80 /* 28+12(%esp) holds host_addr */
81
82 /* Get the host CPU in the state expected by generated code. */
83
84 /* set host FPU control word to the default mode expected
85 by VEX-generated code. See comments in libvex.h for
86 more info. */
87 finit
88 pushl $0x027F
89 fldcw (%esp)
90 addl $4, %esp
91
92 /* set host SSE control word to the default mode expected
93 by VEX-generated code. */
94 cmpl $0, VG_(machine_x86_have_mxcsr)
95 jz L1
96 pushl $0x1F80
97 ldmxcsr (%esp)
98 addl $4, %esp
99L1:
100 /* set dir flag to known value */
101 cld
102
103 /* Set up the guest state pointer */
104 movl 28+8(%esp), %ebp
105
106 /* and jump into the code cache. Chained translations in
107 the code cache run, until for whatever reason, they can't
108 continue. When that happens, the translation in question
109 will jump (or call) to one of the continuation points
110 VG_(cp_...) below. */
111 jmpl *28+12(%esp)
112 /*NOTREACHED*/
113
114/*----------------------------------------------------*/
115/*--- Postamble and exit. ---*/
116/*----------------------------------------------------*/
117
118postamble:
119 /* At this point, %eax and %edx contain two
120 words to be returned to the caller. %eax
121 holds a TRC value, and %edx optionally may
122 hold another word (for CHAIN_ME exits, the
123 address of the place to patch.) */
124
125 /* We're leaving. Check that nobody messed with %mxcsr
126 or %fpucw. We can't mess with %eax or %edx here as they
127 holds the tentative return value, but any others are OK. */
128#if !defined(ENABLE_INNER)
129 /* This check fails for self-hosting, so skip in that case */
130 pushl $0
131 fstcw (%esp)
132 cmpl $0x027F, (%esp)
133 popl %esi /* get rid of the word without trashing %eflags */
134 jnz invariant_violation
135#endif
136# cmpl $0, VG_(machine_x86_have_mxcsr)
137 jz L2
138 pushl $0
139 stmxcsr (%esp)
140 andl $0xFFFFFFC0, (%esp) /* mask out status flags */
141 cmpl $0x1F80, (%esp)
142 popl %esi
143 jnz invariant_violation
144L2: /* otherwise we're OK */
145 jmp remove_frame
146invariant_violation:
147 movl $VG_TRC_INVARIANT_FAILED, %eax
148 movl $0, %edx
149
150remove_frame:
151 /* Stash return values */
152 movl 28+4(%esp), %edi /* two_words */
153 movl %eax, 0(%edi)
154 movl %edx, 4(%edi)
155 /* Restore int regs and return. */
156 popl %ebp
157 popl %edi
158 popl %esi
159 popl %edx
160 popl %ecx
161 popl %ebx
162 popl %eax
163 ret
164
165/*----------------------------------------------------*/
166/*--- Continuation points ---*/
167/*----------------------------------------------------*/
168
169/* ------ Chain me to slow entry point ------ */
170.global VG_(disp_cp_chain_me_to_slowEP)
171VG_(disp_cp_chain_me_to_slowEP):
172 /* We got called. The return address indicates
173 where the patching needs to happen. Collect
174 the return address and, exit back to C land,
175 handing the caller the pair (Chain_me_S, RA) */
176 movl $VG_TRC_CHAIN_ME_TO_SLOW_EP, %eax
177 popl %edx
178 /* 5 = movl $VG_(disp_chain_me_to_slowEP), %edx;
179 2 = call *%edx */
180 subl $5+2, %edx
181 jmp postamble
182
183/* ------ Chain me to fast entry point ------ */
184.global VG_(disp_cp_chain_me_to_fastEP)
185VG_(disp_cp_chain_me_to_fastEP):
186 /* We got called. The return address indicates
187 where the patching needs to happen. Collect
188 the return address and, exit back to C land,
189 handing the caller the pair (Chain_me_F, RA) */
190 movl $VG_TRC_CHAIN_ME_TO_FAST_EP, %eax
191 popl %edx
192 /* 5 = movl $VG_(disp_chain_me_to_fastEP), %edx;
193 2 = call *%edx */
194 subl $5+2, %edx
195 jmp postamble
196
197/* ------ Indirect but boring jump ------ */
198.global VG_(disp_cp_xindir)
199VG_(disp_cp_xindir):
200 /* Where are we going? */
201 movl OFFSET_x86_EIP(%ebp), %eax
202
203 /* stats only */
204 addl $1, VG_(stats__n_xindirs_32)
205
206 /* try a fast lookup in the translation cache */
207 movl %eax, %ebx /* next guest addr */
208 andl $VG_TT_FAST_MASK, %ebx /* entry# */
209 movl 0+VG_(tt_fast)(,%ebx,8), %esi /* .guest */
210 movl 4+VG_(tt_fast)(,%ebx,8), %edi /* .host */
211 cmpl %eax, %esi
212 jnz fast_lookup_failed
213
214 /* Found a match. Jump to .host. */
215 jmp *%edi
216 ud2 /* persuade insn decoders not to speculate past here */
217
218fast_lookup_failed:
219 /* stats only */
220 addl $1, VG_(stats__n_xindir_misses_32)
221
222 movl $VG_TRC_INNER_FASTMISS, %eax
223 movl $0, %edx
224 jmp postamble
225
226/* ------ Assisted jump ------ */
227.global VG_(disp_cp_xassisted)
228VG_(disp_cp_xassisted):
229 /* %ebp contains the TRC */
230 movl %ebp, %eax
231 movl $0, %edx
232 jmp postamble
233
234/* ------ Event check failed ------ */
235.global VG_(disp_cp_evcheck_fail)
236VG_(disp_cp_evcheck_fail):
237 movl $VG_TRC_INNER_COUNTERZERO, %eax
238 movl $0, %edx
239 jmp postamble
240
241
242.size VG_(disp_run_translations), .-VG_(disp_run_translations)
243
244#endif // defined(VGP_x86_solaris)
245
florian3f1d6132015-09-30 20:30:48 +0000246/* Let the linker know we don't need an executable stack */
247MARK_STACK_NO_EXEC
248
sewardj8eb8bab2015-07-21 14:44:28 +0000249/*--------------------------------------------------------------------*/
250/*--- end ---*/
251/*--------------------------------------------------------------------*/