sewardj | 21082ff | 2004-10-19 13:11:35 +0000 | [diff] [blame] | 1 | |
| 2 | /*--------------------------------------------------------------------*/ |
sewardj | 2c80060 | 2004-11-06 13:56:36 +0000 | [diff] [blame] | 3 | /*--- Instrument IR to perform memory checking operations. ---*/ |
sewardj | 21082ff | 2004-10-19 13:11:35 +0000 | [diff] [blame] | 4 | /*--- mc_translate.c ---*/ |
| 5 | /*--------------------------------------------------------------------*/ |
| 6 | |
| 7 | /* |
| 8 | This file is part of MemCheck, a heavyweight Valgrind tool for |
| 9 | detecting memory errors. |
| 10 | |
| 11 | Copyright (C) 2000-2004 Julian Seward |
| 12 | jseward@acm.org |
| 13 | |
| 14 | This program is free software; you can redistribute it and/or |
| 15 | modify it under the terms of the GNU General Public License as |
| 16 | published by the Free Software Foundation; either version 2 of the |
| 17 | License, or (at your option) any later version. |
| 18 | |
| 19 | This program is distributed in the hope that it will be useful, but |
| 20 | WITHOUT ANY WARRANTY; without even the implied warranty of |
| 21 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
| 22 | General Public License for more details. |
| 23 | |
| 24 | You should have received a copy of the GNU General Public License |
| 25 | along with this program; if not, write to the Free Software |
| 26 | Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA |
| 27 | 02111-1307, USA. |
| 28 | |
| 29 | The GNU General Public License is contained in the file COPYING. |
| 30 | */ |
| 31 | |
| 32 | #include "mc_include.h" |
| 33 | |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 34 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 35 | /*------------------------------------------------------------*/ |
| 36 | /*--- Forward decls ---*/ |
| 37 | /*------------------------------------------------------------*/ |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 38 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 39 | struct _MCEnv; |
| 40 | |
| 41 | static IRType shadowType ( IRType ty ); |
| 42 | static IRExpr* expr2vbits ( struct _MCEnv* mce, IRExpr* e ); |
| 43 | |
| 44 | |
| 45 | /*------------------------------------------------------------*/ |
| 46 | /*--- Memcheck running state, and tmp management. ---*/ |
| 47 | /*------------------------------------------------------------*/ |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 48 | |
| 49 | /* Carries around state during memcheck instrumentation. */ |
| 50 | typedef |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 51 | struct _MCEnv { |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 52 | /* MODIFIED: the bb being constructed. IRStmts are added. */ |
| 53 | IRBB* bb; |
| 54 | |
| 55 | /* MODIFIED: a table [0 .. #temps_in_original_bb-1] which maps |
| 56 | original temps to their current their current shadow temp. |
| 57 | Initially all entries are IRTemp_INVALID. Entries are added |
| 58 | lazily since many original temps are not used due to |
| 59 | optimisation prior to instrumentation. Note that floating |
| 60 | point original tmps are shadowed by integer tmps of the same |
| 61 | size, and Bit-typed original tmps are shadowed by the type |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 62 | Ity_I8. See comment below. */ |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 63 | IRTemp* tmpMap; |
| 64 | Int n_originalTmps; /* for range checking */ |
| 65 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 66 | /* READONLY: the guest layout. This indicates which parts of |
| 67 | the guest state should be regarded as 'always defined'. */ |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 68 | VexGuestLayout* layout; |
| 69 | /* READONLY: the host word type. Needed for constructing |
| 70 | arguments of type 'HWord' to be passed to helper functions. |
| 71 | Ity_I32 or Ity_I64 only. */ |
| 72 | IRType hWordTy; |
| 73 | } |
| 74 | MCEnv; |
| 75 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 76 | /* SHADOW TMP MANAGEMENT. Shadow tmps are allocated lazily (on |
| 77 | demand), as they are encountered. This is for two reasons. |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 78 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 79 | (1) (less important reason): Many original tmps are unused due to |
| 80 | initial IR optimisation, and we do not want to spaces in tables |
| 81 | tracking them. |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 82 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 83 | Shadow IRTemps are therefore allocated on demand. mce.tmpMap is a |
| 84 | table indexed [0 .. n_types-1], which gives the current shadow for |
| 85 | each original tmp, or INVALID_IRTEMP if none is so far assigned. |
| 86 | It is necessary to support making multiple assignments to a shadow |
| 87 | -- specifically, after testing a shadow for definedness, it needs |
| 88 | to be made defined. But IR's SSA property disallows this. |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 89 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 90 | (2) (more important reason): Therefore, when a shadow needs to get |
| 91 | a new value, a new temporary is created, the value is assigned to |
| 92 | that, and the tmpMap is updated to reflect the new binding. |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 93 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 94 | A corollary is that if the tmpMap maps a given tmp to |
| 95 | INVALID_IRTEMP and we are hoping to read that shadow tmp, it means |
| 96 | there's a read-before-write error in the original tmps. The IR |
| 97 | sanity checker should catch all such anomalies, however. |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 98 | */ |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 99 | |
| 100 | /* Find the tmp currently shadowing the given original tmp. If none |
| 101 | so far exists, allocate one. */ |
| 102 | static IRTemp findShadowTmp ( MCEnv* mce, IRTemp orig ) |
| 103 | { |
| 104 | sk_assert(orig < mce->n_originalTmps); |
sewardj | 92d168d | 2004-11-15 14:22:12 +0000 | [diff] [blame^] | 105 | if (mce->tmpMap[orig] == IRTemp_INVALID) { |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 106 | mce->tmpMap[orig] |
| 107 | = newIRTemp(mce->bb->tyenv, |
| 108 | shadowType(mce->bb->tyenv->types[orig])); |
| 109 | } |
| 110 | return mce->tmpMap[orig]; |
| 111 | } |
| 112 | |
| 113 | /* Allocate a new shadow for the given original tmp. This means any |
| 114 | previous shadow is abandoned. This is needed because it is |
| 115 | necessary to give a new value to a shadow once it has been tested |
| 116 | for undefinedness, but unfortunately IR's SSA property disallows |
| 117 | this. Instead we must abandon the old shadow, allocate a new one |
| 118 | and use that instead. */ |
| 119 | static void newShadowTmp ( MCEnv* mce, IRTemp orig ) |
| 120 | { |
| 121 | sk_assert(orig < mce->n_originalTmps); |
| 122 | mce->tmpMap[orig] |
| 123 | = newIRTemp(mce->bb->tyenv, |
| 124 | shadowType(mce->bb->tyenv->types[orig])); |
| 125 | } |
| 126 | |
| 127 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 128 | /*------------------------------------------------------------*/ |
| 129 | /*--- IRAtoms -- a subset of IRExprs ---*/ |
| 130 | /*------------------------------------------------------------*/ |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 131 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 132 | /* An atom is either an IRExpr_Const or an IRExpr_Tmp, as defined by |
| 133 | isAtom() in libvex_ir.h. Because this instrumenter expects flat |
| 134 | input, most of this code deals in atoms. Usefully, a value atom |
| 135 | always has a V-value which is also an atom: constants are shadowed |
| 136 | by constants, and temps are shadowed by the corresponding shadow |
| 137 | temporary. */ |
| 138 | |
| 139 | typedef IRExpr IRAtom; |
| 140 | |
| 141 | /* (used for sanity checks only): is this an atom which looks |
| 142 | like it's from original code? */ |
| 143 | static Bool isOriginalAtom ( MCEnv* mce, IRAtom* a1 ) |
| 144 | { |
| 145 | if (a1->tag == Iex_Const) |
| 146 | return True; |
| 147 | if (a1->tag == Iex_Tmp && a1->Iex.Tmp.tmp < mce->n_originalTmps) |
| 148 | return True; |
| 149 | return False; |
| 150 | } |
| 151 | |
| 152 | /* (used for sanity checks only): is this an atom which looks |
| 153 | like it's from shadow code? */ |
| 154 | static Bool isShadowAtom ( MCEnv* mce, IRAtom* a1 ) |
| 155 | { |
| 156 | if (a1->tag == Iex_Const) |
| 157 | return True; |
| 158 | if (a1->tag == Iex_Tmp && a1->Iex.Tmp.tmp >= mce->n_originalTmps) |
| 159 | return True; |
| 160 | return False; |
| 161 | } |
| 162 | |
| 163 | /* (used for sanity checks only): check that both args are atoms and |
| 164 | are identically-kinded. */ |
| 165 | static Bool sameKindedAtoms ( IRAtom* a1, IRAtom* a2 ) |
| 166 | { |
| 167 | if (a1->tag == Iex_Tmp && a1->tag == Iex_Tmp) |
| 168 | return True; |
| 169 | if (a1->tag == Iex_Const && a1->tag == Iex_Const) |
| 170 | return True; |
| 171 | return False; |
| 172 | } |
| 173 | |
| 174 | |
| 175 | /*------------------------------------------------------------*/ |
| 176 | /*--- Type management ---*/ |
| 177 | /*------------------------------------------------------------*/ |
| 178 | |
| 179 | /* Shadow state is always accessed using integer types. This returns |
| 180 | an integer type with the same size (as per sizeofIRType) as the |
| 181 | given type. The only valid shadow types are Bit, I8, I16, I32, |
| 182 | I64. */ |
| 183 | |
| 184 | static IRType shadowType ( IRType ty ) |
| 185 | { |
| 186 | switch (ty) { |
| 187 | case Ity_Bit: |
| 188 | case Ity_I8: |
| 189 | case Ity_I16: |
| 190 | case Ity_I32: |
| 191 | case Ity_I64: return ty; |
| 192 | case Ity_F32: return Ity_I32; |
| 193 | case Ity_F64: return Ity_I64; |
| 194 | default: ppIRType(ty); |
| 195 | VG_(skin_panic)("memcheck:shadowType"); |
| 196 | } |
| 197 | } |
| 198 | |
| 199 | /* Produce a 'defined' value of the given shadow type. Should only be |
| 200 | supplied shadow types (Bit/I8/I16/I32/UI64). */ |
| 201 | static IRExpr* definedOfType ( IRType ty ) { |
| 202 | switch (ty) { |
| 203 | case Ity_Bit: return IRExpr_Const(IRConst_Bit(False)); |
| 204 | case Ity_I8: return IRExpr_Const(IRConst_U8(0)); |
| 205 | case Ity_I16: return IRExpr_Const(IRConst_U16(0)); |
| 206 | case Ity_I32: return IRExpr_Const(IRConst_U32(0)); |
| 207 | case Ity_I64: return IRExpr_Const(IRConst_U64(0)); |
| 208 | default: VG_(skin_panic)("memcheck:definedOfType"); |
| 209 | } |
| 210 | } |
| 211 | |
| 212 | |
| 213 | /*------------------------------------------------------------*/ |
| 214 | /*--- Constructing IR fragments ---*/ |
| 215 | /*------------------------------------------------------------*/ |
| 216 | |
| 217 | /* assign value to tmp */ |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 218 | #define assign(_bb,_tmp,_expr) \ |
| 219 | addStmtToIRBB((_bb), IRStmt_Tmp((_tmp),(_expr))) |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 220 | |
| 221 | /* add stmt to a bb */ |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 222 | #define stmt(_bb,_stmt) \ |
| 223 | addStmtToIRBB((_bb), (_stmt)) |
| 224 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 225 | /* build various kinds of expressions */ |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 226 | #define binop(_op, _arg1, _arg2) IRExpr_Binop((_op),(_arg1),(_arg2)) |
| 227 | #define unop(_op, _arg) IRExpr_Unop((_op),(_arg)) |
sewardj | a6929da | 2004-11-03 15:22:25 +0000 | [diff] [blame] | 228 | #define mkU8(_n) IRExpr_Const(IRConst_U8(_n)) |
| 229 | #define mkU16(_n) IRExpr_Const(IRConst_U16(_n)) |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 230 | #define mkU32(_n) IRExpr_Const(IRConst_U32(_n)) |
| 231 | #define mkU64(_n) IRExpr_Const(IRConst_U64(_n)) |
| 232 | #define mkexpr(_tmp) IRExpr_Tmp((_tmp)) |
| 233 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 234 | /* bind the given expression to a new temporary, and return the |
| 235 | temporary. This effectively converts an arbitrary expression into |
| 236 | an atom. */ |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 237 | static IRAtom* assignNew ( MCEnv* mce, IRType ty, IRExpr* e ) { |
| 238 | IRTemp t = newIRTemp(mce->bb->tyenv, ty); |
| 239 | assign(mce->bb, t, e); |
| 240 | return mkexpr(t); |
| 241 | } |
| 242 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 243 | |
| 244 | /*------------------------------------------------------------*/ |
| 245 | /*--- Constructing definedness primitive ops ---*/ |
| 246 | /*------------------------------------------------------------*/ |
| 247 | |
| 248 | /* --------- Defined-if-either-defined --------- */ |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 249 | |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 250 | static IRAtom* mkDifD8 ( MCEnv* mce, IRAtom* a1, IRAtom* a2 ) { |
| 251 | sk_assert(isShadowAtom(mce,a1)); |
| 252 | sk_assert(isShadowAtom(mce,a2)); |
| 253 | return assignNew(mce, Ity_I8, binop(Iop_And8, a1, a2)); |
| 254 | } |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 255 | |
sewardj | 35b5db8 | 2004-11-04 20:27:37 +0000 | [diff] [blame] | 256 | static IRAtom* mkDifD16 ( MCEnv* mce, IRAtom* a1, IRAtom* a2 ) { |
| 257 | sk_assert(isShadowAtom(mce,a1)); |
| 258 | sk_assert(isShadowAtom(mce,a2)); |
| 259 | return assignNew(mce, Ity_I16, binop(Iop_And16, a1, a2)); |
| 260 | } |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 261 | |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 262 | static IRAtom* mkDifD32 ( MCEnv* mce, IRAtom* a1, IRAtom* a2 ) { |
| 263 | sk_assert(isShadowAtom(mce,a1)); |
| 264 | sk_assert(isShadowAtom(mce,a2)); |
| 265 | return assignNew(mce, Ity_I32, binop(Iop_And32, a1, a2)); |
| 266 | } |
| 267 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 268 | /* --------- Undefined-if-either-undefined --------- */ |
| 269 | |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 270 | static IRAtom* mkUifU8 ( MCEnv* mce, IRAtom* a1, IRAtom* a2 ) { |
| 271 | sk_assert(isShadowAtom(mce,a1)); |
| 272 | sk_assert(isShadowAtom(mce,a2)); |
| 273 | return assignNew(mce, Ity_I8, binop(Iop_Or8, a1, a2)); |
| 274 | } |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 275 | |
sewardj | a6929da | 2004-11-03 15:22:25 +0000 | [diff] [blame] | 276 | static IRAtom* mkUifU16 ( MCEnv* mce, IRAtom* a1, IRAtom* a2 ) { |
| 277 | sk_assert(isShadowAtom(mce,a1)); |
| 278 | sk_assert(isShadowAtom(mce,a2)); |
| 279 | return assignNew(mce, Ity_I16, binop(Iop_Or16, a1, a2)); |
| 280 | } |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 281 | |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 282 | static IRAtom* mkUifU32 ( MCEnv* mce, IRAtom* a1, IRAtom* a2 ) { |
| 283 | sk_assert(isShadowAtom(mce,a1)); |
| 284 | sk_assert(isShadowAtom(mce,a2)); |
| 285 | return assignNew(mce, Ity_I32, binop(Iop_Or32, a1, a2)); |
| 286 | } |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 287 | |
sewardj | 9da6e24 | 2004-11-05 01:56:14 +0000 | [diff] [blame] | 288 | static IRAtom* mkUifU64 ( MCEnv* mce, IRAtom* a1, IRAtom* a2 ) { |
| 289 | sk_assert(isShadowAtom(mce,a1)); |
| 290 | sk_assert(isShadowAtom(mce,a2)); |
| 291 | return assignNew(mce, Ity_I64, binop(Iop_Or64, a1, a2)); |
| 292 | } |
| 293 | |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 294 | static IRAtom* mkUifU ( MCEnv* mce, IRType vty, IRAtom* a1, IRAtom* a2 ) { |
| 295 | switch (vty) { |
sewardj | edf9b5a | 2004-11-09 16:02:11 +0000 | [diff] [blame] | 296 | case Ity_I16: return mkUifU16(mce, a1, a2); |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 297 | case Ity_I32: return mkUifU32(mce, a1, a2); |
sewardj | 9da6e24 | 2004-11-05 01:56:14 +0000 | [diff] [blame] | 298 | case Ity_I64: return mkUifU64(mce, a1, a2); |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 299 | default: |
| 300 | VG_(printf)("\n"); ppIRType(vty); VG_(printf)("\n"); |
| 301 | VG_(skin_panic)("memcheck:mkUifU"); |
| 302 | } |
| 303 | } |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 304 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 305 | /* --------- The Left-family of operations. --------- */ |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 306 | |
| 307 | static IRAtom* mkLeft8 ( MCEnv* mce, IRAtom* a1 ) { |
| 308 | sk_assert(isShadowAtom(mce,a1)); |
| 309 | /* It's safe to duplicate a1 since it's only an atom */ |
| 310 | return assignNew(mce, Ity_I8, |
| 311 | binop(Iop_Or8, a1, |
| 312 | assignNew(mce, Ity_I8, |
| 313 | /* unop(Iop_Neg8, a1)))); */ |
| 314 | binop(Iop_Sub8, mkU8(0), a1) ))); |
| 315 | } |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 316 | |
sewardj | 35b5db8 | 2004-11-04 20:27:37 +0000 | [diff] [blame] | 317 | static IRAtom* mkLeft16 ( MCEnv* mce, IRAtom* a1 ) { |
| 318 | sk_assert(isShadowAtom(mce,a1)); |
| 319 | /* It's safe to duplicate a1 since it's only an atom */ |
| 320 | return assignNew(mce, Ity_I16, |
| 321 | binop(Iop_Or16, a1, |
| 322 | assignNew(mce, Ity_I16, |
| 323 | /* unop(Iop_Neg16, a1)))); */ |
| 324 | binop(Iop_Sub16, mkU16(0), a1) ))); |
| 325 | } |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 326 | |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 327 | static IRAtom* mkLeft32 ( MCEnv* mce, IRAtom* a1 ) { |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 328 | sk_assert(isShadowAtom(mce,a1)); |
| 329 | /* It's safe to duplicate a1 since it's only an atom */ |
| 330 | return assignNew(mce, Ity_I32, |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 331 | binop(Iop_Or32, a1, |
| 332 | assignNew(mce, Ity_I32, |
| 333 | /* unop(Iop_Neg32, a1)))); */ |
| 334 | binop(Iop_Sub32, mkU32(0), a1) ))); |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 335 | } |
| 336 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 337 | /* --------- 'Improvement' functions for AND/OR. --------- */ |
| 338 | |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 339 | /* ImproveAND(data, vbits) = data OR vbits. Defined (0) data 0s give |
| 340 | defined (0); all other -> undefined (1). |
| 341 | */ |
| 342 | static IRAtom* mkImproveAND8 ( MCEnv* mce, IRAtom* data, IRAtom* vbits ) |
| 343 | { |
| 344 | sk_assert(isOriginalAtom(mce, data)); |
| 345 | sk_assert(isShadowAtom(mce, vbits)); |
| 346 | sk_assert(sameKindedAtoms(data, vbits)); |
| 347 | return assignNew(mce, Ity_I8, binop(Iop_Or8, data, vbits)); |
| 348 | } |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 349 | |
sewardj | 4fec8f3 | 2004-11-08 13:08:53 +0000 | [diff] [blame] | 350 | static IRAtom* mkImproveAND16 ( MCEnv* mce, IRAtom* data, IRAtom* vbits ) |
| 351 | { |
| 352 | sk_assert(isOriginalAtom(mce, data)); |
| 353 | sk_assert(isShadowAtom(mce, vbits)); |
| 354 | sk_assert(sameKindedAtoms(data, vbits)); |
| 355 | return assignNew(mce, Ity_I16, binop(Iop_Or16, data, vbits)); |
| 356 | } |
| 357 | |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 358 | static IRAtom* mkImproveAND32 ( MCEnv* mce, IRAtom* data, IRAtom* vbits ) |
| 359 | { |
| 360 | sk_assert(isOriginalAtom(mce, data)); |
| 361 | sk_assert(isShadowAtom(mce, vbits)); |
| 362 | sk_assert(sameKindedAtoms(data, vbits)); |
| 363 | return assignNew(mce, Ity_I32, binop(Iop_Or32, data, vbits)); |
| 364 | } |
| 365 | |
| 366 | /* ImproveOR(data, vbits) = ~data OR vbits. Defined (0) data 1s give |
| 367 | defined (0); all other -> undefined (1). |
| 368 | */ |
| 369 | static IRAtom* mkImproveOR8 ( MCEnv* mce, IRAtom* data, IRAtom* vbits ) |
| 370 | { |
| 371 | sk_assert(isOriginalAtom(mce, data)); |
| 372 | sk_assert(isShadowAtom(mce, vbits)); |
| 373 | sk_assert(sameKindedAtoms(data, vbits)); |
| 374 | return assignNew( |
| 375 | mce, Ity_I8, |
| 376 | binop(Iop_Or8, |
| 377 | assignNew(mce, Ity_I8, unop(Iop_Not8, data)), |
| 378 | vbits) ); |
| 379 | } |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 380 | |
sewardj | 35b5db8 | 2004-11-04 20:27:37 +0000 | [diff] [blame] | 381 | static IRAtom* mkImproveOR16 ( MCEnv* mce, IRAtom* data, IRAtom* vbits ) |
| 382 | { |
| 383 | sk_assert(isOriginalAtom(mce, data)); |
| 384 | sk_assert(isShadowAtom(mce, vbits)); |
| 385 | sk_assert(sameKindedAtoms(data, vbits)); |
| 386 | return assignNew( |
| 387 | mce, Ity_I16, |
| 388 | binop(Iop_Or16, |
| 389 | assignNew(mce, Ity_I16, unop(Iop_Not16, data)), |
| 390 | vbits) ); |
| 391 | } |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 392 | |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 393 | static IRAtom* mkImproveOR32 ( MCEnv* mce, IRAtom* data, IRAtom* vbits ) |
| 394 | { |
| 395 | sk_assert(isOriginalAtom(mce, data)); |
| 396 | sk_assert(isShadowAtom(mce, vbits)); |
| 397 | sk_assert(sameKindedAtoms(data, vbits)); |
| 398 | return assignNew( |
| 399 | mce, Ity_I32, |
| 400 | binop(Iop_Or32, |
| 401 | assignNew(mce, Ity_I32, unop(Iop_Not32, data)), |
| 402 | vbits) ); |
| 403 | } |
| 404 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 405 | /* --------- Pessimising casts. --------- */ |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 406 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 407 | static IRAtom* mkPCastTo( MCEnv* mce, IRType dst_ty, IRAtom* vbits ) |
| 408 | { |
| 409 | /* Note, dst_ty is a shadow type, not an original type. */ |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 410 | /* First of all, collapse vbits down to a single bit. */ |
| 411 | sk_assert(isShadowAtom(mce,vbits)); |
| 412 | IRType ty = typeOfIRExpr(mce->bb->tyenv, vbits); |
| 413 | IRAtom* tmp1 = NULL; |
| 414 | switch (ty) { |
sewardj | a6929da | 2004-11-03 15:22:25 +0000 | [diff] [blame] | 415 | case Ity_Bit: |
| 416 | tmp1 = vbits; |
| 417 | break; |
| 418 | case Ity_I8: |
| 419 | tmp1 = assignNew(mce, Ity_Bit, binop(Iop_CmpNE8, vbits, mkU8(0))); |
| 420 | break; |
| 421 | case Ity_I16: |
| 422 | tmp1 = assignNew(mce, Ity_Bit, binop(Iop_CmpNE16, vbits, mkU16(0))); |
| 423 | break; |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 424 | case Ity_I32: |
| 425 | tmp1 = assignNew(mce, Ity_Bit, binop(Iop_CmpNE32, vbits, mkU32(0))); |
| 426 | break; |
sewardj | 80415ee | 2004-11-04 16:56:29 +0000 | [diff] [blame] | 427 | case Ity_I64: |
| 428 | tmp1 = assignNew(mce, Ity_Bit, binop(Iop_CmpNE64, vbits, mkU64(0))); |
| 429 | break; |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 430 | default: |
sewardj | a6929da | 2004-11-03 15:22:25 +0000 | [diff] [blame] | 431 | VG_(skin_panic)("mkPCastTo(1)"); |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 432 | } |
| 433 | sk_assert(tmp1); |
| 434 | /* Now widen up to the dst type. */ |
| 435 | switch (dst_ty) { |
| 436 | case Ity_Bit: |
| 437 | return tmp1; |
sewardj | edf9b5a | 2004-11-09 16:02:11 +0000 | [diff] [blame] | 438 | case Ity_I8: |
| 439 | return assignNew(mce, Ity_I8, unop(Iop_1Sto8, tmp1)); |
sewardj | 9da6e24 | 2004-11-05 01:56:14 +0000 | [diff] [blame] | 440 | case Ity_I16: |
| 441 | return assignNew(mce, Ity_I16, unop(Iop_1Sto16, tmp1)); |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 442 | case Ity_I32: |
| 443 | return assignNew(mce, Ity_I32, unop(Iop_1Sto32, tmp1)); |
sewardj | 80415ee | 2004-11-04 16:56:29 +0000 | [diff] [blame] | 444 | case Ity_I64: |
| 445 | return assignNew(mce, Ity_I64, unop(Iop_1Sto64, tmp1)); |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 446 | default: |
| 447 | ppIRType(dst_ty); |
sewardj | a6929da | 2004-11-03 15:22:25 +0000 | [diff] [blame] | 448 | VG_(skin_panic)("mkPCastTo(2)"); |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 449 | } |
| 450 | } |
| 451 | |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 452 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 453 | /*------------------------------------------------------------*/ |
| 454 | /*--- Emit a test and complaint if something is undefined. ---*/ |
| 455 | /*------------------------------------------------------------*/ |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 456 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 457 | /* Set the annotations on a dirty helper to indicate that the stack |
| 458 | pointer and instruction pointers might be read. This is the |
| 459 | behaviour of all 'emit-a-complaint' style functions we might |
| 460 | call. */ |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 461 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 462 | static void setHelperAnns ( MCEnv* mce, IRDirty* di ) { |
| 463 | di->nFxState = 2; |
| 464 | di->fxState[0].fx = Ifx_Read; |
| 465 | di->fxState[0].offset = mce->layout->offset_SP; |
| 466 | di->fxState[0].size = mce->layout->sizeof_SP; |
| 467 | di->fxState[1].fx = Ifx_Read; |
| 468 | di->fxState[1].offset = mce->layout->offset_IP; |
| 469 | di->fxState[1].size = mce->layout->sizeof_IP; |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 470 | } |
| 471 | |
| 472 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 473 | /* Check the supplied **original** atom for undefinedness, and emit a |
| 474 | complaint if so. Once that happens, mark it as defined. This is |
| 475 | possible because the atom is either a tmp or literal. If it's a |
| 476 | tmp, it will be shadowed by a tmp, and so we can set the shadow to |
| 477 | be defined. In fact as mentioned above, we will have to allocate a |
| 478 | new tmp to carry the new 'defined' shadow value, and update the |
| 479 | original->tmp mapping accordingly; we cannot simply assign a new |
| 480 | value to an existing shadow tmp as this breaks SSAness -- resulting |
| 481 | in the post-instrumentation sanity checker spluttering in disapproval. |
| 482 | */ |
| 483 | static void complainIfUndefined ( MCEnv* mce, IRAtom* atom ) |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 484 | { |
sewardj | a6929da | 2004-11-03 15:22:25 +0000 | [diff] [blame] | 485 | /* Since the original expression is atomic, there's no duplicated |
| 486 | work generated by making multiple V-expressions for it. So we |
| 487 | don't really care about the possibility that someone else may |
| 488 | also create a V-interpretion for it. */ |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 489 | sk_assert(isOriginalAtom(mce, atom)); |
sewardj | a6929da | 2004-11-03 15:22:25 +0000 | [diff] [blame] | 490 | IRAtom* vatom = expr2vbits( mce, atom ); |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 491 | sk_assert(isShadowAtom(mce, vatom)); |
| 492 | sk_assert(sameKindedAtoms(atom, vatom)); |
| 493 | |
| 494 | IRType ty = typeOfIRExpr(mce->bb->tyenv, vatom); |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 495 | |
sewardj | a6929da | 2004-11-03 15:22:25 +0000 | [diff] [blame] | 496 | /* sz is only used for constructing the error message */ |
| 497 | Int sz = ty==Ity_Bit ? 0 : sizeofIRType(ty); |
| 498 | |
sewardj | 9da6e24 | 2004-11-05 01:56:14 +0000 | [diff] [blame] | 499 | IRAtom* cond = mkPCastTo( mce, Ity_Bit, vatom ); |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 500 | /* cond will be 0 if all defined, and 1 if any not defined. */ |
| 501 | |
sewardj | acbad75 | 2004-11-08 18:55:47 +0000 | [diff] [blame] | 502 | IRDirty* di; |
sewardj | 9f8abf8 | 2004-11-10 02:39:49 +0000 | [diff] [blame] | 503 | switch (sz) { |
| 504 | case 0: |
| 505 | di = unsafeIRDirty_0_N( 0/*regparms*/, |
| 506 | "MC_(helperc_value_check0_fail)", |
| 507 | &MC_(helperc_value_check0_fail), |
| 508 | mkIRExprVec_0() |
| 509 | ); |
| 510 | break; |
| 511 | case 1: |
| 512 | di = unsafeIRDirty_0_N( 0/*regparms*/, |
| 513 | "MC_(helperc_value_check1_fail)", |
| 514 | &MC_(helperc_value_check1_fail), |
| 515 | mkIRExprVec_0() |
| 516 | ); |
| 517 | break; |
| 518 | case 4: |
| 519 | di = unsafeIRDirty_0_N( 0/*regparms*/, |
| 520 | "MC_(helperc_value_check4_fail)", |
| 521 | &MC_(helperc_value_check4_fail), |
| 522 | mkIRExprVec_0() |
| 523 | ); |
| 524 | break; |
| 525 | default: |
| 526 | di = unsafeIRDirty_0_N( 1/*regparms*/, |
| 527 | "MC_(helperc_complain_undef)", |
| 528 | &MC_(helperc_complain_undef), |
| 529 | mkIRExprVec_1( mkIRExpr_HWord( sz )) |
| 530 | ); |
| 531 | break; |
sewardj | acbad75 | 2004-11-08 18:55:47 +0000 | [diff] [blame] | 532 | } |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 533 | di->guard = cond; |
| 534 | setHelperAnns( mce, di ); |
| 535 | stmt( mce->bb, IRStmt_Dirty(di)); |
| 536 | |
| 537 | /* Set the shadow tmp to be defined. First, update the |
| 538 | orig->shadow tmp mapping to reflect the fact that this shadow is |
| 539 | getting a new value. */ |
| 540 | sk_assert(isAtom(vatom)); |
| 541 | /* sameKindedAtoms ... */ |
| 542 | if (vatom->tag == Iex_Tmp) { |
| 543 | sk_assert(atom->tag == Iex_Tmp); |
| 544 | newShadowTmp(mce, atom->Iex.Tmp.tmp); |
| 545 | assign(mce->bb, findShadowTmp(mce, atom->Iex.Tmp.tmp), |
| 546 | definedOfType(ty)); |
| 547 | } |
| 548 | } |
| 549 | |
| 550 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 551 | /*------------------------------------------------------------*/ |
| 552 | /*--- Shadowing PUTs/GETs, and indexed variants thereof ---*/ |
| 553 | /*------------------------------------------------------------*/ |
| 554 | |
| 555 | /* Examine the always-defined sections declared in layout to see if |
| 556 | the (offset,size) section is within one. Note, is is an error to |
| 557 | partially fall into such a region: (offset,size) should either be |
| 558 | completely in such a region or completely not-in such a region. |
| 559 | */ |
| 560 | static Bool isAlwaysDefd ( MCEnv* mce, Int offset, Int size ) |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 561 | { |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 562 | Int minoffD, maxoffD, i; |
| 563 | Int minoff = offset; |
| 564 | Int maxoff = minoff + size - 1; |
| 565 | sk_assert((minoff & ~0xFFFF) == 0); |
| 566 | sk_assert((maxoff & ~0xFFFF) == 0); |
| 567 | |
| 568 | for (i = 0; i < mce->layout->n_alwaysDefd; i++) { |
| 569 | minoffD = mce->layout->alwaysDefd[i].offset; |
| 570 | maxoffD = minoffD + mce->layout->alwaysDefd[i].size - 1; |
| 571 | sk_assert((minoffD & ~0xFFFF) == 0); |
| 572 | sk_assert((maxoffD & ~0xFFFF) == 0); |
| 573 | |
| 574 | if (maxoff < minoffD || maxoffD < minoff) |
| 575 | continue; /* no overlap */ |
| 576 | if (minoff >= minoffD && maxoff <= maxoffD) |
| 577 | return True; /* completely contained in an always-defd section */ |
| 578 | |
| 579 | VG_(skin_panic)("memcheck:isAlwaysDefd:partial overlap"); |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 580 | } |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 581 | return False; /* could not find any containing section */ |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 582 | } |
| 583 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 584 | |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 585 | /* Generate into bb suitable actions to shadow this Put. If the state |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 586 | slice is marked 'always defined', do nothing. Otherwise, write the |
sewardj | 9f8abf8 | 2004-11-10 02:39:49 +0000 | [diff] [blame] | 587 | supplied V bits to the shadow state. We can pass in either an |
| 588 | original atom or a V-atom, but not both. In the former case the |
| 589 | relevant V-bits are then generated from the original. |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 590 | */ |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 591 | static |
sewardj | 9f8abf8 | 2004-11-10 02:39:49 +0000 | [diff] [blame] | 592 | void do_shadow_PUT ( MCEnv* mce, Int offset, |
| 593 | IRAtom* atom, IRAtom* vatom ) |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 594 | { |
sewardj | 9f8abf8 | 2004-11-10 02:39:49 +0000 | [diff] [blame] | 595 | if (atom) { |
| 596 | sk_assert(!vatom); |
| 597 | sk_assert(isOriginalAtom(mce, atom)); |
| 598 | vatom = expr2vbits( mce, atom ); |
| 599 | } else { |
| 600 | sk_assert(vatom); |
| 601 | sk_assert(isShadowAtom(mce, vatom)); |
| 602 | } |
| 603 | |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 604 | IRType ty = typeOfIRExpr(mce->bb->tyenv, vatom); |
| 605 | sk_assert(ty != Ity_Bit); |
| 606 | if (isAlwaysDefd(mce, offset, sizeofIRType(ty))) { |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 607 | /* later: no ... */ |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 608 | /* emit code to emit a complaint if any of the vbits are 1. */ |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 609 | /* complainIfUndefined(mce, atom); */ |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 610 | } else { |
| 611 | /* Do a plain shadow Put. */ |
| 612 | stmt( mce->bb, IRStmt_Put( offset + mce->layout->total_sizeB, vatom ) ); |
| 613 | } |
| 614 | } |
| 615 | |
| 616 | |
| 617 | /* Return an expression which contains the V bits corresponding to the |
sewardj | 9da6e24 | 2004-11-05 01:56:14 +0000 | [diff] [blame] | 618 | given GETI (passed in in pieces). |
| 619 | */ |
| 620 | static |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 621 | void do_shadow_PUTI ( MCEnv* mce, |
| 622 | IRArray* descr, IRAtom* ix, Int bias, IRAtom* atom ) |
sewardj | 9da6e24 | 2004-11-05 01:56:14 +0000 | [diff] [blame] | 623 | { |
| 624 | sk_assert(isOriginalAtom(mce,atom)); |
| 625 | IRAtom* vatom = expr2vbits( mce, atom ); |
| 626 | sk_assert(sameKindedAtoms(atom, vatom)); |
| 627 | IRType ty = descr->elemTy; |
| 628 | IRType tyS = shadowType(ty); |
| 629 | Int arrSize = descr->nElems * sizeofIRType(ty); |
| 630 | sk_assert(ty != Ity_Bit); |
| 631 | sk_assert(isOriginalAtom(mce,ix)); |
| 632 | complainIfUndefined(mce,ix); |
| 633 | if (isAlwaysDefd(mce, descr->base, arrSize)) { |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 634 | /* later: no ... */ |
sewardj | 9da6e24 | 2004-11-05 01:56:14 +0000 | [diff] [blame] | 635 | /* emit code to emit a complaint if any of the vbits are 1. */ |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 636 | /* complainIfUndefined(mce, atom); */ |
sewardj | 9da6e24 | 2004-11-05 01:56:14 +0000 | [diff] [blame] | 637 | } else { |
| 638 | /* Do a cloned version of the Put that refers to the shadow |
| 639 | area. */ |
| 640 | IRArray* new_descr |
| 641 | = mkIRArray( descr->base + mce->layout->total_sizeB, |
| 642 | tyS, descr->nElems); |
| 643 | stmt( mce->bb, IRStmt_PutI( new_descr, ix, bias, vatom )); |
| 644 | } |
| 645 | } |
| 646 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 647 | |
sewardj | 9da6e24 | 2004-11-05 01:56:14 +0000 | [diff] [blame] | 648 | /* Return an expression which contains the V bits corresponding to the |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 649 | given GET (passed in in pieces). |
| 650 | */ |
| 651 | static |
| 652 | IRExpr* shadow_GET ( MCEnv* mce, Int offset, IRType ty ) |
| 653 | { |
| 654 | IRType tyS = shadowType(ty); |
| 655 | sk_assert(ty != Ity_Bit); |
| 656 | if (isAlwaysDefd(mce, offset, sizeofIRType(ty))) { |
| 657 | /* Always defined, return all zeroes of the relevant type */ |
| 658 | return definedOfType(tyS); |
| 659 | } else { |
| 660 | /* return a cloned version of the Get that refers to the shadow |
| 661 | area. */ |
| 662 | return IRExpr_Get( offset + mce->layout->total_sizeB, tyS ); |
| 663 | } |
| 664 | } |
| 665 | |
| 666 | |
| 667 | /* Return an expression which contains the V bits corresponding to the |
| 668 | given GETI (passed in in pieces). |
| 669 | */ |
| 670 | static |
| 671 | IRExpr* shadow_GETI ( MCEnv* mce, IRArray* descr, IRAtom* ix, Int bias ) |
| 672 | { |
| 673 | IRType ty = descr->elemTy; |
| 674 | IRType tyS = shadowType(ty); |
| 675 | Int arrSize = descr->nElems * sizeofIRType(ty); |
| 676 | sk_assert(ty != Ity_Bit); |
| 677 | sk_assert(isOriginalAtom(mce,ix)); |
sewardj | 9da6e24 | 2004-11-05 01:56:14 +0000 | [diff] [blame] | 678 | complainIfUndefined(mce,ix); |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 679 | if (isAlwaysDefd(mce, descr->base, arrSize)) { |
| 680 | /* Always defined, return all zeroes of the relevant type */ |
| 681 | return definedOfType(tyS); |
| 682 | } else { |
| 683 | /* return a cloned version of the Get that refers to the shadow |
| 684 | area. */ |
| 685 | IRArray* new_descr |
| 686 | = mkIRArray( descr->base + mce->layout->total_sizeB, |
| 687 | tyS, descr->nElems); |
| 688 | return IRExpr_GetI( new_descr, ix, bias ); |
| 689 | } |
| 690 | } |
| 691 | |
| 692 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 693 | /*------------------------------------------------------------*/ |
| 694 | /*--- Generating approximations for unknown operations, ---*/ |
| 695 | /*--- using lazy-propagate semantics ---*/ |
| 696 | /*------------------------------------------------------------*/ |
| 697 | |
| 698 | /* Lazy propagation of undefinedness from two values, resulting in the |
| 699 | specified shadow type. |
| 700 | */ |
sewardj | 80415ee | 2004-11-04 16:56:29 +0000 | [diff] [blame] | 701 | static |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 702 | IRAtom* mkLazy2 ( MCEnv* mce, IRType finalVty, IRAtom* va1, IRAtom* va2 ) |
sewardj | 80415ee | 2004-11-04 16:56:29 +0000 | [diff] [blame] | 703 | { |
| 704 | /* force everything via 32-bit intermediaries. */ |
| 705 | IRAtom* at; |
sewardj | 9da6e24 | 2004-11-05 01:56:14 +0000 | [diff] [blame] | 706 | sk_assert(isShadowAtom(mce,va1)); |
| 707 | sk_assert(isShadowAtom(mce,va2)); |
| 708 | at = mkPCastTo(mce, Ity_I32, va1); |
| 709 | at = mkUifU(mce, Ity_I32, at, mkPCastTo(mce, Ity_I32, va2)); |
| 710 | at = mkPCastTo(mce, finalVty, at); |
sewardj | 80415ee | 2004-11-04 16:56:29 +0000 | [diff] [blame] | 711 | return at; |
| 712 | } |
| 713 | |
sewardj | a1cb1b5 | 2004-11-06 12:27:15 +0000 | [diff] [blame] | 714 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 715 | /* Do the lazy propagation game from a null-terminated vector of |
| 716 | atoms. This is presumably the arguments to a helper call, so the |
| 717 | IRCallee info is also supplied in order that we can know which |
| 718 | arguments should be ignored (via the .mcx_mask field). |
| 719 | */ |
| 720 | static |
| 721 | IRAtom* mkLazyN ( MCEnv* mce, |
| 722 | IRAtom** exprvec, IRType finalVtype, IRCallee* cee ) |
| 723 | { |
| 724 | Int i; |
| 725 | IRAtom* here; |
| 726 | IRAtom* curr = definedOfType(Ity_I32); |
| 727 | for (i = 0; exprvec[i]; i++) { |
| 728 | sk_assert(i < 32); |
| 729 | sk_assert(isOriginalAtom(mce, exprvec[i])); |
| 730 | /* Only take notice of this arg if the callee's mc-exclusion |
| 731 | mask does not say it is to be excluded. */ |
| 732 | if (cee->mcx_mask & (1<<i)) { |
| 733 | /* the arg is to be excluded from definedness checking. Do |
| 734 | nothing. */ |
| 735 | if (0) VG_(printf)("excluding %s(%d)\n", cee->name, i); |
| 736 | } else { |
| 737 | /* calculate the arg's definedness, and pessimistically merge |
| 738 | it in. */ |
| 739 | here = mkPCastTo( mce, Ity_I32, expr2vbits(mce, exprvec[i]) ); |
| 740 | curr = mkUifU32(mce, here, curr); |
| 741 | } |
| 742 | } |
| 743 | return mkPCastTo(mce, finalVtype, curr ); |
| 744 | } |
| 745 | |
| 746 | |
| 747 | /*------------------------------------------------------------*/ |
| 748 | /*--- Generating expensive sequences for exact carry-chain ---*/ |
| 749 | /*--- propagation in add/sub and related operations. ---*/ |
| 750 | /*------------------------------------------------------------*/ |
| 751 | |
sewardj | a1cb1b5 | 2004-11-06 12:27:15 +0000 | [diff] [blame] | 752 | static |
| 753 | IRAtom* expensiveAdd32 ( MCEnv* mce, IRAtom* qaa, IRAtom* qbb, |
| 754 | IRAtom* aa, IRAtom* bb ) |
| 755 | { |
| 756 | sk_assert(isShadowAtom(mce,qaa)); |
| 757 | sk_assert(isShadowAtom(mce,qbb)); |
| 758 | sk_assert(isOriginalAtom(mce,aa)); |
| 759 | sk_assert(isOriginalAtom(mce,bb)); |
| 760 | sk_assert(sameKindedAtoms(qaa,aa)); |
| 761 | sk_assert(sameKindedAtoms(qbb,bb)); |
| 762 | |
| 763 | IRType ty = Ity_I32; |
| 764 | IROp opAND = Iop_And32; |
| 765 | IROp opOR = Iop_Or32; |
| 766 | IROp opXOR = Iop_Xor32; |
| 767 | IROp opNOT = Iop_Not32; |
| 768 | IROp opADD = Iop_Add32; |
| 769 | |
| 770 | IRAtom *a_min, *b_min, *a_max, *b_max; |
| 771 | |
| 772 | // a_min = aa & ~qaa |
| 773 | a_min = assignNew(mce,ty, |
| 774 | binop(opAND, aa, |
| 775 | assignNew(mce,ty, unop(opNOT, qaa)))); |
| 776 | |
| 777 | // b_min = bb & ~qbb |
| 778 | b_min = assignNew(mce,ty, |
| 779 | binop(opAND, bb, |
| 780 | assignNew(mce,ty, unop(opNOT, qbb)))); |
| 781 | |
| 782 | // a_max = aa | qaa |
| 783 | a_max = assignNew(mce,ty, binop(opOR, aa, qaa)); |
| 784 | |
| 785 | // b_max = bb | qbb |
| 786 | b_max = assignNew(mce,ty, binop(opOR, bb, qbb)); |
| 787 | |
| 788 | // result = (qaa | qbb) | ((a_min + b_min) ^ (a_max + b_max)) |
| 789 | return |
| 790 | assignNew(mce,ty, |
| 791 | binop( opOR, |
| 792 | assignNew(mce,ty, binop(opOR, qaa, qbb)), |
| 793 | assignNew(mce,ty, |
| 794 | binop(opXOR, assignNew(mce,ty, binop(opADD, a_min, b_min)), |
| 795 | assignNew(mce,ty, binop(opADD, a_max, b_max)) |
| 796 | ) |
| 797 | ) |
| 798 | ) |
| 799 | ); |
| 800 | } |
| 801 | |
| 802 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 803 | /*------------------------------------------------------------*/ |
| 804 | /*--- Generate shadow values from all kinds of IRExprs. ---*/ |
| 805 | /*------------------------------------------------------------*/ |
| 806 | |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 807 | static |
sewardj | 80415ee | 2004-11-04 16:56:29 +0000 | [diff] [blame] | 808 | IRAtom* expr2vbits_Binop ( MCEnv* mce, |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 809 | IROp op, |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 810 | IRAtom* atom1, IRAtom* atom2 ) |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 811 | { |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 812 | IRType and_or_ty; |
| 813 | IRAtom* (*uifu) (MCEnv*, IRAtom*, IRAtom*); |
| 814 | IRAtom* (*difd) (MCEnv*, IRAtom*, IRAtom*); |
| 815 | IRAtom* (*improve) (MCEnv*, IRAtom*, IRAtom*); |
| 816 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 817 | IRAtom* vatom1 = expr2vbits( mce, atom1 ); |
| 818 | IRAtom* vatom2 = expr2vbits( mce, atom2 ); |
| 819 | |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 820 | sk_assert(isOriginalAtom(mce,atom1)); |
| 821 | sk_assert(isOriginalAtom(mce,atom2)); |
| 822 | sk_assert(isShadowAtom(mce,vatom1)); |
| 823 | sk_assert(isShadowAtom(mce,vatom2)); |
| 824 | sk_assert(sameKindedAtoms(atom1,vatom1)); |
| 825 | sk_assert(sameKindedAtoms(atom2,vatom2)); |
| 826 | switch (op) { |
sewardj | 80415ee | 2004-11-04 16:56:29 +0000 | [diff] [blame] | 827 | |
sewardj | edf9b5a | 2004-11-09 16:02:11 +0000 | [diff] [blame] | 828 | case Iop_RoundF64: |
sewardj | 4fec8f3 | 2004-11-08 13:08:53 +0000 | [diff] [blame] | 829 | case Iop_F64toI64: |
| 830 | /* First arg is I32 (rounding mode), second is F64 (data). */ |
| 831 | return mkLazy2(mce, Ity_I64, vatom1, vatom2); |
| 832 | |
sewardj | edf9b5a | 2004-11-09 16:02:11 +0000 | [diff] [blame] | 833 | case Iop_PRemC3210F64: case Iop_PRem1C3210F64: |
| 834 | /* Takes two F64 args. */ |
sewardj | 9da6e24 | 2004-11-05 01:56:14 +0000 | [diff] [blame] | 835 | case Iop_F64toI32: |
| 836 | /* First arg is I32 (rounding mode), second is F64 (data). */ |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 837 | return mkLazy2(mce, Ity_I32, vatom1, vatom2); |
sewardj | 9da6e24 | 2004-11-05 01:56:14 +0000 | [diff] [blame] | 838 | |
| 839 | case Iop_F64toI16: |
| 840 | /* First arg is I32 (rounding mode), second is F64 (data). */ |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 841 | return mkLazy2(mce, Ity_I16, vatom1, vatom2); |
sewardj | 9da6e24 | 2004-11-05 01:56:14 +0000 | [diff] [blame] | 842 | |
sewardj | edf9b5a | 2004-11-09 16:02:11 +0000 | [diff] [blame] | 843 | case Iop_ScaleF64: |
| 844 | case Iop_Yl2xF64: |
sewardj | 9f8abf8 | 2004-11-10 02:39:49 +0000 | [diff] [blame] | 845 | case Iop_Yl2xp1F64: |
sewardj | edf9b5a | 2004-11-09 16:02:11 +0000 | [diff] [blame] | 846 | case Iop_PRemF64: |
sewardj | 4fec8f3 | 2004-11-08 13:08:53 +0000 | [diff] [blame] | 847 | case Iop_AtanF64: |
sewardj | f106711 | 2004-11-07 18:46:22 +0000 | [diff] [blame] | 848 | case Iop_AddF64: |
| 849 | case Iop_DivF64: |
| 850 | case Iop_SubF64: |
sewardj | 9da6e24 | 2004-11-05 01:56:14 +0000 | [diff] [blame] | 851 | case Iop_MulF64: |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 852 | return mkLazy2(mce, Ity_I64, vatom1, vatom2); |
sewardj | 9da6e24 | 2004-11-05 01:56:14 +0000 | [diff] [blame] | 853 | |
sewardj | f106711 | 2004-11-07 18:46:22 +0000 | [diff] [blame] | 854 | case Iop_CmpF64: |
| 855 | return mkLazy2(mce, Ity_I32, vatom1, vatom2); |
| 856 | |
sewardj | 9da6e24 | 2004-11-05 01:56:14 +0000 | [diff] [blame] | 857 | /* non-FP after here */ |
| 858 | |
sewardj | 80415ee | 2004-11-04 16:56:29 +0000 | [diff] [blame] | 859 | case Iop_DivModU64to32: |
| 860 | case Iop_DivModS64to32: |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 861 | return mkLazy2(mce, Ity_I64, vatom1, vatom2); |
sewardj | 80415ee | 2004-11-04 16:56:29 +0000 | [diff] [blame] | 862 | |
sewardj | edf9b5a | 2004-11-09 16:02:11 +0000 | [diff] [blame] | 863 | case Iop_16HLto32: |
| 864 | return assignNew(mce, Ity_I32, |
| 865 | binop(Iop_16HLto32, vatom1, vatom2)); |
sewardj | 80415ee | 2004-11-04 16:56:29 +0000 | [diff] [blame] | 866 | case Iop_32HLto64: |
| 867 | return assignNew(mce, Ity_I64, |
sewardj | 23be73b | 2004-11-06 16:17:21 +0000 | [diff] [blame] | 868 | binop(Iop_32HLto64, vatom1, vatom2)); |
sewardj | 80415ee | 2004-11-04 16:56:29 +0000 | [diff] [blame] | 869 | |
sewardj | 4b39f28 | 2004-11-04 19:41:09 +0000 | [diff] [blame] | 870 | case Iop_MullS32: |
sewardj | 80415ee | 2004-11-04 16:56:29 +0000 | [diff] [blame] | 871 | case Iop_MullU32: { |
| 872 | IRAtom* vLo32 = mkLeft32(mce, mkUifU32(mce, vatom1,vatom2)); |
sewardj | 9da6e24 | 2004-11-05 01:56:14 +0000 | [diff] [blame] | 873 | IRAtom* vHi32 = mkPCastTo(mce, Ity_I32, vLo32); |
sewardj | 80415ee | 2004-11-04 16:56:29 +0000 | [diff] [blame] | 874 | return assignNew(mce, Ity_I64, binop(Iop_32HLto64, vHi32, vLo32)); |
| 875 | } |
| 876 | |
sewardj | edf9b5a | 2004-11-09 16:02:11 +0000 | [diff] [blame] | 877 | case Iop_MullS16: |
| 878 | case Iop_MullU16: { |
| 879 | IRAtom* vLo16 = mkLeft16(mce, mkUifU16(mce, vatom1,vatom2)); |
| 880 | IRAtom* vHi16 = mkPCastTo(mce, Ity_I16, vLo16); |
| 881 | return assignNew(mce, Ity_I32, binop(Iop_16HLto32, vHi16, vLo16)); |
| 882 | } |
| 883 | |
| 884 | case Iop_MullS8: |
| 885 | case Iop_MullU8: { |
| 886 | IRAtom* vLo8 = mkLeft8(mce, mkUifU8(mce, vatom1,vatom2)); |
| 887 | IRAtom* vHi8 = mkPCastTo(mce, Ity_I8, vLo8); |
| 888 | return assignNew(mce, Ity_I16, binop(Iop_8HLto16, vHi8, vLo8)); |
| 889 | } |
| 890 | |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 891 | case Iop_Add32: |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 892 | # if 0 |
sewardj | a1cb1b5 | 2004-11-06 12:27:15 +0000 | [diff] [blame] | 893 | return expensiveAdd32(mce, vatom1,vatom2, atom1,atom2); |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 894 | # endif |
sewardj | a1cb1b5 | 2004-11-06 12:27:15 +0000 | [diff] [blame] | 895 | case Iop_Sub32: |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 896 | case Iop_Mul32: |
sewardj | a6929da | 2004-11-03 15:22:25 +0000 | [diff] [blame] | 897 | return mkLeft32(mce, mkUifU32(mce, vatom1,vatom2)); |
| 898 | |
sewardj | edf9b5a | 2004-11-09 16:02:11 +0000 | [diff] [blame] | 899 | case Iop_Mul16: |
sewardj | a1cb1b5 | 2004-11-06 12:27:15 +0000 | [diff] [blame] | 900 | case Iop_Add16: |
sewardj | 35b5db8 | 2004-11-04 20:27:37 +0000 | [diff] [blame] | 901 | case Iop_Sub16: |
| 902 | return mkLeft16(mce, mkUifU16(mce, vatom1,vatom2)); |
| 903 | |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 904 | case Iop_Sub8: |
| 905 | case Iop_Add8: |
| 906 | return mkLeft8(mce, mkUifU8(mce, vatom1,vatom2)); |
| 907 | |
sewardj | 80415ee | 2004-11-04 16:56:29 +0000 | [diff] [blame] | 908 | case Iop_CmpLE32S: case Iop_CmpLE32U: |
| 909 | case Iop_CmpLT32U: case Iop_CmpLT32S: |
sewardj | 35b5db8 | 2004-11-04 20:27:37 +0000 | [diff] [blame] | 910 | case Iop_CmpEQ32: case Iop_CmpNE32: |
sewardj | 9da6e24 | 2004-11-05 01:56:14 +0000 | [diff] [blame] | 911 | return mkPCastTo(mce, Ity_Bit, mkUifU32(mce, vatom1,vatom2)); |
sewardj | a6929da | 2004-11-03 15:22:25 +0000 | [diff] [blame] | 912 | |
sewardj | edf9b5a | 2004-11-09 16:02:11 +0000 | [diff] [blame] | 913 | case Iop_CmpEQ16: case Iop_CmpNE16: |
sewardj | 9da6e24 | 2004-11-05 01:56:14 +0000 | [diff] [blame] | 914 | return mkPCastTo(mce, Ity_Bit, mkUifU16(mce, vatom1,vatom2)); |
sewardj | a6929da | 2004-11-03 15:22:25 +0000 | [diff] [blame] | 915 | |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 916 | case Iop_CmpEQ8: case Iop_CmpNE8: |
sewardj | 9da6e24 | 2004-11-05 01:56:14 +0000 | [diff] [blame] | 917 | return mkPCastTo(mce, Ity_Bit, mkUifU8(mce, vatom1,vatom2)); |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 918 | |
sewardj | 80415ee | 2004-11-04 16:56:29 +0000 | [diff] [blame] | 919 | case Iop_Shl32: case Iop_Shr32: case Iop_Sar32: |
sewardj | a6929da | 2004-11-03 15:22:25 +0000 | [diff] [blame] | 920 | /* Complain if the shift amount is undefined. Then simply |
| 921 | shift the first arg's V bits by the real shift amount. */ |
| 922 | complainIfUndefined(mce, atom2); |
| 923 | return assignNew(mce, Ity_I32, binop(op, vatom1, atom2)); |
| 924 | |
sewardj | 4b39f28 | 2004-11-04 19:41:09 +0000 | [diff] [blame] | 925 | case Iop_Shl16: case Iop_Shr16: |
| 926 | /* Same scheme as with 32-bit shifts. */ |
| 927 | complainIfUndefined(mce, atom2); |
| 928 | return assignNew(mce, Ity_I16, binop(op, vatom1, atom2)); |
| 929 | |
sewardj | edf9b5a | 2004-11-09 16:02:11 +0000 | [diff] [blame] | 930 | case Iop_Shl8: case Iop_Shr8: |
sewardj | 35b5db8 | 2004-11-04 20:27:37 +0000 | [diff] [blame] | 931 | /* Same scheme as with 32-bit shifts. */ |
| 932 | complainIfUndefined(mce, atom2); |
| 933 | return assignNew(mce, Ity_I8, binop(op, vatom1, atom2)); |
| 934 | |
sewardj | 80415ee | 2004-11-04 16:56:29 +0000 | [diff] [blame] | 935 | case Iop_Shl64: case Iop_Shr64: |
| 936 | /* Same scheme as with 32-bit shifts. */ |
| 937 | complainIfUndefined(mce, atom2); |
| 938 | return assignNew(mce, Ity_I64, binop(op, vatom1, atom2)); |
| 939 | |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 940 | case Iop_And32: |
| 941 | uifu = mkUifU32; difd = mkDifD32; |
| 942 | and_or_ty = Ity_I32; improve = mkImproveAND32; goto do_And_Or; |
sewardj | 4fec8f3 | 2004-11-08 13:08:53 +0000 | [diff] [blame] | 943 | case Iop_And16: |
| 944 | uifu = mkUifU16; difd = mkDifD16; |
| 945 | and_or_ty = Ity_I16; improve = mkImproveAND16; goto do_And_Or; |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 946 | case Iop_And8: |
| 947 | uifu = mkUifU8; difd = mkDifD8; |
| 948 | and_or_ty = Ity_I8; improve = mkImproveAND8; goto do_And_Or; |
| 949 | |
| 950 | case Iop_Or32: |
| 951 | uifu = mkUifU32; difd = mkDifD32; |
| 952 | and_or_ty = Ity_I32; improve = mkImproveOR32; goto do_And_Or; |
sewardj | 35b5db8 | 2004-11-04 20:27:37 +0000 | [diff] [blame] | 953 | case Iop_Or16: |
| 954 | uifu = mkUifU16; difd = mkDifD16; |
| 955 | and_or_ty = Ity_I16; improve = mkImproveOR16; goto do_And_Or; |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 956 | case Iop_Or8: |
| 957 | uifu = mkUifU8; difd = mkDifD8; |
| 958 | and_or_ty = Ity_I8; improve = mkImproveOR8; goto do_And_Or; |
| 959 | |
| 960 | do_And_Or: |
| 961 | return |
| 962 | assignNew( |
| 963 | mce, |
| 964 | and_or_ty, |
| 965 | difd(mce, uifu(mce, vatom1, vatom2), |
| 966 | difd(mce, improve(mce, atom1, vatom1), |
| 967 | improve(mce, atom2, vatom2) ) ) ); |
sewardj | 4b39f28 | 2004-11-04 19:41:09 +0000 | [diff] [blame] | 968 | |
| 969 | case Iop_Xor8: |
| 970 | return mkUifU8(mce, vatom1, vatom2); |
sewardj | 4fec8f3 | 2004-11-08 13:08:53 +0000 | [diff] [blame] | 971 | case Iop_Xor16: |
| 972 | return mkUifU16(mce, vatom1, vatom2); |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 973 | case Iop_Xor32: |
| 974 | return mkUifU32(mce, vatom1, vatom2); |
| 975 | |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 976 | default: |
| 977 | ppIROp(op); |
| 978 | VG_(skin_panic)("memcheck:expr2vbits_Binop"); |
| 979 | } |
| 980 | } |
| 981 | |
| 982 | |
sewardj | a6929da | 2004-11-03 15:22:25 +0000 | [diff] [blame] | 983 | static |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 984 | IRExpr* expr2vbits_Unop ( MCEnv* mce, IROp op, IRAtom* atom ) |
sewardj | a6929da | 2004-11-03 15:22:25 +0000 | [diff] [blame] | 985 | { |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 986 | IRAtom* vatom = expr2vbits( mce, atom ); |
sewardj | a6929da | 2004-11-03 15:22:25 +0000 | [diff] [blame] | 987 | sk_assert(isOriginalAtom(mce,atom)); |
sewardj | a6929da | 2004-11-03 15:22:25 +0000 | [diff] [blame] | 988 | switch (op) { |
sewardj | 9da6e24 | 2004-11-05 01:56:14 +0000 | [diff] [blame] | 989 | |
| 990 | case Iop_F32toF64: |
sewardj | f106711 | 2004-11-07 18:46:22 +0000 | [diff] [blame] | 991 | case Iop_I32toF64: |
| 992 | case Iop_I64toF64: |
| 993 | case Iop_NegF64: |
sewardj | 4fec8f3 | 2004-11-08 13:08:53 +0000 | [diff] [blame] | 994 | case Iop_SinF64: |
| 995 | case Iop_CosF64: |
sewardj | edf9b5a | 2004-11-09 16:02:11 +0000 | [diff] [blame] | 996 | case Iop_TanF64: |
sewardj | 4fec8f3 | 2004-11-08 13:08:53 +0000 | [diff] [blame] | 997 | case Iop_SqrtF64: |
sewardj | edf9b5a | 2004-11-09 16:02:11 +0000 | [diff] [blame] | 998 | case Iop_AbsF64: |
| 999 | case Iop_2xm1F64: |
sewardj | 9da6e24 | 2004-11-05 01:56:14 +0000 | [diff] [blame] | 1000 | return mkPCastTo(mce, Ity_I64, vatom); |
| 1001 | |
sewardj | f106711 | 2004-11-07 18:46:22 +0000 | [diff] [blame] | 1002 | case Iop_F64toF32: |
sewardj | 4fec8f3 | 2004-11-08 13:08:53 +0000 | [diff] [blame] | 1003 | case Iop_Clz32: |
sewardj | edf9b5a | 2004-11-09 16:02:11 +0000 | [diff] [blame] | 1004 | case Iop_Ctz32: |
sewardj | f106711 | 2004-11-07 18:46:22 +0000 | [diff] [blame] | 1005 | return mkPCastTo(mce, Ity_I32, vatom); |
| 1006 | |
sewardj | edf9b5a | 2004-11-09 16:02:11 +0000 | [diff] [blame] | 1007 | case Iop_32Sto64: |
| 1008 | case Iop_32Uto64: |
| 1009 | return assignNew(mce, Ity_I64, unop(op, vatom)); |
| 1010 | |
sewardj | 80415ee | 2004-11-04 16:56:29 +0000 | [diff] [blame] | 1011 | case Iop_64to32: |
| 1012 | case Iop_64HIto32: |
sewardj | a6929da | 2004-11-03 15:22:25 +0000 | [diff] [blame] | 1013 | case Iop_1Uto32: |
| 1014 | case Iop_8Uto32: |
| 1015 | case Iop_16Uto32: |
sewardj | 35b5db8 | 2004-11-04 20:27:37 +0000 | [diff] [blame] | 1016 | case Iop_16Sto32: |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 1017 | case Iop_8Sto32: |
sewardj | a6929da | 2004-11-03 15:22:25 +0000 | [diff] [blame] | 1018 | return assignNew(mce, Ity_I32, unop(op, vatom)); |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 1019 | |
sewardj | 35b5db8 | 2004-11-04 20:27:37 +0000 | [diff] [blame] | 1020 | case Iop_8Sto16: |
sewardj | edf9b5a | 2004-11-09 16:02:11 +0000 | [diff] [blame] | 1021 | case Iop_8Uto16: |
sewardj | a6929da | 2004-11-03 15:22:25 +0000 | [diff] [blame] | 1022 | case Iop_32to16: |
sewardj | edf9b5a | 2004-11-09 16:02:11 +0000 | [diff] [blame] | 1023 | case Iop_32HIto16: |
sewardj | 35b5db8 | 2004-11-04 20:27:37 +0000 | [diff] [blame] | 1024 | return assignNew(mce, Ity_I16, unop(op, vatom)); |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 1025 | |
| 1026 | case Iop_1Uto8: |
sewardj | edf9b5a | 2004-11-09 16:02:11 +0000 | [diff] [blame] | 1027 | case Iop_16to8: |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 1028 | case Iop_32to8: |
| 1029 | return assignNew(mce, Ity_I8, unop(op, vatom)); |
| 1030 | |
| 1031 | case Iop_32to1: |
| 1032 | return assignNew(mce, Ity_Bit, unop(Iop_32to1, vatom)); |
| 1033 | |
sewardj | 4fec8f3 | 2004-11-08 13:08:53 +0000 | [diff] [blame] | 1034 | case Iop_ReinterpF64asI64: |
sewardj | b139e82 | 2004-11-10 12:07:51 +0000 | [diff] [blame] | 1035 | case Iop_ReinterpI64asF64: |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 1036 | case Iop_Not32: |
sewardj | edf9b5a | 2004-11-09 16:02:11 +0000 | [diff] [blame] | 1037 | case Iop_Not16: |
sewardj | 3861f46 | 2004-11-06 14:22:03 +0000 | [diff] [blame] | 1038 | case Iop_Not8: |
sewardj | a6929da | 2004-11-03 15:22:25 +0000 | [diff] [blame] | 1039 | case Iop_Not1: |
| 1040 | return vatom; |
| 1041 | default: |
| 1042 | ppIROp(op); |
| 1043 | VG_(skin_panic)("memcheck:expr2vbits_Unop"); |
| 1044 | } |
| 1045 | } |
| 1046 | |
| 1047 | |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1048 | static |
sewardj | b139e82 | 2004-11-10 12:07:51 +0000 | [diff] [blame] | 1049 | IRAtom* expr2vbits_LDle ( MCEnv* mce, IRType ty, IRAtom* addr, UInt bias ) |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1050 | { |
| 1051 | void* helper; |
| 1052 | Char* hname; |
| 1053 | IRDirty* di; |
| 1054 | IRTemp datavbits; |
sewardj | b139e82 | 2004-11-10 12:07:51 +0000 | [diff] [blame] | 1055 | IRAtom* addrAct; |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1056 | |
| 1057 | sk_assert(isOriginalAtom(mce,addr)); |
| 1058 | |
| 1059 | /* First, emit a definedness test for the address. This also sets |
| 1060 | the address (shadow) to 'defined' following the test. */ |
sewardj | a6929da | 2004-11-03 15:22:25 +0000 | [diff] [blame] | 1061 | complainIfUndefined( mce, addr ); |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1062 | |
| 1063 | /* Now cook up a call to the relevant helper function, to read the |
| 1064 | data V bits from shadow memory. */ |
sewardj | 9da6e24 | 2004-11-05 01:56:14 +0000 | [diff] [blame] | 1065 | ty = shadowType(ty); |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1066 | switch (ty) { |
sewardj | f106711 | 2004-11-07 18:46:22 +0000 | [diff] [blame] | 1067 | case Ity_I64: helper = &MC_(helperc_LOADV8); |
| 1068 | hname = "MC_(helperc_LOADV8)"; |
| 1069 | break; |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1070 | case Ity_I32: helper = &MC_(helperc_LOADV4); |
| 1071 | hname = "MC_(helperc_LOADV4)"; |
| 1072 | break; |
| 1073 | case Ity_I16: helper = &MC_(helperc_LOADV2); |
| 1074 | hname = "MC_(helperc_LOADV2)"; |
| 1075 | break; |
| 1076 | case Ity_I8: helper = &MC_(helperc_LOADV1); |
| 1077 | hname = "MC_(helperc_LOADV1)"; |
| 1078 | break; |
sewardj | 9da6e24 | 2004-11-05 01:56:14 +0000 | [diff] [blame] | 1079 | default: ppIRType(ty); |
| 1080 | VG_(skin_panic)("memcheck:do_shadow_LDle"); |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1081 | } |
| 1082 | |
sewardj | b139e82 | 2004-11-10 12:07:51 +0000 | [diff] [blame] | 1083 | /* Generate the actual address into addrAct. */ |
| 1084 | if (bias == 0) { |
| 1085 | addrAct = addr; |
| 1086 | } else { |
| 1087 | IRType tyAddr = mce->hWordTy; |
| 1088 | sk_assert( tyAddr == Ity_I32 || tyAddr == Ity_I64 ); |
| 1089 | IROp mkAdd = tyAddr==Ity_I32 ? Iop_Add32 : Iop_Add64; |
| 1090 | IRAtom* eBias = tyAddr==Ity_I32 ? mkU32(bias) : mkU64(bias); |
| 1091 | addrAct = assignNew(mce, tyAddr, binop(mkAdd, addr, eBias) ); |
| 1092 | } |
| 1093 | |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1094 | /* We need to have a place to park the V bits we're just about to |
| 1095 | read. */ |
| 1096 | datavbits = newIRTemp(mce->bb->tyenv, ty); |
| 1097 | di = unsafeIRDirty_1_N( datavbits, |
sewardj | b139e82 | 2004-11-10 12:07:51 +0000 | [diff] [blame] | 1098 | 1/*regparms*/, hname, helper, |
| 1099 | mkIRExprVec_1( addrAct )); |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1100 | setHelperAnns( mce, di ); |
| 1101 | stmt( mce->bb, IRStmt_Dirty(di) ); |
| 1102 | |
| 1103 | return mkexpr(datavbits); |
| 1104 | } |
| 1105 | |
| 1106 | |
| 1107 | static |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 1108 | IRAtom* expr2vbits_Mux0X ( MCEnv* mce, |
sewardj | 9f8abf8 | 2004-11-10 02:39:49 +0000 | [diff] [blame] | 1109 | IRAtom* cond, IRAtom* expr0, IRAtom* exprX ) |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 1110 | { |
| 1111 | IRAtom *vbitsC, *vbits0, *vbitsX; |
| 1112 | IRType ty; |
| 1113 | /* Given Mux0X(cond,expr0,exprX), generate |
| 1114 | Mux0X(cond,expr0#,exprX#) `UifU` PCast(cond#) |
| 1115 | That is, steer the V bits like the originals, but trash the |
| 1116 | result if the steering value is undefined. This gives |
| 1117 | lazy propagation. */ |
| 1118 | sk_assert(isOriginalAtom(mce, cond)); |
| 1119 | sk_assert(isOriginalAtom(mce, expr0)); |
| 1120 | sk_assert(isOriginalAtom(mce, exprX)); |
| 1121 | |
| 1122 | vbitsC = expr2vbits(mce, cond); |
| 1123 | vbits0 = expr2vbits(mce, expr0); |
| 1124 | vbitsX = expr2vbits(mce, exprX); |
| 1125 | ty = typeOfIRExpr(mce->bb->tyenv, vbits0); |
| 1126 | |
| 1127 | return |
| 1128 | mkUifU(mce, ty, assignNew(mce, ty, IRExpr_Mux0X(cond, vbits0, vbitsX)), |
sewardj | 9da6e24 | 2004-11-05 01:56:14 +0000 | [diff] [blame] | 1129 | mkPCastTo(mce, ty, vbitsC) ); |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 1130 | } |
| 1131 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 1132 | /* --------- This is the main expression-handling function. --------- */ |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 1133 | |
| 1134 | static |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1135 | IRExpr* expr2vbits ( MCEnv* mce, IRExpr* e ) |
| 1136 | { |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1137 | switch (e->tag) { |
| 1138 | |
| 1139 | case Iex_Get: |
| 1140 | return shadow_GET( mce, e->Iex.Get.offset, e->Iex.Get.ty ); |
| 1141 | |
sewardj | 9da6e24 | 2004-11-05 01:56:14 +0000 | [diff] [blame] | 1142 | case Iex_GetI: |
| 1143 | return shadow_GETI( mce, e->Iex.GetI.descr, |
| 1144 | e->Iex.GetI.ix, e->Iex.GetI.bias ); |
| 1145 | |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1146 | case Iex_Tmp: |
| 1147 | return IRExpr_Tmp( findShadowTmp(mce, e->Iex.Tmp.tmp) ); |
| 1148 | |
| 1149 | case Iex_Const: |
| 1150 | return definedOfType(shadowType(typeOfIRExpr(mce->bb->tyenv, e))); |
| 1151 | |
| 1152 | case Iex_Binop: |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1153 | return expr2vbits_Binop( |
| 1154 | mce, |
| 1155 | e->Iex.Binop.op, |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 1156 | e->Iex.Binop.arg1, e->Iex.Binop.arg2 |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1157 | ); |
| 1158 | |
sewardj | a6929da | 2004-11-03 15:22:25 +0000 | [diff] [blame] | 1159 | case Iex_Unop: |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 1160 | return expr2vbits_Unop( mce, e->Iex.Unop.op, e->Iex.Unop.arg ); |
sewardj | a6929da | 2004-11-03 15:22:25 +0000 | [diff] [blame] | 1161 | |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1162 | case Iex_LDle: |
sewardj | b139e82 | 2004-11-10 12:07:51 +0000 | [diff] [blame] | 1163 | return expr2vbits_LDle( mce, e->Iex.LDle.ty, |
| 1164 | e->Iex.LDle.addr, 0/*addr bias*/ ); |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1165 | |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 1166 | case Iex_CCall: |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 1167 | return mkLazyN( mce, e->Iex.CCall.args, |
| 1168 | e->Iex.CCall.retty, |
| 1169 | e->Iex.CCall.cee ); |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 1170 | |
| 1171 | case Iex_Mux0X: |
| 1172 | return expr2vbits_Mux0X( mce, e->Iex.Mux0X.cond, e->Iex.Mux0X.expr0, |
| 1173 | e->Iex.Mux0X.exprX); |
| 1174 | |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1175 | default: |
| 1176 | VG_(printf)("\n"); |
| 1177 | ppIRExpr(e); |
| 1178 | VG_(printf)("\n"); |
| 1179 | VG_(skin_panic)("memcheck: expr2vbits"); |
| 1180 | } |
| 1181 | } |
| 1182 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 1183 | /*------------------------------------------------------------*/ |
| 1184 | /*--- Generate shadow stmts from all kinds of IRStmts. ---*/ |
| 1185 | /*------------------------------------------------------------*/ |
| 1186 | |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 1187 | /* Widen a value to the host word size. */ |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 1188 | |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1189 | static |
| 1190 | IRExpr* zwidenToHostWord ( MCEnv* mce, IRAtom* vatom ) |
| 1191 | { |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 1192 | /* vatom is vbits-value and as such can only have a shadow type. */ |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1193 | sk_assert(isShadowAtom(mce,vatom)); |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 1194 | |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1195 | IRType ty = typeOfIRExpr(mce->bb->tyenv, vatom); |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 1196 | IRType tyH = mce->hWordTy; |
| 1197 | |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 1198 | if (tyH == Ity_I32) { |
| 1199 | switch (ty) { |
| 1200 | case Ity_I32: return vatom; |
| 1201 | case Ity_I16: return assignNew(mce, tyH, unop(Iop_16Uto32, vatom)); |
| 1202 | case Ity_I8: return assignNew(mce, tyH, unop(Iop_8Uto32, vatom)); |
| 1203 | default: goto unhandled; |
| 1204 | } |
| 1205 | } else { |
| 1206 | goto unhandled; |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1207 | } |
sewardj | 1ce85b5 | 2004-11-04 15:21:04 +0000 | [diff] [blame] | 1208 | unhandled: |
| 1209 | VG_(printf)("\nty = "); ppIRType(ty); VG_(printf)("\n"); |
| 1210 | VG_(skin_panic)("zwidenToHostWord"); |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1211 | } |
| 1212 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 1213 | |
sewardj | b139e82 | 2004-11-10 12:07:51 +0000 | [diff] [blame] | 1214 | /* Generate a shadow store. addr is always the original address atom. |
| 1215 | You can pass in either originals or V-bits for the data atom, but |
| 1216 | obviously not both. */ |
| 1217 | |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1218 | static |
sewardj | b139e82 | 2004-11-10 12:07:51 +0000 | [diff] [blame] | 1219 | void do_shadow_STle ( MCEnv* mce, |
| 1220 | IRAtom* addr, UInt bias, |
| 1221 | IRAtom* data, IRAtom* vdata ) |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1222 | { |
| 1223 | IRType ty; |
| 1224 | IRDirty* di; |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1225 | void* helper = NULL; |
| 1226 | Char* hname = NULL; |
sewardj | b139e82 | 2004-11-10 12:07:51 +0000 | [diff] [blame] | 1227 | IRAtom* addrAct; |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1228 | |
sewardj | b139e82 | 2004-11-10 12:07:51 +0000 | [diff] [blame] | 1229 | if (data) { |
| 1230 | sk_assert(!vdata); |
| 1231 | sk_assert(isOriginalAtom(mce, data)); |
| 1232 | sk_assert(bias == 0); |
| 1233 | vdata = expr2vbits( mce, data ); |
| 1234 | } else { |
| 1235 | sk_assert(vdata); |
| 1236 | } |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1237 | |
| 1238 | sk_assert(isOriginalAtom(mce,addr)); |
sewardj | b139e82 | 2004-11-10 12:07:51 +0000 | [diff] [blame] | 1239 | sk_assert(isShadowAtom(mce,vdata)); |
| 1240 | |
| 1241 | ty = typeOfIRExpr(mce->bb->tyenv, vdata); |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1242 | |
| 1243 | /* First, emit a definedness test for the address. This also sets |
| 1244 | the address (shadow) to 'defined' following the test. */ |
sewardj | b139e82 | 2004-11-10 12:07:51 +0000 | [diff] [blame] | 1245 | complainIfUndefined( mce, addr ); |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1246 | |
| 1247 | /* Now cook up a call to the relevant helper function, to write the |
| 1248 | data V bits into shadow memory. */ |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1249 | switch (ty) { |
sewardj | f106711 | 2004-11-07 18:46:22 +0000 | [diff] [blame] | 1250 | case Ity_I64: helper = &MC_(helperc_STOREV8); |
| 1251 | hname = "MC_(helperc_STOREV8)"; |
| 1252 | break; |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1253 | case Ity_I32: helper = &MC_(helperc_STOREV4); |
| 1254 | hname = "MC_(helperc_STOREV4)"; |
| 1255 | break; |
| 1256 | case Ity_I16: helper = &MC_(helperc_STOREV2); |
| 1257 | hname = "MC_(helperc_STOREV2)"; |
| 1258 | break; |
| 1259 | case Ity_I8: helper = &MC_(helperc_STOREV1); |
| 1260 | hname = "MC_(helperc_STOREV1)"; |
| 1261 | break; |
| 1262 | default: VG_(skin_panic)("memcheck:do_shadow_STle"); |
| 1263 | } |
| 1264 | |
sewardj | b139e82 | 2004-11-10 12:07:51 +0000 | [diff] [blame] | 1265 | /* Generate the actual address into addrAct. */ |
| 1266 | if (bias == 0) { |
| 1267 | addrAct = addr; |
| 1268 | } else { |
| 1269 | IRType tyAddr = mce->hWordTy; |
| 1270 | sk_assert( tyAddr == Ity_I32 || tyAddr == Ity_I64 ); |
| 1271 | IROp mkAdd = tyAddr==Ity_I32 ? Iop_Add32 : Iop_Add64; |
| 1272 | IRAtom* eBias = tyAddr==Ity_I32 ? mkU32(bias) : mkU64(bias); |
| 1273 | addrAct = assignNew(mce, tyAddr, binop(mkAdd, addr, eBias) ); |
| 1274 | } |
| 1275 | |
sewardj | f106711 | 2004-11-07 18:46:22 +0000 | [diff] [blame] | 1276 | if (ty == Ity_I64) { |
sewardj | 9f8abf8 | 2004-11-10 02:39:49 +0000 | [diff] [blame] | 1277 | /* We can't do this with regparm 2 on 32-bit platforms, since |
| 1278 | the back ends aren't clever enough to handle 64-bit regparm |
| 1279 | args. Therefore be different. */ |
sewardj | f106711 | 2004-11-07 18:46:22 +0000 | [diff] [blame] | 1280 | di = unsafeIRDirty_0_N( |
| 1281 | 1/*regparms*/, hname, helper, |
sewardj | b139e82 | 2004-11-10 12:07:51 +0000 | [diff] [blame] | 1282 | mkIRExprVec_2( addrAct, vdata )); |
sewardj | f106711 | 2004-11-07 18:46:22 +0000 | [diff] [blame] | 1283 | } else { |
| 1284 | di = unsafeIRDirty_0_N( |
| 1285 | 2/*regparms*/, hname, helper, |
sewardj | b139e82 | 2004-11-10 12:07:51 +0000 | [diff] [blame] | 1286 | mkIRExprVec_2( addrAct, |
| 1287 | zwidenToHostWord( mce, vdata ))); |
sewardj | f106711 | 2004-11-07 18:46:22 +0000 | [diff] [blame] | 1288 | } |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1289 | setHelperAnns( mce, di ); |
| 1290 | stmt( mce->bb, IRStmt_Dirty(di) ); |
| 1291 | } |
| 1292 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 1293 | |
sewardj | 9f8abf8 | 2004-11-10 02:39:49 +0000 | [diff] [blame] | 1294 | /* Do lazy pessimistic propagation through a dirty helper call, by |
| 1295 | looking at the annotations on it. This is the most complex part of |
| 1296 | Memcheck. */ |
| 1297 | |
| 1298 | static IRType szToITy ( Int n ) |
| 1299 | { |
| 1300 | switch (n) { |
| 1301 | case 1: return Ity_I8; |
| 1302 | case 2: return Ity_I16; |
| 1303 | case 4: return Ity_I32; |
| 1304 | case 8: return Ity_I64; |
| 1305 | default: VG_(skin_panic)("szToITy(memcheck)"); |
| 1306 | } |
| 1307 | } |
| 1308 | |
| 1309 | static |
| 1310 | void do_shadow_Dirty ( MCEnv* mce, IRDirty* d ) |
| 1311 | { |
sewardj | b139e82 | 2004-11-10 12:07:51 +0000 | [diff] [blame] | 1312 | Int i, offset, toDo; |
| 1313 | IRAtom* src; |
| 1314 | IRType tyAddr, tySrc, tyDst; |
| 1315 | IRTemp dst; |
sewardj | 9f8abf8 | 2004-11-10 02:39:49 +0000 | [diff] [blame] | 1316 | |
| 1317 | /* First check the guard. */ |
| 1318 | complainIfUndefined(mce, d->guard); |
| 1319 | |
| 1320 | /* Now round up all inputs and PCast over them. */ |
| 1321 | IRAtom* here; |
| 1322 | IRAtom* curr = definedOfType(Ity_I32); |
| 1323 | |
| 1324 | /* Inputs: unmasked args */ |
| 1325 | for (i = 0; d->args[i]; i++) { |
| 1326 | if (d->cee->mcx_mask & (1<<i)) { |
| 1327 | /* ignore this arg */ |
| 1328 | } else { |
| 1329 | here = mkPCastTo( mce, Ity_I32, expr2vbits(mce, d->args[i]) ); |
| 1330 | curr = mkUifU32(mce, here, curr); |
| 1331 | } |
| 1332 | } |
| 1333 | |
| 1334 | /* Inputs: guest state that we read. */ |
| 1335 | for (i = 0; i < d->nFxState; i++) { |
| 1336 | sk_assert(d->fxState[i].fx != Ifx_None); |
| 1337 | if (d->fxState[i].fx == Ifx_Write) |
| 1338 | continue; |
| 1339 | /* This state element is read or modified. So we need to |
| 1340 | consider it. */ |
| 1341 | tySrc = szToITy( d->fxState[i].size ); |
| 1342 | src = assignNew( mce, tySrc, |
| 1343 | shadow_GET(mce, d->fxState[i].offset, tySrc ) ); |
| 1344 | here = mkPCastTo( mce, Ity_I32, src ); |
| 1345 | curr = mkUifU32(mce, here, curr); |
| 1346 | } |
| 1347 | |
| 1348 | /* Inputs: memory. First set up some info needed regardless of |
| 1349 | whether we're doing reads or writes. */ |
sewardj | 9f8abf8 | 2004-11-10 02:39:49 +0000 | [diff] [blame] | 1350 | tyAddr = Ity_INVALID; |
| 1351 | |
| 1352 | if (d->mFx != Ifx_None) { |
| 1353 | /* Because we may do multiple shadow loads/stores from the same |
| 1354 | base address, it's best to do a single test of its |
| 1355 | definedness right now. Post-instrumentation optimisation |
| 1356 | should remove all but this test. */ |
| 1357 | sk_assert(d->mAddr); |
| 1358 | complainIfUndefined(mce, d->mAddr); |
| 1359 | |
| 1360 | tyAddr = typeOfIRExpr(mce->bb->tyenv, d->mAddr); |
| 1361 | sk_assert(tyAddr == Ity_I32 || tyAddr == Ity_I64); |
sewardj | b139e82 | 2004-11-10 12:07:51 +0000 | [diff] [blame] | 1362 | sk_assert(tyAddr == mce->hWordTy); /* not really right */ |
sewardj | 9f8abf8 | 2004-11-10 02:39:49 +0000 | [diff] [blame] | 1363 | } |
| 1364 | |
| 1365 | /* Deal with memory inputs (reads or modifies) */ |
| 1366 | if (d->mFx == Ifx_Read || d->mFx == Ifx_Modify) { |
| 1367 | offset = 0; |
| 1368 | toDo = d->mSize; |
sewardj | 9f8abf8 | 2004-11-10 02:39:49 +0000 | [diff] [blame] | 1369 | /* chew off 32-bit chunks */ |
| 1370 | while (toDo >= 4) { |
| 1371 | here = mkPCastTo( |
| 1372 | mce, Ity_I32, |
sewardj | b139e82 | 2004-11-10 12:07:51 +0000 | [diff] [blame] | 1373 | expr2vbits_LDle ( mce, Ity_I32, |
| 1374 | d->mAddr, d->mSize - toDo ) |
sewardj | 9f8abf8 | 2004-11-10 02:39:49 +0000 | [diff] [blame] | 1375 | ); |
| 1376 | curr = mkUifU32(mce, here, curr); |
sewardj | 9f8abf8 | 2004-11-10 02:39:49 +0000 | [diff] [blame] | 1377 | toDo -= 4; |
| 1378 | } |
| 1379 | /* chew off 16-bit chunks */ |
| 1380 | while (toDo >= 2) { |
| 1381 | here = mkPCastTo( |
| 1382 | mce, Ity_I32, |
sewardj | b139e82 | 2004-11-10 12:07:51 +0000 | [diff] [blame] | 1383 | expr2vbits_LDle ( mce, Ity_I16, |
| 1384 | d->mAddr, d->mSize - toDo ) |
sewardj | 9f8abf8 | 2004-11-10 02:39:49 +0000 | [diff] [blame] | 1385 | ); |
| 1386 | curr = mkUifU32(mce, here, curr); |
sewardj | 9f8abf8 | 2004-11-10 02:39:49 +0000 | [diff] [blame] | 1387 | toDo -= 2; |
| 1388 | } |
| 1389 | sk_assert(toDo == 0); /* also need to handle 1-byte excess */ |
| 1390 | } |
| 1391 | |
| 1392 | /* Whew! So curr is a 32-bit V-value summarising pessimistically |
| 1393 | all the inputs to the helper. Now we need to re-distribute the |
| 1394 | results to all destinations. */ |
| 1395 | |
| 1396 | /* Outputs: the destination temporary, if there is one. */ |
sewardj | 92d168d | 2004-11-15 14:22:12 +0000 | [diff] [blame^] | 1397 | if (d->tmp != IRTemp_INVALID) { |
sewardj | 9f8abf8 | 2004-11-10 02:39:49 +0000 | [diff] [blame] | 1398 | dst = findShadowTmp(mce, d->tmp); |
| 1399 | tyDst = typeOfIRTemp(mce->bb->tyenv, d->tmp); |
| 1400 | assign( mce->bb, dst, mkPCastTo( mce, tyDst, curr) ); |
| 1401 | } |
| 1402 | |
| 1403 | /* Outputs: guest state that we write or modify. */ |
| 1404 | for (i = 0; i < d->nFxState; i++) { |
| 1405 | sk_assert(d->fxState[i].fx != Ifx_None); |
| 1406 | if (d->fxState[i].fx == Ifx_Read) |
| 1407 | continue; |
| 1408 | /* this state element is written or modified. So we need to |
| 1409 | consider it. */ |
| 1410 | tyDst = szToITy( d->fxState[i].size ); |
| 1411 | do_shadow_PUT( mce, d->fxState[i].offset, |
| 1412 | NULL, /* original atom */ |
| 1413 | mkPCastTo( mce, tyDst, curr ) ); |
| 1414 | } |
| 1415 | |
| 1416 | /* Outputs: memory that we write or modify. */ |
| 1417 | if (d->mFx == Ifx_Write || d->mFx == Ifx_Modify) { |
| 1418 | offset = 0; |
| 1419 | toDo = d->mSize; |
sewardj | 9f8abf8 | 2004-11-10 02:39:49 +0000 | [diff] [blame] | 1420 | /* chew off 32-bit chunks */ |
| 1421 | while (toDo >= 4) { |
sewardj | b139e82 | 2004-11-10 12:07:51 +0000 | [diff] [blame] | 1422 | do_shadow_STle( mce, d->mAddr, d->mSize - toDo, |
| 1423 | NULL, /* original data */ |
sewardj | 9f8abf8 | 2004-11-10 02:39:49 +0000 | [diff] [blame] | 1424 | mkPCastTo( mce, Ity_I32, curr ) ); |
sewardj | 9f8abf8 | 2004-11-10 02:39:49 +0000 | [diff] [blame] | 1425 | toDo -= 4; |
| 1426 | } |
| 1427 | /* chew off 16-bit chunks */ |
| 1428 | while (toDo >= 2) { |
sewardj | b139e82 | 2004-11-10 12:07:51 +0000 | [diff] [blame] | 1429 | do_shadow_STle( mce, d->mAddr, d->mSize - toDo, |
| 1430 | NULL, /* original data */ |
| 1431 | mkPCastTo( mce, Ity_I16, curr ) ); |
sewardj | 9f8abf8 | 2004-11-10 02:39:49 +0000 | [diff] [blame] | 1432 | toDo -= 2; |
| 1433 | } |
| 1434 | sk_assert(toDo == 0); /* also need to handle 1-byte excess */ |
| 1435 | } |
| 1436 | |
| 1437 | } |
| 1438 | |
| 1439 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 1440 | /*------------------------------------------------------------*/ |
| 1441 | /*--- Memcheck main ---*/ |
| 1442 | /*------------------------------------------------------------*/ |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1443 | |
sewardj | 67cc0db | 2004-11-07 00:57:46 +0000 | [diff] [blame] | 1444 | #if 0 /* UNUSED */ |
| 1445 | static Bool isBogusAtom ( IRAtom* at ) |
| 1446 | { |
| 1447 | ULong n = 0; |
| 1448 | IRConst* con; |
| 1449 | sk_assert(isAtom(at)); |
| 1450 | if (at->tag == Iex_Tmp) |
| 1451 | return False; |
| 1452 | sk_assert(at->tag == Iex_Const); |
| 1453 | con = at->Iex.Const.con; |
| 1454 | switch (con->tag) { |
| 1455 | case Ico_U8: n = (ULong)con->Ico.U8; break; |
| 1456 | case Ico_U16: n = (ULong)con->Ico.U16; break; |
| 1457 | case Ico_U32: n = (ULong)con->Ico.U32; break; |
| 1458 | case Ico_U64: n = (ULong)con->Ico.U64; break; |
| 1459 | default: ppIRExpr(at); sk_assert(0); |
| 1460 | } |
| 1461 | /* VG_(printf)("%llx\n", n); */ |
| 1462 | return (n == 0xFEFEFEFF |
| 1463 | || n == 0x80808080 |
| 1464 | || n == 0x1010101 |
| 1465 | || n == 1010100); |
| 1466 | } |
| 1467 | |
| 1468 | static Bool checkForBogusLiterals ( /*FLAT*/ IRStmt* st ) |
| 1469 | { |
| 1470 | Int i; |
| 1471 | IRExpr* e; |
| 1472 | switch (st->tag) { |
| 1473 | case Ist_Tmp: |
| 1474 | e = st->Ist.Tmp.data; |
| 1475 | switch (e->tag) { |
| 1476 | case Iex_Get: |
| 1477 | case Iex_Tmp: |
| 1478 | return False; |
| 1479 | case Iex_Unop: |
| 1480 | return isBogusAtom(e->Iex.Unop.arg); |
| 1481 | case Iex_Binop: |
| 1482 | return isBogusAtom(e->Iex.Binop.arg1) |
| 1483 | || isBogusAtom(e->Iex.Binop.arg2); |
| 1484 | case Iex_Mux0X: |
| 1485 | return isBogusAtom(e->Iex.Mux0X.cond) |
| 1486 | || isBogusAtom(e->Iex.Mux0X.expr0) |
| 1487 | || isBogusAtom(e->Iex.Mux0X.exprX); |
| 1488 | case Iex_LDle: |
| 1489 | return isBogusAtom(e->Iex.LDle.addr); |
| 1490 | case Iex_CCall: |
| 1491 | for (i = 0; e->Iex.CCall.args[i]; i++) |
| 1492 | if (isBogusAtom(e->Iex.CCall.args[i])) |
| 1493 | return True; |
| 1494 | return False; |
| 1495 | default: |
| 1496 | goto unhandled; |
| 1497 | } |
| 1498 | case Ist_Put: |
| 1499 | return isBogusAtom(st->Ist.Put.data); |
| 1500 | case Ist_STle: |
| 1501 | return isBogusAtom(st->Ist.STle.addr) |
| 1502 | || isBogusAtom(st->Ist.STle.data); |
| 1503 | case Ist_Exit: |
| 1504 | return isBogusAtom(st->Ist.Exit.cond); |
| 1505 | default: |
| 1506 | unhandled: |
| 1507 | ppIRStmt(st); |
| 1508 | VG_(skin_panic)("hasBogusLiterals"); |
| 1509 | } |
| 1510 | } |
| 1511 | #endif /* UNUSED */ |
| 1512 | |
| 1513 | |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1514 | IRBB* SK_(instrument) ( IRBB* bb_in, VexGuestLayout* layout, IRType hWordTy ) |
| 1515 | { |
sewardj | 9f8abf8 | 2004-11-10 02:39:49 +0000 | [diff] [blame] | 1516 | Bool verboze = False; //True; |
sewardj | 67cc0db | 2004-11-07 00:57:46 +0000 | [diff] [blame] | 1517 | |
| 1518 | /* Bool hasBogusLiterals = False; */ |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1519 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 1520 | Int i, j, first_stmt; |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1521 | IRStmt* st; |
| 1522 | MCEnv mce; |
| 1523 | |
| 1524 | /* Set up BB */ |
| 1525 | IRBB* bb = emptyIRBB(); |
| 1526 | bb->tyenv = dopyIRTypeEnv(bb_in->tyenv); |
| 1527 | bb->next = dopyIRExpr(bb_in->next); |
| 1528 | bb->jumpkind = bb_in->jumpkind; |
| 1529 | |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1530 | /* Set up the running environment. Only .bb is modified as we go |
| 1531 | along. */ |
| 1532 | mce.bb = bb; |
| 1533 | mce.layout = layout; |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 1534 | mce.n_originalTmps = bb->tyenv->types_used; |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1535 | mce.hWordTy = hWordTy; |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 1536 | mce.tmpMap = LibVEX_Alloc(mce.n_originalTmps * sizeof(IRTemp)); |
| 1537 | for (i = 0; i < mce.n_originalTmps; i++) |
sewardj | 92d168d | 2004-11-15 14:22:12 +0000 | [diff] [blame^] | 1538 | mce.tmpMap[i] = IRTemp_INVALID; |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1539 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 1540 | /* Iterate over the stmts. */ |
| 1541 | |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1542 | for (i = 0; i < bb_in->stmts_used; i++) { |
| 1543 | st = bb_in->stmts[i]; |
| 1544 | if (!st) continue; |
| 1545 | |
| 1546 | sk_assert(isFlatIRStmt(st)); |
sewardj | 67cc0db | 2004-11-07 00:57:46 +0000 | [diff] [blame] | 1547 | |
| 1548 | /* |
| 1549 | if (!hasBogusLiterals) { |
| 1550 | hasBogusLiterals = checkForBogusLiterals(st); |
| 1551 | if (hasBogusLiterals) { |
| 1552 | VG_(printf)("bogus: "); |
| 1553 | ppIRStmt(st); |
| 1554 | VG_(printf)("\n"); |
| 1555 | } |
| 1556 | } |
| 1557 | */ |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1558 | first_stmt = bb->stmts_used; |
| 1559 | |
sewardj | 3861f46 | 2004-11-06 14:22:03 +0000 | [diff] [blame] | 1560 | if (verboze) { |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1561 | ppIRStmt(st); |
| 1562 | VG_(printf)("\n\n"); |
| 1563 | } |
| 1564 | |
| 1565 | switch (st->tag) { |
| 1566 | |
| 1567 | case Ist_Tmp: |
| 1568 | assign( bb, findShadowTmp(&mce, st->Ist.Tmp.tmp), |
| 1569 | expr2vbits( &mce, st->Ist.Tmp.data) ); |
| 1570 | break; |
| 1571 | |
| 1572 | case Ist_Put: |
| 1573 | do_shadow_PUT( &mce, |
| 1574 | st->Ist.Put.offset, |
sewardj | 9f8abf8 | 2004-11-10 02:39:49 +0000 | [diff] [blame] | 1575 | st->Ist.Put.data, |
| 1576 | NULL /* shadow atom */ ); |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1577 | break; |
| 1578 | |
sewardj | 9da6e24 | 2004-11-05 01:56:14 +0000 | [diff] [blame] | 1579 | case Ist_PutI: |
| 1580 | do_shadow_PUTI( &mce, |
| 1581 | st->Ist.PutI.descr, |
| 1582 | st->Ist.PutI.ix, |
| 1583 | st->Ist.PutI.bias, |
| 1584 | st->Ist.PutI.data ); |
| 1585 | break; |
| 1586 | |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1587 | case Ist_STle: |
sewardj | b139e82 | 2004-11-10 12:07:51 +0000 | [diff] [blame] | 1588 | do_shadow_STle( &mce, st->Ist.STle.addr, 0/* addr bias */, |
| 1589 | st->Ist.STle.data, |
| 1590 | NULL /* shadow data */ ); |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1591 | break; |
| 1592 | |
sewardj | a6929da | 2004-11-03 15:22:25 +0000 | [diff] [blame] | 1593 | case Ist_Exit: |
sewardj | 67cc0db | 2004-11-07 00:57:46 +0000 | [diff] [blame] | 1594 | /* if (!hasBogusLiterals) */ |
| 1595 | complainIfUndefined( &mce, st->Ist.Exit.cond ); |
sewardj | a6929da | 2004-11-03 15:22:25 +0000 | [diff] [blame] | 1596 | break; |
| 1597 | |
sewardj | 9f8abf8 | 2004-11-10 02:39:49 +0000 | [diff] [blame] | 1598 | case Ist_Dirty: |
| 1599 | do_shadow_Dirty( &mce, st->Ist.Dirty.details ); |
| 1600 | break; |
| 1601 | |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1602 | default: |
| 1603 | VG_(printf)("\n"); |
| 1604 | ppIRStmt(st); |
| 1605 | VG_(printf)("\n"); |
| 1606 | VG_(skin_panic)("memcheck: unhandled IRStmt"); |
| 1607 | |
| 1608 | } /* switch (st->tag) */ |
| 1609 | |
sewardj | 3861f46 | 2004-11-06 14:22:03 +0000 | [diff] [blame] | 1610 | if (verboze) { |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1611 | for (j = first_stmt; j < bb->stmts_used; j++) { |
| 1612 | VG_(printf)(" "); |
| 1613 | ppIRStmt(bb->stmts[j]); |
| 1614 | VG_(printf)("\n"); |
| 1615 | } |
| 1616 | VG_(printf)("\n"); |
| 1617 | } |
| 1618 | |
| 1619 | addStmtToIRBB(bb, st); |
| 1620 | |
| 1621 | } |
| 1622 | |
sewardj | 2985e1b | 2004-11-06 13:51:48 +0000 | [diff] [blame] | 1623 | /* Now we need to complain if the jump target is undefined. */ |
sewardj | a6929da | 2004-11-03 15:22:25 +0000 | [diff] [blame] | 1624 | first_stmt = bb->stmts_used; |
| 1625 | |
sewardj | 3861f46 | 2004-11-06 14:22:03 +0000 | [diff] [blame] | 1626 | if (verboze) { |
sewardj | a6929da | 2004-11-03 15:22:25 +0000 | [diff] [blame] | 1627 | VG_(printf)("bb->next = "); |
| 1628 | ppIRExpr(bb->next); |
| 1629 | VG_(printf)("\n\n"); |
| 1630 | } |
| 1631 | |
| 1632 | complainIfUndefined( &mce, bb->next ); |
| 1633 | |
sewardj | 3861f46 | 2004-11-06 14:22:03 +0000 | [diff] [blame] | 1634 | if (verboze) { |
sewardj | a6929da | 2004-11-03 15:22:25 +0000 | [diff] [blame] | 1635 | for (j = first_stmt; j < bb->stmts_used; j++) { |
| 1636 | VG_(printf)(" "); |
| 1637 | ppIRStmt(bb->stmts[j]); |
| 1638 | VG_(printf)("\n"); |
| 1639 | } |
| 1640 | VG_(printf)("\n"); |
| 1641 | } |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1642 | |
| 1643 | return bb; |
sewardj | 1dbd19d | 2004-11-03 09:10:30 +0000 | [diff] [blame] | 1644 | } |
| 1645 | |
sewardj | 21082ff | 2004-10-19 13:11:35 +0000 | [diff] [blame] | 1646 | /*--------------------------------------------------------------------*/ |
| 1647 | /*--- end mc_translate.c ---*/ |
| 1648 | /*--------------------------------------------------------------------*/ |