sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 1 | |
| 2 | /*--------------------------------------------------------------------*/ |
| 3 | /*--- An implementation of malloc/free for the client. ---*/ |
| 4 | /*--- vg_clientmalloc.c ---*/ |
| 5 | /*--------------------------------------------------------------------*/ |
| 6 | |
| 7 | /* |
njn | c953984 | 2002-10-02 13:26:35 +0000 | [diff] [blame] | 8 | This file is part of Valgrind, an extensible x86 protected-mode |
| 9 | emulator for monitoring program execution on x86-Unixes. |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 10 | |
| 11 | Copyright (C) 2000-2002 Julian Seward |
| 12 | jseward@acm.org |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 13 | |
| 14 | This program is free software; you can redistribute it and/or |
| 15 | modify it under the terms of the GNU General Public License as |
| 16 | published by the Free Software Foundation; either version 2 of the |
| 17 | License, or (at your option) any later version. |
| 18 | |
| 19 | This program is distributed in the hope that it will be useful, but |
| 20 | WITHOUT ANY WARRANTY; without even the implied warranty of |
| 21 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
| 22 | General Public License for more details. |
| 23 | |
| 24 | You should have received a copy of the GNU General Public License |
| 25 | along with this program; if not, write to the Free Software |
| 26 | Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA |
| 27 | 02111-1307, USA. |
| 28 | |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 29 | The GNU General Public License is contained in the file COPYING. |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 30 | */ |
| 31 | |
| 32 | #include "vg_include.h" |
| 33 | |
| 34 | |
| 35 | /*------------------------------------------------------------*/ |
| 36 | /*--- Defns ---*/ |
| 37 | /*------------------------------------------------------------*/ |
| 38 | |
| 39 | /* #define DEBUG_CLIENTMALLOC */ |
| 40 | |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 41 | /* Holds malloc'd but not freed blocks. Static, so zero-inited by default. */ |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 42 | #define VG_MALLOCLIST_NO(aa) (((UInt)(aa)) % VG_N_MALLOCLISTS) |
| 43 | static ShadowChunk* vg_malloclist[VG_N_MALLOCLISTS]; |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 44 | |
| 45 | /* Stats ... */ |
| 46 | static UInt vg_cmalloc_n_mallocs = 0; |
| 47 | static UInt vg_cmalloc_n_frees = 0; |
| 48 | static UInt vg_cmalloc_bs_mallocd = 0; |
| 49 | |
| 50 | static UInt vg_mlist_frees = 0; |
| 51 | static UInt vg_mlist_tries = 0; |
| 52 | |
| 53 | |
| 54 | /*------------------------------------------------------------*/ |
| 55 | /*--- Fns ---*/ |
| 56 | /*------------------------------------------------------------*/ |
| 57 | |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 58 | static __inline__ |
| 59 | Bool needs_shadow_chunks ( void ) |
| 60 | { |
| 61 | return VG_(needs).core_errors || |
| 62 | VG_(needs).alternative_free || |
| 63 | VG_(needs).sizeof_shadow_block > 0 || |
| 64 | VG_(track_events).bad_free || |
| 65 | VG_(track_events).mismatched_free || |
| 66 | VG_(track_events).copy_mem_heap || |
| 67 | VG_(track_events).die_mem_heap; |
| 68 | } |
| 69 | |
| 70 | #ifdef DEBUG_CLIENTMALLOC |
| 71 | static |
| 72 | Int count_malloclists ( void ) |
| 73 | { |
| 74 | ShadowChunk* sc; |
| 75 | UInt ml_no; |
| 76 | Int n = 0; |
| 77 | |
| 78 | for (ml_no = 0; ml_no < VG_N_MALLOCLISTS; ml_no++) |
| 79 | for (sc = vg_malloclist[ml_no]; sc != NULL; sc = sc->next) |
| 80 | n++; |
| 81 | return n; |
| 82 | } |
| 83 | #endif |
| 84 | |
| 85 | /*------------------------------------------------------------*/ |
| 86 | /*--- Shadow chunks, etc ---*/ |
| 87 | /*------------------------------------------------------------*/ |
| 88 | |
| 89 | /* Allocate a user-chunk of size bytes. Also allocate its shadow |
| 90 | block, make the shadow block point at the user block. Put the |
| 91 | shadow chunk on the appropriate list, and set all memory |
| 92 | protections correctly. */ |
| 93 | static void addShadowChunk ( ThreadState* tst, |
| 94 | Addr p, UInt size, VgAllocKind kind ) |
| 95 | { |
| 96 | ShadowChunk* sc; |
| 97 | UInt ml_no = VG_MALLOCLIST_NO(p); |
| 98 | |
| 99 | # ifdef DEBUG_CLIENTMALLOC |
| 100 | VG_(printf)("[m %d, f %d (%d)] addShadowChunk " |
| 101 | "( sz %d, addr %p, list %d )\n", |
| 102 | count_malloclists(), |
| 103 | 0/*count_freelist()*/, 0/*vg_freed_list_volume*/, |
| 104 | size, p, ml_no ); |
| 105 | # endif |
| 106 | |
| 107 | sc = VG_(arena_malloc)(VG_AR_CORE, |
| 108 | sizeof(ShadowChunk) |
| 109 | + VG_(needs).sizeof_shadow_block); |
| 110 | sc->size = size; |
| 111 | sc->allockind = kind; |
| 112 | sc->data = p; |
| 113 | /* Fill in any skin-specific shadow chunk stuff */ |
| 114 | if (VG_(needs).sizeof_shadow_block > 0) |
| 115 | SK_(complete_shadow_chunk) ( sc, tst ); |
| 116 | |
| 117 | sc->next = vg_malloclist[ml_no]; |
| 118 | vg_malloclist[ml_no] = sc; |
| 119 | } |
| 120 | |
| 121 | /* Get the sc, and return the address of the previous node's next pointer |
| 122 | which allows sc to be removed from the list later without having to look |
| 123 | it up again. */ |
| 124 | static ShadowChunk* getShadowChunk ( Addr a, /*OUT*/ShadowChunk*** next_ptr ) |
| 125 | { |
| 126 | ShadowChunk *prev, *curr; |
| 127 | Int ml_no; |
| 128 | |
| 129 | ml_no = VG_MALLOCLIST_NO(a); |
| 130 | |
| 131 | prev = NULL; |
| 132 | curr = vg_malloclist[ml_no]; |
| 133 | while (True) { |
| 134 | if (curr == NULL) |
| 135 | break; |
| 136 | if (a == curr->data) |
| 137 | break; |
| 138 | prev = curr; |
| 139 | curr = curr->next; |
| 140 | } |
| 141 | |
| 142 | if (NULL == prev) |
| 143 | *next_ptr = &vg_malloclist[ml_no]; |
| 144 | else |
| 145 | *next_ptr = &prev->next; |
| 146 | |
| 147 | return curr; |
| 148 | } |
| 149 | |
njn | 4ba5a79 | 2002-09-30 10:23:54 +0000 | [diff] [blame] | 150 | void VG_(free_ShadowChunk) ( ShadowChunk* sc ) |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 151 | { |
| 152 | VG_(arena_free) ( VG_AR_CLIENT, (void*)sc->data ); |
| 153 | VG_(arena_free) ( VG_AR_CORE, sc ); |
| 154 | } |
| 155 | |
| 156 | |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 157 | /* Allocate a suitably-sized array, copy all the malloc-d block |
| 158 | shadows into it, and return both the array and the size of it. |
| 159 | This is used by the memory-leak detector. |
| 160 | */ |
| 161 | ShadowChunk** VG_(get_malloc_shadows) ( /*OUT*/ UInt* n_shadows ) |
| 162 | { |
| 163 | UInt i, scn; |
| 164 | ShadowChunk** arr; |
| 165 | ShadowChunk* sc; |
| 166 | *n_shadows = 0; |
| 167 | for (scn = 0; scn < VG_N_MALLOCLISTS; scn++) { |
| 168 | for (sc = vg_malloclist[scn]; sc != NULL; sc = sc->next) { |
| 169 | (*n_shadows)++; |
| 170 | } |
| 171 | } |
| 172 | if (*n_shadows == 0) return NULL; |
| 173 | |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 174 | arr = VG_(malloc)( *n_shadows * sizeof(ShadowChunk*) ); |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 175 | |
| 176 | i = 0; |
| 177 | for (scn = 0; scn < VG_N_MALLOCLISTS; scn++) { |
| 178 | for (sc = vg_malloclist[scn]; sc != NULL; sc = sc->next) { |
| 179 | arr[i++] = sc; |
| 180 | } |
| 181 | } |
| 182 | vg_assert(i == *n_shadows); |
| 183 | return arr; |
| 184 | } |
| 185 | |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 186 | Bool VG_(addr_is_in_block)( Addr a, Addr start, UInt size ) |
| 187 | { |
| 188 | return (start - VG_AR_CLIENT_REDZONE_SZB <= a |
| 189 | && a < start + size + VG_AR_CLIENT_REDZONE_SZB); |
| 190 | } |
| 191 | |
| 192 | /* Return the first shadow chunk satisfying the predicate p. */ |
| 193 | ShadowChunk* VG_(any_matching_mallocd_ShadowChunks) |
| 194 | ( Bool (*p) ( ShadowChunk* )) |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 195 | { |
| 196 | UInt ml_no; |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 197 | ShadowChunk* sc; |
| 198 | |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 199 | for (ml_no = 0; ml_no < VG_N_MALLOCLISTS; ml_no++) |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 200 | for (sc = vg_malloclist[ml_no]; sc != NULL; sc = sc->next) |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 201 | if (p(sc)) |
| 202 | return sc; |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 203 | |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 204 | return NULL; |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 205 | } |
| 206 | |
| 207 | |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 208 | /*------------------------------------------------------------*/ |
| 209 | /*--- client_malloc(), etc ---*/ |
| 210 | /*------------------------------------------------------------*/ |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 211 | |
| 212 | /* Allocate memory, noticing whether or not we are doing the full |
| 213 | instrumentation thing. */ |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 214 | static __inline__ |
| 215 | void* alloc_and_new_mem ( ThreadState* tst, UInt size, UInt alignment, |
| 216 | Bool is_zeroed, VgAllocKind kind ) |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 217 | { |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 218 | Addr p; |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 219 | |
| 220 | VGP_PUSHCC(VgpCliMalloc); |
sewardj | 2e93c50 | 2002-04-12 11:12:52 +0000 | [diff] [blame] | 221 | |
| 222 | vg_cmalloc_n_mallocs ++; |
| 223 | vg_cmalloc_bs_mallocd += size; |
| 224 | |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 225 | vg_assert(alignment >= 4); |
| 226 | if (alignment == 4) |
| 227 | p = (Addr)VG_(arena_malloc)(VG_AR_CLIENT, size); |
| 228 | else |
| 229 | p = (Addr)VG_(arena_malloc_aligned)(VG_AR_CLIENT, alignment, size); |
sewardj | 2e93c50 | 2002-04-12 11:12:52 +0000 | [diff] [blame] | 230 | |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 231 | if (needs_shadow_chunks()) |
| 232 | addShadowChunk ( tst, p, size, kind ); |
| 233 | |
| 234 | VG_TRACK( ban_mem_heap, p-VG_AR_CLIENT_REDZONE_SZB, |
| 235 | VG_AR_CLIENT_REDZONE_SZB ); |
| 236 | VG_TRACK( new_mem_heap, p, size, is_zeroed ); |
| 237 | VG_TRACK( ban_mem_heap, p+size, VG_AR_CLIENT_REDZONE_SZB ); |
| 238 | |
| 239 | VGP_POPCC(VgpCliMalloc); |
| 240 | return (void*)p; |
| 241 | } |
| 242 | |
| 243 | void* VG_(client_malloc) ( ThreadState* tst, UInt size, VgAllocKind kind ) |
| 244 | { |
| 245 | void* p = alloc_and_new_mem ( tst, size, VG_(clo_alignment), |
| 246 | /*is_zeroed*/False, kind ); |
| 247 | # ifdef DEBUG_CLIENTMALLOC |
| 248 | VG_(printf)("[m %d, f %d (%d)] client_malloc ( %d, %x ) = %p\n", |
| 249 | count_malloclists(), |
| 250 | 0/*count_freelist()*/, 0/*vg_freed_list_volume*/, |
| 251 | size, kind, p ); |
| 252 | # endif |
| 253 | return p; |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 254 | } |
| 255 | |
| 256 | |
sewardj | 8c82451 | 2002-04-14 04:16:48 +0000 | [diff] [blame] | 257 | void* VG_(client_memalign) ( ThreadState* tst, UInt align, UInt size ) |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 258 | { |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 259 | void* p = alloc_and_new_mem ( tst, size, align, |
| 260 | /*is_zeroed*/False, Vg_AllocMalloc ); |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 261 | # ifdef DEBUG_CLIENTMALLOC |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 262 | VG_(printf)("[m %d, f %d (%d)] client_memalign ( al %d, sz %d ) = %p\n", |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 263 | count_malloclists(), |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 264 | 0/*count_freelist()*/, 0/*vg_freed_list_volume*/, |
| 265 | align, size, p ); |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 266 | # endif |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 267 | return p; |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 268 | } |
| 269 | |
| 270 | |
sewardj | 8c82451 | 2002-04-14 04:16:48 +0000 | [diff] [blame] | 271 | void* VG_(client_calloc) ( ThreadState* tst, UInt nmemb, UInt size1 ) |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 272 | { |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 273 | void* p; |
| 274 | UInt size, i; |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 275 | |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 276 | size = nmemb * size1; |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 277 | |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 278 | p = alloc_and_new_mem ( tst, size, VG_(clo_alignment), |
| 279 | /*is_zeroed*/True, Vg_AllocMalloc ); |
| 280 | /* Must zero block for calloc! */ |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 281 | for (i = 0; i < size; i++) ((UChar*)p)[i] = 0; |
| 282 | |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 283 | # ifdef DEBUG_CLIENTMALLOC |
| 284 | VG_(printf)("[m %d, f %d (%d)] client_calloc ( %d, %d ) = %p\n", |
| 285 | count_malloclists(), |
| 286 | 0/*count_freelist()*/, 0/*vg_freed_list_volume*/, |
| 287 | nmemb, size1, p ); |
| 288 | # endif |
| 289 | |
| 290 | return p; |
| 291 | } |
| 292 | |
| 293 | static |
| 294 | void die_and_free_mem ( ThreadState* tst, ShadowChunk* sc, |
| 295 | ShadowChunk** prev_chunks_next_ptr ) |
| 296 | { |
| 297 | /* Note: ban redzones again -- just in case user de-banned them |
| 298 | with a client request... */ |
| 299 | VG_TRACK( ban_mem_heap, sc->data-VG_AR_CLIENT_REDZONE_SZB, |
| 300 | VG_AR_CLIENT_REDZONE_SZB ); |
| 301 | VG_TRACK( die_mem_heap, sc->data, sc->size ); |
| 302 | VG_TRACK( ban_mem_heap, sc->data+sc->size, VG_AR_CLIENT_REDZONE_SZB ); |
| 303 | |
| 304 | /* Remove sc from the malloclist using prev_chunks_next_ptr to |
| 305 | avoid repeating the hash table lookup. Can't remove until at least |
| 306 | after free and free_mismatch errors are done because they use |
| 307 | describe_addr() which looks for it in malloclist. */ |
| 308 | *prev_chunks_next_ptr = sc->next; |
| 309 | |
| 310 | if (VG_(needs).alternative_free) |
| 311 | SK_(alt_free) ( sc, tst ); |
| 312 | else |
njn | 4ba5a79 | 2002-09-30 10:23:54 +0000 | [diff] [blame] | 313 | VG_(free_ShadowChunk) ( sc ); |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 314 | } |
| 315 | |
| 316 | |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 317 | void VG_(client_free) ( ThreadState* tst, void* p, VgAllocKind kind ) |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 318 | { |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 319 | ShadowChunk* sc; |
| 320 | ShadowChunk** prev_chunks_next_ptr; |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 321 | |
| 322 | VGP_PUSHCC(VgpCliMalloc); |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 323 | |
| 324 | # ifdef DEBUG_CLIENTMALLOC |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 325 | VG_(printf)("[m %d, f %d (%d)] client_free ( %p, %x )\n", |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 326 | count_malloclists(), |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 327 | 0/*count_freelist()*/, 0/*vg_freed_list_volume*/, |
| 328 | p, kind ); |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 329 | # endif |
| 330 | |
sewardj | 2e93c50 | 2002-04-12 11:12:52 +0000 | [diff] [blame] | 331 | vg_cmalloc_n_frees ++; |
sewardj | 2e93c50 | 2002-04-12 11:12:52 +0000 | [diff] [blame] | 332 | |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 333 | if (! needs_shadow_chunks()) { |
| 334 | VG_(arena_free) ( VG_AR_CLIENT, p ); |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 335 | |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 336 | } else { |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 337 | sc = getShadowChunk ( (Addr)p, &prev_chunks_next_ptr ); |
| 338 | |
| 339 | if (sc == NULL) { |
| 340 | VG_TRACK( bad_free, tst, (Addr)p ); |
| 341 | VGP_POPCC(VgpCliMalloc); |
| 342 | return; |
| 343 | } |
| 344 | |
| 345 | /* check if its a matching free() / delete / delete [] */ |
| 346 | if (kind != sc->allockind) |
| 347 | VG_TRACK( mismatched_free, tst, (Addr)p ); |
| 348 | |
| 349 | die_and_free_mem ( tst, sc, prev_chunks_next_ptr ); |
| 350 | } |
| 351 | VGP_POPCC(VgpCliMalloc); |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 352 | } |
| 353 | |
| 354 | |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 355 | void* VG_(client_realloc) ( ThreadState* tst, void* p, UInt new_size ) |
| 356 | { |
| 357 | ShadowChunk *sc; |
| 358 | ShadowChunk **prev_chunks_next_ptr; |
| 359 | UInt i; |
| 360 | |
| 361 | VGP_PUSHCC(VgpCliMalloc); |
| 362 | |
| 363 | vg_cmalloc_n_frees ++; |
| 364 | vg_cmalloc_n_mallocs ++; |
| 365 | vg_cmalloc_bs_mallocd += new_size; |
| 366 | |
| 367 | if (! needs_shadow_chunks()) { |
| 368 | vg_assert(p != NULL && new_size != 0); |
| 369 | p = VG_(arena_realloc) ( VG_AR_CLIENT, p, VG_(clo_alignment), |
| 370 | new_size ); |
| 371 | VGP_POPCC(VgpCliMalloc); |
| 372 | return p; |
| 373 | |
| 374 | } else { |
| 375 | /* First try and find the block. */ |
| 376 | sc = getShadowChunk ( (Addr)p, &prev_chunks_next_ptr ); |
| 377 | |
| 378 | if (sc == NULL) { |
| 379 | VG_TRACK( bad_free, tst, (Addr)p ); |
| 380 | /* Perhaps we should return to the program regardless. */ |
| 381 | VGP_POPCC(VgpCliMalloc); |
| 382 | return NULL; |
| 383 | } |
| 384 | |
| 385 | /* check if its a matching free() / delete / delete [] */ |
| 386 | if (Vg_AllocMalloc != sc->allockind) { |
| 387 | /* can not realloc a range that was allocated with new or new [] */ |
| 388 | VG_TRACK( mismatched_free, tst, (Addr)p ); |
| 389 | /* but keep going anyway */ |
| 390 | } |
| 391 | |
| 392 | if (sc->size == new_size) { |
| 393 | /* size unchanged */ |
| 394 | VGP_POPCC(VgpCliMalloc); |
| 395 | return p; |
| 396 | |
| 397 | } else if (sc->size > new_size) { |
| 398 | /* new size is smaller */ |
| 399 | VG_TRACK( die_mem_heap, sc->data+new_size, sc->size-new_size ); |
| 400 | sc->size = new_size; |
| 401 | VGP_POPCC(VgpCliMalloc); |
| 402 | # ifdef DEBUG_CLIENTMALLOC |
| 403 | VG_(printf)("[m %d, f %d (%d)] client_realloc_smaller ( %p, %d ) = %p\n", |
| 404 | count_malloclists(), |
| 405 | 0/*count_freelist()*/, 0/*vg_freed_list_volume*/, |
| 406 | p, new_size, p ); |
| 407 | # endif |
| 408 | return p; |
| 409 | |
| 410 | } else { |
| 411 | /* new size is bigger */ |
| 412 | Addr p_new; |
| 413 | |
| 414 | /* Get new memory */ |
| 415 | vg_assert(VG_(clo_alignment) >= 4); |
| 416 | if (VG_(clo_alignment) == 4) |
| 417 | p_new = (Addr)VG_(arena_malloc)(VG_AR_CLIENT, new_size); |
| 418 | else |
| 419 | p_new = (Addr)VG_(arena_malloc_aligned)(VG_AR_CLIENT, |
| 420 | VG_(clo_alignment), new_size); |
| 421 | |
| 422 | /* First half kept and copied, second half new, |
| 423 | red zones as normal */ |
| 424 | VG_TRACK( ban_mem_heap, p_new-VG_AR_CLIENT_REDZONE_SZB, |
| 425 | VG_AR_CLIENT_REDZONE_SZB ); |
| 426 | VG_TRACK( copy_mem_heap, (Addr)p, p_new, sc->size ); |
| 427 | VG_TRACK( new_mem_heap, p_new+sc->size, new_size-sc->size, |
| 428 | /*inited=*/False ); |
| 429 | VG_TRACK( ban_mem_heap, p_new+new_size, VG_AR_CLIENT_REDZONE_SZB ); |
| 430 | |
| 431 | /* Copy from old to new */ |
| 432 | for (i = 0; i < sc->size; i++) |
| 433 | ((UChar*)p_new)[i] = ((UChar*)p)[i]; |
| 434 | |
| 435 | /* Free old memory */ |
| 436 | die_and_free_mem ( tst, sc, prev_chunks_next_ptr ); |
| 437 | |
| 438 | /* this has to be after die_and_free_mem, otherwise the |
| 439 | former succeeds in shorting out the new block, not the |
| 440 | old, in the case when both are on the same list. */ |
| 441 | addShadowChunk ( tst, p_new, new_size, Vg_AllocMalloc ); |
| 442 | |
| 443 | VGP_POPCC(VgpCliMalloc); |
| 444 | # ifdef DEBUG_CLIENTMALLOC |
| 445 | VG_(printf)("[m %d, f %d (%d)] client_realloc_bigger ( %p, %d ) = %p\n", |
| 446 | count_malloclists(), |
| 447 | 0/*count_freelist()*/, 0/*vg_freed_list_volume*/, |
| 448 | p, new_size, (void*)p_new ); |
| 449 | # endif |
| 450 | return (void*)p_new; |
| 451 | } |
| 452 | } |
| 453 | } |
| 454 | |
| 455 | void VG_(print_malloc_stats) ( void ) |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 456 | { |
| 457 | UInt nblocks, nbytes, ml_no; |
| 458 | ShadowChunk* sc; |
| 459 | |
njn25 | e49d8e7 | 2002-09-23 09:36:25 +0000 | [diff] [blame] | 460 | if (VG_(clo_verbosity) == 0) |
| 461 | return; |
| 462 | |
| 463 | vg_assert(needs_shadow_chunks()); |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 464 | |
| 465 | nblocks = nbytes = 0; |
| 466 | |
| 467 | for (ml_no = 0; ml_no < VG_N_MALLOCLISTS; ml_no++) { |
| 468 | for (sc = vg_malloclist[ml_no]; sc != NULL; sc = sc->next) { |
| 469 | nblocks ++; |
| 470 | nbytes += sc->size; |
| 471 | } |
| 472 | } |
| 473 | |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 474 | VG_(message)(Vg_UserMsg, |
| 475 | "malloc/free: in use at exit: %d bytes in %d blocks.", |
| 476 | nbytes, nblocks); |
| 477 | VG_(message)(Vg_UserMsg, |
sewardj | 488dc28 | 2002-12-22 19:24:22 +0000 | [diff] [blame] | 478 | "malloc/free: %d allocs, %d frees, %u bytes allocated.", |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 479 | vg_cmalloc_n_mallocs, |
| 480 | vg_cmalloc_n_frees, vg_cmalloc_bs_mallocd); |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 481 | if (0) |
| 482 | VG_(message)(Vg_DebugMsg, |
| 483 | "free search: %d tries, %d frees", |
| 484 | vg_mlist_tries, |
| 485 | vg_mlist_frees ); |
| 486 | if (VG_(clo_verbosity) > 1) |
| 487 | VG_(message)(Vg_UserMsg, ""); |
| 488 | } |
| 489 | |
sewardj | de4a1d0 | 2002-03-22 01:27:54 +0000 | [diff] [blame] | 490 | /*--------------------------------------------------------------------*/ |
| 491 | /*--- end vg_clientmalloc.c ---*/ |
| 492 | /*--------------------------------------------------------------------*/ |