Files updated, added and removed in order to turn the ERASER branch into HEAD


git-svn-id: svn://svn.valgrind.org/valgrind/trunk@1086 a5019735-40e9-0310-863c-91ae7b9d1cf9
diff --git a/coregrind/vg_execontext.c b/coregrind/vg_execontext.c
index 4da1b31..fe85fa0 100644
--- a/coregrind/vg_execontext.c
+++ b/coregrind/vg_execontext.c
@@ -26,11 +26,10 @@
    Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
    02111-1307, USA.
 
-   The GNU General Public License is contained in the file LICENSE.
+   The GNU General Public License is contained in the file COPYING.
 */
 
 #include "vg_include.h"
-#include "vg_constants.h"
 
 
 /*------------------------------------------------------------*/
@@ -109,39 +108,40 @@
 
 
 /* Compare two ExeContexts, comparing all callers. */
-Bool VG_(eq_ExeContext_all) ( ExeContext* e1, ExeContext* e2 )
+Bool VG_(eq_ExeContext) ( VgRes res, ExeContext* e1, ExeContext* e2 )
 {
-   vg_ec_cmpAlls++;
-   /* Just do pointer comparison. */
-   if (e1 != e2) return False;
-   return True;
-}
+   if (e1 == NULL || e2 == NULL) 
+      return False;
+   switch (res) {
+   case Vg_LowRes:
+      /* Just compare the top two callers. */
+      vg_ec_cmp2s++;
+      if (e1->eips[0] != e2->eips[0]
+          || e1->eips[1] != e2->eips[1]) return False;
+      return True;
 
+   case Vg_MedRes:
+      /* Just compare the top four callers. */
+      vg_ec_cmp4s++;
+      if (e1->eips[0] != e2->eips[0]
+          || e1->eips[1] != e2->eips[1]) return False;
 
-/* Compare two ExeContexts, just comparing the top two callers. */
-Bool VG_(eq_ExeContext_top2) ( ExeContext* e1, ExeContext* e2 )
-{
-   vg_ec_cmp2s++;
-   if (e1->eips[0] != e2->eips[0]
-       || e1->eips[1] != e2->eips[1]) return False;
-   return True;
-}
+      if (VG_(clo_backtrace_size) < 3) return True;
+      if (e1->eips[2] != e2->eips[2]) return False;
 
+      if (VG_(clo_backtrace_size) < 4) return True;
+      if (e1->eips[3] != e2->eips[3]) return False;
+      return True;
 
-/* Compare two ExeContexts, just comparing the top four callers. */
-Bool VG_(eq_ExeContext_top4) ( ExeContext* e1, ExeContext* e2 )
-{
-   vg_ec_cmp4s++;
-   if (e1->eips[0] != e2->eips[0]
-       || e1->eips[1] != e2->eips[1]) return False;
+   case Vg_HighRes:
+      vg_ec_cmpAlls++;
+      /* Compare them all -- just do pointer comparison. */
+      if (e1 != e2) return False;
+      return True;
 
-   if (VG_(clo_backtrace_size) < 3) return True;
-   if (e1->eips[2] != e2->eips[2]) return False;
-
-   if (VG_(clo_backtrace_size) < 4) return True;
-   if (e1->eips[3] != e2->eips[3]) return False;
-
-   return True;
+   default:
+      VG_(panic)("VG_(eq_ExeContext): unrecognised VgRes");
+   }
 }
 
 
@@ -156,11 +156,12 @@
 
    In order to be thread-safe, we pass in the thread's %EIP and %EBP.
 */
-ExeContext* VG_(get_ExeContext) ( Bool skip_top_frame,
-                                  Addr eip, Addr ebp )
+ExeContext* VG_(get_ExeContext2) ( Addr eip, Addr ebp,
+                                   Addr ebp_min, Addr ebp_max_orig )
 {
    Int         i;
    Addr        eips[VG_DEEPEST_BACKTRACE];
+   Addr        ebp_max;
    Bool        same;
    UInt        hash;
    ExeContext* new_ec;
@@ -173,29 +174,53 @@
 
    /* First snaffle %EIPs from the client's stack into eips[0
       .. VG_(clo_backtrace_size)-1], putting zeroes in when the trail
-      goes cold. */
+      goes cold, which we guess to be when %ebp is not a reasonable
+      stack location.  We also assert that %ebp increases down the chain. */
 
-   for (i = 0; i < VG_(clo_backtrace_size); i++)
+   // Gives shorter stack trace for tests/badjump.c
+   // JRS 2002-aug-16: I don't think this is a big deal; looks ok for
+   // most "normal" backtraces.
+   // NJN 2002-sep-05: traces for pthreaded programs are particularly bad.
+
+   // JRS 2002-sep-17: hack, to round up ebp_max to the end of the
+   // current page, at least.  Dunno if it helps.
+   // NJN 2002-sep-17: seems to -- stack traces look like 1.0.X again
+   ebp_max = (ebp_max_orig + VKI_BYTES_PER_PAGE - 1) 
+                & ~(VKI_BYTES_PER_PAGE - 1);
+   ebp_max -= sizeof(Addr);
+
+   /* Assertion broken before main() is reached in pthreaded programs;  the
+    * offending stack traces only have one item.  --njn, 2002-aug-16 */
+   /* vg_assert(ebp_min <= ebp_max);*/
+
+   /* Checks the stack isn't riduculously big */
+   vg_assert(ebp_min + 4000000 > ebp_max);
+
+   //   VG_(printf)("%p -> %p\n", ebp_max_orig, ebp_max);
+   eips[0] = eip;
+   //   VG_(printf)("\nSNAP: %p .. %p, EBP=%p\n", ebp_min, ebp_max, ebp  );
+   //   VG_(printf)("   : %p\n", eips[0]);
+   /* Get whatever we safely can ... */
+   for (i = 1; i < VG_(clo_backtrace_size); i++) {
+      if (!(ebp_min <= ebp && ebp <= ebp_max)) {
+         //VG_(printf)("... out of range %p\n", ebp);
+         break; /* ebp gone baaaad */
+      }
+      // NJN 2002-sep-17: monotonicity doesn't work -- gives wrong traces...
+      //     if (ebp >= ((UInt*)ebp)[0]) {
+      //   VG_(printf)("nonmonotonic\n");
+      //    break; /* ebp gone nonmonotonic */
+      // }
+      eips[i] = ((UInt*)ebp)[1];  /* ret addr */
+      ebp     = ((UInt*)ebp)[0];  /* old ebp */
+      //VG_(printf)("     %p\n", eips[i]);
+   }
+
+   /* Put zeroes in the rest. */
+   for (;  i < VG_(clo_backtrace_size); i++) {
       eips[i] = 0;
-   
-#  define GET_CALLER(lval)                                        \
-   if (ebp != 0 && VGM_(check_readable)(ebp, 8, NULL)) {          \
-      lval = ((UInt*)ebp)[1];  /* ret addr */                     \
-      ebp  = ((UInt*)ebp)[0];  /* old ebp */                      \
-   } else {                                                       \
-      lval = ebp = 0;                                             \
    }
 
-   if (skip_top_frame) {
-      for (i = 0; i < VG_(clo_backtrace_size); i++)
-         GET_CALLER(eips[i]);
-   } else {
-      eips[0] = eip;
-      for (i = 1; i < VG_(clo_backtrace_size); i++)
-         GET_CALLER(eips[i]);
-   }
-#  undef GET_CALLER
-
    /* Now figure out if we've seen this one before.  First hash it so
       as to determine the list number. */
 
@@ -228,19 +253,16 @@
 
    if (list != NULL) {
       /* Yay!  We found it.  */
-      VGP_POPCC;
+      VGP_POPCC(VgpExeContext);
       return list;
    }
 
    /* Bummer.  We have to allocate a new context record. */
    vg_ec_totstored++;
 
-   new_ec 
-      = VG_(malloc)( 
-           VG_AR_EXECTXT, 
-           sizeof(struct _ExeContextRec *) 
-              + VG_(clo_backtrace_size) * sizeof(Addr) 
-        );
+   new_ec = VG_(arena_malloc)( VG_AR_EXECTXT, 
+                               sizeof(struct _ExeContext *) 
+                               + VG_(clo_backtrace_size) * sizeof(Addr) );
 
    for (i = 0; i < VG_(clo_backtrace_size); i++)
       new_ec->eips[i] = eips[i];
@@ -248,10 +270,16 @@
    new_ec->next = vg_ec_list[hash];
    vg_ec_list[hash] = new_ec;
 
-   VGP_POPCC;
+   VGP_POPCC(VgpExeContext);
    return new_ec;
 }
 
+ExeContext* VG_(get_ExeContext) ( ThreadState *tst )
+{
+   return VG_(get_ExeContext2)( tst->m_eip, tst->m_ebp, tst->m_esp, 
+                                tst->stack_highest_word );
+}
+
 
 /*--------------------------------------------------------------------*/
 /*--- end                                          vg_execontext.c ---*/