Enhance IR so as to distinguish between little- and big-endian loads and
stores, so that PPC can be properly handled.  Until now it's been hardwired
to assume little-endian.

As a result, IRStmt_STle is renamed IRStmt_Store and IRExpr_LDle is
renamed IRExpr_Load.



git-svn-id: svn://svn.valgrind.org/vex/trunk@1239 8f6e269a-dfd6-0310-a8e1-e2731360e62c
diff --git a/priv/host-amd64/isel.c b/priv/host-amd64/isel.c
index 9c4ff45..bff3c21 100644
--- a/priv/host-amd64/isel.c
+++ b/priv/host-amd64/isel.c
@@ -797,9 +797,13 @@
    }
 
    /* --------- LOAD --------- */
-   case Iex_LDle: {
+   case Iex_Load: {
       HReg dst = newVRegI(env);
-      AMD64AMode* amode = iselIntExpr_AMode ( env, e->Iex.LDle.addr );
+      AMD64AMode* amode = iselIntExpr_AMode ( env, e->Iex.Load.addr );
+
+      if (e->Iex.Load.end != Iend_LE)
+         goto irreducible;
+
       if (ty == Ity_I64) {
          addInstr(env, AMD64Instr_Alu64R(Aalu_MOV,
                                          AMD64RMI_Mem(amode), dst) );
@@ -1777,8 +1781,8 @@
    }
 
    /* special case: 64-bit load from memory */
-   if (e->tag == Iex_LDle && ty == Ity_I64) {
-      AMD64AMode* am = iselIntExpr_AMode(env, e->Iex.LDle.addr);
+   if (e->tag == Iex_Load && ty == Ity_I64 && e->Iex.Load.end == Iend_LE) {
+      AMD64AMode* am = iselIntExpr_AMode(env, e->Iex.Load.addr);
       return AMD64RMI_Mem(am);
    }
 
@@ -2629,11 +2633,11 @@
       return lookupIRTemp(env, e->Iex.Tmp.tmp);
    }
 
-   if (e->tag == Iex_LDle) {
+   if (e->tag == Iex_Load && e->Iex.Load.end == Iend_LE) {
       AMD64AMode* am;
       HReg res = newVRegV(env);
-      vassert(e->Iex.LDle.ty == Ity_F32);
-      am = iselIntExpr_AMode(env, e->Iex.LDle.addr);
+      vassert(e->Iex.Load.ty == Ity_F32);
+      am = iselIntExpr_AMode(env, e->Iex.Load.addr);
       addInstr(env, AMD64Instr_SseLdSt(True/*load*/, 4, res, am));
       return res;
    }
@@ -2753,11 +2757,11 @@
       return res;
    }
 
-   if (e->tag == Iex_LDle) {
+   if (e->tag == Iex_Load && e->Iex.Load.end == Iend_LE) {
       AMD64AMode* am;
       HReg res = newVRegV(env);
-      vassert(e->Iex.LDle.ty == Ity_F64);
-      am = iselIntExpr_AMode(env, e->Iex.LDle.addr);
+      vassert(e->Iex.Load.ty == Ity_F64);
+      am = iselIntExpr_AMode(env, e->Iex.Load.addr);
       addInstr(env, AMD64Instr_SseLdSt( True/*load*/, 8, res, am ));
       return res;
    }
@@ -3071,9 +3075,9 @@
       return dst;
    }
 
-   if (e->tag == Iex_LDle) {
+   if (e->tag == Iex_Load && e->Iex.Load.end == Iend_LE) {
       HReg        dst = newVRegV(env);
-      AMD64AMode* am  = iselIntExpr_AMode(env, e->Iex.LDle.addr);
+      AMD64AMode* am  = iselIntExpr_AMode(env, e->Iex.Load.addr);
       addInstr(env, AMD64Instr_SseLdSt( True/*load*/, 16, dst, am ));
       return dst;
    }
@@ -3488,38 +3492,42 @@
    switch (stmt->tag) {
 
    /* --------- STORE --------- */
-   case Ist_STle: {
+   case Ist_Store: {
       AMD64AMode* am;
-      IRType tya = typeOfIRExpr(env->type_env, stmt->Ist.STle.addr);
-      IRType tyd = typeOfIRExpr(env->type_env, stmt->Ist.STle.data);
-      vassert(tya == Ity_I64);
-      am = iselIntExpr_AMode(env, stmt->Ist.STle.addr);
+      IRType    tya = typeOfIRExpr(env->type_env, stmt->Ist.Store.addr);
+      IRType    tyd = typeOfIRExpr(env->type_env, stmt->Ist.Store.data);
+      IREndness end = stmt->Ist.Store.end;
+
+      if (tya != Ity_I64 || end != Iend_LE) 
+         goto stmt_fail;
+
+      am = iselIntExpr_AMode(env, stmt->Ist.Store.addr);
       if (tyd == Ity_I64) {
-         AMD64RI* ri = iselIntExpr_RI(env, stmt->Ist.STle.data);
+         AMD64RI* ri = iselIntExpr_RI(env, stmt->Ist.Store.data);
          addInstr(env, AMD64Instr_Alu64M(Aalu_MOV,ri,am));
          return;
       }
       if (tyd == Ity_I8 || tyd == Ity_I16 || tyd == Ity_I32) {
-         HReg r = iselIntExpr_R(env, stmt->Ist.STle.data);
+         HReg r = iselIntExpr_R(env, stmt->Ist.Store.data);
          addInstr(env, AMD64Instr_Store(
                           toUChar(tyd==Ity_I8 ? 1 : (tyd==Ity_I16 ? 2 : 4)),
                           r,am));
          return;
       }
       if (tyd == Ity_F64) {
-         HReg r = iselDblExpr(env, stmt->Ist.STle.data);
+         HReg r = iselDblExpr(env, stmt->Ist.Store.data);
          addInstr(env, AMD64Instr_SseLdSt(False/*store*/, 8, r, am));
          return;
       }
       if (tyd == Ity_F32) {
-         HReg r = iselFltExpr(env, stmt->Ist.STle.data);
+         HReg r = iselFltExpr(env, stmt->Ist.Store.data);
          addInstr(env, AMD64Instr_SseLdSt(False/*store*/, 4, r, am));
          return;
       }
 //..       if (tyd == Ity_I64) {
 //..          HReg vHi, vLo, rA;
-//..          iselInt64Expr(&vHi, &vLo, env, stmt->Ist.STle.data);
-//..          rA = iselIntExpr_R(env, stmt->Ist.STle.addr);
+//..          iselInt64Expr(&vHi, &vLo, env, stmt->Ist.Store.data);
+//..          rA = iselIntExpr_R(env, stmt->Ist.Store.addr);
 //..          addInstr(env, X86Instr_Alu32M(
 //..                           Xalu_MOV, X86RI_Reg(vLo), X86AMode_IR(0, rA)));
 //..          addInstr(env, X86Instr_Alu32M(
@@ -3527,7 +3535,7 @@
 //..          return;
 //..       }
       if (tyd == Ity_V128) {
-         HReg r = iselVecExpr(env, stmt->Ist.STle.data);
+         HReg r = iselVecExpr(env, stmt->Ist.Store.data);
          addInstr(env, AMD64Instr_SseLdSt(False/*store*/, 16, r, am));
          return;
       }
@@ -3720,6 +3728,7 @@
 
    default: break;
    }
+  stmt_fail:
    ppIRStmt(stmt);
    vpanic("iselStmt(amd64)");
 }