machenbach@chromium.org | 528ce02 | 2013-09-23 14:09:36 +0000 | [diff] [blame^] | 1 | // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 | // Redistribution and use in source and binary forms, with or without |
| 3 | // modification, are permitted provided that the following conditions are |
| 4 | // met: |
| 5 | // |
| 6 | // * Redistributions of source code must retain the above copyright |
| 7 | // notice, this list of conditions and the following disclaimer. |
| 8 | // * Redistributions in binary form must reproduce the above |
| 9 | // copyright notice, this list of conditions and the following |
| 10 | // disclaimer in the documentation and/or other materials provided |
| 11 | // with the distribution. |
| 12 | // * Neither the name of Google Inc. nor the names of its |
| 13 | // contributors may be used to endorse or promote products derived |
| 14 | // from this software without specific prior written permission. |
| 15 | // |
| 16 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 17 | // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 18 | // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| 19 | // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| 20 | // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 21 | // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 22 | // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 | // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 | // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 | // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 | // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 | |
| 28 | #include "hydrogen-alias-analysis.h" |
| 29 | #include "hydrogen-load-elimination.h" |
| 30 | #include "hydrogen-instructions.h" |
| 31 | |
| 32 | namespace v8 { |
| 33 | namespace internal { |
| 34 | |
| 35 | static const int kMaxTrackedFields = 16; |
| 36 | static const int kMaxTrackedObjects = 5; |
| 37 | |
| 38 | // An element in the field approximation list. |
| 39 | class HFieldApproximation : public ZoneObject { |
| 40 | public: // Just a data blob. |
| 41 | HValue* object_; |
| 42 | HLoadNamedField* last_load_; |
| 43 | HValue* last_value_; |
| 44 | HFieldApproximation* next_; |
| 45 | }; |
| 46 | |
| 47 | |
| 48 | // The main datastructure used during load/store elimination. Each in-object |
| 49 | // field is tracked separately. For each field, store a list of known field |
| 50 | // values for known objects. |
| 51 | class HLoadEliminationTable BASE_EMBEDDED { |
| 52 | public: |
| 53 | HLoadEliminationTable(Zone* zone, HAliasAnalyzer* aliasing) |
| 54 | : zone_(zone), fields_(kMaxTrackedFields, zone), aliasing_(aliasing) { } |
| 55 | |
| 56 | // Process a load instruction, updating internal table state. If a previous |
| 57 | // load or store for this object and field exists, return the new value with |
| 58 | // which the load should be replaced. Otherwise, return {instr}. |
| 59 | HValue* load(HLoadNamedField* instr) { |
| 60 | int field = FieldOf(instr->access()); |
| 61 | if (field < 0) return instr; |
| 62 | |
| 63 | HValue* object = instr->object()->ActualValue(); |
| 64 | HFieldApproximation* approx = FindOrCreate(object, field); |
| 65 | |
| 66 | if (approx->last_value_ == NULL) { |
| 67 | // Load is not redundant. Fill out a new entry. |
| 68 | approx->last_load_ = instr; |
| 69 | approx->last_value_ = instr; |
| 70 | return instr; |
| 71 | } else { |
| 72 | // Eliminate the load. Reuse previously stored value or load instruction. |
| 73 | return approx->last_value_; |
| 74 | } |
| 75 | } |
| 76 | |
| 77 | // Process a store instruction, updating internal table state. If a previous |
| 78 | // store to the same object and field makes this store redundant (e.g. because |
| 79 | // the stored values are the same), return NULL indicating that this store |
| 80 | // instruction is redundant. Otherwise, return {instr}. |
| 81 | HValue* store(HStoreNamedField* instr) { |
| 82 | int field = FieldOf(instr->access()); |
| 83 | if (field < 0) return instr; |
| 84 | |
| 85 | HValue* object = instr->object()->ActualValue(); |
| 86 | HValue* value = instr->value(); |
| 87 | |
| 88 | // Kill non-equivalent may-alias entries. |
| 89 | KillFieldInternal(object, field, value); |
| 90 | if (instr->has_transition()) { |
| 91 | // A transition store alters the map of the object. |
| 92 | // TODO(titzer): remember the new map (a constant) for the object. |
| 93 | KillFieldInternal(object, FieldOf(JSObject::kMapOffset), NULL); |
| 94 | } |
| 95 | HFieldApproximation* approx = FindOrCreate(object, field); |
| 96 | |
| 97 | if (Equal(approx->last_value_, value)) { |
| 98 | // The store is redundant because the field already has this value. |
| 99 | return NULL; |
| 100 | } else { |
| 101 | // The store is not redundant. Update the entry. |
| 102 | approx->last_load_ = NULL; |
| 103 | approx->last_value_ = value; |
| 104 | return instr; |
| 105 | } |
| 106 | } |
| 107 | |
| 108 | // Kill everything in this table. |
| 109 | void Kill() { |
| 110 | fields_.Rewind(0); |
| 111 | } |
| 112 | |
| 113 | // Kill all entries matching the given offset. |
| 114 | void KillOffset(int offset) { |
| 115 | int field = FieldOf(offset); |
| 116 | if (field >= 0 && field < fields_.length()) { |
| 117 | fields_[field] = NULL; |
| 118 | } |
| 119 | } |
| 120 | |
| 121 | // Compute the field index for the given object access; -1 if not tracked. |
| 122 | int FieldOf(HObjectAccess access) { |
| 123 | // Only track kMaxTrackedFields in-object fields. |
| 124 | if (!access.IsInobject()) return -1; |
| 125 | return FieldOf(access.offset()); |
| 126 | } |
| 127 | |
| 128 | // Print this table to stdout. |
| 129 | void Print() { |
| 130 | for (int i = 0; i < fields_.length(); i++) { |
| 131 | PrintF(" field %d: ", i); |
| 132 | for (HFieldApproximation* a = fields_[i]; a != NULL; a = a->next_) { |
| 133 | PrintF("[o%d =", a->object_->id()); |
| 134 | if (a->last_load_ != NULL) PrintF(" L%d", a->last_load_->id()); |
| 135 | if (a->last_value_ != NULL) PrintF(" v%d", a->last_value_->id()); |
| 136 | PrintF("] "); |
| 137 | } |
| 138 | PrintF("\n"); |
| 139 | } |
| 140 | } |
| 141 | |
| 142 | private: |
| 143 | // Find or create an entry for the given object and field pair. |
| 144 | HFieldApproximation* FindOrCreate(HValue* object, int field) { |
| 145 | EnsureFields(field + 1); |
| 146 | |
| 147 | // Search for a field approximation for this object. |
| 148 | HFieldApproximation* approx = fields_[field]; |
| 149 | int count = 0; |
| 150 | while (approx != NULL) { |
| 151 | if (aliasing_->MustAlias(object, approx->object_)) return approx; |
| 152 | count++; |
| 153 | approx = approx->next_; |
| 154 | } |
| 155 | |
| 156 | if (count >= kMaxTrackedObjects) { |
| 157 | // Pull the last entry off the end and repurpose it for this object. |
| 158 | approx = ReuseLastApproximation(field); |
| 159 | } else { |
| 160 | // Allocate a new entry. |
| 161 | approx = new(zone_) HFieldApproximation(); |
| 162 | } |
| 163 | |
| 164 | // Insert the entry at the head of the list. |
| 165 | approx->object_ = object; |
| 166 | approx->last_load_ = NULL; |
| 167 | approx->last_value_ = NULL; |
| 168 | approx->next_ = fields_[field]; |
| 169 | fields_[field] = approx; |
| 170 | |
| 171 | return approx; |
| 172 | } |
| 173 | |
| 174 | // Kill all entries for a given field that _may_ alias the given object |
| 175 | // and do _not_ have the given value. |
| 176 | void KillFieldInternal(HValue* object, int field, HValue* value) { |
| 177 | if (field >= fields_.length()) return; // Nothing to do. |
| 178 | |
| 179 | HFieldApproximation* approx = fields_[field]; |
| 180 | HFieldApproximation* prev = NULL; |
| 181 | while (approx != NULL) { |
| 182 | if (aliasing_->MayAlias(object, approx->object_)) { |
| 183 | if (!Equal(approx->last_value_, value)) { |
| 184 | // Kill an aliasing entry that doesn't agree on the value. |
| 185 | if (prev != NULL) { |
| 186 | prev->next_ = approx->next_; |
| 187 | } else { |
| 188 | fields_[field] = approx->next_; |
| 189 | } |
| 190 | approx = approx->next_; |
| 191 | continue; |
| 192 | } |
| 193 | } |
| 194 | prev = approx; |
| 195 | approx = approx->next_; |
| 196 | } |
| 197 | } |
| 198 | |
| 199 | bool Equal(HValue* a, HValue* b) { |
| 200 | if (a == b) return true; |
| 201 | if (a != NULL && b != NULL) return a->Equals(b); |
| 202 | return false; |
| 203 | } |
| 204 | |
| 205 | // Remove the last approximation for a field so that it can be reused. |
| 206 | // We reuse the last entry because it was the first inserted and is thus |
| 207 | // farthest away from the current instruction. |
| 208 | HFieldApproximation* ReuseLastApproximation(int field) { |
| 209 | HFieldApproximation* approx = fields_[field]; |
| 210 | ASSERT(approx != NULL); |
| 211 | |
| 212 | HFieldApproximation* prev = NULL; |
| 213 | while (approx->next_ != NULL) { |
| 214 | prev = approx; |
| 215 | approx = approx->next_; |
| 216 | } |
| 217 | if (prev != NULL) prev->next_ = NULL; |
| 218 | return approx; |
| 219 | } |
| 220 | |
| 221 | // Ensure internal storage for the given number of fields. |
| 222 | void EnsureFields(int num_fields) { |
| 223 | while (fields_.length() < num_fields) fields_.Add(NULL, zone_); |
| 224 | } |
| 225 | |
| 226 | // Compute the field index for the given in-object offset. |
| 227 | int FieldOf(int offset) { |
| 228 | if (offset >= kMaxTrackedFields * kPointerSize) return -1; |
| 229 | ASSERT((offset % kPointerSize) == 0); // Assume aligned accesses. |
| 230 | return offset / kPointerSize; |
| 231 | } |
| 232 | |
| 233 | Zone* zone_; |
| 234 | ZoneList<HFieldApproximation*> fields_; |
| 235 | HAliasAnalyzer* aliasing_; |
| 236 | }; |
| 237 | |
| 238 | |
| 239 | void HLoadEliminationPhase::Run() { |
| 240 | for (int i = 0; i < graph()->blocks()->length(); i++) { |
| 241 | HBasicBlock* block = graph()->blocks()->at(i); |
| 242 | EliminateLoads(block); |
| 243 | } |
| 244 | } |
| 245 | |
| 246 | |
| 247 | // For code de-uglification. |
| 248 | #define TRACE(x) if (FLAG_trace_load_elimination) PrintF x |
| 249 | |
| 250 | |
| 251 | // Eliminate loads and stores local to a block. |
| 252 | void HLoadEliminationPhase::EliminateLoads(HBasicBlock* block) { |
| 253 | HAliasAnalyzer aliasing; |
| 254 | HLoadEliminationTable table(zone(), &aliasing); |
| 255 | |
| 256 | TRACE(("-- load-elim B%d -------------------------------------------------\n", |
| 257 | block->block_id())); |
| 258 | |
| 259 | for (HInstructionIterator it(block); !it.Done(); it.Advance()) { |
| 260 | bool changed = false; |
| 261 | HInstruction* instr = it.Current(); |
| 262 | |
| 263 | switch (instr->opcode()) { |
| 264 | case HValue::kLoadNamedField: { |
| 265 | HLoadNamedField* load = HLoadNamedField::cast(instr); |
| 266 | TRACE((" process L%d field %d (o%d)\n", |
| 267 | instr->id(), |
| 268 | table.FieldOf(load->access()), |
| 269 | load->object()->ActualValue()->id())); |
| 270 | HValue* result = table.load(load); |
| 271 | if (result != instr) { |
| 272 | // The load can be replaced with a previous load or a value. |
| 273 | TRACE((" replace L%d -> v%d\n", instr->id(), result->id())); |
| 274 | instr->DeleteAndReplaceWith(result); |
| 275 | } |
| 276 | changed = true; |
| 277 | break; |
| 278 | } |
| 279 | case HValue::kStoreNamedField: { |
| 280 | HStoreNamedField* store = HStoreNamedField::cast(instr); |
| 281 | TRACE((" process S%d field %d (o%d) = v%d\n", |
| 282 | instr->id(), |
| 283 | table.FieldOf(store->access()), |
| 284 | store->object()->ActualValue()->id(), |
| 285 | store->value()->id())); |
| 286 | HValue* result = table.store(store); |
| 287 | if (result == NULL) { |
| 288 | // The store is redundant. Remove it. |
| 289 | TRACE((" remove S%d\n", instr->id())); |
| 290 | instr->DeleteAndReplaceWith(NULL); |
| 291 | } |
| 292 | changed = true; |
| 293 | break; |
| 294 | } |
| 295 | default: { |
| 296 | if (instr->CheckGVNFlag(kChangesInobjectFields)) { |
| 297 | TRACE((" kill-all i%d\n", instr->id())); |
| 298 | table.Kill(); |
| 299 | continue; |
| 300 | } |
| 301 | if (instr->CheckGVNFlag(kChangesMaps)) { |
| 302 | TRACE((" kill-maps i%d\n", instr->id())); |
| 303 | table.KillOffset(JSObject::kMapOffset); |
| 304 | } |
| 305 | if (instr->CheckGVNFlag(kChangesElementsKind)) { |
| 306 | TRACE((" kill-elements-kind i%d\n", instr->id())); |
| 307 | table.KillOffset(JSObject::kMapOffset); |
| 308 | table.KillOffset(JSObject::kElementsOffset); |
| 309 | } |
| 310 | if (instr->CheckGVNFlag(kChangesElementsPointer)) { |
| 311 | TRACE((" kill-elements i%d\n", instr->id())); |
| 312 | table.KillOffset(JSObject::kElementsOffset); |
| 313 | } |
| 314 | } |
| 315 | // Improvements possible: |
| 316 | // - learn from HCheckMaps for field 0 |
| 317 | // - remove unobservable stores (write-after-write) |
| 318 | } |
| 319 | |
| 320 | if (changed && FLAG_trace_load_elimination) { |
| 321 | table.Print(); |
| 322 | } |
| 323 | } |
| 324 | } |
| 325 | |
| 326 | |
| 327 | } } // namespace v8::internal |