blob: cbdd689fe8b38e53a66092838b1b57ace8749559 [file] [log] [blame]
J. Duke81537792007-12-01 00:00:00 +00001/*
Tobias Hartmann70a55ea2014-08-05 09:58:52 +02002 * Copyright (c) 1997, 2014, Oracle and/or its affiliates. All rights reserved.
J. Duke81537792007-12-01 00:00:00 +00003 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
Erik Trimbleba7c1732010-05-27 19:08:38 -070019 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
J. Duke81537792007-12-01 00:00:00 +000022 *
23 */
24
Stefan Karlsson8006fe82010-11-23 13:22:55 -080025#ifndef SHARE_VM_OPTO_MEMNODE_HPP
26#define SHARE_VM_OPTO_MEMNODE_HPP
27
28#include "opto/multnode.hpp"
29#include "opto/node.hpp"
30#include "opto/opcodes.hpp"
31#include "opto/type.hpp"
32
J. Duke81537792007-12-01 00:00:00 +000033// Portions of code courtesy of Clifford Click
34
35class MultiNode;
36class PhaseCCP;
37class PhaseTransform;
38
39//------------------------------MemNode----------------------------------------
40// Load or Store, possibly throwing a NULL pointer exception
41class MemNode : public Node {
42protected:
43#ifdef ASSERT
44 const TypePtr* _adr_type; // What kind of memory is being addressed?
45#endif
46 virtual uint size_of() const; // Size is bigger (ASSERT only)
47public:
48 enum { Control, // When is it safe to do this load?
49 Memory, // Chunk of memory is being loaded from
50 Address, // Actually address, derived from base
51 ValueIn, // Value to store
52 OopStore // Preceeding oop store, only in StoreCM
53 };
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -080054 typedef enum { unordered = 0,
55 acquire, // Load has to acquire or be succeeded by MemBarAcquire.
56 release // Store has to release or be preceded by MemBarRelease.
57 } MemOrd;
J. Duke81537792007-12-01 00:00:00 +000058protected:
59 MemNode( Node *c0, Node *c1, Node *c2, const TypePtr* at )
60 : Node(c0,c1,c2 ) {
61 init_class_id(Class_Mem);
62 debug_only(_adr_type=at; adr_type();)
63 }
64 MemNode( Node *c0, Node *c1, Node *c2, const TypePtr* at, Node *c3 )
65 : Node(c0,c1,c2,c3) {
66 init_class_id(Class_Mem);
67 debug_only(_adr_type=at; adr_type();)
68 }
69 MemNode( Node *c0, Node *c1, Node *c2, const TypePtr* at, Node *c3, Node *c4)
70 : Node(c0,c1,c2,c3,c4) {
71 init_class_id(Class_Mem);
72 debug_only(_adr_type=at; adr_type();)
73 }
74
Vladimir Kozlovc223fed2008-02-29 09:57:18 -080075public:
J. Duke81537792007-12-01 00:00:00 +000076 // Helpers for the optimizer. Documented in memnode.cpp.
77 static bool detect_ptr_independence(Node* p1, AllocateNode* a1,
78 Node* p2, AllocateNode* a2,
79 PhaseTransform* phase);
80 static bool adr_phi_is_loop_invariant(Node* adr_phi, Node* cast);
81
Vladimir Kozlovb4977e82013-05-08 15:08:01 -070082 static Node *optimize_simple_memory_chain(Node *mchain, const TypeOopPtr *t_oop, Node *load, PhaseGVN *phase);
83 static Node *optimize_memory_chain(Node *mchain, const TypePtr *t_adr, Node *load, PhaseGVN *phase);
J. Duke81537792007-12-01 00:00:00 +000084 // This one should probably be a phase-specific function:
Vladimir Kozlovdf8fc192008-04-16 19:19:48 -070085 static bool all_controls_dominate(Node* dom, Node* sub);
J. Duke81537792007-12-01 00:00:00 +000086
Vladimir Kozlov757229d2008-05-21 10:45:07 -070087 // Find any cast-away of null-ness and keep its control.
88 static Node *Ideal_common_DU_postCCP( PhaseCCP *ccp, Node* n, Node* adr );
J. Duke81537792007-12-01 00:00:00 +000089 virtual Node *Ideal_DU_postCCP( PhaseCCP *ccp );
90
91 virtual const class TypePtr *adr_type() const; // returns bottom_type of address
92
93 // Shared code for Ideal methods:
94 Node *Ideal_common(PhaseGVN *phase, bool can_reshape); // Return -1 for short-circuit NULL.
95
96 // Helper function for adr_type() implementations.
97 static const TypePtr* calculate_adr_type(const Type* t, const TypePtr* cross_check = NULL);
98
99 // Raw access function, to allow copying of adr_type efficiently in
100 // product builds and retain the debug info for debug builds.
101 const TypePtr *raw_adr_type() const {
102#ifdef ASSERT
103 return _adr_type;
104#else
105 return 0;
106#endif
107 }
108
109 // Map a load or store opcode to its corresponding store opcode.
110 // (Return -1 if unknown.)
111 virtual int store_Opcode() const { return -1; }
112
113 // What is the type of the value in memory? (T_VOID mean "unspecified".)
114 virtual BasicType memory_type() const = 0;
Vladimir Kozlov9f1a8ed2008-02-25 15:05:44 -0800115 virtual int memory_size() const {
116#ifdef ASSERT
117 return type2aelembytes(memory_type(), true);
118#else
119 return type2aelembytes(memory_type());
120#endif
121 }
J. Duke81537792007-12-01 00:00:00 +0000122
123 // Search through memory states which precede this node (load or store).
124 // Look for an exact match for the address, with no intervening
125 // aliased stores.
126 Node* find_previous_store(PhaseTransform* phase);
127
128 // Can this node (load or store) accurately see a stored value in
129 // the given memory state? (The state may or may not be in(Memory).)
130 Node* can_see_stored_value(Node* st, PhaseTransform* phase) const;
131
132#ifndef PRODUCT
133 static void dump_adr_type(const Node* mem, const TypePtr* adr_type, outputStream *st);
134 virtual void dump_spec(outputStream *st) const;
135#endif
136};
137
138//------------------------------LoadNode---------------------------------------
139// Load value; requires Memory and Address
140class LoadNode : public MemNode {
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800141private:
142 // On platforms with weak memory ordering (e.g., PPC, Ia64) we distinguish
143 // loads that can be reordered, and such requiring acquire semantics to
144 // adhere to the Java specification. The required behaviour is stored in
145 // this field.
146 const MemOrd _mo;
147
J. Duke81537792007-12-01 00:00:00 +0000148protected:
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800149 virtual uint cmp(const Node &n) const;
J. Duke81537792007-12-01 00:00:00 +0000150 virtual uint size_of() const; // Size is bigger
Zoltan Majo49b224a2014-11-06 09:40:58 +0100151 // Should LoadNode::Ideal() attempt to remove control edges?
152 virtual bool can_remove_control() const;
J. Duke81537792007-12-01 00:00:00 +0000153 const Type* const _type; // What kind of value is loaded?
154public:
155
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800156 LoadNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const Type *rt, MemOrd mo)
157 : MemNode(c,mem,adr,at), _type(rt), _mo(mo) {
J. Duke81537792007-12-01 00:00:00 +0000158 init_class_id(Class_Load);
159 }
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800160 inline bool is_unordered() const { return !is_acquire(); }
161 inline bool is_acquire() const {
162 assert(_mo == unordered || _mo == acquire, "unexpected");
163 return _mo == acquire;
164 }
J. Duke81537792007-12-01 00:00:00 +0000165
166 // Polymorphic factory method:
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800167 static Node* make(PhaseGVN& gvn, Node *c, Node *mem, Node *adr,
168 const TypePtr* at, const Type *rt, BasicType bt, MemOrd mo);
J. Duke81537792007-12-01 00:00:00 +0000169
170 virtual uint hash() const; // Check the type
171
172 // Handle algebraic identities here. If we have an identity, return the Node
173 // we are equivalent to. We look for Load of a Store.
174 virtual Node *Identity( PhaseTransform *phase );
175
Zoltan Majo49b224a2014-11-06 09:40:58 +0100176 // If the load is from Field memory and the pointer is non-null, it might be possible to
J. Duke81537792007-12-01 00:00:00 +0000177 // zero out the control input.
Zoltan Majo49b224a2014-11-06 09:40:58 +0100178 // If the offset is constant and the base is an object allocation,
179 // try to hook me up to the exact initializing store.
J. Duke81537792007-12-01 00:00:00 +0000180 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
181
Vladimir Kozlov757229d2008-05-21 10:45:07 -0700182 // Split instance field load through Phi.
183 Node* split_through_phi(PhaseGVN *phase);
184
Tom Rodriguez10c473e2007-12-05 09:01:00 -0800185 // Recover original value from boxed values
186 Node *eliminate_autobox(PhaseGVN *phase);
187
J. Duke81537792007-12-01 00:00:00 +0000188 // Compute a new Type for this node. Basically we just do the pre-check,
189 // then call the virtual add() to set the type.
190 virtual const Type *Value( PhaseTransform *phase ) const;
191
Vladimir Kozlov76035422008-05-21 13:46:23 -0700192 // Common methods for LoadKlass and LoadNKlass nodes.
193 const Type *klass_value_common( PhaseTransform *phase ) const;
194 Node *klass_identity_common( PhaseTransform *phase );
195
J. Duke81537792007-12-01 00:00:00 +0000196 virtual uint ideal_reg() const;
197 virtual const Type *bottom_type() const;
198 // Following method is copied from TypeNode:
199 void set_type(const Type* t) {
200 assert(t != NULL, "sanity");
201 debug_only(uint check_hash = (VerifyHashTableKeys && _hash_lock) ? hash() : NO_HASH);
202 *(const Type**)&_type = t; // cast away const-ness
203 // If this node is in the hash table, make sure it doesn't need a rehash.
204 assert(check_hash == NO_HASH || check_hash == hash(), "type change must preserve hash code");
205 }
206 const Type* type() const { assert(_type != NULL, "sanity"); return _type; };
207
208 // Do not match memory edge
209 virtual uint match_edge(uint idx) const;
210
211 // Map a load opcode to its corresponding store opcode.
212 virtual int store_Opcode() const = 0;
213
Vladimir Kozlov30dc0ed2008-03-13 16:31:32 -0700214 // Check if the load's memory input is a Phi node with the same control.
215 bool is_instance_field_load_with_local_phi(Node* ctrl);
216
J. Duke81537792007-12-01 00:00:00 +0000217#ifndef PRODUCT
218 virtual void dump_spec(outputStream *st) const;
219#endif
Vladimir Kozlov21f481e2010-06-15 18:07:27 -0700220#ifdef ASSERT
221 // Helper function to allow a raw load without control edge for some cases
222 static bool is_immutable_value(Node* adr);
223#endif
J. Duke81537792007-12-01 00:00:00 +0000224protected:
225 const Type* load_array_final_field(const TypeKlassPtr *tkls,
226 ciKlass* klass) const;
Igor Veresovc342a792013-11-05 01:57:18 -0800227 // depends_only_on_test is almost always true, and needs to be almost always
228 // true to enable key hoisting & commoning optimizations. However, for the
229 // special case of RawPtr loads from TLS top & end, and other loads performed by
230 // GC barriers, the control edge carries the dependence preventing hoisting past
231 // a Safepoint instead of the memory edge. (An unfortunate consequence of having
232 // Safepoints not set Raw Memory; itself an unfortunate consequence of having Nodes
233 // which produce results (new raw memory state) inside of loops preventing all
234 // manner of other optimizations). Basically, it's ugly but so is the alternative.
235 // See comment in macro.cpp, around line 125 expand_allocate_common().
236 virtual bool depends_only_on_test() const { return adr_type() != TypeRawPtr::BOTTOM; }
237
J. Duke81537792007-12-01 00:00:00 +0000238};
239
240//------------------------------LoadBNode--------------------------------------
241// Load a byte (8bits signed) from memory
242class LoadBNode : public LoadNode {
243public:
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800244 LoadBNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const TypeInt *ti, MemOrd mo)
245 : LoadNode(c, mem, adr, at, ti, mo) {}
J. Duke81537792007-12-01 00:00:00 +0000246 virtual int Opcode() const;
247 virtual uint ideal_reg() const { return Op_RegI; }
248 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
Vladimir Kozlov5b3f2ef2012-01-20 09:43:06 -0800249 virtual const Type *Value(PhaseTransform *phase) const;
J. Duke81537792007-12-01 00:00:00 +0000250 virtual int store_Opcode() const { return Op_StoreB; }
251 virtual BasicType memory_type() const { return T_BYTE; }
252};
253
Christian Thalinger89cea912009-03-09 03:17:11 -0700254//------------------------------LoadUBNode-------------------------------------
255// Load a unsigned byte (8bits unsigned) from memory
256class LoadUBNode : public LoadNode {
257public:
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800258 LoadUBNode(Node* c, Node* mem, Node* adr, const TypePtr* at, const TypeInt* ti, MemOrd mo)
259 : LoadNode(c, mem, adr, at, ti, mo) {}
Christian Thalinger89cea912009-03-09 03:17:11 -0700260 virtual int Opcode() const;
261 virtual uint ideal_reg() const { return Op_RegI; }
262 virtual Node* Ideal(PhaseGVN *phase, bool can_reshape);
Vladimir Kozlov5b3f2ef2012-01-20 09:43:06 -0800263 virtual const Type *Value(PhaseTransform *phase) const;
Christian Thalinger89cea912009-03-09 03:17:11 -0700264 virtual int store_Opcode() const { return Op_StoreB; }
265 virtual BasicType memory_type() const { return T_BYTE; }
266};
267
Christian Thalinger3b8452d2009-01-26 16:22:12 +0100268//------------------------------LoadUSNode-------------------------------------
269// Load an unsigned short/char (16bits unsigned) from memory
270class LoadUSNode : public LoadNode {
J. Duke81537792007-12-01 00:00:00 +0000271public:
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800272 LoadUSNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const TypeInt *ti, MemOrd mo)
273 : LoadNode(c, mem, adr, at, ti, mo) {}
J. Duke81537792007-12-01 00:00:00 +0000274 virtual int Opcode() const;
275 virtual uint ideal_reg() const { return Op_RegI; }
276 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
Vladimir Kozlov5b3f2ef2012-01-20 09:43:06 -0800277 virtual const Type *Value(PhaseTransform *phase) const;
J. Duke81537792007-12-01 00:00:00 +0000278 virtual int store_Opcode() const { return Op_StoreC; }
279 virtual BasicType memory_type() const { return T_CHAR; }
280};
281
Vladimir Kozlov5b3f2ef2012-01-20 09:43:06 -0800282//------------------------------LoadSNode--------------------------------------
283// Load a short (16bits signed) from memory
284class LoadSNode : public LoadNode {
285public:
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800286 LoadSNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const TypeInt *ti, MemOrd mo)
287 : LoadNode(c, mem, adr, at, ti, mo) {}
Vladimir Kozlov5b3f2ef2012-01-20 09:43:06 -0800288 virtual int Opcode() const;
289 virtual uint ideal_reg() const { return Op_RegI; }
290 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
291 virtual const Type *Value(PhaseTransform *phase) const;
292 virtual int store_Opcode() const { return Op_StoreC; }
293 virtual BasicType memory_type() const { return T_SHORT; }
294};
295
J. Duke81537792007-12-01 00:00:00 +0000296//------------------------------LoadINode--------------------------------------
297// Load an integer from memory
298class LoadINode : public LoadNode {
299public:
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800300 LoadINode(Node *c, Node *mem, Node *adr, const TypePtr* at, const TypeInt *ti, MemOrd mo)
301 : LoadNode(c, mem, adr, at, ti, mo) {}
J. Duke81537792007-12-01 00:00:00 +0000302 virtual int Opcode() const;
303 virtual uint ideal_reg() const { return Op_RegI; }
304 virtual int store_Opcode() const { return Op_StoreI; }
305 virtual BasicType memory_type() const { return T_INT; }
306};
307
308//------------------------------LoadRangeNode----------------------------------
309// Load an array length from the array
310class LoadRangeNode : public LoadINode {
311public:
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800312 LoadRangeNode(Node *c, Node *mem, Node *adr, const TypeInt *ti = TypeInt::POS)
313 : LoadINode(c, mem, adr, TypeAryPtr::RANGE, ti, MemNode::unordered) {}
J. Duke81537792007-12-01 00:00:00 +0000314 virtual int Opcode() const;
315 virtual const Type *Value( PhaseTransform *phase ) const;
316 virtual Node *Identity( PhaseTransform *phase );
Chuck Rasboldeee15b12008-09-17 08:29:17 -0700317 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
J. Duke81537792007-12-01 00:00:00 +0000318};
319
320//------------------------------LoadLNode--------------------------------------
321// Load a long from memory
322class LoadLNode : public LoadNode {
323 virtual uint hash() const { return LoadNode::hash() + _require_atomic_access; }
324 virtual uint cmp( const Node &n ) const {
325 return _require_atomic_access == ((LoadLNode&)n)._require_atomic_access
326 && LoadNode::cmp(n);
327 }
328 virtual uint size_of() const { return sizeof(*this); }
329 const bool _require_atomic_access; // is piecewise load forbidden?
330
331public:
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800332 LoadLNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const TypeLong *tl,
333 MemOrd mo, bool require_atomic_access = false)
334 : LoadNode(c, mem, adr, at, tl, mo), _require_atomic_access(require_atomic_access) {}
J. Duke81537792007-12-01 00:00:00 +0000335 virtual int Opcode() const;
336 virtual uint ideal_reg() const { return Op_RegL; }
337 virtual int store_Opcode() const { return Op_StoreL; }
338 virtual BasicType memory_type() const { return T_LONG; }
Tobias Hartmann85296fe2014-05-06 09:17:57 +0200339 bool require_atomic_access() const { return _require_atomic_access; }
Tobias Hartmann70a55ea2014-08-05 09:58:52 +0200340 static LoadLNode* make_atomic(Node* ctl, Node* mem, Node* adr, const TypePtr* adr_type,
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800341 const Type* rt, MemOrd mo);
J. Duke81537792007-12-01 00:00:00 +0000342#ifndef PRODUCT
343 virtual void dump_spec(outputStream *st) const {
344 LoadNode::dump_spec(st);
345 if (_require_atomic_access) st->print(" Atomic!");
346 }
347#endif
348};
349
350//------------------------------LoadL_unalignedNode----------------------------
351// Load a long from unaligned memory
352class LoadL_unalignedNode : public LoadLNode {
353public:
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800354 LoadL_unalignedNode(Node *c, Node *mem, Node *adr, const TypePtr* at, MemOrd mo)
355 : LoadLNode(c, mem, adr, at, TypeLong::LONG, mo) {}
J. Duke81537792007-12-01 00:00:00 +0000356 virtual int Opcode() const;
357};
358
359//------------------------------LoadFNode--------------------------------------
360// Load a float (64 bits) from memory
361class LoadFNode : public LoadNode {
362public:
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800363 LoadFNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const Type *t, MemOrd mo)
364 : LoadNode(c, mem, adr, at, t, mo) {}
J. Duke81537792007-12-01 00:00:00 +0000365 virtual int Opcode() const;
366 virtual uint ideal_reg() const { return Op_RegF; }
367 virtual int store_Opcode() const { return Op_StoreF; }
368 virtual BasicType memory_type() const { return T_FLOAT; }
369};
370
371//------------------------------LoadDNode--------------------------------------
372// Load a double (64 bits) from memory
373class LoadDNode : public LoadNode {
Tobias Hartmann85296fe2014-05-06 09:17:57 +0200374 virtual uint hash() const { return LoadNode::hash() + _require_atomic_access; }
375 virtual uint cmp( const Node &n ) const {
376 return _require_atomic_access == ((LoadDNode&)n)._require_atomic_access
377 && LoadNode::cmp(n);
378 }
379 virtual uint size_of() const { return sizeof(*this); }
380 const bool _require_atomic_access; // is piecewise load forbidden?
381
J. Duke81537792007-12-01 00:00:00 +0000382public:
Tobias Hartmann85296fe2014-05-06 09:17:57 +0200383 LoadDNode(Node *c, Node *mem, Node *adr, const TypePtr* at, const Type *t,
384 MemOrd mo, bool require_atomic_access = false)
385 : LoadNode(c, mem, adr, at, t, mo), _require_atomic_access(require_atomic_access) {}
J. Duke81537792007-12-01 00:00:00 +0000386 virtual int Opcode() const;
387 virtual uint ideal_reg() const { return Op_RegD; }
388 virtual int store_Opcode() const { return Op_StoreD; }
389 virtual BasicType memory_type() const { return T_DOUBLE; }
Tobias Hartmann85296fe2014-05-06 09:17:57 +0200390 bool require_atomic_access() const { return _require_atomic_access; }
Tobias Hartmann70a55ea2014-08-05 09:58:52 +0200391 static LoadDNode* make_atomic(Node* ctl, Node* mem, Node* adr, const TypePtr* adr_type,
Tobias Hartmann85296fe2014-05-06 09:17:57 +0200392 const Type* rt, MemOrd mo);
393#ifndef PRODUCT
394 virtual void dump_spec(outputStream *st) const {
395 LoadNode::dump_spec(st);
396 if (_require_atomic_access) st->print(" Atomic!");
397 }
398#endif
J. Duke81537792007-12-01 00:00:00 +0000399};
400
401//------------------------------LoadD_unalignedNode----------------------------
402// Load a double from unaligned memory
403class LoadD_unalignedNode : public LoadDNode {
404public:
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800405 LoadD_unalignedNode(Node *c, Node *mem, Node *adr, const TypePtr* at, MemOrd mo)
406 : LoadDNode(c, mem, adr, at, Type::DOUBLE, mo) {}
J. Duke81537792007-12-01 00:00:00 +0000407 virtual int Opcode() const;
408};
409
410//------------------------------LoadPNode--------------------------------------
411// Load a pointer from memory (either object or array)
412class LoadPNode : public LoadNode {
413public:
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800414 LoadPNode(Node *c, Node *mem, Node *adr, const TypePtr *at, const TypePtr* t, MemOrd mo)
415 : LoadNode(c, mem, adr, at, t, mo) {}
J. Duke81537792007-12-01 00:00:00 +0000416 virtual int Opcode() const;
417 virtual uint ideal_reg() const { return Op_RegP; }
418 virtual int store_Opcode() const { return Op_StoreP; }
419 virtual BasicType memory_type() const { return T_ADDRESS; }
J. Duke81537792007-12-01 00:00:00 +0000420};
421
Coleen Phillimore4a831d42008-04-13 17:43:42 -0400422
423//------------------------------LoadNNode--------------------------------------
424// Load a narrow oop from memory (either object or array)
425class LoadNNode : public LoadNode {
426public:
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800427 LoadNNode(Node *c, Node *mem, Node *adr, const TypePtr *at, const Type* t, MemOrd mo)
428 : LoadNode(c, mem, adr, at, t, mo) {}
Coleen Phillimore4a831d42008-04-13 17:43:42 -0400429 virtual int Opcode() const;
430 virtual uint ideal_reg() const { return Op_RegN; }
431 virtual int store_Opcode() const { return Op_StoreN; }
432 virtual BasicType memory_type() const { return T_NARROWOOP; }
Coleen Phillimore4a831d42008-04-13 17:43:42 -0400433};
434
J. Duke81537792007-12-01 00:00:00 +0000435//------------------------------LoadKlassNode----------------------------------
436// Load a Klass from an object
437class LoadKlassNode : public LoadPNode {
Zoltan Majo49b224a2014-11-06 09:40:58 +0100438protected:
439 // In most cases, LoadKlassNode does not have the control input set. If the control
440 // input is set, it must not be removed (by LoadNode::Ideal()).
441 virtual bool can_remove_control() const;
J. Duke81537792007-12-01 00:00:00 +0000442public:
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800443 LoadKlassNode(Node *c, Node *mem, Node *adr, const TypePtr *at, const TypeKlassPtr *tk, MemOrd mo)
444 : LoadPNode(c, mem, adr, at, tk, mo) {}
J. Duke81537792007-12-01 00:00:00 +0000445 virtual int Opcode() const;
446 virtual const Type *Value( PhaseTransform *phase ) const;
447 virtual Node *Identity( PhaseTransform *phase );
448 virtual bool depends_only_on_test() const { return true; }
Vladimir Kozlov76035422008-05-21 13:46:23 -0700449
450 // Polymorphic factory method:
Zoltan Majo49b224a2014-11-06 09:40:58 +0100451 static Node* make(PhaseGVN& gvn, Node* ctl, Node* mem, Node* adr, const TypePtr* at,
452 const TypeKlassPtr* tk = TypeKlassPtr::OBJECT);
J. Duke81537792007-12-01 00:00:00 +0000453};
454
Vladimir Kozlov76035422008-05-21 13:46:23 -0700455//------------------------------LoadNKlassNode---------------------------------
456// Load a narrow Klass from an object.
457class LoadNKlassNode : public LoadNNode {
458public:
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800459 LoadNKlassNode(Node *c, Node *mem, Node *adr, const TypePtr *at, const TypeNarrowKlass *tk, MemOrd mo)
460 : LoadNNode(c, mem, adr, at, tk, mo) {}
Vladimir Kozlov76035422008-05-21 13:46:23 -0700461 virtual int Opcode() const;
462 virtual uint ideal_reg() const { return Op_RegN; }
Roland Westrelin61eb5a02012-10-09 10:11:38 +0200463 virtual int store_Opcode() const { return Op_StoreNKlass; }
464 virtual BasicType memory_type() const { return T_NARROWKLASS; }
Vladimir Kozlov76035422008-05-21 13:46:23 -0700465
466 virtual const Type *Value( PhaseTransform *phase ) const;
467 virtual Node *Identity( PhaseTransform *phase );
468 virtual bool depends_only_on_test() const { return true; }
469};
470
471
J. Duke81537792007-12-01 00:00:00 +0000472//------------------------------StoreNode--------------------------------------
473// Store value; requires Store, Address and Value
474class StoreNode : public MemNode {
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800475private:
476 // On platforms with weak memory ordering (e.g., PPC, Ia64) we distinguish
477 // stores that can be reordered, and such requiring release semantics to
478 // adhere to the Java specification. The required behaviour is stored in
479 // this field.
480 const MemOrd _mo;
481 // Needed for proper cloning.
482 virtual uint size_of() const { return sizeof(*this); }
J. Duke81537792007-12-01 00:00:00 +0000483protected:
484 virtual uint cmp( const Node &n ) const;
485 virtual bool depends_only_on_test() const { return false; }
486
487 Node *Ideal_masked_input (PhaseGVN *phase, uint mask);
488 Node *Ideal_sign_extended_input(PhaseGVN *phase, int num_bits);
489
490public:
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800491 // We must ensure that stores of object references will be visible
492 // only after the object's initialization. So the callers of this
493 // procedure must indicate that the store requires `release'
494 // semantics, if the stored value is an object reference that might
495 // point to a new object and may become externally visible.
496 StoreNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
497 : MemNode(c, mem, adr, at, val), _mo(mo) {
J. Duke81537792007-12-01 00:00:00 +0000498 init_class_id(Class_Store);
499 }
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800500 StoreNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, Node *oop_store, MemOrd mo)
501 : MemNode(c, mem, adr, at, val, oop_store), _mo(mo) {
J. Duke81537792007-12-01 00:00:00 +0000502 init_class_id(Class_Store);
503 }
504
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800505 inline bool is_unordered() const { return !is_release(); }
506 inline bool is_release() const {
507 assert((_mo == unordered || _mo == release), "unexpected");
508 return _mo == release;
509 }
510
511 // Conservatively release stores of object references in order to
512 // ensure visibility of object initialization.
513 static inline MemOrd release_if_reference(const BasicType t) {
514 const MemOrd mo = (t == T_ARRAY ||
515 t == T_ADDRESS || // Might be the address of an object reference (`boxing').
516 t == T_OBJECT) ? release : unordered;
517 return mo;
518 }
519
520 // Polymorphic factory method
521 //
522 // We must ensure that stores of object references will be visible
523 // only after the object's initialization. So the callers of this
524 // procedure must indicate that the store requires `release'
525 // semantics, if the stored value is an object reference that might
526 // point to a new object and may become externally visible.
527 static StoreNode* make(PhaseGVN& gvn, Node *c, Node *mem, Node *adr,
528 const TypePtr* at, Node *val, BasicType bt, MemOrd mo);
J. Duke81537792007-12-01 00:00:00 +0000529
530 virtual uint hash() const; // Check the type
531
532 // If the store is to Field memory and the pointer is non-null, we can
533 // zero out the control input.
534 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
535
536 // Compute a new Type for this node. Basically we just do the pre-check,
537 // then call the virtual add() to set the type.
538 virtual const Type *Value( PhaseTransform *phase ) const;
539
540 // Check for identity function on memory (Load then Store at same address)
541 virtual Node *Identity( PhaseTransform *phase );
542
543 // Do not match memory edge
544 virtual uint match_edge(uint idx) const;
545
546 virtual const Type *bottom_type() const; // returns Type::MEMORY
547
548 // Map a store opcode to its corresponding own opcode, trivially.
549 virtual int store_Opcode() const { return Opcode(); }
550
551 // have all possible loads of the value stored been optimized away?
552 bool value_never_loaded(PhaseTransform *phase) const;
553};
554
555//------------------------------StoreBNode-------------------------------------
556// Store byte to memory
557class StoreBNode : public StoreNode {
558public:
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800559 StoreBNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
560 : StoreNode(c, mem, adr, at, val, mo) {}
J. Duke81537792007-12-01 00:00:00 +0000561 virtual int Opcode() const;
562 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
563 virtual BasicType memory_type() const { return T_BYTE; }
564};
565
566//------------------------------StoreCNode-------------------------------------
567// Store char/short to memory
568class StoreCNode : public StoreNode {
569public:
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800570 StoreCNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
571 : StoreNode(c, mem, adr, at, val, mo) {}
J. Duke81537792007-12-01 00:00:00 +0000572 virtual int Opcode() const;
573 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
574 virtual BasicType memory_type() const { return T_CHAR; }
575};
576
577//------------------------------StoreINode-------------------------------------
578// Store int to memory
579class StoreINode : public StoreNode {
580public:
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800581 StoreINode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
582 : StoreNode(c, mem, adr, at, val, mo) {}
J. Duke81537792007-12-01 00:00:00 +0000583 virtual int Opcode() const;
584 virtual BasicType memory_type() const { return T_INT; }
585};
586
587//------------------------------StoreLNode-------------------------------------
588// Store long to memory
589class StoreLNode : public StoreNode {
590 virtual uint hash() const { return StoreNode::hash() + _require_atomic_access; }
591 virtual uint cmp( const Node &n ) const {
592 return _require_atomic_access == ((StoreLNode&)n)._require_atomic_access
593 && StoreNode::cmp(n);
594 }
595 virtual uint size_of() const { return sizeof(*this); }
596 const bool _require_atomic_access; // is piecewise store forbidden?
597
598public:
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800599 StoreLNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo, bool require_atomic_access = false)
600 : StoreNode(c, mem, adr, at, val, mo), _require_atomic_access(require_atomic_access) {}
J. Duke81537792007-12-01 00:00:00 +0000601 virtual int Opcode() const;
602 virtual BasicType memory_type() const { return T_LONG; }
Tobias Hartmann85296fe2014-05-06 09:17:57 +0200603 bool require_atomic_access() const { return _require_atomic_access; }
Tobias Hartmann70a55ea2014-08-05 09:58:52 +0200604 static StoreLNode* make_atomic(Node* ctl, Node* mem, Node* adr, const TypePtr* adr_type, Node* val, MemOrd mo);
J. Duke81537792007-12-01 00:00:00 +0000605#ifndef PRODUCT
606 virtual void dump_spec(outputStream *st) const {
607 StoreNode::dump_spec(st);
608 if (_require_atomic_access) st->print(" Atomic!");
609 }
610#endif
611};
612
613//------------------------------StoreFNode-------------------------------------
614// Store float to memory
615class StoreFNode : public StoreNode {
616public:
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800617 StoreFNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
618 : StoreNode(c, mem, adr, at, val, mo) {}
J. Duke81537792007-12-01 00:00:00 +0000619 virtual int Opcode() const;
620 virtual BasicType memory_type() const { return T_FLOAT; }
621};
622
623//------------------------------StoreDNode-------------------------------------
624// Store double to memory
625class StoreDNode : public StoreNode {
Tobias Hartmann85296fe2014-05-06 09:17:57 +0200626 virtual uint hash() const { return StoreNode::hash() + _require_atomic_access; }
627 virtual uint cmp( const Node &n ) const {
628 return _require_atomic_access == ((StoreDNode&)n)._require_atomic_access
629 && StoreNode::cmp(n);
630 }
631 virtual uint size_of() const { return sizeof(*this); }
632 const bool _require_atomic_access; // is piecewise store forbidden?
J. Duke81537792007-12-01 00:00:00 +0000633public:
Tobias Hartmann85296fe2014-05-06 09:17:57 +0200634 StoreDNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val,
635 MemOrd mo, bool require_atomic_access = false)
636 : StoreNode(c, mem, adr, at, val, mo), _require_atomic_access(require_atomic_access) {}
J. Duke81537792007-12-01 00:00:00 +0000637 virtual int Opcode() const;
638 virtual BasicType memory_type() const { return T_DOUBLE; }
Tobias Hartmann85296fe2014-05-06 09:17:57 +0200639 bool require_atomic_access() const { return _require_atomic_access; }
Tobias Hartmann70a55ea2014-08-05 09:58:52 +0200640 static StoreDNode* make_atomic(Node* ctl, Node* mem, Node* adr, const TypePtr* adr_type, Node* val, MemOrd mo);
Tobias Hartmann85296fe2014-05-06 09:17:57 +0200641#ifndef PRODUCT
642 virtual void dump_spec(outputStream *st) const {
643 StoreNode::dump_spec(st);
644 if (_require_atomic_access) st->print(" Atomic!");
645 }
646#endif
647
J. Duke81537792007-12-01 00:00:00 +0000648};
649
650//------------------------------StorePNode-------------------------------------
651// Store pointer to memory
652class StorePNode : public StoreNode {
653public:
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800654 StorePNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
655 : StoreNode(c, mem, adr, at, val, mo) {}
J. Duke81537792007-12-01 00:00:00 +0000656 virtual int Opcode() const;
657 virtual BasicType memory_type() const { return T_ADDRESS; }
658};
659
Coleen Phillimore4a831d42008-04-13 17:43:42 -0400660//------------------------------StoreNNode-------------------------------------
661// Store narrow oop to memory
662class StoreNNode : public StoreNode {
663public:
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800664 StoreNNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
665 : StoreNode(c, mem, adr, at, val, mo) {}
Coleen Phillimore4a831d42008-04-13 17:43:42 -0400666 virtual int Opcode() const;
667 virtual BasicType memory_type() const { return T_NARROWOOP; }
668};
669
Roland Westrelin61eb5a02012-10-09 10:11:38 +0200670//------------------------------StoreNKlassNode--------------------------------------
671// Store narrow klass to memory
672class StoreNKlassNode : public StoreNNode {
673public:
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800674 StoreNKlassNode(Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, MemOrd mo)
675 : StoreNNode(c, mem, adr, at, val, mo) {}
Roland Westrelin61eb5a02012-10-09 10:11:38 +0200676 virtual int Opcode() const;
677 virtual BasicType memory_type() const { return T_NARROWKLASS; }
678};
679
J. Duke81537792007-12-01 00:00:00 +0000680//------------------------------StoreCMNode-----------------------------------
681// Store card-mark byte to memory for CM
682// The last StoreCM before a SafePoint must be preserved and occur after its "oop" store
683// Preceeding equivalent StoreCMs may be eliminated.
684class StoreCMNode : public StoreNode {
Changpeng Fangc492f4c2009-09-14 09:49:54 -0700685 private:
Tom Rodriguez0b2600a2010-01-28 16:28:28 -0800686 virtual uint hash() const { return StoreNode::hash() + _oop_alias_idx; }
687 virtual uint cmp( const Node &n ) const {
688 return _oop_alias_idx == ((StoreCMNode&)n)._oop_alias_idx
689 && StoreNode::cmp(n);
690 }
691 virtual uint size_of() const { return sizeof(*this); }
Changpeng Fangc492f4c2009-09-14 09:49:54 -0700692 int _oop_alias_idx; // The alias_idx of OopStore
Tom Rodriguez0b2600a2010-01-28 16:28:28 -0800693
J. Duke81537792007-12-01 00:00:00 +0000694public:
Tom Rodriguez0b2600a2010-01-28 16:28:28 -0800695 StoreCMNode( Node *c, Node *mem, Node *adr, const TypePtr* at, Node *val, Node *oop_store, int oop_alias_idx ) :
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800696 StoreNode(c, mem, adr, at, val, oop_store, MemNode::release),
Tom Rodriguez0b2600a2010-01-28 16:28:28 -0800697 _oop_alias_idx(oop_alias_idx) {
698 assert(_oop_alias_idx >= Compile::AliasIdxRaw ||
699 _oop_alias_idx == Compile::AliasIdxBot && Compile::current()->AliasLevel() == 0,
700 "bad oop alias idx");
701 }
J. Duke81537792007-12-01 00:00:00 +0000702 virtual int Opcode() const;
703 virtual Node *Identity( PhaseTransform *phase );
Changpeng Fangc492f4c2009-09-14 09:49:54 -0700704 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
J. Duke81537792007-12-01 00:00:00 +0000705 virtual const Type *Value( PhaseTransform *phase ) const;
706 virtual BasicType memory_type() const { return T_VOID; } // unspecific
Changpeng Fangc492f4c2009-09-14 09:49:54 -0700707 int oop_alias_idx() const { return _oop_alias_idx; }
J. Duke81537792007-12-01 00:00:00 +0000708};
709
710//------------------------------LoadPLockedNode---------------------------------
711// Load-locked a pointer from memory (either object or array).
712// On Sparc & Intel this is implemented as a normal pointer load.
713// On PowerPC and friends it's a real load-locked.
714class LoadPLockedNode : public LoadPNode {
715public:
Goetz Lindenmaier13b13f52013-11-15 11:05:32 -0800716 LoadPLockedNode(Node *c, Node *mem, Node *adr, MemOrd mo)
717 : LoadPNode(c, mem, adr, TypeRawPtr::BOTTOM, TypeRawPtr::BOTTOM, mo) {}
J. Duke81537792007-12-01 00:00:00 +0000718 virtual int Opcode() const;
719 virtual int store_Opcode() const { return Op_StorePConditional; }
720 virtual bool depends_only_on_test() const { return true; }
721};
722
J. Duke81537792007-12-01 00:00:00 +0000723//------------------------------SCMemProjNode---------------------------------------
724// This class defines a projection of the memory state of a store conditional node.
725// These nodes return a value, but also update memory.
726class SCMemProjNode : public ProjNode {
727public:
728 enum {SCMEMPROJCON = (uint)-2};
729 SCMemProjNode( Node *src) : ProjNode( src, SCMEMPROJCON) { }
730 virtual int Opcode() const;
731 virtual bool is_CFG() const { return false; }
732 virtual const Type *bottom_type() const {return Type::MEMORY;}
733 virtual const TypePtr *adr_type() const { return in(0)->in(MemNode::Memory)->adr_type();}
734 virtual uint ideal_reg() const { return 0;} // memory projections don't have a register
735 virtual const Type *Value( PhaseTransform *phase ) const;
736#ifndef PRODUCT
737 virtual void dump_spec(outputStream *st) const {};
738#endif
739};
740
741//------------------------------LoadStoreNode---------------------------
Vladimir Kozlov50c4a232008-07-28 17:12:52 -0700742// Note: is_Mem() method returns 'true' for this class.
J. Duke81537792007-12-01 00:00:00 +0000743class LoadStoreNode : public Node {
Roland Westrelin30254062012-09-20 16:49:17 +0200744private:
745 const Type* const _type; // What kind of value is loaded?
746 const TypePtr* _adr_type; // What kind of memory is being addressed?
747 virtual uint size_of() const; // Size is bigger
748public:
749 LoadStoreNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at, const Type* rt, uint required );
750 virtual bool depends_only_on_test() const { return false; }
751 virtual uint match_edge(uint idx) const { return idx == MemNode::Address || idx == MemNode::ValueIn; }
752
753 virtual const Type *bottom_type() const { return _type; }
754 virtual uint ideal_reg() const;
755 virtual const class TypePtr *adr_type() const { return _adr_type; } // returns bottom_type of address
756
757 bool result_not_used() const;
758};
759
760class LoadStoreConditionalNode : public LoadStoreNode {
J. Duke81537792007-12-01 00:00:00 +0000761public:
762 enum {
763 ExpectedIn = MemNode::ValueIn+1 // One more input than MemNode
764 };
Roland Westrelin30254062012-09-20 16:49:17 +0200765 LoadStoreConditionalNode(Node *c, Node *mem, Node *adr, Node *val, Node *ex);
J. Duke81537792007-12-01 00:00:00 +0000766};
767
768//------------------------------StorePConditionalNode---------------------------
769// Conditionally store pointer to memory, if no change since prior
770// load-locked. Sets flags for success or failure of the store.
Roland Westrelin30254062012-09-20 16:49:17 +0200771class StorePConditionalNode : public LoadStoreConditionalNode {
J. Duke81537792007-12-01 00:00:00 +0000772public:
Roland Westrelin30254062012-09-20 16:49:17 +0200773 StorePConditionalNode( Node *c, Node *mem, Node *adr, Node *val, Node *ll ) : LoadStoreConditionalNode(c, mem, adr, val, ll) { }
J. Duke81537792007-12-01 00:00:00 +0000774 virtual int Opcode() const;
775 // Produces flags
776 virtual uint ideal_reg() const { return Op_RegFlags; }
777};
778
Vladimir Kozlov7aae40a2008-11-07 09:29:38 -0800779//------------------------------StoreIConditionalNode---------------------------
780// Conditionally store int to memory, if no change since prior
781// load-locked. Sets flags for success or failure of the store.
Roland Westrelin30254062012-09-20 16:49:17 +0200782class StoreIConditionalNode : public LoadStoreConditionalNode {
Vladimir Kozlov7aae40a2008-11-07 09:29:38 -0800783public:
Roland Westrelin30254062012-09-20 16:49:17 +0200784 StoreIConditionalNode( Node *c, Node *mem, Node *adr, Node *val, Node *ii ) : LoadStoreConditionalNode(c, mem, adr, val, ii) { }
Vladimir Kozlov7aae40a2008-11-07 09:29:38 -0800785 virtual int Opcode() const;
786 // Produces flags
787 virtual uint ideal_reg() const { return Op_RegFlags; }
788};
789
J. Duke81537792007-12-01 00:00:00 +0000790//------------------------------StoreLConditionalNode---------------------------
791// Conditionally store long to memory, if no change since prior
792// load-locked. Sets flags for success or failure of the store.
Roland Westrelin30254062012-09-20 16:49:17 +0200793class StoreLConditionalNode : public LoadStoreConditionalNode {
J. Duke81537792007-12-01 00:00:00 +0000794public:
Roland Westrelin30254062012-09-20 16:49:17 +0200795 StoreLConditionalNode( Node *c, Node *mem, Node *adr, Node *val, Node *ll ) : LoadStoreConditionalNode(c, mem, adr, val, ll) { }
J. Duke81537792007-12-01 00:00:00 +0000796 virtual int Opcode() const;
Vladimir Kozlov7aae40a2008-11-07 09:29:38 -0800797 // Produces flags
798 virtual uint ideal_reg() const { return Op_RegFlags; }
J. Duke81537792007-12-01 00:00:00 +0000799};
800
801
802//------------------------------CompareAndSwapLNode---------------------------
Roland Westrelin30254062012-09-20 16:49:17 +0200803class CompareAndSwapLNode : public LoadStoreConditionalNode {
J. Duke81537792007-12-01 00:00:00 +0000804public:
Roland Westrelin30254062012-09-20 16:49:17 +0200805 CompareAndSwapLNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex) : LoadStoreConditionalNode(c, mem, adr, val, ex) { }
J. Duke81537792007-12-01 00:00:00 +0000806 virtual int Opcode() const;
807};
808
809
810//------------------------------CompareAndSwapINode---------------------------
Roland Westrelin30254062012-09-20 16:49:17 +0200811class CompareAndSwapINode : public LoadStoreConditionalNode {
J. Duke81537792007-12-01 00:00:00 +0000812public:
Roland Westrelin30254062012-09-20 16:49:17 +0200813 CompareAndSwapINode( Node *c, Node *mem, Node *adr, Node *val, Node *ex) : LoadStoreConditionalNode(c, mem, adr, val, ex) { }
J. Duke81537792007-12-01 00:00:00 +0000814 virtual int Opcode() const;
815};
816
817
818//------------------------------CompareAndSwapPNode---------------------------
Roland Westrelin30254062012-09-20 16:49:17 +0200819class CompareAndSwapPNode : public LoadStoreConditionalNode {
J. Duke81537792007-12-01 00:00:00 +0000820public:
Roland Westrelin30254062012-09-20 16:49:17 +0200821 CompareAndSwapPNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex) : LoadStoreConditionalNode(c, mem, adr, val, ex) { }
J. Duke81537792007-12-01 00:00:00 +0000822 virtual int Opcode() const;
823};
824
Coleen Phillimore4a831d42008-04-13 17:43:42 -0400825//------------------------------CompareAndSwapNNode---------------------------
Roland Westrelin30254062012-09-20 16:49:17 +0200826class CompareAndSwapNNode : public LoadStoreConditionalNode {
Coleen Phillimore4a831d42008-04-13 17:43:42 -0400827public:
Roland Westrelin30254062012-09-20 16:49:17 +0200828 CompareAndSwapNNode( Node *c, Node *mem, Node *adr, Node *val, Node *ex) : LoadStoreConditionalNode(c, mem, adr, val, ex) { }
829 virtual int Opcode() const;
830};
831
832//------------------------------GetAndAddINode---------------------------
833class GetAndAddINode : public LoadStoreNode {
834public:
835 GetAndAddINode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at ) : LoadStoreNode(c, mem, adr, val, at, TypeInt::INT, 4) { }
836 virtual int Opcode() const;
837};
838
839//------------------------------GetAndAddLNode---------------------------
840class GetAndAddLNode : public LoadStoreNode {
841public:
842 GetAndAddLNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at ) : LoadStoreNode(c, mem, adr, val, at, TypeLong::LONG, 4) { }
843 virtual int Opcode() const;
844};
845
846
847//------------------------------GetAndSetINode---------------------------
848class GetAndSetINode : public LoadStoreNode {
849public:
850 GetAndSetINode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at ) : LoadStoreNode(c, mem, adr, val, at, TypeInt::INT, 4) { }
851 virtual int Opcode() const;
852};
853
854//------------------------------GetAndSetINode---------------------------
855class GetAndSetLNode : public LoadStoreNode {
856public:
857 GetAndSetLNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at ) : LoadStoreNode(c, mem, adr, val, at, TypeLong::LONG, 4) { }
858 virtual int Opcode() const;
859};
860
861//------------------------------GetAndSetPNode---------------------------
862class GetAndSetPNode : public LoadStoreNode {
863public:
864 GetAndSetPNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at, const Type* t ) : LoadStoreNode(c, mem, adr, val, at, t, 4) { }
865 virtual int Opcode() const;
866};
867
868//------------------------------GetAndSetNNode---------------------------
869class GetAndSetNNode : public LoadStoreNode {
870public:
871 GetAndSetNNode( Node *c, Node *mem, Node *adr, Node *val, const TypePtr* at, const Type* t ) : LoadStoreNode(c, mem, adr, val, at, t, 4) { }
Coleen Phillimore4a831d42008-04-13 17:43:42 -0400872 virtual int Opcode() const;
873};
874
J. Duke81537792007-12-01 00:00:00 +0000875//------------------------------ClearArray-------------------------------------
876class ClearArrayNode: public Node {
877public:
Vladimir Kozlov9f5ca022009-12-09 16:40:45 -0800878 ClearArrayNode( Node *ctrl, Node *arymem, Node *word_cnt, Node *base )
879 : Node(ctrl,arymem,word_cnt,base) {
880 init_class_id(Class_ClearArray);
881 }
J. Duke81537792007-12-01 00:00:00 +0000882 virtual int Opcode() const;
883 virtual const Type *bottom_type() const { return Type::MEMORY; }
884 // ClearArray modifies array elements, and so affects only the
885 // array memory addressed by the bottom_type of its base address.
886 virtual const class TypePtr *adr_type() const;
887 virtual Node *Identity( PhaseTransform *phase );
888 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
889 virtual uint match_edge(uint idx) const;
890
891 // Clear the given area of an object or array.
892 // The start offset must always be aligned mod BytesPerInt.
893 // The end offset must always be aligned mod BytesPerLong.
894 // Return the new memory.
895 static Node* clear_memory(Node* control, Node* mem, Node* dest,
896 intptr_t start_offset,
897 intptr_t end_offset,
898 PhaseGVN* phase);
899 static Node* clear_memory(Node* control, Node* mem, Node* dest,
900 intptr_t start_offset,
901 Node* end_offset,
902 PhaseGVN* phase);
903 static Node* clear_memory(Node* control, Node* mem, Node* dest,
904 Node* start_offset,
905 Node* end_offset,
906 PhaseGVN* phase);
Vladimir Kozlov9f5ca022009-12-09 16:40:45 -0800907 // Return allocation input memory edge if it is different instance
908 // or itself if it is the one we are looking for.
909 static bool step_through(Node** np, uint instance_id, PhaseTransform* phase);
J. Duke81537792007-12-01 00:00:00 +0000910};
911
J. Duke81537792007-12-01 00:00:00 +0000912//------------------------------MemBar-----------------------------------------
913// There are different flavors of Memory Barriers to match the Java Memory
914// Model. Monitor-enter and volatile-load act as Aquires: no following ref
915// can be moved to before them. We insert a MemBar-Acquire after a FastLock or
916// volatile-load. Monitor-exit and volatile-store act as Release: no
Christian Thalinger05d1de72009-02-27 13:27:09 -0800917// preceding ref can be moved to after them. We insert a MemBar-Release
J. Duke81537792007-12-01 00:00:00 +0000918// before a FastUnlock or volatile-store. All volatiles need to be
919// serialized, so we follow all volatile-stores with a MemBar-Volatile to
Christian Thalinger05d1de72009-02-27 13:27:09 -0800920// separate it from any following volatile-load.
J. Duke81537792007-12-01 00:00:00 +0000921class MemBarNode: public MultiNode {
922 virtual uint hash() const ; // { return NO_HASH; }
923 virtual uint cmp( const Node &n ) const ; // Always fail, except on self
924
925 virtual uint size_of() const { return sizeof(*this); }
926 // Memory type this node is serializing. Usually either rawptr or bottom.
927 const TypePtr* _adr_type;
928
929public:
930 enum {
931 Precedent = TypeFunc::Parms // optional edge to force precedence
932 };
933 MemBarNode(Compile* C, int alias_idx, Node* precedent);
934 virtual int Opcode() const = 0;
935 virtual const class TypePtr *adr_type() const { return _adr_type; }
936 virtual const Type *Value( PhaseTransform *phase ) const;
937 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
938 virtual uint match_edge(uint idx) const { return 0; }
939 virtual const Type *bottom_type() const { return TypeTuple::MEMBAR; }
940 virtual Node *match( const ProjNode *proj, const Matcher *m );
941 // Factory method. Builds a wide or narrow membar.
942 // Optional 'precedent' becomes an extra edge if not null.
943 static MemBarNode* make(Compile* C, int opcode,
944 int alias_idx = Compile::AliasIdxBot,
945 Node* precedent = NULL);
946};
947
948// "Acquire" - no following ref can move before (but earlier refs can
949// follow, like an early Load stalled in cache). Requires multi-cpu
Roland Westrelinb543a072011-08-02 18:36:40 +0200950// visibility. Inserted after a volatile load.
J. Duke81537792007-12-01 00:00:00 +0000951class MemBarAcquireNode: public MemBarNode {
952public:
953 MemBarAcquireNode(Compile* C, int alias_idx, Node* precedent)
954 : MemBarNode(C, alias_idx, precedent) {}
955 virtual int Opcode() const;
956};
957
Goetz Lindenmaierfe897662013-11-26 18:38:19 -0800958// "Acquire" - no following ref can move before (but earlier refs can
959// follow, like an early Load stalled in cache). Requires multi-cpu
960// visibility. Inserted independ of any load, as required
961// for intrinsic sun.misc.Unsafe.loadFence().
962class LoadFenceNode: public MemBarNode {
963public:
964 LoadFenceNode(Compile* C, int alias_idx, Node* precedent)
965 : MemBarNode(C, alias_idx, precedent) {}
966 virtual int Opcode() const;
967};
968
J. Duke81537792007-12-01 00:00:00 +0000969// "Release" - no earlier ref can move after (but later refs can move
970// up, like a speculative pipelined cache-hitting Load). Requires
Roland Westrelinb543a072011-08-02 18:36:40 +0200971// multi-cpu visibility. Inserted before a volatile store.
J. Duke81537792007-12-01 00:00:00 +0000972class MemBarReleaseNode: public MemBarNode {
973public:
974 MemBarReleaseNode(Compile* C, int alias_idx, Node* precedent)
975 : MemBarNode(C, alias_idx, precedent) {}
976 virtual int Opcode() const;
977};
978
Goetz Lindenmaierfe897662013-11-26 18:38:19 -0800979// "Release" - no earlier ref can move after (but later refs can move
980// up, like a speculative pipelined cache-hitting Load). Requires
981// multi-cpu visibility. Inserted independent of any store, as required
982// for intrinsic sun.misc.Unsafe.storeFence().
983class StoreFenceNode: public MemBarNode {
984public:
985 StoreFenceNode(Compile* C, int alias_idx, Node* precedent)
986 : MemBarNode(C, alias_idx, precedent) {}
987 virtual int Opcode() const;
988};
989
Roland Westrelinb543a072011-08-02 18:36:40 +0200990// "Acquire" - no following ref can move before (but earlier refs can
991// follow, like an early Load stalled in cache). Requires multi-cpu
992// visibility. Inserted after a FastLock.
993class MemBarAcquireLockNode: public MemBarNode {
994public:
995 MemBarAcquireLockNode(Compile* C, int alias_idx, Node* precedent)
996 : MemBarNode(C, alias_idx, precedent) {}
997 virtual int Opcode() const;
998};
999
1000// "Release" - no earlier ref can move after (but later refs can move
1001// up, like a speculative pipelined cache-hitting Load). Requires
1002// multi-cpu visibility. Inserted before a FastUnLock.
1003class MemBarReleaseLockNode: public MemBarNode {
1004public:
1005 MemBarReleaseLockNode(Compile* C, int alias_idx, Node* precedent)
1006 : MemBarNode(C, alias_idx, precedent) {}
1007 virtual int Opcode() const;
1008};
1009
Roland Westrelin97439fb2011-12-20 16:56:50 +01001010class MemBarStoreStoreNode: public MemBarNode {
1011public:
1012 MemBarStoreStoreNode(Compile* C, int alias_idx, Node* precedent)
1013 : MemBarNode(C, alias_idx, precedent) {
1014 init_class_id(Class_MemBarStoreStore);
1015 }
1016 virtual int Opcode() const;
1017};
1018
J. Duke81537792007-12-01 00:00:00 +00001019// Ordering between a volatile store and a following volatile load.
1020// Requires multi-CPU visibility?
1021class MemBarVolatileNode: public MemBarNode {
1022public:
1023 MemBarVolatileNode(Compile* C, int alias_idx, Node* precedent)
1024 : MemBarNode(C, alias_idx, precedent) {}
1025 virtual int Opcode() const;
1026};
1027
1028// Ordering within the same CPU. Used to order unsafe memory references
1029// inside the compiler when we lack alias info. Not needed "outside" the
1030// compiler because the CPU does all the ordering for us.
1031class MemBarCPUOrderNode: public MemBarNode {
1032public:
1033 MemBarCPUOrderNode(Compile* C, int alias_idx, Node* precedent)
1034 : MemBarNode(C, alias_idx, precedent) {}
1035 virtual int Opcode() const;
1036 virtual uint ideal_reg() const { return 0; } // not matched in the AD file
1037};
1038
1039// Isolation of object setup after an AllocateNode and before next safepoint.
1040// (See comment in memnode.cpp near InitializeNode::InitializeNode for semantics.)
1041class InitializeNode: public MemBarNode {
1042 friend class AllocateNode;
1043
Vladimir Kozlov47e357e2011-09-26 10:24:05 -07001044 enum {
1045 Incomplete = 0,
1046 Complete = 1,
1047 WithArraycopy = 2
1048 };
1049 int _is_complete;
J. Duke81537792007-12-01 00:00:00 +00001050
Roland Westrelin97439fb2011-12-20 16:56:50 +01001051 bool _does_not_escape;
1052
J. Duke81537792007-12-01 00:00:00 +00001053public:
1054 enum {
1055 Control = TypeFunc::Control,
1056 Memory = TypeFunc::Memory, // MergeMem for states affected by this op
1057 RawAddress = TypeFunc::Parms+0, // the newly-allocated raw address
1058 RawStores = TypeFunc::Parms+1 // zero or more stores (or TOP)
1059 };
1060
1061 InitializeNode(Compile* C, int adr_type, Node* rawoop);
1062 virtual int Opcode() const;
1063 virtual uint size_of() const { return sizeof(*this); }
1064 virtual uint ideal_reg() const { return 0; } // not matched in the AD file
1065 virtual const RegMask &in_RegMask(uint) const; // mask for RawAddress
1066
1067 // Manage incoming memory edges via a MergeMem on in(Memory):
1068 Node* memory(uint alias_idx);
1069
1070 // The raw memory edge coming directly from the Allocation.
1071 // The contents of this memory are *always* all-zero-bits.
1072 Node* zero_memory() { return memory(Compile::AliasIdxRaw); }
1073
1074 // Return the corresponding allocation for this initialization (or null if none).
1075 // (Note: Both InitializeNode::allocation and AllocateNode::initialization
1076 // are defined in graphKit.cpp, which sets up the bidirectional relation.)
1077 AllocateNode* allocation();
1078
1079 // Anything other than zeroing in this init?
1080 bool is_non_zero();
1081
1082 // An InitializeNode must completed before macro expansion is done.
1083 // Completion requires that the AllocateNode must be followed by
1084 // initialization of the new memory to zero, then to any initializers.
Vladimir Kozlov47e357e2011-09-26 10:24:05 -07001085 bool is_complete() { return _is_complete != Incomplete; }
1086 bool is_complete_with_arraycopy() { return (_is_complete & WithArraycopy) != 0; }
J. Duke81537792007-12-01 00:00:00 +00001087
1088 // Mark complete. (Must not yet be complete.)
1089 void set_complete(PhaseGVN* phase);
Vladimir Kozlov47e357e2011-09-26 10:24:05 -07001090 void set_complete_with_arraycopy() { _is_complete = Complete | WithArraycopy; }
J. Duke81537792007-12-01 00:00:00 +00001091
Roland Westrelin97439fb2011-12-20 16:56:50 +01001092 bool does_not_escape() { return _does_not_escape; }
1093 void set_does_not_escape() { _does_not_escape = true; }
1094
J. Duke81537792007-12-01 00:00:00 +00001095#ifdef ASSERT
1096 // ensure all non-degenerate stores are ordered and non-overlapping
1097 bool stores_are_sane(PhaseTransform* phase);
1098#endif //ASSERT
1099
1100 // See if this store can be captured; return offset where it initializes.
1101 // Return 0 if the store cannot be moved (any sort of problem).
Roland Westrelinfe928622013-02-25 14:13:04 +01001102 intptr_t can_capture_store(StoreNode* st, PhaseTransform* phase, bool can_reshape);
J. Duke81537792007-12-01 00:00:00 +00001103
1104 // Capture another store; reformat it to write my internal raw memory.
1105 // Return the captured copy, else NULL if there is some sort of problem.
Roland Westrelinfe928622013-02-25 14:13:04 +01001106 Node* capture_store(StoreNode* st, intptr_t start, PhaseTransform* phase, bool can_reshape);
J. Duke81537792007-12-01 00:00:00 +00001107
1108 // Find captured store which corresponds to the range [start..start+size).
1109 // Return my own memory projection (meaning the initial zero bits)
1110 // if there is no such store. Return NULL if there is a problem.
1111 Node* find_captured_store(intptr_t start, int size_in_bytes, PhaseTransform* phase);
1112
1113 // Called when the associated AllocateNode is expanded into CFG.
1114 Node* complete_stores(Node* rawctl, Node* rawmem, Node* rawptr,
1115 intptr_t header_size, Node* size_in_bytes,
1116 PhaseGVN* phase);
1117
1118 private:
1119 void remove_extra_zeroes();
1120
1121 // Find out where a captured store should be placed (or already is placed).
1122 int captured_store_insertion_point(intptr_t start, int size_in_bytes,
1123 PhaseTransform* phase);
1124
1125 static intptr_t get_store_offset(Node* st, PhaseTransform* phase);
1126
1127 Node* make_raw_address(intptr_t offset, PhaseTransform* phase);
1128
Vladimir Kozlovb4977e82013-05-08 15:08:01 -07001129 bool detect_init_independence(Node* n, int& count);
J. Duke81537792007-12-01 00:00:00 +00001130
1131 void coalesce_subword_stores(intptr_t header_size, Node* size_in_bytes,
1132 PhaseGVN* phase);
1133
1134 intptr_t find_next_fullword_store(uint i, PhaseGVN* phase);
1135};
1136
1137//------------------------------MergeMem---------------------------------------
1138// (See comment in memnode.cpp near MergeMemNode::MergeMemNode for semantics.)
1139class MergeMemNode: public Node {
1140 virtual uint hash() const ; // { return NO_HASH; }
1141 virtual uint cmp( const Node &n ) const ; // Always fail, except on self
1142 friend class MergeMemStream;
1143 MergeMemNode(Node* def); // clients use MergeMemNode::make
1144
1145public:
1146 // If the input is a whole memory state, clone it with all its slices intact.
1147 // Otherwise, make a new memory state with just that base memory input.
1148 // In either case, the result is a newly created MergeMem.
Tobias Hartmann70a55ea2014-08-05 09:58:52 +02001149 static MergeMemNode* make(Node* base_memory);
J. Duke81537792007-12-01 00:00:00 +00001150
1151 virtual int Opcode() const;
1152 virtual Node *Identity( PhaseTransform *phase );
1153 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
1154 virtual uint ideal_reg() const { return NotAMachineReg; }
1155 virtual uint match_edge(uint idx) const { return 0; }
1156 virtual const RegMask &out_RegMask() const;
1157 virtual const Type *bottom_type() const { return Type::MEMORY; }
1158 virtual const TypePtr *adr_type() const { return TypePtr::BOTTOM; }
1159 // sparse accessors
1160 // Fetch the previously stored "set_memory_at", or else the base memory.
1161 // (Caller should clone it if it is a phi-nest.)
1162 Node* memory_at(uint alias_idx) const;
1163 // set the memory, regardless of its previous value
1164 void set_memory_at(uint alias_idx, Node* n);
1165 // the "base" is the memory that provides the non-finite support
1166 Node* base_memory() const { return in(Compile::AliasIdxBot); }
1167 // warning: setting the base can implicitly set any of the other slices too
1168 void set_base_memory(Node* def);
1169 // sentinel value which denotes a copy of the base memory:
1170 Node* empty_memory() const { return in(Compile::AliasIdxTop); }
1171 static Node* make_empty_memory(); // where the sentinel comes from
1172 bool is_empty_memory(Node* n) const { assert((n == empty_memory()) == n->is_top(), "sanity"); return n->is_top(); }
1173 // hook for the iterator, to perform any necessary setup
1174 void iteration_setup(const MergeMemNode* other = NULL);
1175 // push sentinels until I am at least as long as the other (semantic no-op)
1176 void grow_to_match(const MergeMemNode* other);
1177 bool verify_sparse() const PRODUCT_RETURN0;
1178#ifndef PRODUCT
1179 virtual void dump_spec(outputStream *st) const;
1180#endif
1181};
1182
1183class MergeMemStream : public StackObj {
1184 private:
1185 MergeMemNode* _mm;
1186 const MergeMemNode* _mm2; // optional second guy, contributes non-empty iterations
1187 Node* _mm_base; // loop-invariant base memory of _mm
1188 int _idx;
1189 int _cnt;
1190 Node* _mem;
1191 Node* _mem2;
1192 int _cnt2;
1193
1194 void init(MergeMemNode* mm, const MergeMemNode* mm2 = NULL) {
1195 // subsume_node will break sparseness at times, whenever a memory slice
1196 // folds down to a copy of the base ("fat") memory. In such a case,
1197 // the raw edge will update to base, although it should be top.
1198 // This iterator will recognize either top or base_memory as an
1199 // "empty" slice. See is_empty, is_empty2, and next below.
1200 //
1201 // The sparseness property is repaired in MergeMemNode::Ideal.
1202 // As long as access to a MergeMem goes through this iterator
1203 // or the memory_at accessor, flaws in the sparseness will
1204 // never be observed.
1205 //
1206 // Also, iteration_setup repairs sparseness.
1207 assert(mm->verify_sparse(), "please, no dups of base");
1208 assert(mm2==NULL || mm2->verify_sparse(), "please, no dups of base");
1209
1210 _mm = mm;
1211 _mm_base = mm->base_memory();
1212 _mm2 = mm2;
1213 _cnt = mm->req();
1214 _idx = Compile::AliasIdxBot-1; // start at the base memory
1215 _mem = NULL;
1216 _mem2 = NULL;
1217 }
1218
1219#ifdef ASSERT
1220 Node* check_memory() const {
1221 if (at_base_memory())
1222 return _mm->base_memory();
1223 else if ((uint)_idx < _mm->req() && !_mm->in(_idx)->is_top())
1224 return _mm->memory_at(_idx);
1225 else
1226 return _mm_base;
1227 }
1228 Node* check_memory2() const {
1229 return at_base_memory()? _mm2->base_memory(): _mm2->memory_at(_idx);
1230 }
1231#endif
1232
1233 static bool match_memory(Node* mem, const MergeMemNode* mm, int idx) PRODUCT_RETURN0;
1234 void assert_synch() const {
1235 assert(!_mem || _idx >= _cnt || match_memory(_mem, _mm, _idx),
1236 "no side-effects except through the stream");
1237 }
1238
1239 public:
1240
1241 // expected usages:
1242 // for (MergeMemStream mms(mem->is_MergeMem()); next_non_empty(); ) { ... }
1243 // for (MergeMemStream mms(mem1, mem2); next_non_empty2(); ) { ... }
1244
1245 // iterate over one merge
1246 MergeMemStream(MergeMemNode* mm) {
1247 mm->iteration_setup();
1248 init(mm);
1249 debug_only(_cnt2 = 999);
1250 }
1251 // iterate in parallel over two merges
1252 // only iterates through non-empty elements of mm2
1253 MergeMemStream(MergeMemNode* mm, const MergeMemNode* mm2) {
1254 assert(mm2, "second argument must be a MergeMem also");
1255 ((MergeMemNode*)mm2)->iteration_setup(); // update hidden state
1256 mm->iteration_setup(mm2);
1257 init(mm, mm2);
1258 _cnt2 = mm2->req();
1259 }
1260#ifdef ASSERT
1261 ~MergeMemStream() {
1262 assert_synch();
1263 }
1264#endif
1265
1266 MergeMemNode* all_memory() const {
1267 return _mm;
1268 }
1269 Node* base_memory() const {
1270 assert(_mm_base == _mm->base_memory(), "no update to base memory, please");
1271 return _mm_base;
1272 }
1273 const MergeMemNode* all_memory2() const {
1274 assert(_mm2 != NULL, "");
1275 return _mm2;
1276 }
1277 bool at_base_memory() const {
1278 return _idx == Compile::AliasIdxBot;
1279 }
1280 int alias_idx() const {
1281 assert(_mem, "must call next 1st");
1282 return _idx;
1283 }
1284
1285 const TypePtr* adr_type() const {
1286 return Compile::current()->get_adr_type(alias_idx());
1287 }
1288
1289 const TypePtr* adr_type(Compile* C) const {
1290 return C->get_adr_type(alias_idx());
1291 }
1292 bool is_empty() const {
1293 assert(_mem, "must call next 1st");
1294 assert(_mem->is_top() == (_mem==_mm->empty_memory()), "correct sentinel");
1295 return _mem->is_top();
1296 }
1297 bool is_empty2() const {
1298 assert(_mem2, "must call next 1st");
1299 assert(_mem2->is_top() == (_mem2==_mm2->empty_memory()), "correct sentinel");
1300 return _mem2->is_top();
1301 }
1302 Node* memory() const {
1303 assert(!is_empty(), "must not be empty");
1304 assert_synch();
1305 return _mem;
1306 }
1307 // get the current memory, regardless of empty or non-empty status
1308 Node* force_memory() const {
1309 assert(!is_empty() || !at_base_memory(), "");
1310 // Use _mm_base to defend against updates to _mem->base_memory().
1311 Node *mem = _mem->is_top() ? _mm_base : _mem;
1312 assert(mem == check_memory(), "");
1313 return mem;
1314 }
1315 Node* memory2() const {
1316 assert(_mem2 == check_memory2(), "");
1317 return _mem2;
1318 }
1319 void set_memory(Node* mem) {
1320 if (at_base_memory()) {
1321 // Note that this does not change the invariant _mm_base.
1322 _mm->set_base_memory(mem);
1323 } else {
1324 _mm->set_memory_at(_idx, mem);
1325 }
1326 _mem = mem;
1327 assert_synch();
1328 }
1329
1330 // Recover from a side effect to the MergeMemNode.
1331 void set_memory() {
1332 _mem = _mm->in(_idx);
1333 }
1334
1335 bool next() { return next(false); }
1336 bool next2() { return next(true); }
1337
1338 bool next_non_empty() { return next_non_empty(false); }
1339 bool next_non_empty2() { return next_non_empty(true); }
1340 // next_non_empty2 can yield states where is_empty() is true
1341
1342 private:
1343 // find the next item, which might be empty
1344 bool next(bool have_mm2) {
1345 assert((_mm2 != NULL) == have_mm2, "use other next");
1346 assert_synch();
1347 if (++_idx < _cnt) {
1348 // Note: This iterator allows _mm to be non-sparse.
1349 // It behaves the same whether _mem is top or base_memory.
1350 _mem = _mm->in(_idx);
1351 if (have_mm2)
1352 _mem2 = _mm2->in((_idx < _cnt2) ? _idx : Compile::AliasIdxTop);
1353 return true;
1354 }
1355 return false;
1356 }
1357
1358 // find the next non-empty item
1359 bool next_non_empty(bool have_mm2) {
1360 while (next(have_mm2)) {
1361 if (!is_empty()) {
1362 // make sure _mem2 is filled in sensibly
1363 if (have_mm2 && _mem2->is_top()) _mem2 = _mm2->base_memory();
1364 return true;
1365 } else if (have_mm2 && !is_empty2()) {
1366 return true; // is_empty() == true
1367 }
1368 }
1369 return false;
1370 }
1371};
1372
1373//------------------------------Prefetch---------------------------------------
1374
1375// Non-faulting prefetch load. Prefetch for many reads.
1376class PrefetchReadNode : public Node {
1377public:
1378 PrefetchReadNode(Node *abio, Node *adr) : Node(0,abio,adr) {}
1379 virtual int Opcode() const;
1380 virtual uint ideal_reg() const { return NotAMachineReg; }
1381 virtual uint match_edge(uint idx) const { return idx==2; }
1382 virtual const Type *bottom_type() const { return Type::ABIO; }
1383};
1384
1385// Non-faulting prefetch load. Prefetch for many reads & many writes.
1386class PrefetchWriteNode : public Node {
1387public:
1388 PrefetchWriteNode(Node *abio, Node *adr) : Node(0,abio,adr) {}
1389 virtual int Opcode() const;
1390 virtual uint ideal_reg() const { return NotAMachineReg; }
1391 virtual uint match_edge(uint idx) const { return idx==2; }
Vladimir Kozlov90651b22011-08-16 16:59:46 -07001392 virtual const Type *bottom_type() const { return Type::ABIO; }
1393};
1394
1395// Allocation prefetch which may fault, TLAB size have to be adjusted.
1396class PrefetchAllocationNode : public Node {
1397public:
1398 PrefetchAllocationNode(Node *mem, Node *adr) : Node(0,mem,adr) {}
1399 virtual int Opcode() const;
1400 virtual uint ideal_reg() const { return NotAMachineReg; }
1401 virtual uint match_edge(uint idx) const { return idx==2; }
Vladimir Kozlovf6934fd2010-04-07 12:39:27 -07001402 virtual const Type *bottom_type() const { return ( AllocatePrefetchStyle == 3 ) ? Type::MEMORY : Type::ABIO; }
J. Duke81537792007-12-01 00:00:00 +00001403};
Stefan Karlsson8006fe82010-11-23 13:22:55 -08001404
1405#endif // SHARE_VM_OPTO_MEMNODE_HPP