1182 }
1183 Node* conv = _gvn.transform( new ConvI2LNode(offset));
1184 Node* mask = _gvn.transform(ConLNode::make((julong) max_juint));
1185 return _gvn.transform( new AndLNode(conv, mask) );
1186 }
1187
1188 Node* GraphKit::ConvL2I(Node* offset) {
1189 // short-circuit a common case
1190 jlong offset_con = find_long_con(offset, (jlong)Type::OffsetBot);
1191 if (offset_con != (jlong)Type::OffsetBot) {
1192 return intcon((int) offset_con);
1193 }
1194 return _gvn.transform( new ConvL2INode(offset));
1195 }
1196
1197 //-------------------------load_object_klass-----------------------------------
1198 Node* GraphKit::load_object_klass(Node* obj) {
1199 // Special-case a fresh allocation to avoid building nodes:
1200 Node* akls = AllocateNode::Ideal_klass(obj, &_gvn);
1201 if (akls != nullptr) return akls;
1202 Node* k_adr = basic_plus_adr(obj, oopDesc::klass_offset_in_bytes());
1203 return _gvn.transform(LoadKlassNode::make(_gvn, immutable_memory(), k_adr, TypeInstPtr::KLASS));
1204 }
1205
1206 //-------------------------load_array_length-----------------------------------
1207 Node* GraphKit::load_array_length(Node* array) {
1208 // Special-case a fresh allocation to avoid building nodes:
1209 AllocateArrayNode* alloc = AllocateArrayNode::Ideal_array_allocation(array);
1210 Node *alen;
1211 if (alloc == nullptr) {
1212 Node *r_adr = basic_plus_adr(array, arrayOopDesc::length_offset_in_bytes());
1213 alen = _gvn.transform( new LoadRangeNode(nullptr, immutable_memory(), r_adr, TypeInt::POS));
1214 } else {
1215 alen = array_ideal_length(alloc, _gvn.type(array)->is_oopptr(), false);
1216 }
1217 return alen;
1218 }
1219
1220 Node* GraphKit::array_ideal_length(AllocateArrayNode* alloc,
1221 const TypeOopPtr* oop_type,
1222 bool replace_length_in_map) {
3601
3602 // put in an initialization barrier
3603 InitializeNode* init = insert_mem_bar_volatile(Op_Initialize, rawidx,
3604 rawoop)->as_Initialize();
3605 assert(alloc->initialization() == init, "2-way macro link must work");
3606 assert(init ->allocation() == alloc, "2-way macro link must work");
3607 {
3608 // Extract memory strands which may participate in the new object's
3609 // initialization, and source them from the new InitializeNode.
3610 // This will allow us to observe initializations when they occur,
3611 // and link them properly (as a group) to the InitializeNode.
3612 assert(init->in(InitializeNode::Memory) == malloc, "");
3613 MergeMemNode* minit_in = MergeMemNode::make(malloc);
3614 init->set_req(InitializeNode::Memory, minit_in);
3615 record_for_igvn(minit_in); // fold it up later, if possible
3616 Node* minit_out = memory(rawidx);
3617 assert(minit_out->is_Proj() && minit_out->in(0) == init, "");
3618 // Add an edge in the MergeMem for the header fields so an access
3619 // to one of those has correct memory state
3620 set_memory(minit_out, C->get_alias_index(oop_type->add_offset(oopDesc::mark_offset_in_bytes())));
3621 set_memory(minit_out, C->get_alias_index(oop_type->add_offset(oopDesc::klass_offset_in_bytes())));
3622 if (oop_type->isa_aryptr()) {
3623 const TypePtr* telemref = oop_type->add_offset(Type::OffsetBot);
3624 int elemidx = C->get_alias_index(telemref);
3625 hook_memory_on_init(*this, elemidx, minit_in, minit_out);
3626 } else if (oop_type->isa_instptr()) {
3627 ciInstanceKlass* ik = oop_type->is_instptr()->instance_klass();
3628 for (int i = 0, len = ik->nof_nonstatic_fields(); i < len; i++) {
3629 ciField* field = ik->nonstatic_field_at(i);
3630 if (field->offset_in_bytes() >= TrackedInitializationLimit * HeapWordSize)
3631 continue; // do not bother to track really large numbers of fields
3632 // Find (or create) the alias category for this field:
3633 int fieldidx = C->alias_type(field)->index();
3634 hook_memory_on_init(*this, fieldidx, minit_in, minit_out);
3635 }
3636 }
3637 }
3638
3639 // Cast raw oop to the real thing...
3640 Node* javaoop = new CheckCastPPNode(control(), rawoop, oop_type);
3641 javaoop = _gvn.transform(javaoop);
|
1182 }
1183 Node* conv = _gvn.transform( new ConvI2LNode(offset));
1184 Node* mask = _gvn.transform(ConLNode::make((julong) max_juint));
1185 return _gvn.transform( new AndLNode(conv, mask) );
1186 }
1187
1188 Node* GraphKit::ConvL2I(Node* offset) {
1189 // short-circuit a common case
1190 jlong offset_con = find_long_con(offset, (jlong)Type::OffsetBot);
1191 if (offset_con != (jlong)Type::OffsetBot) {
1192 return intcon((int) offset_con);
1193 }
1194 return _gvn.transform( new ConvL2INode(offset));
1195 }
1196
1197 //-------------------------load_object_klass-----------------------------------
1198 Node* GraphKit::load_object_klass(Node* obj) {
1199 // Special-case a fresh allocation to avoid building nodes:
1200 Node* akls = AllocateNode::Ideal_klass(obj, &_gvn);
1201 if (akls != nullptr) return akls;
1202 Node* k_adr = basic_plus_adr(obj, Type::klass_offset());
1203 return _gvn.transform(LoadKlassNode::make(_gvn, immutable_memory(), k_adr, TypeInstPtr::KLASS));
1204 }
1205
1206 //-------------------------load_array_length-----------------------------------
1207 Node* GraphKit::load_array_length(Node* array) {
1208 // Special-case a fresh allocation to avoid building nodes:
1209 AllocateArrayNode* alloc = AllocateArrayNode::Ideal_array_allocation(array);
1210 Node *alen;
1211 if (alloc == nullptr) {
1212 Node *r_adr = basic_plus_adr(array, arrayOopDesc::length_offset_in_bytes());
1213 alen = _gvn.transform( new LoadRangeNode(nullptr, immutable_memory(), r_adr, TypeInt::POS));
1214 } else {
1215 alen = array_ideal_length(alloc, _gvn.type(array)->is_oopptr(), false);
1216 }
1217 return alen;
1218 }
1219
1220 Node* GraphKit::array_ideal_length(AllocateArrayNode* alloc,
1221 const TypeOopPtr* oop_type,
1222 bool replace_length_in_map) {
3601
3602 // put in an initialization barrier
3603 InitializeNode* init = insert_mem_bar_volatile(Op_Initialize, rawidx,
3604 rawoop)->as_Initialize();
3605 assert(alloc->initialization() == init, "2-way macro link must work");
3606 assert(init ->allocation() == alloc, "2-way macro link must work");
3607 {
3608 // Extract memory strands which may participate in the new object's
3609 // initialization, and source them from the new InitializeNode.
3610 // This will allow us to observe initializations when they occur,
3611 // and link them properly (as a group) to the InitializeNode.
3612 assert(init->in(InitializeNode::Memory) == malloc, "");
3613 MergeMemNode* minit_in = MergeMemNode::make(malloc);
3614 init->set_req(InitializeNode::Memory, minit_in);
3615 record_for_igvn(minit_in); // fold it up later, if possible
3616 Node* minit_out = memory(rawidx);
3617 assert(minit_out->is_Proj() && minit_out->in(0) == init, "");
3618 // Add an edge in the MergeMem for the header fields so an access
3619 // to one of those has correct memory state
3620 set_memory(minit_out, C->get_alias_index(oop_type->add_offset(oopDesc::mark_offset_in_bytes())));
3621 set_memory(minit_out, C->get_alias_index(oop_type->add_offset(Type::klass_offset())));
3622 if (oop_type->isa_aryptr()) {
3623 const TypePtr* telemref = oop_type->add_offset(Type::OffsetBot);
3624 int elemidx = C->get_alias_index(telemref);
3625 hook_memory_on_init(*this, elemidx, minit_in, minit_out);
3626 } else if (oop_type->isa_instptr()) {
3627 ciInstanceKlass* ik = oop_type->is_instptr()->instance_klass();
3628 for (int i = 0, len = ik->nof_nonstatic_fields(); i < len; i++) {
3629 ciField* field = ik->nonstatic_field_at(i);
3630 if (field->offset_in_bytes() >= TrackedInitializationLimit * HeapWordSize)
3631 continue; // do not bother to track really large numbers of fields
3632 // Find (or create) the alias category for this field:
3633 int fieldidx = C->alias_type(field)->index();
3634 hook_memory_on_init(*this, fieldidx, minit_in, minit_out);
3635 }
3636 }
3637 }
3638
3639 // Cast raw oop to the real thing...
3640 Node* javaoop = new CheckCastPPNode(control(), rawoop, oop_type);
3641 javaoop = _gvn.transform(javaoop);
|