12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "c1/c1_Canonicalizer.hpp"
26 #include "c1/c1_CFGPrinter.hpp"
27 #include "c1/c1_Compilation.hpp"
28 #include "c1/c1_GraphBuilder.hpp"
29 #include "c1/c1_InstructionPrinter.hpp"
30 #include "ci/ciCallSite.hpp"
31 #include "ci/ciField.hpp"
32 #include "ci/ciKlass.hpp"
33 #include "ci/ciMemberName.hpp"
34 #include "ci/ciSymbols.hpp"
35 #include "ci/ciUtilities.inline.hpp"
36 #include "classfile/javaClasses.hpp"
37 #include "compiler/compilationPolicy.hpp"
38 #include "compiler/compileBroker.hpp"
39 #include "compiler/compilerEvent.hpp"
40 #include "interpreter/bytecode.hpp"
41 #include "jfr/jfrEvents.hpp"
42 #include "memory/resourceArea.hpp"
43 #include "runtime/sharedRuntime.hpp"
44 #include "utilities/checkedCast.hpp"
45 #include "utilities/macros.hpp"
46 #if INCLUDE_JFR
47 #include "jfr/jfr.hpp"
48 #endif
49
50 class BlockListBuilder {
51 private:
1030 // they are using this local. We don't handle skipping over a
1031 // ret.
1032 for (ScopeData* cur_scope_data = scope_data()->parent();
1033 cur_scope_data != nullptr && cur_scope_data->parsing_jsr() && cur_scope_data->scope() == scope();
1034 cur_scope_data = cur_scope_data->parent()) {
1035 if (cur_scope_data->jsr_return_address_local() == index) {
1036 BAILOUT("subroutine overwrites return address from previous subroutine");
1037 }
1038 }
1039 } else if (index == scope_data()->jsr_return_address_local()) {
1040 scope_data()->set_jsr_return_address_local(-1);
1041 }
1042 }
1043
1044 state->store_local(index, x);
1045 }
1046
1047
1048 void GraphBuilder::load_indexed(BasicType type) {
1049 // In case of in block code motion in range check elimination
1050 ValueStack* state_before = copy_state_indexed_access();
1051 compilation()->set_has_access_indexed(true);
1052 Value index = ipop();
1053 Value array = apop();
1054 Value length = nullptr;
1055 if (CSEArrayLength ||
1056 (array->as_Constant() != nullptr) ||
1057 (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1058 (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1059 (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1060 length = append(new ArrayLength(array, state_before));
1061 }
1062 push(as_ValueType(type), append(new LoadIndexed(array, index, length, type, state_before)));
1063 }
1064
1065
1066 void GraphBuilder::store_indexed(BasicType type) {
1067 // In case of in block code motion in range check elimination
1068 ValueStack* state_before = copy_state_indexed_access();
1069 compilation()->set_has_access_indexed(true);
1070 Value value = pop(as_ValueType(type));
1071 Value index = ipop();
1072 Value array = apop();
1073 Value length = nullptr;
1074 if (CSEArrayLength ||
1075 (array->as_Constant() != nullptr) ||
1076 (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1077 (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1078 (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1079 length = append(new ArrayLength(array, state_before));
1080 }
1081 ciType* array_type = array->declared_type();
1082 bool check_boolean = false;
1083 if (array_type != nullptr) {
1084 if (array_type->is_loaded() &&
1085 array_type->as_array_klass()->element_type()->basic_type() == T_BOOLEAN) {
1086 assert(type == T_BYTE, "boolean store uses bastore");
1087 Value mask = append(new Constant(new IntConstant(1)));
1088 value = append(new LogicOp(Bytecodes::_iand, value, mask));
1089 }
1090 } else if (type == T_BYTE) {
1091 check_boolean = true;
1092 }
1093 StoreIndexed* result = new StoreIndexed(array, index, length, type, value, state_before, check_boolean);
1094 append(result);
1095 _memory->store_value(value);
1096
1097 if (type == T_OBJECT && is_profiling()) {
1098 // Note that we'd collect profile data in this method if we wanted it.
1099 compilation()->set_would_profile(true);
1100
1101 if (profile_checkcasts()) {
1102 result->set_profiled_method(method());
1103 result->set_profiled_bci(bci());
1104 result->set_should_profile(true);
1105 }
1106 }
1107 }
1108
1109
1110 void GraphBuilder::stack_op(Bytecodes::Code code) {
1111 switch (code) {
1112 case Bytecodes::_pop:
1113 { state()->raw_pop();
1114 }
1115 break;
1116 case Bytecodes::_pop2:
1117 { state()->raw_pop();
1118 state()->raw_pop();
1119 }
1120 break;
1121 case Bytecodes::_dup:
1122 { Value w = state()->raw_pop();
1123 state()->raw_push(w);
1124 state()->raw_push(w);
1125 }
1126 break;
1127 case Bytecodes::_dup_x1:
1128 { Value w1 = state()->raw_pop();
1129 Value w2 = state()->raw_pop();
1130 state()->raw_push(w1);
1131 state()->raw_push(w2);
1132 state()->raw_push(w1);
1133 }
1134 break;
1135 case Bytecodes::_dup_x2:
1136 { Value w1 = state()->raw_pop();
1137 Value w2 = state()->raw_pop();
1138 Value w3 = state()->raw_pop();
1274
1275
1276 void GraphBuilder::_goto(int from_bci, int to_bci) {
1277 Goto *x = new Goto(block_at(to_bci), to_bci <= from_bci);
1278 if (is_profiling()) {
1279 compilation()->set_would_profile(true);
1280 x->set_profiled_bci(bci());
1281 if (profile_branches()) {
1282 x->set_profiled_method(method());
1283 x->set_should_profile(true);
1284 }
1285 }
1286 append(x);
1287 }
1288
1289
1290 void GraphBuilder::if_node(Value x, If::Condition cond, Value y, ValueStack* state_before) {
1291 BlockBegin* tsux = block_at(stream()->get_dest());
1292 BlockBegin* fsux = block_at(stream()->next_bci());
1293 bool is_bb = tsux->bci() < stream()->cur_bci() || fsux->bci() < stream()->cur_bci();
1294 // In case of loop invariant code motion or predicate insertion
1295 // before the body of a loop the state is needed
1296 Instruction *i = append(new If(x, cond, false, y, tsux, fsux, (is_bb || compilation()->is_optimistic()) ? state_before : nullptr, is_bb));
1297
1298 assert(i->as_Goto() == nullptr ||
1299 (i->as_Goto()->sux_at(0) == tsux && i->as_Goto()->is_safepoint() == (tsux->bci() < stream()->cur_bci())) ||
1300 (i->as_Goto()->sux_at(0) == fsux && i->as_Goto()->is_safepoint() == (fsux->bci() < stream()->cur_bci())),
1301 "safepoint state of Goto returned by canonicalizer incorrect");
1302
1303 if (is_profiling()) {
1304 If* if_node = i->as_If();
1305 if (if_node != nullptr) {
1306 // Note that we'd collect profile data in this method if we wanted it.
1307 compilation()->set_would_profile(true);
1308 // At level 2 we need the proper bci to count backedges
1309 if_node->set_profiled_bci(bci());
1310 if (profile_branches()) {
1311 // Successors can be rotated by the canonicalizer, check for this case.
1312 if_node->set_profiled_method(method());
1313 if_node->set_should_profile(true);
1314 if (if_node->tsux() == fsux) {
1315 if_node->set_swapped(true);
1316 }
1531 }
1532
1533 if (needs_check) {
1534 // Perform the registration of finalizable objects.
1535 ValueStack* state_before = copy_state_for_exception();
1536 load_local(objectType, 0);
1537 append_split(new Intrinsic(voidType, vmIntrinsics::_Object_init,
1538 state()->pop_arguments(1),
1539 true, state_before, true));
1540 }
1541 }
1542
1543
1544 void GraphBuilder::method_return(Value x, bool ignore_return) {
1545 if (method()->intrinsic_id() == vmIntrinsics::_Object_init) {
1546 call_register_finalizer();
1547 }
1548
1549 // The conditions for a memory barrier are described in Parse::do_exits().
1550 bool need_mem_bar = false;
1551 if (method()->name() == ciSymbols::object_initializer_name() &&
1552 (scope()->wrote_final() || scope()->wrote_stable() ||
1553 (AlwaysSafeConstructors && scope()->wrote_fields()) ||
1554 (support_IRIW_for_not_multiple_copy_atomic_cpu && scope()->wrote_volatile()))) {
1555 need_mem_bar = true;
1556 }
1557
1558 BasicType bt = method()->return_type()->basic_type();
1559 switch (bt) {
1560 case T_BYTE:
1561 {
1562 Value shift = append(new Constant(new IntConstant(24)));
1563 x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1564 x = append(new ShiftOp(Bytecodes::_ishr, x, shift));
1565 break;
1566 }
1567 case T_SHORT:
1568 {
1569 Value shift = append(new Constant(new IntConstant(16)));
1570 x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1571 x = append(new ShiftOp(Bytecodes::_ishr, x, shift));
1682 // Attach dimension info to stable arrays.
1683 if (FoldStableValues &&
1684 field->is_stable() && field_type == T_ARRAY && !field_value.is_null_or_zero()) {
1685 ciArray* array = field_value.as_object()->as_array();
1686 jint dimension = field->type()->as_array_klass()->dimension();
1687 value = new StableArrayConstant(array, dimension);
1688 }
1689
1690 switch (field_type) {
1691 case T_ARRAY:
1692 case T_OBJECT:
1693 if (field_value.as_object()->should_be_constant()) {
1694 return new Constant(value);
1695 }
1696 return nullptr; // Not a constant.
1697 default:
1698 return new Constant(value);
1699 }
1700 }
1701
1702 void GraphBuilder::access_field(Bytecodes::Code code) {
1703 bool will_link;
1704 ciField* field = stream()->get_field(will_link);
1705 ciInstanceKlass* holder = field->holder();
1706 BasicType field_type = field->type()->basic_type();
1707 ValueType* type = as_ValueType(field_type);
1708 // call will_link again to determine if the field is valid.
1709 const bool needs_patching = !holder->is_loaded() ||
1710 !field->will_link(method(), code) ||
1711 PatchALot;
1712
1713 ValueStack* state_before = nullptr;
1714 if (!holder->is_initialized() || needs_patching) {
1715 // save state before instruction for debug info when
1716 // deoptimization happens during patching
1717 state_before = copy_state_before();
1718 }
1719
1720 Value obj = nullptr;
1721 if (code == Bytecodes::_getstatic || code == Bytecodes::_putstatic) {
1722 if (state_before != nullptr) {
1723 // build a patching constant
1724 obj = new Constant(new InstanceConstant(holder->java_mirror()), state_before);
1725 } else {
1726 obj = new Constant(new InstanceConstant(holder->java_mirror()));
1727 }
1728 }
1729
1730 if (code == Bytecodes::_putfield) {
1731 scope()->set_wrote_fields();
1732 if (field->is_volatile()) {
1733 scope()->set_wrote_volatile();
1734 }
1735 if (field->is_final()) {
1736 scope()->set_wrote_final();
1737 }
1738 if (field->is_stable()) {
1739 scope()->set_wrote_stable();
1740 }
1741 }
1742
1743 const int offset = !needs_patching ? field->offset_in_bytes() : -1;
1744 switch (code) {
1745 case Bytecodes::_getstatic: {
1746 // check for compile-time constants, i.e., initialized static final fields
1747 Value constant = nullptr;
1748 if (field->is_static_constant() && !PatchALot) {
1749 ciConstant field_value = field->constant_value();
1750 assert(!field->is_stable() || !field_value.is_null_or_zero(),
1751 "stable static w/ default value shouldn't be a constant");
1752 constant = make_constant(field_value, field);
1753 }
1754 if (constant != nullptr) {
1755 push(type, append(constant));
1756 } else {
1757 if (state_before == nullptr) {
1758 state_before = copy_state_for_exception();
1759 }
1760 push(type, append(new LoadField(append(obj), offset, field, true,
1761 state_before, needs_patching)));
1762 }
1763 break;
1764 }
1765 case Bytecodes::_putstatic: {
1766 Value val = pop(type);
1767 if (state_before == nullptr) {
1768 state_before = copy_state_for_exception();
1769 }
1770 if (field->type()->basic_type() == T_BOOLEAN) {
1771 Value mask = append(new Constant(new IntConstant(1)));
1772 val = append(new LogicOp(Bytecodes::_iand, val, mask));
1773 }
1774 append(new StoreField(append(obj), offset, field, val, true, state_before, needs_patching));
1775 break;
1776 }
1777 case Bytecodes::_getfield: {
1778 // Check for compile-time constants, i.e., trusted final non-static fields.
1779 Value constant = nullptr;
1780 obj = apop();
1781 ObjectType* obj_type = obj->type()->as_ObjectType();
1782 if (field->is_constant() && obj_type->is_constant() && !PatchALot) {
1783 ciObject* const_oop = obj_type->constant_value();
1784 if (!const_oop->is_null_object() && const_oop->is_loaded()) {
1785 ciConstant field_value = field->constant_value_of(const_oop);
1786 if (field_value.is_valid()) {
1787 constant = make_constant(field_value, field);
1788 // For CallSite objects add a dependency for invalidation of the optimization.
1789 if (field->is_call_site_target()) {
1790 ciCallSite* call_site = const_oop->as_call_site();
1791 if (!call_site->is_fully_initialized_constant_call_site()) {
1792 ciMethodHandle* target = field_value.as_object()->as_method_handle();
1793 dependency_recorder()->assert_call_site_target_value(call_site, target);
1794 }
1795 }
1796 }
1797 }
1798 }
1799 if (constant != nullptr) {
1800 push(type, append(constant));
1801 } else {
1802 if (state_before == nullptr) {
1803 state_before = copy_state_for_exception();
1804 }
1805 LoadField* load = new LoadField(obj, offset, field, false, state_before, needs_patching);
1806 Value replacement = !needs_patching ? _memory->load(load) : load;
1807 if (replacement != load) {
1808 assert(replacement->is_linked() || !replacement->can_be_linked(), "should already by linked");
1809 // Writing an (integer) value to a boolean, byte, char or short field includes an implicit narrowing
1810 // conversion. Emit an explicit conversion here to get the correct field value after the write.
1811 BasicType bt = field->type()->basic_type();
1812 switch (bt) {
1813 case T_BOOLEAN:
1814 case T_BYTE:
1815 replacement = append(new Convert(Bytecodes::_i2b, replacement, as_ValueType(bt)));
1816 break;
1817 case T_CHAR:
1818 replacement = append(new Convert(Bytecodes::_i2c, replacement, as_ValueType(bt)));
1819 break;
1820 case T_SHORT:
1821 replacement = append(new Convert(Bytecodes::_i2s, replacement, as_ValueType(bt)));
1822 break;
1823 default:
1824 break;
1825 }
1826 push(type, replacement);
1827 } else {
1828 push(type, append(load));
1829 }
1830 }
1831 break;
1832 }
1833 case Bytecodes::_putfield: {
1834 Value val = pop(type);
1835 obj = apop();
1836 if (state_before == nullptr) {
1837 state_before = copy_state_for_exception();
1838 }
1839 if (field->type()->basic_type() == T_BOOLEAN) {
1840 Value mask = append(new Constant(new IntConstant(1)));
1841 val = append(new LogicOp(Bytecodes::_iand, val, mask));
1842 }
1843 StoreField* store = new StoreField(obj, offset, field, val, false, state_before, needs_patching);
1844 if (!needs_patching) store = _memory->store(store);
1845 if (store != nullptr) {
1846 append(store);
1847 }
1848 break;
1849 }
1850 default:
1851 ShouldNotReachHere();
1852 break;
1853 }
1854 }
1855
1856
1857 Dependencies* GraphBuilder::dependency_recorder() const {
1858 assert(DeoptC1, "need debug information");
1859 return compilation()->dependency_recorder();
1860 }
1861
1862 // How many arguments do we want to profile?
1863 Values* GraphBuilder::args_list_for_profiling(ciMethod* target, int& start, bool may_have_receiver) {
1864 int n = 0;
1865 bool has_receiver = may_have_receiver && Bytecodes::has_receiver(method()->java_code_at_bci(bci()));
1866 start = has_receiver ? 1 : 0;
1867 if (profile_arguments()) {
1868 ciProfileData* data = method()->method_data()->bci_to_data(bci());
1869 if (data != nullptr && (data->is_CallTypeData() || data->is_VirtualCallTypeData())) {
1870 n = data->is_CallTypeData() ? data->as_CallTypeData()->number_of_arguments() : data->as_VirtualCallTypeData()->number_of_arguments();
1871 }
1872 }
1873 // If we are inlining then we need to collect arguments to profile parameters for the target
1874 if (profile_parameters() && target != nullptr) {
1875 if (target->method_data() != nullptr && target->method_data()->parameters_type_data() != nullptr) {
1876 // The receiver is profiled on method entry so it's included in
1953 break;
1954 case Bytecodes::_invokehandle:
1955 code = target->is_static() ? Bytecodes::_invokestatic : Bytecodes::_invokespecial;
1956 break;
1957 default:
1958 break;
1959 }
1960 } else {
1961 if (bc_raw == Bytecodes::_invokehandle) {
1962 assert(!will_link, "should come here only for unlinked call");
1963 code = Bytecodes::_invokespecial;
1964 }
1965 }
1966
1967 if (code == Bytecodes::_invokespecial) {
1968 // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
1969 ciKlass* receiver_constraint = nullptr;
1970
1971 if (bc_raw == Bytecodes::_invokeinterface) {
1972 receiver_constraint = holder;
1973 } else if (bc_raw == Bytecodes::_invokespecial && !target->is_object_initializer() && calling_klass->is_interface()) {
1974 receiver_constraint = calling_klass;
1975 }
1976
1977 if (receiver_constraint != nullptr) {
1978 int index = state()->stack_size() - (target->arg_size_no_receiver() + 1);
1979 Value receiver = state()->stack_at(index);
1980 CheckCast* c = new CheckCast(receiver_constraint, receiver, copy_state_before());
1981 // go to uncommon_trap when checkcast fails
1982 c->set_invokespecial_receiver_check();
1983 state()->stack_at_put(index, append_split(c));
1984 }
1985 }
1986
1987 // Push appendix argument (MethodType, CallSite, etc.), if one.
1988 bool patch_for_appendix = false;
1989 int patching_appendix_arg = 0;
1990 if (Bytecodes::has_optional_appendix(bc_raw) && (!will_link || PatchALot)) {
1991 Value arg = append(new Constant(new ObjectConstant(compilation()->env()->unloaded_ciinstance()), copy_state_before()));
1992 apush(arg);
1993 patch_for_appendix = true;
2209 }
2210 }
2211
2212 Invoke* result = new Invoke(code, result_type, recv, args, target, state_before);
2213 // push result
2214 append_split(result);
2215
2216 if (result_type != voidType) {
2217 push(result_type, result);
2218 }
2219 if (profile_return() && result_type->is_object_kind()) {
2220 profile_return_type(result, target);
2221 }
2222 }
2223
2224
2225 void GraphBuilder::new_instance(int klass_index) {
2226 ValueStack* state_before = copy_state_exhandling();
2227 ciKlass* klass = stream()->get_klass();
2228 assert(klass->is_instance_klass(), "must be an instance klass");
2229 NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before, stream()->is_unresolved_klass());
2230 _memory->new_instance(new_instance);
2231 apush(append_split(new_instance));
2232 }
2233
2234
2235 void GraphBuilder::new_type_array() {
2236 ValueStack* state_before = copy_state_exhandling();
2237 apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before, true)));
2238 }
2239
2240
2241 void GraphBuilder::new_object_array() {
2242 ciKlass* klass = stream()->get_klass();
2243 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2244 NewArray* n = new NewObjectArray(klass, ipop(), state_before);
2245 apush(append_split(n));
2246 }
2247
2248
2249 bool GraphBuilder::direct_compare(ciKlass* k) {
2250 if (k->is_loaded() && k->is_instance_klass() && !UseSlowPath) {
2251 ciInstanceKlass* ik = k->as_instance_klass();
2252 if (ik->is_final()) {
2253 return true;
2254 } else {
2287 ciKlass* klass = stream()->get_klass();
2288 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2289 InstanceOf* i = new InstanceOf(klass, apop(), state_before);
2290 ipush(append_split(i));
2291 i->set_direct_compare(direct_compare(klass));
2292
2293 if (is_profiling()) {
2294 // Note that we'd collect profile data in this method if we wanted it.
2295 compilation()->set_would_profile(true);
2296
2297 if (profile_checkcasts()) {
2298 i->set_profiled_method(method());
2299 i->set_profiled_bci(bci());
2300 i->set_should_profile(true);
2301 }
2302 }
2303 }
2304
2305
2306 void GraphBuilder::monitorenter(Value x, int bci) {
2307 // save state before locking in case of deoptimization after a NullPointerException
2308 ValueStack* state_before = copy_state_for_exception_with_bci(bci);
2309 append_with_bci(new MonitorEnter(x, state()->lock(x), state_before), bci);
2310 kill_all();
2311 }
2312
2313
2314 void GraphBuilder::monitorexit(Value x, int bci) {
2315 append_with_bci(new MonitorExit(x, state()->unlock()), bci);
2316 kill_all();
2317 }
2318
2319
2320 void GraphBuilder::new_multi_array(int dimensions) {
2321 ciKlass* klass = stream()->get_klass();
2322 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2323
2324 Values* dims = new Values(dimensions, dimensions, nullptr);
2325 // fill in all dimensions
2326 int i = dimensions;
2327 while (i-- > 0) dims->at_put(i, ipop());
2328 // create array
2329 NewArray* n = new NewMultiArray(klass, dims, state_before);
2414
2415 Instruction* GraphBuilder::append_split(StateSplit* instr) {
2416 return append_with_bci(instr, bci());
2417 }
2418
2419
2420 void GraphBuilder::null_check(Value value) {
2421 if (value->as_NewArray() != nullptr || value->as_NewInstance() != nullptr) {
2422 return;
2423 } else {
2424 Constant* con = value->as_Constant();
2425 if (con) {
2426 ObjectType* c = con->type()->as_ObjectType();
2427 if (c && c->is_loaded()) {
2428 ObjectConstant* oc = c->as_ObjectConstant();
2429 if (!oc || !oc->value()->is_null_object()) {
2430 return;
2431 }
2432 }
2433 }
2434 }
2435 append(new NullCheck(value, copy_state_for_exception()));
2436 }
2437
2438
2439
2440 XHandlers* GraphBuilder::handle_exception(Instruction* instruction) {
2441 if (!has_handler() && (!instruction->needs_exception_state() || instruction->exception_state() != nullptr)) {
2442 assert(instruction->exception_state() == nullptr
2443 || instruction->exception_state()->kind() == ValueStack::EmptyExceptionState
2444 || (instruction->exception_state()->kind() == ValueStack::ExceptionState && _compilation->env()->should_retain_local_variables()),
2445 "exception_state should be of exception kind");
2446 return new XHandlers();
2447 }
2448
2449 XHandlers* exception_handlers = new XHandlers();
2450 ScopeData* cur_scope_data = scope_data();
2451 ValueStack* cur_state = instruction->state_before();
2452 ValueStack* prev_state = nullptr;
2453 int scope_count = 0;
2454
2455 assert(cur_state != nullptr, "state_before must be set");
2456 do {
2457 int cur_bci = cur_state->bci();
2458 assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");
2459 assert(cur_bci == SynchronizationEntryBCI || cur_bci == cur_scope_data->stream()->cur_bci(), "invalid bci");
2460
2461 // join with all potential exception handlers
2462 XHandlers* list = cur_scope_data->xhandlers();
2463 const int n = list->length();
2464 for (int i = 0; i < n; i++) {
2465 XHandler* h = list->handler_at(i);
2466 if (h->covers(cur_bci)) {
2467 // h is a potential exception handler => join it
2468 compilation()->set_has_exception_handlers(true);
2469
2470 BlockBegin* entry = h->entry_block();
2471 if (entry == block()) {
2472 // It's acceptable for an exception handler to cover itself
2473 // but we don't handle that in the parser currently. It's
2474 // very rare so we bailout instead of trying to handle it.
2475 BAILOUT_("exception handler covers itself", exception_handlers);
2476 }
2477 assert(entry->bci() == h->handler_bci(), "must match");
2478 assert(entry->bci() == -1 || entry == cur_scope_data->block_at(entry->bci()), "blocks must correspond");
2479
3221 // the storage for the OSR buffer is freed manually in the LIRGenerator.
3222
3223 assert(state->caller_state() == nullptr, "should be top scope");
3224 state->clear_locals();
3225 Goto* g = new Goto(target, false);
3226 append(g);
3227 _osr_entry->set_end(g);
3228 target->merge(_osr_entry->end()->state(), compilation()->has_irreducible_loops());
3229
3230 scope_data()->set_stream(nullptr);
3231 }
3232
3233
3234 ValueStack* GraphBuilder::state_at_entry() {
3235 ValueStack* state = new ValueStack(scope(), nullptr);
3236
3237 // Set up locals for receiver
3238 int idx = 0;
3239 if (!method()->is_static()) {
3240 // we should always see the receiver
3241 state->store_local(idx, new Local(method()->holder(), objectType, idx, true));
3242 idx = 1;
3243 }
3244
3245 // Set up locals for incoming arguments
3246 ciSignature* sig = method()->signature();
3247 for (int i = 0; i < sig->count(); i++) {
3248 ciType* type = sig->type_at(i);
3249 BasicType basic_type = type->basic_type();
3250 // don't allow T_ARRAY to propagate into locals types
3251 if (is_reference_type(basic_type)) basic_type = T_OBJECT;
3252 ValueType* vt = as_ValueType(basic_type);
3253 state->store_local(idx, new Local(type, vt, idx, false));
3254 idx += type->size();
3255 }
3256
3257 // lock synchronized method
3258 if (method()->is_synchronized()) {
3259 state->lock(nullptr);
3260 }
3261
3262 return state;
3263 }
3264
3265
3266 GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)
3267 : _scope_data(nullptr)
3268 , _compilation(compilation)
3269 , _memory(new MemoryBuffer())
3270 , _inline_bailout_msg(nullptr)
3271 , _instruction_count(0)
3272 , _osr_entry(nullptr)
3273 {
3274 int osr_bci = compilation->osr_bci();
3275
3276 // determine entry points and bci2block mapping
3277 BlockListBuilder blm(compilation, scope, osr_bci);
3278 CHECK_BAILOUT();
3279
3280 BlockList* bci2block = blm.bci2block();
3281 BlockBegin* start_block = bci2block->at(0);
3282
3283 push_root_scope(scope, bci2block, start_block);
3284
3285 // setup state for std entry
3286 _initial_state = state_at_entry();
3287 start_block->merge(_initial_state, compilation->has_irreducible_loops());
3288
3289 // End nulls still exist here
3290
3291 // complete graph
3292 _vmap = new ValueMap();
|
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "c1/c1_Canonicalizer.hpp"
26 #include "c1/c1_CFGPrinter.hpp"
27 #include "c1/c1_Compilation.hpp"
28 #include "c1/c1_GraphBuilder.hpp"
29 #include "c1/c1_InstructionPrinter.hpp"
30 #include "ci/ciCallSite.hpp"
31 #include "ci/ciField.hpp"
32 #include "ci/ciFlatArrayKlass.hpp"
33 #include "ci/ciInlineKlass.hpp"
34 #include "ci/ciKlass.hpp"
35 #include "ci/ciMemberName.hpp"
36 #include "ci/ciSymbols.hpp"
37 #include "ci/ciUtilities.inline.hpp"
38 #include "classfile/javaClasses.hpp"
39 #include "compiler/compilationPolicy.hpp"
40 #include "compiler/compileBroker.hpp"
41 #include "compiler/compilerEvent.hpp"
42 #include "interpreter/bytecode.hpp"
43 #include "jfr/jfrEvents.hpp"
44 #include "memory/resourceArea.hpp"
45 #include "runtime/sharedRuntime.hpp"
46 #include "utilities/checkedCast.hpp"
47 #include "utilities/macros.hpp"
48 #if INCLUDE_JFR
49 #include "jfr/jfr.hpp"
50 #endif
51
52 class BlockListBuilder {
53 private:
1032 // they are using this local. We don't handle skipping over a
1033 // ret.
1034 for (ScopeData* cur_scope_data = scope_data()->parent();
1035 cur_scope_data != nullptr && cur_scope_data->parsing_jsr() && cur_scope_data->scope() == scope();
1036 cur_scope_data = cur_scope_data->parent()) {
1037 if (cur_scope_data->jsr_return_address_local() == index) {
1038 BAILOUT("subroutine overwrites return address from previous subroutine");
1039 }
1040 }
1041 } else if (index == scope_data()->jsr_return_address_local()) {
1042 scope_data()->set_jsr_return_address_local(-1);
1043 }
1044 }
1045
1046 state->store_local(index, x);
1047 }
1048
1049
1050 void GraphBuilder::load_indexed(BasicType type) {
1051 // In case of in block code motion in range check elimination
1052 ValueStack* state_before = nullptr;
1053 int array_idx = state()->stack_size() - 2;
1054 if (type == T_OBJECT && state()->stack_at(array_idx)->maybe_flat_array()) {
1055 // Save the entire state and re-execute on deopt when accessing flat arrays
1056 state_before = copy_state_before();
1057 state_before->set_should_reexecute(true);
1058 } else {
1059 state_before = copy_state_indexed_access();
1060 }
1061 compilation()->set_has_access_indexed(true);
1062 Value index = ipop();
1063 Value array = apop();
1064 Value length = nullptr;
1065 if (CSEArrayLength ||
1066 (array->as_Constant() != nullptr) ||
1067 (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1068 (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1069 (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1070 length = append(new ArrayLength(array, state_before));
1071 }
1072
1073 bool need_membar = false;
1074 LoadIndexed* load_indexed = nullptr;
1075 Instruction* result = nullptr;
1076 if (array->is_loaded_flat_array()) {
1077 // TODO 8350865 This is currently dead code
1078 ciType* array_type = array->declared_type();
1079 ciInlineKlass* elem_klass = array_type->as_flat_array_klass()->element_klass()->as_inline_klass();
1080
1081 bool can_delay_access = false;
1082 ciBytecodeStream s(method());
1083 s.force_bci(bci());
1084 s.next();
1085 if (s.cur_bc() == Bytecodes::_getfield) {
1086 bool will_link;
1087 ciField* next_field = s.get_field(will_link);
1088 bool next_needs_patching = !next_field->holder()->is_initialized() ||
1089 !next_field->will_link(method(), Bytecodes::_getfield) ||
1090 PatchALot;
1091 can_delay_access = C1UseDelayedFlattenedFieldReads && !next_needs_patching;
1092 }
1093 if (can_delay_access) {
1094 // potentially optimizable array access, storing information for delayed decision
1095 LoadIndexed* li = new LoadIndexed(array, index, length, type, state_before);
1096 DelayedLoadIndexed* dli = new DelayedLoadIndexed(li, state_before);
1097 li->set_delayed(dli);
1098 set_pending_load_indexed(dli);
1099 return; // Nothing else to do for now
1100 } else {
1101 NewInstance* new_instance = new NewInstance(elem_klass, state_before, false, true);
1102 _memory->new_instance(new_instance);
1103 apush(append_split(new_instance));
1104 load_indexed = new LoadIndexed(array, index, length, type, state_before);
1105 load_indexed->set_vt(new_instance);
1106 // The LoadIndexed node will initialise this instance by copying from
1107 // the flat field. Ensure these stores are visible before any
1108 // subsequent store that publishes this reference.
1109 need_membar = true;
1110 }
1111 } else {
1112 load_indexed = new LoadIndexed(array, index, length, type, state_before);
1113 if (profile_array_accesses() && is_reference_type(type)) {
1114 compilation()->set_would_profile(true);
1115 load_indexed->set_should_profile(true);
1116 load_indexed->set_profiled_method(method());
1117 load_indexed->set_profiled_bci(bci());
1118 }
1119 }
1120 result = append(load_indexed);
1121 if (need_membar) {
1122 append(new MemBar(lir_membar_storestore));
1123 }
1124 assert(!load_indexed->should_profile() || load_indexed == result, "should not be optimized out");
1125 if (!array->is_loaded_flat_array()) {
1126 push(as_ValueType(type), result);
1127 }
1128 }
1129
1130
1131 void GraphBuilder::store_indexed(BasicType type) {
1132 // In case of in block code motion in range check elimination
1133 ValueStack* state_before = nullptr;
1134 int array_idx = state()->stack_size() - 3;
1135 if (type == T_OBJECT && state()->stack_at(array_idx)->maybe_flat_array()) {
1136 // Save the entire state and re-execute on deopt when accessing flat arrays
1137 state_before = copy_state_before();
1138 state_before->set_should_reexecute(true);
1139 } else {
1140 state_before = copy_state_indexed_access();
1141 }
1142 compilation()->set_has_access_indexed(true);
1143 Value value = pop(as_ValueType(type));
1144 Value index = ipop();
1145 Value array = apop();
1146 Value length = nullptr;
1147 if (CSEArrayLength ||
1148 (array->as_Constant() != nullptr) ||
1149 (array->as_AccessField() && array->as_AccessField()->field()->is_constant()) ||
1150 (array->as_NewArray() && array->as_NewArray()->length() && array->as_NewArray()->length()->type()->is_constant()) ||
1151 (array->as_NewMultiArray() && array->as_NewMultiArray()->dims()->at(0)->type()->is_constant())) {
1152 length = append(new ArrayLength(array, state_before));
1153 }
1154 ciType* array_type = array->declared_type();
1155 bool check_boolean = false;
1156 if (array_type != nullptr) {
1157 if (array_type->is_loaded() &&
1158 array_type->as_array_klass()->element_type()->basic_type() == T_BOOLEAN) {
1159 assert(type == T_BYTE, "boolean store uses bastore");
1160 Value mask = append(new Constant(new IntConstant(1)));
1161 value = append(new LogicOp(Bytecodes::_iand, value, mask));
1162 }
1163 } else if (type == T_BYTE) {
1164 check_boolean = true;
1165 }
1166
1167 StoreIndexed* store_indexed = new StoreIndexed(array, index, length, type, value, state_before, check_boolean);
1168 if (profile_array_accesses() && is_reference_type(type) && !array->is_loaded_flat_array()) {
1169 compilation()->set_would_profile(true);
1170 store_indexed->set_should_profile(true);
1171 store_indexed->set_profiled_method(method());
1172 store_indexed->set_profiled_bci(bci());
1173 }
1174 Instruction* result = append(store_indexed);
1175 assert(!store_indexed->should_profile() || store_indexed == result, "should not be optimized out");
1176 _memory->store_value(value);
1177 }
1178
1179 void GraphBuilder::stack_op(Bytecodes::Code code) {
1180 switch (code) {
1181 case Bytecodes::_pop:
1182 { Value w = state()->raw_pop();
1183 }
1184 break;
1185 case Bytecodes::_pop2:
1186 { Value w1 = state()->raw_pop();
1187 Value w2 = state()->raw_pop();
1188 }
1189 break;
1190 case Bytecodes::_dup:
1191 { Value w = state()->raw_pop();
1192 state()->raw_push(w);
1193 state()->raw_push(w);
1194 }
1195 break;
1196 case Bytecodes::_dup_x1:
1197 { Value w1 = state()->raw_pop();
1198 Value w2 = state()->raw_pop();
1199 state()->raw_push(w1);
1200 state()->raw_push(w2);
1201 state()->raw_push(w1);
1202 }
1203 break;
1204 case Bytecodes::_dup_x2:
1205 { Value w1 = state()->raw_pop();
1206 Value w2 = state()->raw_pop();
1207 Value w3 = state()->raw_pop();
1343
1344
1345 void GraphBuilder::_goto(int from_bci, int to_bci) {
1346 Goto *x = new Goto(block_at(to_bci), to_bci <= from_bci);
1347 if (is_profiling()) {
1348 compilation()->set_would_profile(true);
1349 x->set_profiled_bci(bci());
1350 if (profile_branches()) {
1351 x->set_profiled_method(method());
1352 x->set_should_profile(true);
1353 }
1354 }
1355 append(x);
1356 }
1357
1358
1359 void GraphBuilder::if_node(Value x, If::Condition cond, Value y, ValueStack* state_before) {
1360 BlockBegin* tsux = block_at(stream()->get_dest());
1361 BlockBegin* fsux = block_at(stream()->next_bci());
1362 bool is_bb = tsux->bci() < stream()->cur_bci() || fsux->bci() < stream()->cur_bci();
1363
1364 bool subst_check = false;
1365 if (EnableValhalla && (stream()->cur_bc() == Bytecodes::_if_acmpeq || stream()->cur_bc() == Bytecodes::_if_acmpne)) {
1366 ValueType* left_vt = x->type();
1367 ValueType* right_vt = y->type();
1368 if (left_vt->is_object()) {
1369 assert(right_vt->is_object(), "must be");
1370 ciKlass* left_klass = x->as_loaded_klass_or_null();
1371 ciKlass* right_klass = y->as_loaded_klass_or_null();
1372
1373 if (left_klass == nullptr || right_klass == nullptr) {
1374 // The klass is still unloaded, or came from a Phi node. Go slow case;
1375 subst_check = true;
1376 } else if (left_klass->can_be_inline_klass() || right_klass->can_be_inline_klass()) {
1377 // Either operand may be a value object, but we're not sure. Go slow case;
1378 subst_check = true;
1379 } else {
1380 // No need to do substitutability check
1381 }
1382 }
1383 }
1384 if ((stream()->cur_bc() == Bytecodes::_if_acmpeq || stream()->cur_bc() == Bytecodes::_if_acmpne) &&
1385 is_profiling() && profile_branches()) {
1386 compilation()->set_would_profile(true);
1387 append(new ProfileACmpTypes(method(), bci(), x, y));
1388 }
1389
1390 // In case of loop invariant code motion or predicate insertion
1391 // before the body of a loop the state is needed
1392 Instruction *i = append(new If(x, cond, false, y, tsux, fsux, (is_bb || compilation()->is_optimistic() || subst_check) ? state_before : nullptr, is_bb, subst_check));
1393
1394 assert(i->as_Goto() == nullptr ||
1395 (i->as_Goto()->sux_at(0) == tsux && i->as_Goto()->is_safepoint() == (tsux->bci() < stream()->cur_bci())) ||
1396 (i->as_Goto()->sux_at(0) == fsux && i->as_Goto()->is_safepoint() == (fsux->bci() < stream()->cur_bci())),
1397 "safepoint state of Goto returned by canonicalizer incorrect");
1398
1399 if (is_profiling()) {
1400 If* if_node = i->as_If();
1401 if (if_node != nullptr) {
1402 // Note that we'd collect profile data in this method if we wanted it.
1403 compilation()->set_would_profile(true);
1404 // At level 2 we need the proper bci to count backedges
1405 if_node->set_profiled_bci(bci());
1406 if (profile_branches()) {
1407 // Successors can be rotated by the canonicalizer, check for this case.
1408 if_node->set_profiled_method(method());
1409 if_node->set_should_profile(true);
1410 if (if_node->tsux() == fsux) {
1411 if_node->set_swapped(true);
1412 }
1627 }
1628
1629 if (needs_check) {
1630 // Perform the registration of finalizable objects.
1631 ValueStack* state_before = copy_state_for_exception();
1632 load_local(objectType, 0);
1633 append_split(new Intrinsic(voidType, vmIntrinsics::_Object_init,
1634 state()->pop_arguments(1),
1635 true, state_before, true));
1636 }
1637 }
1638
1639
1640 void GraphBuilder::method_return(Value x, bool ignore_return) {
1641 if (method()->intrinsic_id() == vmIntrinsics::_Object_init) {
1642 call_register_finalizer();
1643 }
1644
1645 // The conditions for a memory barrier are described in Parse::do_exits().
1646 bool need_mem_bar = false;
1647 if (method()->is_object_constructor() &&
1648 (scope()->wrote_final() || scope()->wrote_stable() ||
1649 (AlwaysSafeConstructors && scope()->wrote_fields()) ||
1650 (support_IRIW_for_not_multiple_copy_atomic_cpu && scope()->wrote_volatile()))) {
1651 need_mem_bar = true;
1652 }
1653
1654 BasicType bt = method()->return_type()->basic_type();
1655 switch (bt) {
1656 case T_BYTE:
1657 {
1658 Value shift = append(new Constant(new IntConstant(24)));
1659 x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1660 x = append(new ShiftOp(Bytecodes::_ishr, x, shift));
1661 break;
1662 }
1663 case T_SHORT:
1664 {
1665 Value shift = append(new Constant(new IntConstant(16)));
1666 x = append(new ShiftOp(Bytecodes::_ishl, x, shift));
1667 x = append(new ShiftOp(Bytecodes::_ishr, x, shift));
1778 // Attach dimension info to stable arrays.
1779 if (FoldStableValues &&
1780 field->is_stable() && field_type == T_ARRAY && !field_value.is_null_or_zero()) {
1781 ciArray* array = field_value.as_object()->as_array();
1782 jint dimension = field->type()->as_array_klass()->dimension();
1783 value = new StableArrayConstant(array, dimension);
1784 }
1785
1786 switch (field_type) {
1787 case T_ARRAY:
1788 case T_OBJECT:
1789 if (field_value.as_object()->should_be_constant()) {
1790 return new Constant(value);
1791 }
1792 return nullptr; // Not a constant.
1793 default:
1794 return new Constant(value);
1795 }
1796 }
1797
1798 void GraphBuilder::copy_inline_content(ciInlineKlass* vk, Value src, int src_off, Value dest, int dest_off, ValueStack* state_before, ciField* enclosing_field) {
1799 for (int i = 0; i < vk->nof_declared_nonstatic_fields(); i++) {
1800 ciField* field = vk->declared_nonstatic_field_at(i);
1801 int offset = field->offset_in_bytes() - vk->payload_offset();
1802 if (field->is_flat()) {
1803 copy_inline_content(field->type()->as_inline_klass(), src, src_off + offset, dest, dest_off + offset, state_before, enclosing_field);
1804 if (!field->is_null_free()) {
1805 // Nullable, copy the null marker using Unsafe because null markers are no real fields
1806 int null_marker_offset = field->null_marker_offset() - vk->payload_offset();
1807 Value offset = append(new Constant(new LongConstant(src_off + null_marker_offset)));
1808 Value nm = append(new UnsafeGet(T_BOOLEAN, src, offset, false));
1809 offset = append(new Constant(new LongConstant(dest_off + null_marker_offset)));
1810 append(new UnsafePut(T_BOOLEAN, dest, offset, nm, false));
1811 }
1812 } else {
1813 Value value = append(new LoadField(src, src_off + offset, field, false, state_before, false));
1814 StoreField* store = new StoreField(dest, dest_off + offset, field, value, false, state_before, false);
1815 store->set_enclosing_field(enclosing_field);
1816 append(store);
1817 }
1818 }
1819 }
1820
1821 void GraphBuilder::access_field(Bytecodes::Code code) {
1822 bool will_link;
1823 ciField* field = stream()->get_field(will_link);
1824 ciInstanceKlass* holder = field->holder();
1825 BasicType field_type = field->type()->basic_type();
1826 ValueType* type = as_ValueType(field_type);
1827
1828 // call will_link again to determine if the field is valid.
1829 const bool needs_patching = !holder->is_loaded() ||
1830 !field->will_link(method(), code) ||
1831 (!field->is_flat() && PatchALot);
1832
1833 ValueStack* state_before = nullptr;
1834 if (!holder->is_initialized() || needs_patching) {
1835 // save state before instruction for debug info when
1836 // deoptimization happens during patching
1837 state_before = copy_state_before();
1838 }
1839
1840 Value obj = nullptr;
1841 if (code == Bytecodes::_getstatic || code == Bytecodes::_putstatic) {
1842 if (state_before != nullptr) {
1843 // build a patching constant
1844 obj = new Constant(new InstanceConstant(holder->java_mirror()), state_before);
1845 } else {
1846 obj = new Constant(new InstanceConstant(holder->java_mirror()));
1847 }
1848 }
1849
1850 if (code == Bytecodes::_putfield) {
1851 scope()->set_wrote_fields();
1852 if (field->is_volatile()) {
1853 scope()->set_wrote_volatile();
1854 }
1855 if (field->is_final()) {
1856 scope()->set_wrote_final();
1857 }
1858 if (field->is_stable()) {
1859 scope()->set_wrote_stable();
1860 }
1861 }
1862
1863 int offset = !needs_patching ? field->offset_in_bytes() : -1;
1864 switch (code) {
1865 case Bytecodes::_getstatic: {
1866 // check for compile-time constants, i.e., initialized static final fields
1867 Value constant = nullptr;
1868 if (field->is_static_constant() && !PatchALot) {
1869 ciConstant field_value = field->constant_value();
1870 assert(!field->is_stable() || !field_value.is_null_or_zero(),
1871 "stable static w/ default value shouldn't be a constant");
1872 constant = make_constant(field_value, field);
1873 }
1874 if (constant != nullptr) {
1875 push(type, append(constant));
1876 } else {
1877 if (state_before == nullptr) {
1878 state_before = copy_state_for_exception();
1879 }
1880 LoadField* load_field = new LoadField(append(obj), offset, field, true,
1881 state_before, needs_patching);
1882 push(type, append(load_field));
1883 }
1884 break;
1885 }
1886 case Bytecodes::_putstatic: {
1887 Value val = pop(type);
1888 if (state_before == nullptr) {
1889 state_before = copy_state_for_exception();
1890 }
1891 if (field_type == T_BOOLEAN) {
1892 Value mask = append(new Constant(new IntConstant(1)));
1893 val = append(new LogicOp(Bytecodes::_iand, val, mask));
1894 }
1895 if (field->is_null_free()) {
1896 null_check(val);
1897 }
1898 if (field->is_null_free() && field->type()->is_loaded() && field->type()->as_inline_klass()->is_empty() && (!method()->is_class_initializer() || field->is_flat())) {
1899 // Storing to a field of an empty, null-free inline type that is already initialized. Ignore.
1900 break;
1901 }
1902 append(new StoreField(append(obj), offset, field, val, true, state_before, needs_patching));
1903 break;
1904 }
1905 case Bytecodes::_getfield: {
1906 // Check for compile-time constants, i.e., trusted final non-static fields.
1907 Value constant = nullptr;
1908 if (state_before == nullptr && field->is_flat()) {
1909 // Save the entire state and re-execute on deopt when accessing flat fields
1910 assert(Interpreter::bytecode_should_reexecute(code), "should reexecute");
1911 state_before = copy_state_before();
1912 }
1913 if (!has_pending_field_access() && !has_pending_load_indexed()) {
1914 obj = apop();
1915 ObjectType* obj_type = obj->type()->as_ObjectType();
1916 if (field->is_constant() && !field->is_flat() && obj_type->is_constant() && !PatchALot) {
1917 ciObject* const_oop = obj_type->constant_value();
1918 if (!const_oop->is_null_object() && const_oop->is_loaded()) {
1919 ciConstant field_value = field->constant_value_of(const_oop);
1920 if (field_value.is_valid()) {
1921 constant = make_constant(field_value, field);
1922 // For CallSite objects add a dependency for invalidation of the optimization.
1923 if (field->is_call_site_target()) {
1924 ciCallSite* call_site = const_oop->as_call_site();
1925 if (!call_site->is_fully_initialized_constant_call_site()) {
1926 ciMethodHandle* target = field_value.as_object()->as_method_handle();
1927 dependency_recorder()->assert_call_site_target_value(call_site, target);
1928 }
1929 }
1930 }
1931 }
1932 }
1933 }
1934 if (constant != nullptr) {
1935 push(type, append(constant));
1936 } else {
1937 if (state_before == nullptr) {
1938 state_before = copy_state_for_exception();
1939 }
1940 if (!field->is_flat()) {
1941 if (has_pending_field_access()) {
1942 assert(!needs_patching, "Can't patch delayed field access");
1943 obj = pending_field_access()->obj();
1944 offset += pending_field_access()->offset() - field->holder()->as_inline_klass()->payload_offset();
1945 field = pending_field_access()->holder()->get_field_by_offset(offset, false);
1946 assert(field != nullptr, "field not found");
1947 set_pending_field_access(nullptr);
1948 } else if (has_pending_load_indexed()) {
1949 assert(!needs_patching, "Can't patch delayed field access");
1950 pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->payload_offset());
1951 LoadIndexed* li = pending_load_indexed()->load_instr();
1952 li->set_type(type);
1953 push(type, append(li));
1954 set_pending_load_indexed(nullptr);
1955 break;
1956 }
1957 LoadField* load = new LoadField(obj, offset, field, false, state_before, needs_patching);
1958 Value replacement = !needs_patching ? _memory->load(load) : load;
1959 if (replacement != load) {
1960 assert(replacement->is_linked() || !replacement->can_be_linked(), "should already by linked");
1961 // Writing an (integer) value to a boolean, byte, char or short field includes an implicit narrowing
1962 // conversion. Emit an explicit conversion here to get the correct field value after the write.
1963 switch (field_type) {
1964 case T_BOOLEAN:
1965 case T_BYTE:
1966 replacement = append(new Convert(Bytecodes::_i2b, replacement, type));
1967 break;
1968 case T_CHAR:
1969 replacement = append(new Convert(Bytecodes::_i2c, replacement, type));
1970 break;
1971 case T_SHORT:
1972 replacement = append(new Convert(Bytecodes::_i2s, replacement, type));
1973 break;
1974 default:
1975 break;
1976 }
1977 push(type, replacement);
1978 } else {
1979 push(type, append(load));
1980 }
1981 } else {
1982 // Flat field
1983 assert(!needs_patching, "Can't patch flat inline type field access");
1984 ciInlineKlass* inline_klass = field->type()->as_inline_klass();
1985 bool is_naturally_atomic = inline_klass->nof_declared_nonstatic_fields() <= 1;
1986 bool needs_atomic_access = !field->is_null_free() || (field->is_volatile() && !is_naturally_atomic);
1987 if (needs_atomic_access) {
1988 assert(!has_pending_field_access(), "Pending field accesses are not supported");
1989 LoadField* load = new LoadField(obj, offset, field, false, state_before, needs_patching);
1990 push(type, append(load));
1991 } else {
1992 assert(field->is_null_free(), "must be null-free");
1993 // Look at the next bytecode to check if we can delay the field access
1994 bool can_delay_access = false;
1995 ciBytecodeStream s(method());
1996 s.force_bci(bci());
1997 s.next();
1998 if (s.cur_bc() == Bytecodes::_getfield && !needs_patching) {
1999 ciField* next_field = s.get_field(will_link);
2000 bool next_needs_patching = !next_field->holder()->is_loaded() ||
2001 !next_field->will_link(method(), Bytecodes::_getfield) ||
2002 PatchALot;
2003 // We can't update the offset for atomic accesses
2004 bool next_needs_atomic_access = !next_field->is_null_free() || next_field->is_volatile();
2005 can_delay_access = C1UseDelayedFlattenedFieldReads && !next_needs_patching && !next_needs_atomic_access;
2006 }
2007 if (can_delay_access) {
2008 if (has_pending_load_indexed()) {
2009 pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->payload_offset());
2010 } else if (has_pending_field_access()) {
2011 pending_field_access()->inc_offset(offset - field->holder()->as_inline_klass()->payload_offset());
2012 } else {
2013 null_check(obj);
2014 DelayedFieldAccess* dfa = new DelayedFieldAccess(obj, field->holder(), field->offset_in_bytes(), state_before);
2015 set_pending_field_access(dfa);
2016 }
2017 } else {
2018 scope()->set_wrote_final();
2019 scope()->set_wrote_fields();
2020 bool need_membar = false;
2021 if (has_pending_load_indexed()) {
2022 assert(!needs_patching, "Can't patch delayed field access");
2023 pending_load_indexed()->update(field, offset - field->holder()->as_inline_klass()->payload_offset());
2024 NewInstance* vt = new NewInstance(inline_klass, pending_load_indexed()->state_before(), false, true);
2025 _memory->new_instance(vt);
2026 pending_load_indexed()->load_instr()->set_vt(vt);
2027 apush(append_split(vt));
2028 append(pending_load_indexed()->load_instr());
2029 set_pending_load_indexed(nullptr);
2030 need_membar = true;
2031 } else {
2032 if (has_pending_field_access()) {
2033 state_before = pending_field_access()->state_before();
2034 }
2035 NewInstance* new_instance = new NewInstance(inline_klass, state_before, false, true);
2036 _memory->new_instance(new_instance);
2037 apush(append_split(new_instance));
2038 if (has_pending_field_access()) {
2039 copy_inline_content(inline_klass, pending_field_access()->obj(),
2040 pending_field_access()->offset() + field->offset_in_bytes() - field->holder()->as_inline_klass()->payload_offset(),
2041 new_instance, inline_klass->payload_offset(), state_before);
2042 set_pending_field_access(nullptr);
2043 } else {
2044 if (field->type()->as_instance_klass()->is_initialized() && field->type()->as_inline_klass()->is_empty()) {
2045 // Needs an explicit null check because below code does not perform any actual load if there are no fields
2046 null_check(obj);
2047 }
2048 copy_inline_content(inline_klass, obj, field->offset_in_bytes(), new_instance, inline_klass->payload_offset(), state_before);
2049 }
2050 need_membar = true;
2051 }
2052 if (need_membar) {
2053 // If we allocated a new instance ensure the stores to copy the
2054 // field contents are visible before any subsequent store that
2055 // publishes this reference.
2056 append(new MemBar(lir_membar_storestore));
2057 }
2058 }
2059 }
2060 }
2061 }
2062 break;
2063 }
2064 case Bytecodes::_putfield: {
2065 Value val = pop(type);
2066 obj = apop();
2067 if (state_before == nullptr) {
2068 state_before = copy_state_for_exception();
2069 }
2070 if (field_type == T_BOOLEAN) {
2071 Value mask = append(new Constant(new IntConstant(1)));
2072 val = append(new LogicOp(Bytecodes::_iand, val, mask));
2073 }
2074
2075 if (field->is_null_free() && field->type()->is_loaded() && field->type()->as_inline_klass()->is_empty() && (!method()->is_object_constructor() || field->is_flat())) {
2076 // Storing to a field of an empty, null-free inline type that is already initialized. Ignore.
2077 null_check(obj);
2078 null_check(val);
2079 } else if (!field->is_flat()) {
2080 if (field->is_null_free()) {
2081 null_check(val);
2082 }
2083 StoreField* store = new StoreField(obj, offset, field, val, false, state_before, needs_patching);
2084 if (!needs_patching) store = _memory->store(store);
2085 if (store != nullptr) {
2086 append(store);
2087 }
2088 } else {
2089 // Flat field
2090 assert(!needs_patching, "Can't patch flat inline type field access");
2091 ciInlineKlass* inline_klass = field->type()->as_inline_klass();
2092 bool is_naturally_atomic = inline_klass->nof_declared_nonstatic_fields() <= 1;
2093 bool needs_atomic_access = !field->is_null_free() || (field->is_volatile() && !is_naturally_atomic);
2094 if (needs_atomic_access) {
2095 if (field->is_null_free()) {
2096 null_check(val);
2097 }
2098 append(new StoreField(obj, offset, field, val, false, state_before, needs_patching));
2099 } else {
2100 assert(field->is_null_free(), "must be null-free");
2101 copy_inline_content(inline_klass, val, inline_klass->payload_offset(), obj, offset, state_before, field);
2102 }
2103 }
2104 break;
2105 }
2106 default:
2107 ShouldNotReachHere();
2108 break;
2109 }
2110 }
2111
2112 Dependencies* GraphBuilder::dependency_recorder() const {
2113 assert(DeoptC1, "need debug information");
2114 return compilation()->dependency_recorder();
2115 }
2116
2117 // How many arguments do we want to profile?
2118 Values* GraphBuilder::args_list_for_profiling(ciMethod* target, int& start, bool may_have_receiver) {
2119 int n = 0;
2120 bool has_receiver = may_have_receiver && Bytecodes::has_receiver(method()->java_code_at_bci(bci()));
2121 start = has_receiver ? 1 : 0;
2122 if (profile_arguments()) {
2123 ciProfileData* data = method()->method_data()->bci_to_data(bci());
2124 if (data != nullptr && (data->is_CallTypeData() || data->is_VirtualCallTypeData())) {
2125 n = data->is_CallTypeData() ? data->as_CallTypeData()->number_of_arguments() : data->as_VirtualCallTypeData()->number_of_arguments();
2126 }
2127 }
2128 // If we are inlining then we need to collect arguments to profile parameters for the target
2129 if (profile_parameters() && target != nullptr) {
2130 if (target->method_data() != nullptr && target->method_data()->parameters_type_data() != nullptr) {
2131 // The receiver is profiled on method entry so it's included in
2208 break;
2209 case Bytecodes::_invokehandle:
2210 code = target->is_static() ? Bytecodes::_invokestatic : Bytecodes::_invokespecial;
2211 break;
2212 default:
2213 break;
2214 }
2215 } else {
2216 if (bc_raw == Bytecodes::_invokehandle) {
2217 assert(!will_link, "should come here only for unlinked call");
2218 code = Bytecodes::_invokespecial;
2219 }
2220 }
2221
2222 if (code == Bytecodes::_invokespecial) {
2223 // Additional receiver subtype checks for interface calls via invokespecial or invokeinterface.
2224 ciKlass* receiver_constraint = nullptr;
2225
2226 if (bc_raw == Bytecodes::_invokeinterface) {
2227 receiver_constraint = holder;
2228 } else if (bc_raw == Bytecodes::_invokespecial && !target->is_object_constructor() && calling_klass->is_interface()) {
2229 receiver_constraint = calling_klass;
2230 }
2231
2232 if (receiver_constraint != nullptr) {
2233 int index = state()->stack_size() - (target->arg_size_no_receiver() + 1);
2234 Value receiver = state()->stack_at(index);
2235 CheckCast* c = new CheckCast(receiver_constraint, receiver, copy_state_before());
2236 // go to uncommon_trap when checkcast fails
2237 c->set_invokespecial_receiver_check();
2238 state()->stack_at_put(index, append_split(c));
2239 }
2240 }
2241
2242 // Push appendix argument (MethodType, CallSite, etc.), if one.
2243 bool patch_for_appendix = false;
2244 int patching_appendix_arg = 0;
2245 if (Bytecodes::has_optional_appendix(bc_raw) && (!will_link || PatchALot)) {
2246 Value arg = append(new Constant(new ObjectConstant(compilation()->env()->unloaded_ciinstance()), copy_state_before()));
2247 apush(arg);
2248 patch_for_appendix = true;
2464 }
2465 }
2466
2467 Invoke* result = new Invoke(code, result_type, recv, args, target, state_before);
2468 // push result
2469 append_split(result);
2470
2471 if (result_type != voidType) {
2472 push(result_type, result);
2473 }
2474 if (profile_return() && result_type->is_object_kind()) {
2475 profile_return_type(result, target);
2476 }
2477 }
2478
2479
2480 void GraphBuilder::new_instance(int klass_index) {
2481 ValueStack* state_before = copy_state_exhandling();
2482 ciKlass* klass = stream()->get_klass();
2483 assert(klass->is_instance_klass(), "must be an instance klass");
2484 NewInstance* new_instance = new NewInstance(klass->as_instance_klass(), state_before, stream()->is_unresolved_klass(), false);
2485 _memory->new_instance(new_instance);
2486 apush(append_split(new_instance));
2487 }
2488
2489 void GraphBuilder::new_type_array() {
2490 ValueStack* state_before = copy_state_exhandling();
2491 apush(append_split(new NewTypeArray(ipop(), (BasicType)stream()->get_index(), state_before, true)));
2492 }
2493
2494
2495 void GraphBuilder::new_object_array() {
2496 ciKlass* klass = stream()->get_klass();
2497 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2498 NewArray* n = new NewObjectArray(klass, ipop(), state_before);
2499 apush(append_split(n));
2500 }
2501
2502
2503 bool GraphBuilder::direct_compare(ciKlass* k) {
2504 if (k->is_loaded() && k->is_instance_klass() && !UseSlowPath) {
2505 ciInstanceKlass* ik = k->as_instance_klass();
2506 if (ik->is_final()) {
2507 return true;
2508 } else {
2541 ciKlass* klass = stream()->get_klass();
2542 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2543 InstanceOf* i = new InstanceOf(klass, apop(), state_before);
2544 ipush(append_split(i));
2545 i->set_direct_compare(direct_compare(klass));
2546
2547 if (is_profiling()) {
2548 // Note that we'd collect profile data in this method if we wanted it.
2549 compilation()->set_would_profile(true);
2550
2551 if (profile_checkcasts()) {
2552 i->set_profiled_method(method());
2553 i->set_profiled_bci(bci());
2554 i->set_should_profile(true);
2555 }
2556 }
2557 }
2558
2559
2560 void GraphBuilder::monitorenter(Value x, int bci) {
2561 bool maybe_inlinetype = false;
2562 if (bci == InvocationEntryBci) {
2563 // Called by GraphBuilder::inline_sync_entry.
2564 #ifdef ASSERT
2565 ciType* obj_type = x->declared_type();
2566 assert(obj_type == nullptr || !obj_type->is_inlinetype(), "inline types cannot have synchronized methods");
2567 #endif
2568 } else {
2569 // We are compiling a monitorenter bytecode
2570 if (EnableValhalla) {
2571 ciType* obj_type = x->declared_type();
2572 if (obj_type == nullptr || obj_type->as_klass()->can_be_inline_klass()) {
2573 // If we're (possibly) locking on an inline type, check for markWord::always_locked_pattern
2574 // and throw IMSE. (obj_type is null for Phi nodes, so let's just be conservative).
2575 maybe_inlinetype = true;
2576 }
2577 }
2578 }
2579
2580 // save state before locking in case of deoptimization after a NullPointerException
2581 ValueStack* state_before = copy_state_for_exception_with_bci(bci);
2582 append_with_bci(new MonitorEnter(x, state()->lock(x), state_before, maybe_inlinetype), bci);
2583 kill_all();
2584 }
2585
2586
2587 void GraphBuilder::monitorexit(Value x, int bci) {
2588 append_with_bci(new MonitorExit(x, state()->unlock()), bci);
2589 kill_all();
2590 }
2591
2592
2593 void GraphBuilder::new_multi_array(int dimensions) {
2594 ciKlass* klass = stream()->get_klass();
2595 ValueStack* state_before = !klass->is_loaded() || PatchALot ? copy_state_before() : copy_state_exhandling();
2596
2597 Values* dims = new Values(dimensions, dimensions, nullptr);
2598 // fill in all dimensions
2599 int i = dimensions;
2600 while (i-- > 0) dims->at_put(i, ipop());
2601 // create array
2602 NewArray* n = new NewMultiArray(klass, dims, state_before);
2687
2688 Instruction* GraphBuilder::append_split(StateSplit* instr) {
2689 return append_with_bci(instr, bci());
2690 }
2691
2692
2693 void GraphBuilder::null_check(Value value) {
2694 if (value->as_NewArray() != nullptr || value->as_NewInstance() != nullptr) {
2695 return;
2696 } else {
2697 Constant* con = value->as_Constant();
2698 if (con) {
2699 ObjectType* c = con->type()->as_ObjectType();
2700 if (c && c->is_loaded()) {
2701 ObjectConstant* oc = c->as_ObjectConstant();
2702 if (!oc || !oc->value()->is_null_object()) {
2703 return;
2704 }
2705 }
2706 }
2707 if (value->is_null_free()) return;
2708 }
2709 append(new NullCheck(value, copy_state_for_exception()));
2710 }
2711
2712
2713
2714 XHandlers* GraphBuilder::handle_exception(Instruction* instruction) {
2715 if (!has_handler() && (!instruction->needs_exception_state() || instruction->exception_state() != nullptr)) {
2716 assert(instruction->exception_state() == nullptr
2717 || instruction->exception_state()->kind() == ValueStack::EmptyExceptionState
2718 || (instruction->exception_state()->kind() == ValueStack::ExceptionState && _compilation->env()->should_retain_local_variables()),
2719 "exception_state should be of exception kind");
2720 return new XHandlers();
2721 }
2722
2723 XHandlers* exception_handlers = new XHandlers();
2724 ScopeData* cur_scope_data = scope_data();
2725 ValueStack* cur_state = instruction->state_before();
2726 ValueStack* prev_state = nullptr;
2727 int scope_count = 0;
2728
2729 assert(cur_state != nullptr, "state_before must be set");
2730 do {
2731 int cur_bci = cur_state->bci();
2732 assert(cur_scope_data->scope() == cur_state->scope(), "scopes do not match");
2733 assert(cur_bci == SynchronizationEntryBCI || cur_bci == cur_scope_data->stream()->cur_bci()
2734 || has_pending_field_access() || has_pending_load_indexed(), "invalid bci");
2735
2736
2737 // join with all potential exception handlers
2738 XHandlers* list = cur_scope_data->xhandlers();
2739 const int n = list->length();
2740 for (int i = 0; i < n; i++) {
2741 XHandler* h = list->handler_at(i);
2742 if (h->covers(cur_bci)) {
2743 // h is a potential exception handler => join it
2744 compilation()->set_has_exception_handlers(true);
2745
2746 BlockBegin* entry = h->entry_block();
2747 if (entry == block()) {
2748 // It's acceptable for an exception handler to cover itself
2749 // but we don't handle that in the parser currently. It's
2750 // very rare so we bailout instead of trying to handle it.
2751 BAILOUT_("exception handler covers itself", exception_handlers);
2752 }
2753 assert(entry->bci() == h->handler_bci(), "must match");
2754 assert(entry->bci() == -1 || entry == cur_scope_data->block_at(entry->bci()), "blocks must correspond");
2755
3497 // the storage for the OSR buffer is freed manually in the LIRGenerator.
3498
3499 assert(state->caller_state() == nullptr, "should be top scope");
3500 state->clear_locals();
3501 Goto* g = new Goto(target, false);
3502 append(g);
3503 _osr_entry->set_end(g);
3504 target->merge(_osr_entry->end()->state(), compilation()->has_irreducible_loops());
3505
3506 scope_data()->set_stream(nullptr);
3507 }
3508
3509
3510 ValueStack* GraphBuilder::state_at_entry() {
3511 ValueStack* state = new ValueStack(scope(), nullptr);
3512
3513 // Set up locals for receiver
3514 int idx = 0;
3515 if (!method()->is_static()) {
3516 // we should always see the receiver
3517 state->store_local(idx, new Local(method()->holder(), objectType, idx,
3518 /*receiver*/ true, /*null_free*/ method()->holder()->is_flat_array_klass()));
3519 idx = 1;
3520 }
3521
3522 // Set up locals for incoming arguments
3523 ciSignature* sig = method()->signature();
3524 for (int i = 0; i < sig->count(); i++) {
3525 ciType* type = sig->type_at(i);
3526 BasicType basic_type = type->basic_type();
3527 // don't allow T_ARRAY to propagate into locals types
3528 if (is_reference_type(basic_type)) basic_type = T_OBJECT;
3529 ValueType* vt = as_ValueType(basic_type);
3530 state->store_local(idx, new Local(type, vt, idx, false, false));
3531 idx += type->size();
3532 }
3533
3534 // lock synchronized method
3535 if (method()->is_synchronized()) {
3536 state->lock(nullptr);
3537 }
3538
3539 return state;
3540 }
3541
3542
3543 GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)
3544 : _scope_data(nullptr)
3545 , _compilation(compilation)
3546 , _memory(new MemoryBuffer())
3547 , _inline_bailout_msg(nullptr)
3548 , _instruction_count(0)
3549 , _osr_entry(nullptr)
3550 , _pending_field_access(nullptr)
3551 , _pending_load_indexed(nullptr)
3552 {
3553 int osr_bci = compilation->osr_bci();
3554
3555 // determine entry points and bci2block mapping
3556 BlockListBuilder blm(compilation, scope, osr_bci);
3557 CHECK_BAILOUT();
3558
3559 BlockList* bci2block = blm.bci2block();
3560 BlockBegin* start_block = bci2block->at(0);
3561
3562 push_root_scope(scope, bci2block, start_block);
3563
3564 // setup state for std entry
3565 _initial_state = state_at_entry();
3566 start_block->merge(_initial_state, compilation->has_irreducible_loops());
3567
3568 // End nulls still exist here
3569
3570 // complete graph
3571 _vmap = new ValueMap();
|