19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "asm/macroAssembler.hpp"
26 #include "compiler/disassembler.hpp"
27 #include "gc/shared/collectedHeap.hpp"
28 #include "gc/shared/gc_globals.hpp"
29 #include "gc/shared/tlab_globals.hpp"
30 #include "interpreter/interpreter.hpp"
31 #include "interpreter/interpreterRuntime.hpp"
32 #include "interpreter/interp_masm.hpp"
33 #include "interpreter/templateTable.hpp"
34 #include "memory/universe.hpp"
35 #include "oops/methodCounters.hpp"
36 #include "oops/methodData.hpp"
37 #include "oops/objArrayKlass.hpp"
38 #include "oops/oop.inline.hpp"
39 #include "oops/resolvedFieldEntry.hpp"
40 #include "oops/resolvedIndyEntry.hpp"
41 #include "oops/resolvedMethodEntry.hpp"
42 #include "prims/jvmtiExport.hpp"
43 #include "prims/methodHandles.hpp"
44 #include "runtime/frame.inline.hpp"
45 #include "runtime/safepointMechanism.hpp"
46 #include "runtime/sharedRuntime.hpp"
47 #include "runtime/stubRoutines.hpp"
48 #include "runtime/synchronizer.hpp"
49 #include "utilities/macros.hpp"
50
51 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
52
53 // Global Register Names
54 static const Register rbcp = r13;
55 static const Register rlocals = r14;
56
57 // Address Computation: local variables
58 static inline Address iaddress(int n) {
150 static void do_oop_load(InterpreterMacroAssembler* _masm,
151 Address src,
152 Register dst,
153 DecoratorSet decorators = 0) {
154 __ load_heap_oop(dst, src, rdx, rbx, decorators);
155 }
156
157 Address TemplateTable::at_bcp(int offset) {
158 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
159 return Address(rbcp, offset);
160 }
161
162
163 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
164 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
165 int byte_no) {
166 if (!RewriteBytecodes) return;
167 Label L_patch_done;
168
169 switch (bc) {
170 case Bytecodes::_fast_aputfield:
171 case Bytecodes::_fast_bputfield:
172 case Bytecodes::_fast_zputfield:
173 case Bytecodes::_fast_cputfield:
174 case Bytecodes::_fast_dputfield:
175 case Bytecodes::_fast_fputfield:
176 case Bytecodes::_fast_iputfield:
177 case Bytecodes::_fast_lputfield:
178 case Bytecodes::_fast_sputfield:
179 {
180 // We skip bytecode quickening for putfield instructions when
181 // the put_code written to the constant pool cache is zero.
182 // This is required so that every execution of this instruction
183 // calls out to InterpreterRuntime::resolve_get_put to do
184 // additional, required work.
185 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
186 assert(load_bc_into_bc_reg, "we use bc_reg as temp");
187 __ load_field_entry(temp_reg, bc_reg);
188 if (byte_no == f1_byte) {
189 __ load_unsigned_byte(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));
766 Address(rdx, rax,
767 Address::times_4,
768 arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
769 noreg, noreg);
770 }
771
772 void TemplateTable::daload() {
773 transition(itos, dtos);
774 // rax: index
775 // rdx: array
776 index_check(rdx, rax); // kills rbx
777 __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, noreg /* dtos */,
778 Address(rdx, rax,
779 Address::times_8,
780 arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
781 noreg, noreg);
782 }
783
784 void TemplateTable::aaload() {
785 transition(itos, atos);
786 // rax: index
787 // rdx: array
788 index_check(rdx, rax); // kills rbx
789 do_oop_load(_masm,
790 Address(rdx, rax,
791 UseCompressedOops ? Address::times_4 : Address::times_ptr,
792 arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
793 rax,
794 IS_ARRAY);
795 }
796
797 void TemplateTable::baload() {
798 transition(itos, itos);
799 // rax: index
800 // rdx: array
801 index_check(rdx, rax); // kills rbx
802 __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, rax,
803 Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
804 noreg, noreg);
805 }
806
807 void TemplateTable::caload() {
808 transition(itos, itos);
809 // rax: index
810 // rdx: array
811 index_check(rdx, rax); // kills rbx
812 __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax,
813 Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
814 noreg, noreg);
1048 __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY,
1049 Address(rdx, rbx, Address::times_4,
1050 arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
1051 noreg /* ftos */, noreg, noreg, noreg);
1052 }
1053
1054 void TemplateTable::dastore() {
1055 transition(dtos, vtos);
1056 __ pop_i(rbx);
1057 // value is in UseSSE >= 2 ? xmm0 : ST(0)
1058 // rbx: index
1059 // rdx: array
1060 index_check(rdx, rbx); // prefer index in rbx
1061 __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY,
1062 Address(rdx, rbx, Address::times_8,
1063 arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
1064 noreg /* dtos */, noreg, noreg, noreg);
1065 }
1066
1067 void TemplateTable::aastore() {
1068 Label is_null, ok_is_subtype, done;
1069 transition(vtos, vtos);
1070 // stack: ..., array, index, value
1071 __ movptr(rax, at_tos()); // value
1072 __ movl(rcx, at_tos_p1()); // index
1073 __ movptr(rdx, at_tos_p2()); // array
1074
1075 Address element_address(rdx, rcx,
1076 UseCompressedOops? Address::times_4 : Address::times_ptr,
1077 arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1078
1079 index_check_without_pop(rdx, rcx); // kills rbx
1080 __ testptr(rax, rax);
1081 __ jcc(Assembler::zero, is_null);
1082
1083 // Move subklass into rbx
1084 __ load_klass(rbx, rax, rscratch1);
1085 // Move superklass into rax
1086 __ load_klass(rax, rdx, rscratch1);
1087 __ movptr(rax, Address(rax,
1088 ObjArrayKlass::element_klass_offset()));
1089
1090 // Generate subtype check. Blows rcx, rdi
1091 // Superklass in rax. Subklass in rbx.
1092 __ gen_subtype_check(rbx, ok_is_subtype);
1093
1094 // Come here on failure
1095 // object is at TOS
1096 __ jump(RuntimeAddress(Interpreter::_throw_ArrayStoreException_entry));
1097
1098 // Come here on success
1099 __ bind(ok_is_subtype);
1100
1101 // Get the value we will store
1102 __ movptr(rax, at_tos());
1103 __ movl(rcx, at_tos_p1()); // index
1104 // Now store using the appropriate barrier
1105 do_oop_store(_masm, element_address, rax, IS_ARRAY);
1106 __ jmp(done);
1107
1108 // Have a null in rax, rdx=array, ecx=index. Store null at ary[idx]
1109 __ bind(is_null);
1110 __ profile_null_seen(rbx);
1111
1112 // Store a null
1113 do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1114
1115 // Pop stack arguments
1116 __ bind(done);
1117 __ addptr(rsp, 3 * Interpreter::stackElementSize);
1118 }
1119
1120 void TemplateTable::bastore() {
1121 transition(itos, vtos);
1122 __ pop_i(rbx);
1123 // rax: value
1124 // rbx: index
1125 // rdx: array
1126 index_check(rdx, rbx); // prefer index in rbx
1127 // Need to check whether array is boolean or byte
1128 // since both types share the bastore bytecode.
1129 __ load_klass(rcx, rdx, rscratch1);
1130 __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1131 int diffbit = Klass::layout_helper_boolean_diffbit();
1132 __ testl(rcx, diffbit);
1133 Label L_skip;
1134 __ jccb(Assembler::zero, L_skip);
1905 __ jcc(j_not(cc), not_taken);
1906 branch(false, false);
1907 __ bind(not_taken);
1908 __ profile_not_taken_branch(rax);
1909 }
1910
1911 void TemplateTable::if_nullcmp(Condition cc) {
1912 transition(atos, vtos);
1913 // assume branch is more often taken than not (loops use backward branches)
1914 Label not_taken;
1915 __ testptr(rax, rax);
1916 __ jcc(j_not(cc), not_taken);
1917 branch(false, false);
1918 __ bind(not_taken);
1919 __ profile_not_taken_branch(rax);
1920 }
1921
1922 void TemplateTable::if_acmp(Condition cc) {
1923 transition(atos, vtos);
1924 // assume branch is more often taken than not (loops use backward branches)
1925 Label not_taken;
1926 __ pop_ptr(rdx);
1927 __ cmpoop(rdx, rax);
1928 __ jcc(j_not(cc), not_taken);
1929 branch(false, false);
1930 __ bind(not_taken);
1931 __ profile_not_taken_branch(rax);
1932 }
1933
1934 void TemplateTable::ret() {
1935 transition(vtos, vtos);
1936 locals_index(rbx);
1937 __ movslq(rbx, iaddress(rbx)); // get return bci, compute return bcp
1938 __ profile_ret(rbx, rcx);
1939 __ get_method(rax);
1940 __ movptr(rbcp, Address(rax, Method::const_offset()));
1941 __ lea(rbcp, Address(rbcp, rbx, Address::times_1,
1942 ConstMethod::codes_offset()));
1943 __ dispatch_next(vtos, 0, true);
1944 }
1945
1946 void TemplateTable::wide_ret() {
1947 transition(vtos, vtos);
1948 locals_index_wide(rbx);
1949 __ movptr(rbx, aaddress(rbx)); // get return bci, compute return bcp
1950 __ profile_ret(rbx, rcx);
1951 __ get_method(rax);
2165 if (_desc->bytecode() != Bytecodes::_return_register_finalizer) {
2166 Label no_safepoint;
2167 NOT_PRODUCT(__ block_comment("Thread-local Safepoint poll"));
2168 __ testb(Address(r15_thread, JavaThread::polling_word_offset()), SafepointMechanism::poll_bit());
2169 __ jcc(Assembler::zero, no_safepoint);
2170 __ push(state);
2171 __ push_cont_fastpath();
2172 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2173 InterpreterRuntime::at_safepoint));
2174 __ pop_cont_fastpath();
2175 __ pop(state);
2176 __ bind(no_safepoint);
2177 }
2178
2179 // Narrow result if state is itos but result type is smaller.
2180 // Need to narrow in the return bytecode rather than in generate_return_entry
2181 // since compiled code callers expect the result to already be narrowed.
2182 if (state == itos) {
2183 __ narrow(rax);
2184 }
2185 __ remove_activation(state, rbcp);
2186
2187 __ jmp(rbcp);
2188 }
2189
2190 // ----------------------------------------------------------------------------
2191 // Volatile variables demand their effects be made known to all CPU's
2192 // in order. Store buffers on most chips allow reads & writes to
2193 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2194 // without some kind of memory barrier (i.e., it's not sufficient that
2195 // the interpreter does not reorder volatile references, the hardware
2196 // also must not reorder them).
2197 //
2198 // According to the new Java Memory Model (JMM):
2199 // (1) All volatiles are serialized wrt to each other. ALSO reads &
2200 // writes act as acquire & release, so:
2201 // (2) A read cannot let unrelated NON-volatile memory refs that
2202 // happen after the read float up to before the read. It's OK for
2203 // non-volatile memory refs that happen before the volatile read to
2204 // float down below it.
2205 // (3) Similar a volatile write cannot let unrelated NON-volatile
2527 }
2528 // rax,: object pointer or null
2529 // cache: cache entry pointer
2530 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
2531 rax, cache);
2532
2533 __ load_field_entry(cache, index);
2534 __ bind(L1);
2535 }
2536 }
2537
2538 void TemplateTable::pop_and_check_object(Register r) {
2539 __ pop_ptr(r);
2540 __ null_check(r); // for field access must check obj.
2541 __ verify_oop(r);
2542 }
2543
2544 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2545 transition(vtos, vtos);
2546
2547 const Register obj = c_rarg3;
2548 const Register cache = rcx;
2549 const Register index = rdx;
2550 const Register off = rbx;
2551 const Register tos_state = rax;
2552 const Register flags = rdx;
2553 const Register bc = c_rarg3; // uses same reg as obj, so don't mix them
2554
2555 resolve_cache_and_index_for_field(byte_no, cache, index);
2556 jvmti_post_field_access(cache, index, is_static, false);
2557 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2558
2559 if (!is_static) pop_and_check_object(obj);
2560
2561 const Address field(obj, off, Address::times_1, 0*wordSize);
2562
2563 Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj;
2564
2565 // Make sure we don't need to mask edx after the above shift
2566 assert(btos == 0, "change code, btos != 0");
2567 __ testl(tos_state, tos_state);
2568 __ jcc(Assembler::notZero, notByte);
2569
2570 // btos
2571 __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
2572 __ push(btos);
2573 // Rewrite bytecode to be faster
2574 if (!is_static && rc == may_rewrite) {
2575 patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2576 }
2577 __ jmp(Done);
2578
2579 __ bind(notByte);
2580 __ cmpl(tos_state, ztos);
2581 __ jcc(Assembler::notEqual, notBool);
2582
2583 // ztos (same code as btos)
2584 __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg, noreg);
2585 __ push(ztos);
2586 // Rewrite bytecode to be faster
2587 if (!is_static && rc == may_rewrite) {
2588 // use btos rewriting, no truncating to t/f bit is needed for getfield.
2589 patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2590 }
2591 __ jmp(Done);
2592
2593 __ bind(notBool);
2594 __ cmpl(tos_state, atos);
2595 __ jcc(Assembler::notEqual, notObj);
2596 // atos
2597 do_oop_load(_masm, field, rax);
2598 __ push(atos);
2599 if (!is_static && rc == may_rewrite) {
2600 patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
2601 }
2602 __ jmp(Done);
2603
2604 __ bind(notObj);
2605 __ cmpl(tos_state, itos);
2606 __ jcc(Assembler::notEqual, notInt);
2607 // itos
2608 __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
2609 __ push(itos);
2610 // Rewrite bytecode to be faster
2611 if (!is_static && rc == may_rewrite) {
2612 patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
2613 }
2614 __ jmp(Done);
2615
2616 __ bind(notInt);
2617 __ cmpl(tos_state, ctos);
2618 __ jcc(Assembler::notEqual, notChar);
2619 // ctos
2620 __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
2621 __ push(ctos);
2622 // Rewrite bytecode to be faster
2623 if (!is_static && rc == may_rewrite) {
2624 patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);
2684 #endif
2685
2686 __ bind(Done);
2687 // [jk] not needed currently
2688 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
2689 // Assembler::LoadStore));
2690 }
2691
2692 void TemplateTable::getfield(int byte_no) {
2693 getfield_or_static(byte_no, false);
2694 }
2695
2696 void TemplateTable::nofast_getfield(int byte_no) {
2697 getfield_or_static(byte_no, false, may_not_rewrite);
2698 }
2699
2700 void TemplateTable::getstatic(int byte_no) {
2701 getfield_or_static(byte_no, true);
2702 }
2703
2704
2705 // The registers cache and index expected to be set before call.
2706 // The function may destroy various registers, just not the cache and index registers.
2707 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
2708 // Cache is rcx and index is rdx
2709 const Register entry = c_rarg2; // ResolvedFieldEntry
2710 const Register obj = c_rarg1; // Object pointer
2711 const Register value = c_rarg3; // JValue object
2712
2713 if (JvmtiExport::can_post_field_modification()) {
2714 // Check to see if a field modification watch has been set before
2715 // we take the time to call into the VM.
2716 Label L1;
2717 assert_different_registers(cache, obj, rax);
2718 __ mov32(rax, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
2719 __ testl(rax, rax);
2720 __ jcc(Assembler::zero, L1);
2721
2722 __ mov(entry, cache);
2723
2724 if (is_static) {
2746 // cache: field entry pointer
2747 // value: jvalue object on the stack
2748 __ call_VM(noreg,
2749 CAST_FROM_FN_PTR(address,
2750 InterpreterRuntime::post_field_modification),
2751 obj, entry, value);
2752 // Reload field entry
2753 __ load_field_entry(cache, index);
2754 __ bind(L1);
2755 }
2756 }
2757
2758 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2759 transition(vtos, vtos);
2760
2761 const Register obj = rcx;
2762 const Register cache = rcx;
2763 const Register index = rdx;
2764 const Register tos_state = rdx;
2765 const Register off = rbx;
2766 const Register flags = rax;
2767
2768 resolve_cache_and_index_for_field(byte_no, cache, index);
2769 jvmti_post_field_mod(cache, index, is_static);
2770 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2771
2772 // [jk] not needed currently
2773 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
2774 // Assembler::StoreStore));
2775
2776 Label notVolatile, Done;
2777
2778 // Check for volatile store
2779 __ andl(flags, (1 << ResolvedFieldEntry::is_volatile_shift));
2780 __ testl(flags, flags);
2781 __ jcc(Assembler::zero, notVolatile);
2782
2783 putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state);
2784 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
2785 Assembler::StoreStore));
2786 __ jmp(Done);
2787 __ bind(notVolatile);
2788
2789 putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state);
2790
2791 __ bind(Done);
2792 }
2793
2794 void TemplateTable::putfield_or_static_helper(int byte_no, bool is_static, RewriteControl rc,
2795 Register obj, Register off, Register tos_state) {
2796
2797 // field addresses
2798 const Address field(obj, off, Address::times_1, 0*wordSize);
2799
2800 Label notByte, notBool, notInt, notShort, notChar,
2801 notLong, notFloat, notObj;
2802 Label Done;
2803
2804 const Register bc = c_rarg3;
2805
2806 // Test TOS state
2807 __ testl(tos_state, tos_state);
2808 __ jcc(Assembler::notZero, notByte);
2809
2810 // btos
2811 {
2812 __ pop(btos);
2813 if (!is_static) pop_and_check_object(obj);
2814 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
2815 if (!is_static && rc == may_rewrite) {
2816 patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
2817 }
2818 __ jmp(Done);
2819 }
2820
2821 __ bind(notByte);
2822 __ cmpl(tos_state, ztos);
2823 __ jcc(Assembler::notEqual, notBool);
2824
2825 // ztos
2826 {
2827 __ pop(ztos);
2828 if (!is_static) pop_and_check_object(obj);
2829 __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
2830 if (!is_static && rc == may_rewrite) {
2831 patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
2832 }
2833 __ jmp(Done);
2834 }
2835
2836 __ bind(notBool);
2837 __ cmpl(tos_state, atos);
2838 __ jcc(Assembler::notEqual, notObj);
2839
2840 // atos
2841 {
2842 __ pop(atos);
2843 if (!is_static) pop_and_check_object(obj);
2844 // Store into the field
2845 do_oop_store(_masm, field, rax);
2846 if (!is_static && rc == may_rewrite) {
2847 patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
2848 }
2849 __ jmp(Done);
2850 }
2851
2852 __ bind(notObj);
2853 __ cmpl(tos_state, itos);
2854 __ jcc(Assembler::notEqual, notInt);
2855
2856 // itos
2857 {
2858 __ pop(itos);
2859 if (!is_static) pop_and_check_object(obj);
2860 __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
2861 if (!is_static && rc == may_rewrite) {
2862 patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
2863 }
2864 __ jmp(Done);
2865 }
2866
2867 __ bind(notInt);
2868 __ cmpl(tos_state, ctos);
2869 __ jcc(Assembler::notEqual, notChar);
2966 }
2967
2968 void TemplateTable::jvmti_post_fast_field_mod() {
2969
2970 const Register scratch = c_rarg3;
2971
2972 if (JvmtiExport::can_post_field_modification()) {
2973 // Check to see if a field modification watch has been set before
2974 // we take the time to call into the VM.
2975 Label L2;
2976 __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
2977 __ testl(scratch, scratch);
2978 __ jcc(Assembler::zero, L2);
2979 __ pop_ptr(rbx); // copy the object pointer from tos
2980 __ verify_oop(rbx);
2981 __ push_ptr(rbx); // put the object pointer back on tos
2982 // Save tos values before call_VM() clobbers them. Since we have
2983 // to do it for every data type, we use the saved values as the
2984 // jvalue object.
2985 switch (bytecode()) { // load values into the jvalue object
2986 case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;
2987 case Bytecodes::_fast_bputfield: // fall through
2988 case Bytecodes::_fast_zputfield: // fall through
2989 case Bytecodes::_fast_sputfield: // fall through
2990 case Bytecodes::_fast_cputfield: // fall through
2991 case Bytecodes::_fast_iputfield: __ push_i(rax); break;
2992 case Bytecodes::_fast_dputfield: __ push(dtos); break;
2993 case Bytecodes::_fast_fputfield: __ push(ftos); break;
2994 case Bytecodes::_fast_lputfield: __ push_l(rax); break;
2995
2996 default:
2997 ShouldNotReachHere();
2998 }
2999 __ mov(scratch, rsp); // points to jvalue on the stack
3000 // access constant pool cache entry
3001 __ load_field_entry(c_rarg2, rax);
3002 __ verify_oop(rbx);
3003 // rbx: object pointer copied above
3004 // c_rarg2: cache entry pointer
3005 // c_rarg3: jvalue object on the stack
3006 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3);
3007
3008 switch (bytecode()) { // restore tos values
3009 case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
3010 case Bytecodes::_fast_bputfield: // fall through
3011 case Bytecodes::_fast_zputfield: // fall through
3012 case Bytecodes::_fast_sputfield: // fall through
3013 case Bytecodes::_fast_cputfield: // fall through
3014 case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
3015 case Bytecodes::_fast_dputfield: __ pop(dtos); break;
3016 case Bytecodes::_fast_fputfield: __ pop(ftos); break;
3017 case Bytecodes::_fast_lputfield: __ pop_l(rax); break;
3018 default: break;
3019 }
3020 __ bind(L2);
3021 }
3022 }
3023
3024 void TemplateTable::fast_storefield(TosState state) {
3025 transition(state, vtos);
3026
3027 Register cache = rcx;
3028
3029 Label notVolatile, Done;
3030
3031 jvmti_post_fast_field_mod();
3032
3033 __ push(rax);
3034 __ load_field_entry(rcx, rax);
3035 load_resolved_field_entry(noreg, cache, rax, rbx, rdx);
3036 // RBX: field offset, RAX: TOS, RDX: flags
3037 __ andl(rdx, (1 << ResolvedFieldEntry::is_volatile_shift));
3038 __ pop(rax);
3039
3040 // Get object from stack
3041 pop_and_check_object(rcx);
3042
3043 // field address
3044 const Address field(rcx, rbx, Address::times_1);
3045
3046 // Check for volatile store
3047 __ testl(rdx, rdx);
3048 __ jcc(Assembler::zero, notVolatile);
3049
3050 fast_storefield_helper(field, rax);
3051 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3052 Assembler::StoreStore));
3053 __ jmp(Done);
3054 __ bind(notVolatile);
3055
3056 fast_storefield_helper(field, rax);
3057
3058 __ bind(Done);
3059 }
3060
3061 void TemplateTable::fast_storefield_helper(Address field, Register rax) {
3062
3063 // access field
3064 switch (bytecode()) {
3065 case Bytecodes::_fast_aputfield:
3066 do_oop_store(_masm, field, rax);
3067 break;
3068 case Bytecodes::_fast_lputfield:
3069 __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg, noreg);
3070 break;
3071 case Bytecodes::_fast_iputfield:
3072 __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3073 break;
3074 case Bytecodes::_fast_zputfield:
3075 __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3076 break;
3077 case Bytecodes::_fast_bputfield:
3078 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3079 break;
3080 case Bytecodes::_fast_sputfield:
3081 __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg, noreg);
3082 break;
3083 case Bytecodes::_fast_cputfield:
3084 __ access_store_at(T_CHAR, IN_HEAP, field, rax, noreg, noreg, noreg);
3085 break;
3086 case Bytecodes::_fast_fputfield:
3102 // Check to see if a field access watch has been set before we
3103 // take the time to call into the VM.
3104 Label L1;
3105 __ mov32(rcx, ExternalAddress((address) JvmtiExport::get_field_access_count_addr()));
3106 __ testl(rcx, rcx);
3107 __ jcc(Assembler::zero, L1);
3108 // access constant pool cache entry
3109 __ load_field_entry(c_rarg2, rcx);
3110 __ verify_oop(rax);
3111 __ push_ptr(rax); // save object pointer before call_VM() clobbers it
3112 __ mov(c_rarg1, rax);
3113 // c_rarg1: object pointer copied above
3114 // c_rarg2: cache entry pointer
3115 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), c_rarg1, c_rarg2);
3116 __ pop_ptr(rax); // restore object pointer
3117 __ bind(L1);
3118 }
3119
3120 // access constant pool cache
3121 __ load_field_entry(rcx, rbx);
3122 __ load_sized_value(rbx, Address(rcx, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
3123
3124 // rax: object
3125 __ verify_oop(rax);
3126 __ null_check(rax);
3127 Address field(rax, rbx, Address::times_1);
3128
3129 // access field
3130 switch (bytecode()) {
3131 case Bytecodes::_fast_agetfield:
3132 do_oop_load(_masm, field, rax);
3133 __ verify_oop(rax);
3134 break;
3135 case Bytecodes::_fast_lgetfield:
3136 __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg, noreg);
3137 break;
3138 case Bytecodes::_fast_igetfield:
3139 __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3140 break;
3141 case Bytecodes::_fast_bgetfield:
3142 __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3143 break;
3144 case Bytecodes::_fast_sgetfield:
3145 __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
3146 break;
3147 case Bytecodes::_fast_cgetfield:
3148 __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
3149 break;
3150 case Bytecodes::_fast_fgetfield:
3535
3536 // Note: rax_callsite is already pushed
3537
3538 // %%% should make a type profile for any invokedynamic that takes a ref argument
3539 // profile this call
3540 __ profile_call(rbcp);
3541 __ profile_arguments_type(rdx, rbx_method, rbcp, false);
3542
3543 __ verify_oop(rax_callsite);
3544
3545 __ jump_from_interpreted(rbx_method, rdx);
3546 }
3547
3548 //-----------------------------------------------------------------------------
3549 // Allocation
3550
3551 void TemplateTable::_new() {
3552 transition(vtos, atos);
3553 __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
3554 Label slow_case;
3555 Label slow_case_no_pop;
3556 Label done;
3557 Label initialize_header;
3558
3559 __ get_cpool_and_tags(rcx, rax);
3560
3561 // Make sure the class we're about to instantiate has been resolved.
3562 // This is done before loading InstanceKlass to be consistent with the order
3563 // how Constant Pool is updated (see ConstantPool::klass_at_put)
3564 const int tags_offset = Array<u1>::base_offset_in_bytes();
3565 __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
3566 __ jcc(Assembler::notEqual, slow_case_no_pop);
3567
3568 // get InstanceKlass
3569 __ load_resolved_klass_at_index(rcx, rcx, rdx);
3570 __ push(rcx); // save the contexts of klass for initializing the header
3571
3572 // make sure klass is initialized
3573 // init_state needs acquire, but x86 is TSO, and so we are already good.
3574 assert(VM_Version::supports_fast_class_init_checks(), "must support fast class initialization checks");
3575 __ clinit_barrier(rcx, r15_thread, nullptr /*L_fast_path*/, &slow_case);
3576
3577 // get instance_size in InstanceKlass (scaled to a count of bytes)
3578 __ movl(rdx, Address(rcx, Klass::layout_helper_offset()));
3579 // test to see if it is malformed in some way
3580 __ testl(rdx, Klass::_lh_instance_slow_path_bit);
3581 __ jcc(Assembler::notZero, slow_case);
3582
3583 // Allocate the instance:
3584 // If TLAB is enabled:
3585 // Try to allocate in the TLAB.
3586 // If fails, go to the slow path.
3587 // Initialize the allocation.
3588 // Exit.
3589 //
3590 // Go to slow path.
3591
3592 if (UseTLAB) {
3593 __ tlab_allocate(r15_thread, rax, rdx, 0, rcx, rbx, slow_case);
3594 if (ZeroTLAB) {
3595 // the fields have been already cleared
3596 __ jmp(initialize_header);
3597 }
3598
3599 // The object is initialized before the header. If the object size is
3600 // zero, go directly to the header initialization.
3601 if (UseCompactObjectHeaders) {
3602 assert(is_aligned(oopDesc::base_offset_in_bytes(), BytesPerLong), "oop base offset must be 8-byte-aligned");
3603 __ decrement(rdx, oopDesc::base_offset_in_bytes());
3604 } else {
3605 __ decrement(rdx, sizeof(oopDesc));
3606 }
3607 __ jcc(Assembler::zero, initialize_header);
3608
3609 // Initialize topmost object field, divide rdx by 8, check if odd and
3610 // test if zero.
3611 __ xorl(rcx, rcx); // use zero reg to clear memory (shorter code)
3612 __ shrl(rdx, LogBytesPerLong); // divide by 2*oopSize and set carry flag if odd
3613
3614 // rdx must have been multiple of 8
3615 #ifdef ASSERT
3616 // make sure rdx was multiple of 8
3617 Label L;
3618 // Ignore partial flag stall after shrl() since it is debug VM
3619 __ jcc(Assembler::carryClear, L);
3620 __ stop("object size is not multiple of 2 - adjust this code");
3621 __ bind(L);
3622 // rdx must be > 0, no extra check needed here
3623 #endif
3624
3625 // initialize remaining object fields: rdx was a multiple of 8
3626 { Label loop;
3627 __ bind(loop);
3628 int header_size_bytes = oopDesc::header_size() * HeapWordSize;
3629 assert(is_aligned(header_size_bytes, BytesPerLong), "oop header size must be 8-byte-aligned");
3630 __ movptr(Address(rax, rdx, Address::times_8, header_size_bytes - 1*oopSize), rcx);
3631 __ decrement(rdx);
3632 __ jcc(Assembler::notZero, loop);
3633 }
3634
3635 // initialize object header only.
3636 __ bind(initialize_header);
3637 if (UseCompactObjectHeaders) {
3638 __ pop(rcx); // get saved klass back in the register.
3639 __ movptr(rbx, Address(rcx, Klass::prototype_header_offset()));
3640 __ movptr(Address(rax, oopDesc::mark_offset_in_bytes()), rbx);
3641 } else {
3642 __ movptr(Address(rax, oopDesc::mark_offset_in_bytes()),
3643 (intptr_t)markWord::prototype().value()); // header
3644 __ pop(rcx); // get saved klass back in the register.
3645 __ xorl(rsi, rsi); // use zero reg to clear memory (shorter code)
3646 __ store_klass_gap(rax, rsi); // zero klass gap for compressed oops
3647 __ store_klass(rax, rcx, rscratch1); // klass
3648 }
3649
3650 if (DTraceAllocProbes) {
3651 // Trigger dtrace event for fastpath
3652 __ push(atos);
3653 __ call_VM_leaf(
3654 CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), rax);
3655 __ pop(atos);
3656 }
3657
3658 __ jmp(done);
3659 }
3660
3661 // slow case
3662 __ bind(slow_case);
3663 __ pop(rcx); // restore stack pointer to what it was when we came in.
3664 __ bind(slow_case_no_pop);
3665
3666 __ get_constant_pool(c_rarg1);
3667 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3668 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3669 __ verify_oop(rax);
3670
3671 // continue
3672 __ bind(done);
3673 }
3674
3675 void TemplateTable::newarray() {
3676 transition(itos, atos);
3677 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3678 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3679 c_rarg1, rax);
3680 }
3681
3682 void TemplateTable::anewarray() {
3683 transition(itos, atos);
3684
3686 __ get_constant_pool(c_rarg1);
3687 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
3688 c_rarg1, c_rarg2, rax);
3689 }
3690
3691 void TemplateTable::arraylength() {
3692 transition(atos, itos);
3693 __ movl(rax, Address(rax, arrayOopDesc::length_offset_in_bytes()));
3694 }
3695
3696 void TemplateTable::checkcast() {
3697 transition(atos, atos);
3698 Label done, is_null, ok_is_subtype, quicked, resolved;
3699 __ testptr(rax, rax); // object is in rax
3700 __ jcc(Assembler::zero, is_null);
3701
3702 // Get cpool & tags index
3703 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
3704 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
3705 // See if bytecode has already been quicked
3706 __ cmpb(Address(rdx, rbx,
3707 Address::times_1,
3708 Array<u1>::base_offset_in_bytes()),
3709 JVM_CONSTANT_Class);
3710 __ jcc(Assembler::equal, quicked);
3711 __ push(atos); // save receiver for result, and for GC
3712 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3713
3714 // vm_result_2 has metadata result
3715 __ get_vm_result_2(rax, r15_thread);
3716
3717 __ pop_ptr(rdx); // restore receiver
3718 __ jmpb(resolved);
3719
3720 // Get superklass in rax and subklass in rbx
3721 __ bind(quicked);
3722 __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
3723 __ load_resolved_klass_at_index(rax, rcx, rbx);
3724
3725 __ bind(resolved);
3726 __ load_klass(rbx, rdx, rscratch1);
3727
3728 // Generate subtype check. Blows rcx, rdi. Object in rdx.
3729 // Superklass in rax. Subklass in rbx.
3730 __ gen_subtype_check(rbx, ok_is_subtype);
3731
3732 // Come here on failure
3733 __ push_ptr(rdx);
3734 // object is at TOS
3735 __ jump(RuntimeAddress(Interpreter::_throw_ClassCastException_entry));
3736
3737 // Come here on success
3738 __ bind(ok_is_subtype);
3739 __ mov(rax, rdx); // Restore object in rdx
3740
3741 // Collect counts on whether this check-cast sees nulls a lot or not.
3742 if (ProfileInterpreter) {
3743 __ jmp(done);
3744 __ bind(is_null);
3745 __ profile_null_seen(rcx);
3746 } else {
3747 __ bind(is_null); // same as 'done'
3748 }
3749 __ bind(done);
3750 }
3751
3752 void TemplateTable::instanceof() {
3753 transition(atos, itos);
3754 Label done, is_null, ok_is_subtype, quicked, resolved;
3755 __ testptr(rax, rax);
3756 __ jcc(Assembler::zero, is_null);
3757
3758 // Get cpool & tags index
3759 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
3760 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
3761 // See if bytecode has already been quicked
3762 __ cmpb(Address(rdx, rbx,
3763 Address::times_1,
3764 Array<u1>::base_offset_in_bytes()),
3765 JVM_CONSTANT_Class);
3766 __ jcc(Assembler::equal, quicked);
3767
3768 __ push(atos); // save receiver for result, and for GC
3769 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3770 // vm_result_2 has metadata result
3771
3772 __ get_vm_result_2(rax, r15_thread);
3773
3774 __ pop_ptr(rdx); // restore receiver
3775 __ verify_oop(rdx);
3776 __ load_klass(rdx, rdx, rscratch1);
3777 __ jmpb(resolved);
3778
3779 // Get superklass in rax and subklass in rdx
3780 __ bind(quicked);
3781 __ load_klass(rdx, rax, rscratch1);
3782 __ load_resolved_klass_at_index(rax, rcx, rbx);
3783
3784 __ bind(resolved);
3785
3790 // Come here on failure
3791 __ xorl(rax, rax);
3792 __ jmpb(done);
3793 // Come here on success
3794 __ bind(ok_is_subtype);
3795 __ movl(rax, 1);
3796
3797 // Collect counts on whether this test sees nulls a lot or not.
3798 if (ProfileInterpreter) {
3799 __ jmp(done);
3800 __ bind(is_null);
3801 __ profile_null_seen(rcx);
3802 } else {
3803 __ bind(is_null); // same as 'done'
3804 }
3805 __ bind(done);
3806 // rax = 0: obj == nullptr or obj is not an instanceof the specified klass
3807 // rax = 1: obj != nullptr and obj is an instanceof the specified klass
3808 }
3809
3810
3811 //----------------------------------------------------------------------------------------------------
3812 // Breakpoints
3813 void TemplateTable::_breakpoint() {
3814 // Note: We get here even if we are single stepping..
3815 // jbug insists on setting breakpoints at every bytecode
3816 // even if we are in single step mode.
3817
3818 transition(vtos, vtos);
3819
3820 // get the unpatched byte code
3821 __ get_method(c_rarg1);
3822 __ call_VM(noreg,
3823 CAST_FROM_FN_PTR(address,
3824 InterpreterRuntime::get_original_bytecode_at),
3825 c_rarg1, rbcp);
3826 __ mov(rbx, rax); // why?
3827
3828 // post the breakpoint event
3829 __ get_method(c_rarg1);
3830 __ call_VM(noreg,
3850 // Note: monitorenter & exit are symmetric routines; which is reflected
3851 // in the assembly code structure as well
3852 //
3853 // Stack layout:
3854 //
3855 // [expressions ] <--- rsp = expression stack top
3856 // ..
3857 // [expressions ]
3858 // [monitor entry] <--- monitor block top = expression stack bot
3859 // ..
3860 // [monitor entry]
3861 // [frame data ] <--- monitor block bot
3862 // ...
3863 // [saved rbp ] <--- rbp
3864 void TemplateTable::monitorenter() {
3865 transition(atos, vtos);
3866
3867 // check for null object
3868 __ null_check(rax);
3869
3870 const Address monitor_block_top(
3871 rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
3872 const Address monitor_block_bot(
3873 rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
3874 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
3875
3876 Label allocated;
3877
3878 Register rtop = c_rarg3;
3879 Register rbot = c_rarg2;
3880 Register rmon = c_rarg1;
3881
3882 // initialize entry pointer
3883 __ xorl(rmon, rmon); // points to free slot or null
3884
3885 // find a free slot in the monitor block (result in rmon)
3886 {
3887 Label entry, loop, exit;
3888 __ movptr(rtop, monitor_block_top); // derelativize pointer
3889 __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
3942 // rmon: points to monitor entry
3943 __ bind(allocated);
3944
3945 // Increment bcp to point to the next bytecode, so exception
3946 // handling for async. exceptions work correctly.
3947 // The object has already been popped from the stack, so the
3948 // expression stack looks correct.
3949 __ increment(rbcp);
3950
3951 // store object
3952 __ movptr(Address(rmon, BasicObjectLock::obj_offset()), rax);
3953 __ lock_object(rmon);
3954
3955 // check to make sure this monitor doesn't cause stack overflow after locking
3956 __ save_bcp(); // in case of exception
3957 __ generate_stack_overflow_check(0);
3958
3959 // The bcp has already been incremented. Just need to dispatch to
3960 // next instruction.
3961 __ dispatch_next(vtos);
3962 }
3963
3964 void TemplateTable::monitorexit() {
3965 transition(atos, vtos);
3966
3967 // check for null object
3968 __ null_check(rax);
3969
3970 const Address monitor_block_top(
3971 rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
3972 const Address monitor_block_bot(
3973 rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
3974 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
3975
3976 Register rtop = c_rarg1;
3977 Register rbot = c_rarg2;
3978
3979 Label found;
3980
3981 // find matching slot
3982 {
3983 Label entry, loop;
3984 __ movptr(rtop, monitor_block_top); // derelativize pointer
3985 __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
3986 // rtop points to current entry, starting with top-most entry
3987
3988 __ lea(rbot, monitor_block_bot); // points to word before bottom
3989 // of monitor block
|
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "asm/macroAssembler.hpp"
26 #include "compiler/disassembler.hpp"
27 #include "gc/shared/collectedHeap.hpp"
28 #include "gc/shared/gc_globals.hpp"
29 #include "gc/shared/tlab_globals.hpp"
30 #include "interpreter/interpreter.hpp"
31 #include "interpreter/interpreterRuntime.hpp"
32 #include "interpreter/interp_masm.hpp"
33 #include "interpreter/templateTable.hpp"
34 #include "memory/universe.hpp"
35 #include "oops/methodCounters.hpp"
36 #include "oops/methodData.hpp"
37 #include "oops/objArrayKlass.hpp"
38 #include "oops/oop.inline.hpp"
39 #include "oops/inlineKlass.hpp"
40 #include "oops/resolvedFieldEntry.hpp"
41 #include "oops/resolvedIndyEntry.hpp"
42 #include "oops/resolvedMethodEntry.hpp"
43 #include "prims/jvmtiExport.hpp"
44 #include "prims/methodHandles.hpp"
45 #include "runtime/frame.inline.hpp"
46 #include "runtime/safepointMechanism.hpp"
47 #include "runtime/sharedRuntime.hpp"
48 #include "runtime/stubRoutines.hpp"
49 #include "runtime/synchronizer.hpp"
50 #include "utilities/macros.hpp"
51
52 #define __ Disassembler::hook<InterpreterMacroAssembler>(__FILE__, __LINE__, _masm)->
53
54 // Global Register Names
55 static const Register rbcp = r13;
56 static const Register rlocals = r14;
57
58 // Address Computation: local variables
59 static inline Address iaddress(int n) {
151 static void do_oop_load(InterpreterMacroAssembler* _masm,
152 Address src,
153 Register dst,
154 DecoratorSet decorators = 0) {
155 __ load_heap_oop(dst, src, rdx, rbx, decorators);
156 }
157
158 Address TemplateTable::at_bcp(int offset) {
159 assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
160 return Address(rbcp, offset);
161 }
162
163
164 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
165 Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
166 int byte_no) {
167 if (!RewriteBytecodes) return;
168 Label L_patch_done;
169
170 switch (bc) {
171 case Bytecodes::_fast_vputfield:
172 case Bytecodes::_fast_aputfield:
173 case Bytecodes::_fast_bputfield:
174 case Bytecodes::_fast_zputfield:
175 case Bytecodes::_fast_cputfield:
176 case Bytecodes::_fast_dputfield:
177 case Bytecodes::_fast_fputfield:
178 case Bytecodes::_fast_iputfield:
179 case Bytecodes::_fast_lputfield:
180 case Bytecodes::_fast_sputfield:
181 {
182 // We skip bytecode quickening for putfield instructions when
183 // the put_code written to the constant pool cache is zero.
184 // This is required so that every execution of this instruction
185 // calls out to InterpreterRuntime::resolve_get_put to do
186 // additional, required work.
187 assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
188 assert(load_bc_into_bc_reg, "we use bc_reg as temp");
189 __ load_field_entry(temp_reg, bc_reg);
190 if (byte_no == f1_byte) {
191 __ load_unsigned_byte(temp_reg, Address(temp_reg, in_bytes(ResolvedFieldEntry::get_code_offset())));
768 Address(rdx, rax,
769 Address::times_4,
770 arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
771 noreg, noreg);
772 }
773
774 void TemplateTable::daload() {
775 transition(itos, dtos);
776 // rax: index
777 // rdx: array
778 index_check(rdx, rax); // kills rbx
779 __ access_load_at(T_DOUBLE, IN_HEAP | IS_ARRAY, noreg /* dtos */,
780 Address(rdx, rax,
781 Address::times_8,
782 arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
783 noreg, noreg);
784 }
785
786 void TemplateTable::aaload() {
787 transition(itos, atos);
788 Register array = rdx;
789 Register index = rax;
790
791 index_check(array, index); // kills rbx
792 __ profile_array_type<ArrayLoadData>(rbx, array, rcx);
793 if (UseArrayFlattening) {
794 Label is_flat_array, done;
795 __ test_flat_array_oop(array, rbx, is_flat_array);
796 do_oop_load(_masm,
797 Address(array, index,
798 UseCompressedOops ? Address::times_4 : Address::times_ptr,
799 arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
800 rax,
801 IS_ARRAY);
802 __ jmp(done);
803 __ bind(is_flat_array);
804 __ movptr(rcx, array);
805 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::flat_array_load), rcx, index);
806 __ bind(done);
807 } else {
808 do_oop_load(_masm,
809 Address(array, index,
810 UseCompressedOops ? Address::times_4 : Address::times_ptr,
811 arrayOopDesc::base_offset_in_bytes(T_OBJECT)),
812 rax,
813 IS_ARRAY);
814 }
815 __ profile_element_type(rbx, rax, rcx);
816 }
817
818 void TemplateTable::baload() {
819 transition(itos, itos);
820 // rax: index
821 // rdx: array
822 index_check(rdx, rax); // kills rbx
823 __ access_load_at(T_BYTE, IN_HEAP | IS_ARRAY, rax,
824 Address(rdx, rax, Address::times_1, arrayOopDesc::base_offset_in_bytes(T_BYTE)),
825 noreg, noreg);
826 }
827
828 void TemplateTable::caload() {
829 transition(itos, itos);
830 // rax: index
831 // rdx: array
832 index_check(rdx, rax); // kills rbx
833 __ access_load_at(T_CHAR, IN_HEAP | IS_ARRAY, rax,
834 Address(rdx, rax, Address::times_2, arrayOopDesc::base_offset_in_bytes(T_CHAR)),
835 noreg, noreg);
1069 __ access_store_at(T_FLOAT, IN_HEAP | IS_ARRAY,
1070 Address(rdx, rbx, Address::times_4,
1071 arrayOopDesc::base_offset_in_bytes(T_FLOAT)),
1072 noreg /* ftos */, noreg, noreg, noreg);
1073 }
1074
1075 void TemplateTable::dastore() {
1076 transition(dtos, vtos);
1077 __ pop_i(rbx);
1078 // value is in UseSSE >= 2 ? xmm0 : ST(0)
1079 // rbx: index
1080 // rdx: array
1081 index_check(rdx, rbx); // prefer index in rbx
1082 __ access_store_at(T_DOUBLE, IN_HEAP | IS_ARRAY,
1083 Address(rdx, rbx, Address::times_8,
1084 arrayOopDesc::base_offset_in_bytes(T_DOUBLE)),
1085 noreg /* dtos */, noreg, noreg, noreg);
1086 }
1087
1088 void TemplateTable::aastore() {
1089 Label is_null, is_flat_array, ok_is_subtype, done;
1090 transition(vtos, vtos);
1091 // stack: ..., array, index, value
1092 __ movptr(rax, at_tos()); // value
1093 __ movl(rcx, at_tos_p1()); // index
1094 __ movptr(rdx, at_tos_p2()); // array
1095
1096 Address element_address(rdx, rcx,
1097 UseCompressedOops? Address::times_4 : Address::times_ptr,
1098 arrayOopDesc::base_offset_in_bytes(T_OBJECT));
1099
1100 index_check_without_pop(rdx, rcx); // kills rbx
1101
1102 __ profile_array_type<ArrayStoreData>(rdi, rdx, rbx);
1103 __ profile_multiple_element_types(rdi, rax, rbx, rcx);
1104
1105 __ testptr(rax, rax);
1106 __ jcc(Assembler::zero, is_null);
1107
1108 // Move array class to rdi
1109 __ load_klass(rdi, rdx, rscratch1);
1110 if (UseArrayFlattening) {
1111 __ movl(rbx, Address(rdi, Klass::layout_helper_offset()));
1112 __ test_flat_array_layout(rbx, is_flat_array);
1113 }
1114
1115 // Move subklass into rbx
1116 __ load_klass(rbx, rax, rscratch1);
1117 // Move array element superklass into rax
1118 __ movptr(rax, Address(rdi,
1119 ObjArrayKlass::element_klass_offset()));
1120
1121 // Generate subtype check. Blows rcx, rdi
1122 // Superklass in rax. Subklass in rbx.
1123 // is "rbx <: rax" ? (value subclass <: array element superclass)
1124 __ gen_subtype_check(rbx, ok_is_subtype, false);
1125
1126 // Come here on failure
1127 // object is at TOS
1128 __ jump(RuntimeAddress(Interpreter::_throw_ArrayStoreException_entry));
1129
1130 // Come here on success
1131 __ bind(ok_is_subtype);
1132
1133 // Get the value we will store
1134 __ movptr(rax, at_tos());
1135 __ movl(rcx, at_tos_p1()); // index
1136 // Now store using the appropriate barrier
1137 do_oop_store(_masm, element_address, rax, IS_ARRAY);
1138 __ jmp(done);
1139
1140 // Have a null in rax, rdx=array, ecx=index. Store null at ary[idx]
1141 __ bind(is_null);
1142 if (EnableValhalla) {
1143 Label write_null_to_null_free_array, store_null;
1144
1145 // Move array class to rdi
1146 __ load_klass(rdi, rdx, rscratch1);
1147 if (UseArrayFlattening) {
1148 __ movl(rbx, Address(rdi, Klass::layout_helper_offset()));
1149 __ test_flat_array_layout(rbx, is_flat_array);
1150 }
1151
1152 // No way to store null in null-free array
1153 __ test_null_free_array_oop(rdx, rbx, write_null_to_null_free_array);
1154 __ jmp(store_null);
1155
1156 __ bind(write_null_to_null_free_array);
1157 __ jump(RuntimeAddress(Interpreter::_throw_NullPointerException_entry));
1158
1159 __ bind(store_null);
1160 }
1161 // Store a null
1162 do_oop_store(_masm, element_address, noreg, IS_ARRAY);
1163 __ jmp(done);
1164
1165 if (UseArrayFlattening) {
1166 Label is_type_ok;
1167 __ bind(is_flat_array); // Store non-null value to flat
1168
1169 __ movptr(rax, at_tos());
1170 __ movl(rcx, at_tos_p1()); // index
1171 __ movptr(rdx, at_tos_p2()); // array
1172
1173 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::flat_array_store), rax, rdx, rcx);
1174 }
1175 // Pop stack arguments
1176 __ bind(done);
1177 __ addptr(rsp, 3 * Interpreter::stackElementSize);
1178 }
1179
1180 void TemplateTable::bastore() {
1181 transition(itos, vtos);
1182 __ pop_i(rbx);
1183 // rax: value
1184 // rbx: index
1185 // rdx: array
1186 index_check(rdx, rbx); // prefer index in rbx
1187 // Need to check whether array is boolean or byte
1188 // since both types share the bastore bytecode.
1189 __ load_klass(rcx, rdx, rscratch1);
1190 __ movl(rcx, Address(rcx, Klass::layout_helper_offset()));
1191 int diffbit = Klass::layout_helper_boolean_diffbit();
1192 __ testl(rcx, diffbit);
1193 Label L_skip;
1194 __ jccb(Assembler::zero, L_skip);
1965 __ jcc(j_not(cc), not_taken);
1966 branch(false, false);
1967 __ bind(not_taken);
1968 __ profile_not_taken_branch(rax);
1969 }
1970
1971 void TemplateTable::if_nullcmp(Condition cc) {
1972 transition(atos, vtos);
1973 // assume branch is more often taken than not (loops use backward branches)
1974 Label not_taken;
1975 __ testptr(rax, rax);
1976 __ jcc(j_not(cc), not_taken);
1977 branch(false, false);
1978 __ bind(not_taken);
1979 __ profile_not_taken_branch(rax);
1980 }
1981
1982 void TemplateTable::if_acmp(Condition cc) {
1983 transition(atos, vtos);
1984 // assume branch is more often taken than not (loops use backward branches)
1985 Label taken, not_taken;
1986 __ pop_ptr(rdx);
1987
1988 __ profile_acmp(rbx, rdx, rax, rcx);
1989
1990 const int is_inline_type_mask = markWord::inline_type_pattern;
1991 if (EnableValhalla) {
1992 __ cmpoop(rdx, rax);
1993 __ jcc(Assembler::equal, (cc == equal) ? taken : not_taken);
1994
1995 // might be substitutable, test if either rax or rdx is null
1996 __ testptr(rax, rax);
1997 __ jcc(Assembler::zero, (cc == equal) ? not_taken : taken);
1998 __ testptr(rdx, rdx);
1999 __ jcc(Assembler::zero, (cc == equal) ? not_taken : taken);
2000
2001 // and both are values ?
2002 __ movptr(rbx, Address(rdx, oopDesc::mark_offset_in_bytes()));
2003 __ andptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
2004 __ andptr(rbx, is_inline_type_mask);
2005 __ cmpptr(rbx, is_inline_type_mask);
2006 __ jcc(Assembler::notEqual, (cc == equal) ? not_taken : taken);
2007
2008 // same value klass ?
2009 __ load_metadata(rbx, rdx);
2010 __ load_metadata(rcx, rax);
2011 __ cmpptr(rbx, rcx);
2012 __ jcc(Assembler::notEqual, (cc == equal) ? not_taken : taken);
2013
2014 // Know both are the same type, let's test for substitutability...
2015 if (cc == equal) {
2016 invoke_is_substitutable(rax, rdx, taken, not_taken);
2017 } else {
2018 invoke_is_substitutable(rax, rdx, not_taken, taken);
2019 }
2020 __ stop("Not reachable");
2021 }
2022
2023 __ cmpoop(rdx, rax);
2024 __ jcc(j_not(cc), not_taken);
2025 __ bind(taken);
2026 branch(false, false);
2027 __ bind(not_taken);
2028 __ profile_not_taken_branch(rax, true);
2029 }
2030
2031 void TemplateTable::invoke_is_substitutable(Register aobj, Register bobj,
2032 Label& is_subst, Label& not_subst) {
2033 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::is_substitutable), aobj, bobj);
2034 // Restored...rax answer, jmp to outcome...
2035 __ testl(rax, rax);
2036 __ jcc(Assembler::zero, not_subst);
2037 __ jmp(is_subst);
2038 }
2039
2040 void TemplateTable::ret() {
2041 transition(vtos, vtos);
2042 locals_index(rbx);
2043 __ movslq(rbx, iaddress(rbx)); // get return bci, compute return bcp
2044 __ profile_ret(rbx, rcx);
2045 __ get_method(rax);
2046 __ movptr(rbcp, Address(rax, Method::const_offset()));
2047 __ lea(rbcp, Address(rbcp, rbx, Address::times_1,
2048 ConstMethod::codes_offset()));
2049 __ dispatch_next(vtos, 0, true);
2050 }
2051
2052 void TemplateTable::wide_ret() {
2053 transition(vtos, vtos);
2054 locals_index_wide(rbx);
2055 __ movptr(rbx, aaddress(rbx)); // get return bci, compute return bcp
2056 __ profile_ret(rbx, rcx);
2057 __ get_method(rax);
2271 if (_desc->bytecode() != Bytecodes::_return_register_finalizer) {
2272 Label no_safepoint;
2273 NOT_PRODUCT(__ block_comment("Thread-local Safepoint poll"));
2274 __ testb(Address(r15_thread, JavaThread::polling_word_offset()), SafepointMechanism::poll_bit());
2275 __ jcc(Assembler::zero, no_safepoint);
2276 __ push(state);
2277 __ push_cont_fastpath();
2278 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
2279 InterpreterRuntime::at_safepoint));
2280 __ pop_cont_fastpath();
2281 __ pop(state);
2282 __ bind(no_safepoint);
2283 }
2284
2285 // Narrow result if state is itos but result type is smaller.
2286 // Need to narrow in the return bytecode rather than in generate_return_entry
2287 // since compiled code callers expect the result to already be narrowed.
2288 if (state == itos) {
2289 __ narrow(rax);
2290 }
2291
2292 __ remove_activation(state, rbcp, true, true, true);
2293
2294 __ jmp(rbcp);
2295 }
2296
2297 // ----------------------------------------------------------------------------
2298 // Volatile variables demand their effects be made known to all CPU's
2299 // in order. Store buffers on most chips allow reads & writes to
2300 // reorder; the JMM's ReadAfterWrite.java test fails in -Xint mode
2301 // without some kind of memory barrier (i.e., it's not sufficient that
2302 // the interpreter does not reorder volatile references, the hardware
2303 // also must not reorder them).
2304 //
2305 // According to the new Java Memory Model (JMM):
2306 // (1) All volatiles are serialized wrt to each other. ALSO reads &
2307 // writes act as acquire & release, so:
2308 // (2) A read cannot let unrelated NON-volatile memory refs that
2309 // happen after the read float up to before the read. It's OK for
2310 // non-volatile memory refs that happen before the volatile read to
2311 // float down below it.
2312 // (3) Similar a volatile write cannot let unrelated NON-volatile
2634 }
2635 // rax,: object pointer or null
2636 // cache: cache entry pointer
2637 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access),
2638 rax, cache);
2639
2640 __ load_field_entry(cache, index);
2641 __ bind(L1);
2642 }
2643 }
2644
2645 void TemplateTable::pop_and_check_object(Register r) {
2646 __ pop_ptr(r);
2647 __ null_check(r); // for field access must check obj.
2648 __ verify_oop(r);
2649 }
2650
2651 void TemplateTable::getfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2652 transition(vtos, vtos);
2653
2654 const Register obj = r9;
2655 const Register cache = rcx;
2656 const Register index = rdx;
2657 const Register off = rbx;
2658 const Register tos_state = rax;
2659 const Register flags = rdx;
2660 const Register bc = c_rarg3; // uses same reg as obj, so don't mix them
2661
2662 resolve_cache_and_index_for_field(byte_no, cache, index);
2663 jvmti_post_field_access(cache, index, is_static, false);
2664 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2665
2666 const Address field(obj, off, Address::times_1, 0*wordSize);
2667
2668 Label Done, notByte, notBool, notInt, notShort, notChar, notLong, notFloat, notObj, notInlineType;
2669
2670 // Make sure we don't need to mask edx after the above shift
2671 assert(btos == 0, "change code, btos != 0");
2672 __ testl(tos_state, tos_state);
2673 __ jcc(Assembler::notZero, notByte);
2674
2675 // btos
2676 if (!is_static) pop_and_check_object(obj);
2677 __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
2678 __ push(btos);
2679 // Rewrite bytecode to be faster
2680 if (!is_static && rc == may_rewrite) {
2681 patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2682 }
2683 __ jmp(Done);
2684
2685 __ bind(notByte);
2686 __ cmpl(tos_state, ztos);
2687 __ jcc(Assembler::notEqual, notBool);
2688 if (!is_static) pop_and_check_object(obj);
2689 // ztos (same code as btos)
2690 __ access_load_at(T_BOOLEAN, IN_HEAP, rax, field, noreg, noreg);
2691 __ push(ztos);
2692 // Rewrite bytecode to be faster
2693 if (!is_static && rc == may_rewrite) {
2694 // use btos rewriting, no truncating to t/f bit is needed for getfield.
2695 patch_bytecode(Bytecodes::_fast_bgetfield, bc, rbx);
2696 }
2697 __ jmp(Done);
2698
2699 __ bind(notBool);
2700 __ cmpl(tos_state, atos);
2701 __ jcc(Assembler::notEqual, notObj);
2702 // atos
2703 if (!EnableValhalla) {
2704 if (!is_static) pop_and_check_object(obj);
2705 do_oop_load(_masm, field, rax);
2706 __ push(atos);
2707 if (!is_static && rc == may_rewrite) {
2708 patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
2709 }
2710 __ jmp(Done);
2711 } else {
2712 if (is_static) {
2713 __ load_heap_oop(rax, field);
2714 Label is_null_free_inline_type, uninitialized;
2715 // Issue below if the static field has not been initialized yet
2716 __ test_field_is_null_free_inline_type(flags, rscratch1, is_null_free_inline_type);
2717 // field is not a null free inline type
2718 __ push(atos);
2719 __ jmp(Done);
2720 // field is a null free inline type, must not return null even if uninitialized
2721 __ bind(is_null_free_inline_type);
2722 __ testptr(rax, rax);
2723 __ jcc(Assembler::zero, uninitialized);
2724 __ push(atos);
2725 __ jmp(Done);
2726 __ bind(uninitialized);
2727 __ jump(RuntimeAddress(Interpreter::_throw_NPE_UninitializedField_entry));
2728 } else {
2729 Label is_flat, nonnull, is_null_free_inline_type, rewrite_inline, has_null_marker;
2730 __ test_field_is_null_free_inline_type(flags, rscratch1, is_null_free_inline_type);
2731 __ test_field_has_null_marker(flags, rscratch1, has_null_marker);
2732 // field is not a null free inline type
2733 pop_and_check_object(obj);
2734 __ load_heap_oop(rax, field);
2735 __ push(atos);
2736 if (rc == may_rewrite) {
2737 patch_bytecode(Bytecodes::_fast_agetfield, bc, rbx);
2738 }
2739 __ jmp(Done);
2740 __ bind(is_null_free_inline_type);
2741 __ test_field_is_flat(flags, rscratch1, is_flat);
2742 // field is not flat
2743 pop_and_check_object(obj);
2744 __ load_heap_oop(rax, field);
2745 __ testptr(rax, rax);
2746 __ jcc(Assembler::notZero, nonnull);
2747 __ jump(RuntimeAddress(Interpreter::_throw_NPE_UninitializedField_entry));
2748 __ bind(nonnull);
2749 __ verify_oop(rax);
2750 __ push(atos);
2751 __ jmp(rewrite_inline);
2752 __ bind(is_flat);
2753 pop_and_check_object(rax);
2754 __ read_flat_field(rcx, rdx, rbx, rax);
2755 __ verify_oop(rax);
2756 __ push(atos);
2757 __ jmp(rewrite_inline);
2758 __ bind(has_null_marker);
2759 pop_and_check_object(rax);
2760 __ load_field_entry(rcx, rbx);
2761 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::read_nullable_flat_field), rax, rcx);
2762 __ get_vm_result(rax, r15_thread);
2763 __ push(atos);
2764 __ bind(rewrite_inline);
2765 if (rc == may_rewrite) {
2766 patch_bytecode(Bytecodes::_fast_vgetfield, bc, rbx);
2767 }
2768 __ jmp(Done);
2769 }
2770 }
2771
2772 __ bind(notObj);
2773
2774 if (!is_static) pop_and_check_object(obj);
2775
2776 __ cmpl(tos_state, itos);
2777 __ jcc(Assembler::notEqual, notInt);
2778 // itos
2779 __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
2780 __ push(itos);
2781 // Rewrite bytecode to be faster
2782 if (!is_static && rc == may_rewrite) {
2783 patch_bytecode(Bytecodes::_fast_igetfield, bc, rbx);
2784 }
2785 __ jmp(Done);
2786
2787 __ bind(notInt);
2788 __ cmpl(tos_state, ctos);
2789 __ jcc(Assembler::notEqual, notChar);
2790 // ctos
2791 __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
2792 __ push(ctos);
2793 // Rewrite bytecode to be faster
2794 if (!is_static && rc == may_rewrite) {
2795 patch_bytecode(Bytecodes::_fast_cgetfield, bc, rbx);
2855 #endif
2856
2857 __ bind(Done);
2858 // [jk] not needed currently
2859 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadLoad |
2860 // Assembler::LoadStore));
2861 }
2862
2863 void TemplateTable::getfield(int byte_no) {
2864 getfield_or_static(byte_no, false);
2865 }
2866
2867 void TemplateTable::nofast_getfield(int byte_no) {
2868 getfield_or_static(byte_no, false, may_not_rewrite);
2869 }
2870
2871 void TemplateTable::getstatic(int byte_no) {
2872 getfield_or_static(byte_no, true);
2873 }
2874
2875 // The registers cache and index expected to be set before call.
2876 // The function may destroy various registers, just not the cache and index registers.
2877 void TemplateTable::jvmti_post_field_mod(Register cache, Register index, bool is_static) {
2878 // Cache is rcx and index is rdx
2879 const Register entry = c_rarg2; // ResolvedFieldEntry
2880 const Register obj = c_rarg1; // Object pointer
2881 const Register value = c_rarg3; // JValue object
2882
2883 if (JvmtiExport::can_post_field_modification()) {
2884 // Check to see if a field modification watch has been set before
2885 // we take the time to call into the VM.
2886 Label L1;
2887 assert_different_registers(cache, obj, rax);
2888 __ mov32(rax, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
2889 __ testl(rax, rax);
2890 __ jcc(Assembler::zero, L1);
2891
2892 __ mov(entry, cache);
2893
2894 if (is_static) {
2916 // cache: field entry pointer
2917 // value: jvalue object on the stack
2918 __ call_VM(noreg,
2919 CAST_FROM_FN_PTR(address,
2920 InterpreterRuntime::post_field_modification),
2921 obj, entry, value);
2922 // Reload field entry
2923 __ load_field_entry(cache, index);
2924 __ bind(L1);
2925 }
2926 }
2927
2928 void TemplateTable::putfield_or_static(int byte_no, bool is_static, RewriteControl rc) {
2929 transition(vtos, vtos);
2930
2931 const Register obj = rcx;
2932 const Register cache = rcx;
2933 const Register index = rdx;
2934 const Register tos_state = rdx;
2935 const Register off = rbx;
2936 const Register flags = r9;
2937
2938 resolve_cache_and_index_for_field(byte_no, cache, index);
2939 jvmti_post_field_mod(cache, index, is_static);
2940 load_resolved_field_entry(obj, cache, tos_state, off, flags, is_static);
2941
2942 // [jk] not needed currently
2943 // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
2944 // Assembler::StoreStore));
2945
2946 Label notVolatile, Done;
2947
2948 // Check for volatile store
2949 __ movl(rscratch1, flags);
2950 __ andl(rscratch1, (1 << ResolvedFieldEntry::is_volatile_shift));
2951 __ testl(rscratch1, rscratch1);
2952 __ jcc(Assembler::zero, notVolatile);
2953
2954 putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state, flags);
2955 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
2956 Assembler::StoreStore));
2957 __ jmp(Done);
2958 __ bind(notVolatile);
2959
2960 putfield_or_static_helper(byte_no, is_static, rc, obj, off, tos_state, flags);
2961
2962 __ bind(Done);
2963 }
2964
2965 void TemplateTable::putfield_or_static_helper(int byte_no, bool is_static, RewriteControl rc,
2966 Register obj, Register off, Register tos_state, Register flags) {
2967
2968 // field addresses
2969 const Address field(obj, off, Address::times_1, 0*wordSize);
2970
2971 Label notByte, notBool, notInt, notShort, notChar,
2972 notLong, notFloat, notObj, notInlineType;
2973 Label Done;
2974
2975 const Register bc = c_rarg3;
2976
2977 // Test TOS state
2978 __ testl(tos_state, tos_state);
2979 __ jcc(Assembler::notZero, notByte);
2980
2981 // btos
2982 {
2983 __ pop(btos);
2984 if (!is_static) pop_and_check_object(obj);
2985 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
2986 if (!is_static && rc == may_rewrite) {
2987 patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
2988 }
2989 __ jmp(Done);
2990 }
2991
2992 __ bind(notByte);
2993 __ cmpl(tos_state, ztos);
2994 __ jcc(Assembler::notEqual, notBool);
2995
2996 // ztos
2997 {
2998 __ pop(ztos);
2999 if (!is_static) pop_and_check_object(obj);
3000 __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3001 if (!is_static && rc == may_rewrite) {
3002 patch_bytecode(Bytecodes::_fast_zputfield, bc, rbx, true, byte_no);
3003 }
3004 __ jmp(Done);
3005 }
3006
3007 __ bind(notBool);
3008 __ cmpl(tos_state, atos);
3009 __ jcc(Assembler::notEqual, notObj);
3010
3011 // atos
3012 {
3013 if (!EnableValhalla) {
3014 __ pop(atos);
3015 if (!is_static) pop_and_check_object(obj);
3016 // Store into the field
3017 do_oop_store(_masm, field, rax);
3018 if (!is_static && rc == may_rewrite) {
3019 patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3020 }
3021 __ jmp(Done);
3022 } else {
3023 __ pop(atos);
3024 if (is_static) {
3025 Label is_inline_type;
3026 __ test_field_is_not_null_free_inline_type(flags, rscratch1, is_inline_type);
3027 __ null_check(rax);
3028 __ bind(is_inline_type);
3029 do_oop_store(_masm, field, rax);
3030 __ jmp(Done);
3031 } else {
3032 Label is_null_free_inline_type, is_flat, has_null_marker,
3033 write_null, rewrite_not_inline, rewrite_inline;
3034 __ test_field_is_null_free_inline_type(flags, rscratch1, is_null_free_inline_type);
3035 __ test_field_has_null_marker(flags, rscratch1, has_null_marker);
3036 // Not an inline type
3037 pop_and_check_object(obj);
3038 // Store into the field
3039 do_oop_store(_masm, field, rax);
3040 __ bind(rewrite_not_inline);
3041 if (rc == may_rewrite) {
3042 patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
3043 }
3044 __ jmp(Done);
3045 // Implementation of the inline type semantic
3046 __ bind(is_null_free_inline_type);
3047 __ null_check(rax);
3048 __ test_field_is_flat(flags, rscratch1, is_flat);
3049 // field is not flat
3050 pop_and_check_object(obj);
3051 // Store into the field
3052 do_oop_store(_masm, field, rax);
3053 __ jmp(rewrite_inline);
3054 __ bind(is_flat);
3055 // field is flat
3056 __ load_unsigned_short(rdx, Address(rcx, in_bytes(ResolvedFieldEntry::field_index_offset())));
3057 __ movptr(r9, Address(rcx, in_bytes(ResolvedFieldEntry::field_holder_offset())));
3058 pop_and_check_object(obj); // obj = rcx
3059 __ load_klass(r8, rax, rscratch1);
3060 __ payload_addr(rax, rax, r8);
3061 __ addptr(obj, off);
3062 __ inline_layout_info(r9, rdx, rbx);
3063 // because we use InlineLayoutInfo, we need special value access code specialized for fields (arrays will need a different API)
3064 __ flat_field_copy(IN_HEAP, rax, obj, rbx);
3065 __ jmp(rewrite_inline);
3066 __ bind(has_null_marker); // has null marker means the field is flat with a null marker
3067 pop_and_check_object(rbx);
3068 __ load_field_entry(rcx, rdx);
3069 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_nullable_flat_field), rbx, rax, rcx);
3070 __ bind(rewrite_inline);
3071 if (rc == may_rewrite) {
3072 patch_bytecode(Bytecodes::_fast_vputfield, bc, rbx, true, byte_no);
3073 }
3074 __ jmp(Done);
3075 }
3076 }
3077 }
3078
3079 __ bind(notObj);
3080 __ cmpl(tos_state, itos);
3081 __ jcc(Assembler::notEqual, notInt);
3082
3083 // itos
3084 {
3085 __ pop(itos);
3086 if (!is_static) pop_and_check_object(obj);
3087 __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3088 if (!is_static && rc == may_rewrite) {
3089 patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
3090 }
3091 __ jmp(Done);
3092 }
3093
3094 __ bind(notInt);
3095 __ cmpl(tos_state, ctos);
3096 __ jcc(Assembler::notEqual, notChar);
3193 }
3194
3195 void TemplateTable::jvmti_post_fast_field_mod() {
3196
3197 const Register scratch = c_rarg3;
3198
3199 if (JvmtiExport::can_post_field_modification()) {
3200 // Check to see if a field modification watch has been set before
3201 // we take the time to call into the VM.
3202 Label L2;
3203 __ mov32(scratch, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
3204 __ testl(scratch, scratch);
3205 __ jcc(Assembler::zero, L2);
3206 __ pop_ptr(rbx); // copy the object pointer from tos
3207 __ verify_oop(rbx);
3208 __ push_ptr(rbx); // put the object pointer back on tos
3209 // Save tos values before call_VM() clobbers them. Since we have
3210 // to do it for every data type, we use the saved values as the
3211 // jvalue object.
3212 switch (bytecode()) { // load values into the jvalue object
3213 case Bytecodes::_fast_vputfield: //fall through
3214 case Bytecodes::_fast_aputfield: __ push_ptr(rax); break;
3215 case Bytecodes::_fast_bputfield: // fall through
3216 case Bytecodes::_fast_zputfield: // fall through
3217 case Bytecodes::_fast_sputfield: // fall through
3218 case Bytecodes::_fast_cputfield: // fall through
3219 case Bytecodes::_fast_iputfield: __ push_i(rax); break;
3220 case Bytecodes::_fast_dputfield: __ push(dtos); break;
3221 case Bytecodes::_fast_fputfield: __ push(ftos); break;
3222 case Bytecodes::_fast_lputfield: __ push_l(rax); break;
3223
3224 default:
3225 ShouldNotReachHere();
3226 }
3227 __ mov(scratch, rsp); // points to jvalue on the stack
3228 // access constant pool cache entry
3229 __ load_field_entry(c_rarg2, rax);
3230 __ verify_oop(rbx);
3231 // rbx: object pointer copied above
3232 // c_rarg2: cache entry pointer
3233 // c_rarg3: jvalue object on the stack
3234 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_modification), rbx, c_rarg2, c_rarg3);
3235
3236 switch (bytecode()) { // restore tos values
3237 case Bytecodes::_fast_vputfield: // fall through
3238 case Bytecodes::_fast_aputfield: __ pop_ptr(rax); break;
3239 case Bytecodes::_fast_bputfield: // fall through
3240 case Bytecodes::_fast_zputfield: // fall through
3241 case Bytecodes::_fast_sputfield: // fall through
3242 case Bytecodes::_fast_cputfield: // fall through
3243 case Bytecodes::_fast_iputfield: __ pop_i(rax); break;
3244 case Bytecodes::_fast_dputfield: __ pop(dtos); break;
3245 case Bytecodes::_fast_fputfield: __ pop(ftos); break;
3246 case Bytecodes::_fast_lputfield: __ pop_l(rax); break;
3247 default: break;
3248 }
3249 __ bind(L2);
3250 }
3251 }
3252
3253 void TemplateTable::fast_storefield(TosState state) {
3254 transition(state, vtos);
3255
3256 Label notVolatile, Done;
3257
3258 jvmti_post_fast_field_mod();
3259
3260 __ push(rax);
3261 __ load_field_entry(rcx, rax);
3262 load_resolved_field_entry(noreg, rcx, rax, rbx, rdx);
3263 __ pop(rax);
3264 // RBX: field offset, RCX: RAX: TOS, RDX: flags
3265
3266 // Get object from stack
3267 pop_and_check_object(rcx);
3268
3269 // field address
3270 const Address field(rcx, rbx, Address::times_1);
3271
3272 // Check for volatile store
3273 __ movl(rscratch2, rdx); // saving flags for is_flat test
3274 __ andl(rscratch2, (1 << ResolvedFieldEntry::is_volatile_shift));
3275 __ testl(rscratch2, rscratch2);
3276 __ jcc(Assembler::zero, notVolatile);
3277
3278 fast_storefield_helper(field, rax, rdx);
3279 volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
3280 Assembler::StoreStore));
3281 __ jmp(Done);
3282 __ bind(notVolatile);
3283
3284 fast_storefield_helper(field, rax, rdx);
3285
3286 __ bind(Done);
3287 }
3288
3289 void TemplateTable::fast_storefield_helper(Address field, Register rax, Register flags) {
3290
3291 // DANGER: 'field' argument depends on rcx and rbx
3292
3293 // access field
3294 switch (bytecode()) {
3295 case Bytecodes::_fast_vputfield:
3296 {
3297 Label is_flat, has_null_marker, write_null, done;
3298 __ test_field_has_null_marker(flags, rscratch1, has_null_marker);
3299 // Null free field cases: flat or not flat
3300 __ null_check(rax);
3301 __ test_field_is_flat(flags, rscratch1, is_flat);
3302 // field is not flat
3303 do_oop_store(_masm, field, rax);
3304 __ jmp(done);
3305 __ bind(is_flat);
3306 __ load_field_entry(r8, r9);
3307 __ load_unsigned_short(r9, Address(r8, in_bytes(ResolvedFieldEntry::field_index_offset())));
3308 __ movptr(r8, Address(r8, in_bytes(ResolvedFieldEntry::field_holder_offset())));
3309 __ inline_layout_info(r8, r9, r8);
3310 __ load_klass(rdx, rax, rscratch1);
3311 __ payload_addr(rax, rax, rdx);
3312 __ lea(rcx, field);
3313 __ flat_field_copy(IN_HEAP, rax, rcx, r8);
3314 __ jmp(done);
3315 __ bind(has_null_marker); // has null marker means the field is flat with a null marker
3316 __ movptr(rbx, rcx);
3317 __ load_field_entry(rcx, rdx);
3318 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::write_nullable_flat_field), rbx, rax, rcx);
3319 __ bind(done);
3320 }
3321 break;
3322 case Bytecodes::_fast_aputfield:
3323 {
3324 do_oop_store(_masm, field, rax);
3325 }
3326 break;
3327 case Bytecodes::_fast_lputfield:
3328 __ access_store_at(T_LONG, IN_HEAP, field, noreg /* ltos */, noreg, noreg, noreg);
3329 break;
3330 case Bytecodes::_fast_iputfield:
3331 __ access_store_at(T_INT, IN_HEAP, field, rax, noreg, noreg, noreg);
3332 break;
3333 case Bytecodes::_fast_zputfield:
3334 __ access_store_at(T_BOOLEAN, IN_HEAP, field, rax, noreg, noreg, noreg);
3335 break;
3336 case Bytecodes::_fast_bputfield:
3337 __ access_store_at(T_BYTE, IN_HEAP, field, rax, noreg, noreg, noreg);
3338 break;
3339 case Bytecodes::_fast_sputfield:
3340 __ access_store_at(T_SHORT, IN_HEAP, field, rax, noreg, noreg, noreg);
3341 break;
3342 case Bytecodes::_fast_cputfield:
3343 __ access_store_at(T_CHAR, IN_HEAP, field, rax, noreg, noreg, noreg);
3344 break;
3345 case Bytecodes::_fast_fputfield:
3361 // Check to see if a field access watch has been set before we
3362 // take the time to call into the VM.
3363 Label L1;
3364 __ mov32(rcx, ExternalAddress((address) JvmtiExport::get_field_access_count_addr()));
3365 __ testl(rcx, rcx);
3366 __ jcc(Assembler::zero, L1);
3367 // access constant pool cache entry
3368 __ load_field_entry(c_rarg2, rcx);
3369 __ verify_oop(rax);
3370 __ push_ptr(rax); // save object pointer before call_VM() clobbers it
3371 __ mov(c_rarg1, rax);
3372 // c_rarg1: object pointer copied above
3373 // c_rarg2: cache entry pointer
3374 __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::post_field_access), c_rarg1, c_rarg2);
3375 __ pop_ptr(rax); // restore object pointer
3376 __ bind(L1);
3377 }
3378
3379 // access constant pool cache
3380 __ load_field_entry(rcx, rbx);
3381 __ load_sized_value(rdx, Address(rcx, in_bytes(ResolvedFieldEntry::field_offset_offset())), sizeof(int), true /*is_signed*/);
3382
3383 // rax: object
3384 __ verify_oop(rax);
3385 __ null_check(rax);
3386 Address field(rax, rdx, Address::times_1);
3387
3388 // access field
3389 switch (bytecode()) {
3390 case Bytecodes::_fast_vgetfield:
3391 {
3392 Label is_flat, nonnull, Done, has_null_marker;
3393 __ load_unsigned_byte(rscratch1, Address(rcx, in_bytes(ResolvedFieldEntry::flags_offset())));
3394 __ test_field_has_null_marker(rscratch1, rscratch2, has_null_marker);
3395 __ test_field_is_flat(rscratch1, rscratch2, is_flat);
3396 // field is not flat
3397 __ load_heap_oop(rax, field);
3398 __ testptr(rax, rax);
3399 __ jcc(Assembler::notZero, nonnull);
3400 __ jump(RuntimeAddress(Interpreter::_throw_NPE_UninitializedField_entry));
3401 __ bind(nonnull);
3402 __ verify_oop(rax);
3403 __ jmp(Done);
3404 __ bind(is_flat);
3405 // field is flat
3406 __ read_flat_field(rcx, rdx, rbx, rax);
3407 __ jmp(Done);
3408 __ bind(has_null_marker);
3409 // rax = instance, rcx = resolved entry
3410 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::read_nullable_flat_field), rax, rcx);
3411 __ get_vm_result(rax, r15_thread);
3412 __ bind(Done);
3413 __ verify_oop(rax);
3414 }
3415 break;
3416 case Bytecodes::_fast_agetfield:
3417 do_oop_load(_masm, field, rax);
3418 __ verify_oop(rax);
3419 break;
3420 case Bytecodes::_fast_lgetfield:
3421 __ access_load_at(T_LONG, IN_HEAP, noreg /* ltos */, field, noreg, noreg);
3422 break;
3423 case Bytecodes::_fast_igetfield:
3424 __ access_load_at(T_INT, IN_HEAP, rax, field, noreg, noreg);
3425 break;
3426 case Bytecodes::_fast_bgetfield:
3427 __ access_load_at(T_BYTE, IN_HEAP, rax, field, noreg, noreg);
3428 break;
3429 case Bytecodes::_fast_sgetfield:
3430 __ access_load_at(T_SHORT, IN_HEAP, rax, field, noreg, noreg);
3431 break;
3432 case Bytecodes::_fast_cgetfield:
3433 __ access_load_at(T_CHAR, IN_HEAP, rax, field, noreg, noreg);
3434 break;
3435 case Bytecodes::_fast_fgetfield:
3820
3821 // Note: rax_callsite is already pushed
3822
3823 // %%% should make a type profile for any invokedynamic that takes a ref argument
3824 // profile this call
3825 __ profile_call(rbcp);
3826 __ profile_arguments_type(rdx, rbx_method, rbcp, false);
3827
3828 __ verify_oop(rax_callsite);
3829
3830 __ jump_from_interpreted(rbx_method, rdx);
3831 }
3832
3833 //-----------------------------------------------------------------------------
3834 // Allocation
3835
3836 void TemplateTable::_new() {
3837 transition(vtos, atos);
3838 __ get_unsigned_2_byte_index_at_bcp(rdx, 1);
3839 Label slow_case;
3840 Label done;
3841
3842 __ get_cpool_and_tags(rcx, rax);
3843
3844 // Make sure the class we're about to instantiate has been resolved.
3845 // This is done before loading InstanceKlass to be consistent with the order
3846 // how Constant Pool is updated (see ConstantPool::klass_at_put)
3847 const int tags_offset = Array<u1>::base_offset_in_bytes();
3848 __ cmpb(Address(rax, rdx, Address::times_1, tags_offset), JVM_CONSTANT_Class);
3849 __ jcc(Assembler::notEqual, slow_case);
3850
3851 // get InstanceKlass
3852 __ load_resolved_klass_at_index(rcx, rcx, rdx);
3853
3854 // make sure klass is initialized
3855 // init_state needs acquire, but x86 is TSO, and so we are already good.
3856 assert(VM_Version::supports_fast_class_init_checks(), "must support fast class initialization checks");
3857 __ clinit_barrier(rcx, r15_thread, nullptr /*L_fast_path*/, &slow_case);
3858
3859 __ allocate_instance(rcx, rax, rdx, rbx, true, slow_case);
3860 if (DTraceAllocProbes) {
3861 // Trigger dtrace event for fastpath
3862 __ push(atos);
3863 __ call_VM_leaf(
3864 CAST_FROM_FN_PTR(address, static_cast<int (*)(oopDesc*)>(SharedRuntime::dtrace_object_alloc)), rax);
3865 __ pop(atos);
3866 }
3867 __ jmp(done);
3868
3869 // slow case
3870 __ bind(slow_case);
3871
3872 __ get_constant_pool(c_rarg1);
3873 __ get_unsigned_2_byte_index_at_bcp(c_rarg2, 1);
3874 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::_new), c_rarg1, c_rarg2);
3875 __ verify_oop(rax);
3876
3877 // continue
3878 __ bind(done);
3879 }
3880
3881 void TemplateTable::newarray() {
3882 transition(itos, atos);
3883 __ load_unsigned_byte(c_rarg1, at_bcp(1));
3884 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::newarray),
3885 c_rarg1, rax);
3886 }
3887
3888 void TemplateTable::anewarray() {
3889 transition(itos, atos);
3890
3892 __ get_constant_pool(c_rarg1);
3893 call_VM(rax, CAST_FROM_FN_PTR(address, InterpreterRuntime::anewarray),
3894 c_rarg1, c_rarg2, rax);
3895 }
3896
3897 void TemplateTable::arraylength() {
3898 transition(atos, itos);
3899 __ movl(rax, Address(rax, arrayOopDesc::length_offset_in_bytes()));
3900 }
3901
3902 void TemplateTable::checkcast() {
3903 transition(atos, atos);
3904 Label done, is_null, ok_is_subtype, quicked, resolved;
3905 __ testptr(rax, rax); // object is in rax
3906 __ jcc(Assembler::zero, is_null);
3907
3908 // Get cpool & tags index
3909 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
3910 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
3911 // See if bytecode has already been quicked
3912 __ movzbl(rdx, Address(rdx, rbx,
3913 Address::times_1,
3914 Array<u1>::base_offset_in_bytes()));
3915 __ cmpl(rdx, JVM_CONSTANT_Class);
3916 __ jcc(Assembler::equal, quicked);
3917 __ push(atos); // save receiver for result, and for GC
3918 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3919
3920 // vm_result_2 has metadata result
3921 __ get_vm_result_2(rax, r15_thread);
3922
3923 __ pop_ptr(rdx); // restore receiver
3924 __ jmpb(resolved);
3925
3926 // Get superklass in rax and subklass in rbx
3927 __ bind(quicked);
3928 __ mov(rdx, rax); // Save object in rdx; rax needed for subtype check
3929 __ load_resolved_klass_at_index(rax, rcx, rbx);
3930
3931 __ bind(resolved);
3932 __ load_klass(rbx, rdx, rscratch1);
3933
3934 // Generate subtype check. Blows rcx, rdi. Object in rdx.
3935 // Superklass in rax. Subklass in rbx.
3936 __ gen_subtype_check(rbx, ok_is_subtype);
3937
3938 // Come here on failure
3939 __ push_ptr(rdx);
3940 // object is at TOS
3941 __ jump(RuntimeAddress(Interpreter::_throw_ClassCastException_entry));
3942
3943 // Come here on success
3944 __ bind(ok_is_subtype);
3945 __ mov(rax, rdx); // Restore object in rdx
3946 __ jmp(done);
3947
3948 __ bind(is_null);
3949
3950 // Collect counts on whether this check-cast sees nulls a lot or not.
3951 if (ProfileInterpreter) {
3952 __ profile_null_seen(rcx);
3953 }
3954
3955 __ bind(done);
3956 }
3957
3958 void TemplateTable::instanceof() {
3959 transition(atos, itos);
3960 Label done, is_null, ok_is_subtype, quicked, resolved;
3961 __ testptr(rax, rax);
3962 __ jcc(Assembler::zero, is_null);
3963
3964 // Get cpool & tags index
3965 __ get_cpool_and_tags(rcx, rdx); // rcx=cpool, rdx=tags array
3966 __ get_unsigned_2_byte_index_at_bcp(rbx, 1); // rbx=index
3967 // See if bytecode has already been quicked
3968 __ movzbl(rdx, Address(rdx, rbx,
3969 Address::times_1,
3970 Array<u1>::base_offset_in_bytes()));
3971 __ cmpl(rdx, JVM_CONSTANT_Class);
3972 __ jcc(Assembler::equal, quicked);
3973
3974 __ push(atos); // save receiver for result, and for GC
3975 call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::quicken_io_cc));
3976 // vm_result_2 has metadata result
3977
3978 __ get_vm_result_2(rax, r15_thread);
3979
3980 __ pop_ptr(rdx); // restore receiver
3981 __ verify_oop(rdx);
3982 __ load_klass(rdx, rdx, rscratch1);
3983 __ jmpb(resolved);
3984
3985 // Get superklass in rax and subklass in rdx
3986 __ bind(quicked);
3987 __ load_klass(rdx, rax, rscratch1);
3988 __ load_resolved_klass_at_index(rax, rcx, rbx);
3989
3990 __ bind(resolved);
3991
3996 // Come here on failure
3997 __ xorl(rax, rax);
3998 __ jmpb(done);
3999 // Come here on success
4000 __ bind(ok_is_subtype);
4001 __ movl(rax, 1);
4002
4003 // Collect counts on whether this test sees nulls a lot or not.
4004 if (ProfileInterpreter) {
4005 __ jmp(done);
4006 __ bind(is_null);
4007 __ profile_null_seen(rcx);
4008 } else {
4009 __ bind(is_null); // same as 'done'
4010 }
4011 __ bind(done);
4012 // rax = 0: obj == nullptr or obj is not an instanceof the specified klass
4013 // rax = 1: obj != nullptr and obj is an instanceof the specified klass
4014 }
4015
4016 //----------------------------------------------------------------------------------------------------
4017 // Breakpoints
4018 void TemplateTable::_breakpoint() {
4019 // Note: We get here even if we are single stepping..
4020 // jbug insists on setting breakpoints at every bytecode
4021 // even if we are in single step mode.
4022
4023 transition(vtos, vtos);
4024
4025 // get the unpatched byte code
4026 __ get_method(c_rarg1);
4027 __ call_VM(noreg,
4028 CAST_FROM_FN_PTR(address,
4029 InterpreterRuntime::get_original_bytecode_at),
4030 c_rarg1, rbcp);
4031 __ mov(rbx, rax); // why?
4032
4033 // post the breakpoint event
4034 __ get_method(c_rarg1);
4035 __ call_VM(noreg,
4055 // Note: monitorenter & exit are symmetric routines; which is reflected
4056 // in the assembly code structure as well
4057 //
4058 // Stack layout:
4059 //
4060 // [expressions ] <--- rsp = expression stack top
4061 // ..
4062 // [expressions ]
4063 // [monitor entry] <--- monitor block top = expression stack bot
4064 // ..
4065 // [monitor entry]
4066 // [frame data ] <--- monitor block bot
4067 // ...
4068 // [saved rbp ] <--- rbp
4069 void TemplateTable::monitorenter() {
4070 transition(atos, vtos);
4071
4072 // check for null object
4073 __ null_check(rax);
4074
4075 Label is_inline_type;
4076 __ movptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
4077 __ test_markword_is_inline_type(rbx, is_inline_type);
4078
4079 const Address monitor_block_top(
4080 rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4081 const Address monitor_block_bot(
4082 rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4083 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4084
4085 Label allocated;
4086
4087 Register rtop = c_rarg3;
4088 Register rbot = c_rarg2;
4089 Register rmon = c_rarg1;
4090
4091 // initialize entry pointer
4092 __ xorl(rmon, rmon); // points to free slot or null
4093
4094 // find a free slot in the monitor block (result in rmon)
4095 {
4096 Label entry, loop, exit;
4097 __ movptr(rtop, monitor_block_top); // derelativize pointer
4098 __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
4151 // rmon: points to monitor entry
4152 __ bind(allocated);
4153
4154 // Increment bcp to point to the next bytecode, so exception
4155 // handling for async. exceptions work correctly.
4156 // The object has already been popped from the stack, so the
4157 // expression stack looks correct.
4158 __ increment(rbcp);
4159
4160 // store object
4161 __ movptr(Address(rmon, BasicObjectLock::obj_offset()), rax);
4162 __ lock_object(rmon);
4163
4164 // check to make sure this monitor doesn't cause stack overflow after locking
4165 __ save_bcp(); // in case of exception
4166 __ generate_stack_overflow_check(0);
4167
4168 // The bcp has already been incremented. Just need to dispatch to
4169 // next instruction.
4170 __ dispatch_next(vtos);
4171
4172 __ bind(is_inline_type);
4173 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4174 InterpreterRuntime::throw_identity_exception), rax);
4175 __ should_not_reach_here();
4176 }
4177
4178 void TemplateTable::monitorexit() {
4179 transition(atos, vtos);
4180
4181 // check for null object
4182 __ null_check(rax);
4183
4184 const int is_inline_type_mask = markWord::inline_type_pattern;
4185 Label has_identity;
4186 __ movptr(rbx, Address(rax, oopDesc::mark_offset_in_bytes()));
4187 __ andptr(rbx, is_inline_type_mask);
4188 __ cmpl(rbx, is_inline_type_mask);
4189 __ jcc(Assembler::notEqual, has_identity);
4190 __ call_VM(noreg, CAST_FROM_FN_PTR(address,
4191 InterpreterRuntime::throw_illegal_monitor_state_exception));
4192 __ should_not_reach_here();
4193 __ bind(has_identity);
4194
4195 const Address monitor_block_top(
4196 rbp, frame::interpreter_frame_monitor_block_top_offset * wordSize);
4197 const Address monitor_block_bot(
4198 rbp, frame::interpreter_frame_initial_sp_offset * wordSize);
4199 const int entry_size = frame::interpreter_frame_monitor_size_in_bytes();
4200
4201 Register rtop = c_rarg1;
4202 Register rbot = c_rarg2;
4203
4204 Label found;
4205
4206 // find matching slot
4207 {
4208 Label entry, loop;
4209 __ movptr(rtop, monitor_block_top); // derelativize pointer
4210 __ lea(rtop, Address(rbp, rtop, Address::times_ptr));
4211 // rtop points to current entry, starting with top-most entry
4212
4213 __ lea(rbot, monitor_block_bot); // points to word before bottom
4214 // of monitor block
|