1 /* 2 * Copyright (c) 2000, 2024, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_OPTO_CALLGENERATOR_HPP 26 #define SHARE_OPTO_CALLGENERATOR_HPP 27 28 #include "compiler/compileBroker.hpp" 29 #include "opto/callnode.hpp" 30 #include "opto/compile.hpp" 31 #include "opto/type.hpp" 32 #include "runtime/deoptimization.hpp" 33 34 //---------------------------CallGenerator------------------------------------- 35 // The subclasses of this class handle generation of ideal nodes for 36 // call sites and method entry points. 37 38 class CallGenerator : public ArenaObj { 39 private: 40 ciMethod* _method; // The method being called. 41 42 protected: 43 CallGenerator(ciMethod* method) : _method(method) {} 44 45 void do_late_inline_helper(); 46 47 virtual bool do_late_inline_check(Compile* C, JVMState* jvms) { ShouldNotReachHere(); return false; } 48 virtual bool is_pure_call() const { ShouldNotReachHere(); return false; } 49 50 public: 51 // Accessors 52 ciMethod* method() const { return _method; } 53 54 // is_inline: At least some code implementing the method is copied here. 55 virtual bool is_inline() const { return false; } 56 // is_intrinsic: There's a method-specific way of generating the inline code. 57 virtual bool is_intrinsic() const { return false; } 58 // is_parse: Bytecodes implementing the specific method are copied here. 59 virtual bool is_parse() const { return false; } 60 // is_virtual: The call uses the receiver type to select or check the method. 61 virtual bool is_virtual() const { return false; } 62 // is_deferred: The decision whether to inline or not is deferred. 63 virtual bool is_deferred() const { return false; } 64 // is_predicated: Uses an explicit check (predicate). 65 virtual bool is_predicated() const { return false; } 66 virtual int predicates_count() const { return 0; } 67 // is_trap: Does not return to the caller. (E.g., uncommon trap.) 68 virtual bool is_trap() const { return false; } 69 // does_virtual_dispatch: Should try inlining as normal method first. 70 virtual bool does_virtual_dispatch() const { return false; } 71 72 // is_late_inline: supports conversion of call into an inline 73 virtual bool is_late_inline() const { return false; } 74 // same but for method handle calls 75 virtual bool is_mh_late_inline() const { return false; } 76 virtual bool is_string_late_inline() const { return false; } 77 virtual bool is_boxing_late_inline() const { return false; } 78 virtual bool is_vector_reboxing_late_inline() const { return false; } 79 virtual bool is_virtual_late_inline() const { return false; } 80 81 // Replace the call with an inline version of the code 82 virtual void do_late_inline() { ShouldNotReachHere(); } 83 84 virtual CallNode* call_node() const { return nullptr; } 85 virtual CallGenerator* with_call_node(CallNode* call) { return this; } 86 87 virtual void set_unique_id(jlong id) { fatal("unique id only for late inlines"); }; 88 virtual jlong unique_id() const { fatal("unique id only for late inlines"); return 0; }; 89 90 virtual CallGenerator* inline_cg() const { ShouldNotReachHere(); return nullptr; } 91 92 virtual void set_callee_method(ciMethod* callee) { ShouldNotReachHere(); } 93 94 // Note: It is possible for a CG to be both inline and virtual. 95 // (The hashCode intrinsic does a vtable check and an inlined fast path.) 96 97 // Allocate CallGenerators only in Compile arena since some of them are referenced from CallNodes. 98 void* operator new(size_t size) throw() { 99 Compile* C = Compile::current(); 100 return ArenaObj::operator new(size, C->comp_arena()); 101 } 102 103 // Utilities: 104 const TypeFunc* tf() const; 105 106 // The given jvms has state and arguments for a call to my method. 107 // Edges after jvms->argoff() carry all (pre-popped) argument values. 108 // 109 // Update the map with state and return values (if any) and return it. 110 // The return values (0, 1, or 2) must be pushed on the map's stack, 111 // and the sp of the jvms incremented accordingly. 112 // 113 // The jvms is returned on success. Alternatively, a copy of the 114 // given jvms, suitably updated, may be returned, in which case the 115 // caller should discard the original jvms. 116 // 117 // The non-Parm edges of the returned map will contain updated global state, 118 // and one or two edges before jvms->sp() will carry any return values. 119 // Other map edges may contain locals or monitors, and should not 120 // be changed in meaning. 121 // 122 // If the call traps, the returned map must have a control edge of top. 123 // If the call can throw, the returned map must report has_exceptions(). 124 // 125 // If the result is null, it means that this CallGenerator was unable 126 // to handle the given call, and another CallGenerator should be consulted. 127 virtual JVMState* generate(JVMState* jvms) = 0; 128 129 // How to generate a call site that is inlined: 130 static CallGenerator* for_inline(ciMethod* m, float expected_uses = -1); 131 // How to generate code for an on-stack replacement handler. 132 static CallGenerator* for_osr(ciMethod* m, int osr_bci); 133 134 // How to generate vanilla out-of-line call sites: 135 static CallGenerator* for_direct_call(ciMethod* m, bool separate_io_projs = false); // static, special 136 static CallGenerator* for_virtual_call(ciMethod* m, int vtable_index); // virtual, interface 137 138 static CallGenerator* for_method_handle_call( JVMState* jvms, ciMethod* caller, ciMethod* callee, bool allow_inline); 139 static CallGenerator* for_method_handle_inline(JVMState* jvms, ciMethod* caller, ciMethod* callee, bool allow_inline, bool& input_not_const); 140 141 // How to generate a replace a direct call with an inline version 142 static CallGenerator* for_late_inline(ciMethod* m, CallGenerator* inline_cg); 143 static CallGenerator* for_mh_late_inline(ciMethod* caller, ciMethod* callee, bool input_not_const); 144 static CallGenerator* for_string_late_inline(ciMethod* m, CallGenerator* inline_cg); 145 static CallGenerator* for_boxing_late_inline(ciMethod* m, CallGenerator* inline_cg); 146 static CallGenerator* for_vector_reboxing_late_inline(ciMethod* m, CallGenerator* inline_cg); 147 static CallGenerator* for_late_inline_virtual(ciMethod* m, int vtable_index, float expected_uses); 148 149 // How to make a call that optimistically assumes a receiver type: 150 static CallGenerator* for_predicted_call(ciKlass* predicted_receiver, 151 CallGenerator* if_missed, 152 CallGenerator* if_hit, 153 float hit_prob); 154 155 static CallGenerator* for_guarded_call(ciKlass* predicted_receiver, 156 CallGenerator* if_missed, 157 CallGenerator* if_hit); 158 159 // How to make a call that optimistically assumes a MethodHandle target: 160 static CallGenerator* for_predicted_dynamic_call(ciMethodHandle* predicted_method_handle, 161 CallGenerator* if_missed, 162 CallGenerator* if_hit, 163 float hit_prob); 164 165 // How to make a call that gives up and goes back to the interpreter: 166 static CallGenerator* for_uncommon_trap(ciMethod* m, 167 Deoptimization::DeoptReason reason, 168 Deoptimization::DeoptAction action); 169 170 // Registry for intrinsics: 171 static CallGenerator* for_intrinsic(ciMethod* m); 172 static void register_intrinsic(ciMethod* m, CallGenerator* cg); 173 static CallGenerator* for_predicated_intrinsic(CallGenerator* intrinsic, 174 CallGenerator* cg); 175 virtual Node* generate_predicate(JVMState* jvms, int predicate) { return nullptr; }; 176 177 static void print_inlining_failure(Compile* C, ciMethod* callee, JVMState* jvms, const char* msg) { 178 C->inline_printer()->record(callee, jvms, InliningResult::FAILURE, msg); 179 C->log_inline_failure(msg); 180 } 181 182 static bool is_inlined_method_handle_intrinsic(JVMState* jvms, ciMethod* m); 183 static bool is_inlined_method_handle_intrinsic(ciMethod* caller, int bci, ciMethod* m); 184 static bool is_inlined_method_handle_intrinsic(ciMethod* symbolic_info, ciMethod* m); 185 }; 186 187 188 //------------------------InlineCallGenerator---------------------------------- 189 class InlineCallGenerator : public CallGenerator { 190 protected: 191 InlineCallGenerator(ciMethod* method) : CallGenerator(method) {} 192 193 public: 194 virtual bool is_inline() const { return true; } 195 }; 196 197 #endif // SHARE_OPTO_CALLGENERATOR_HPP