42 #endif
43
44
45 // A Method represents a Java method.
46 //
47 // Note that most applications load thousands of methods, so keeping the size of this
48 // class small has a big impact on footprint.
49 //
50 // Note that native_function and signature_handler have to be at fixed offsets
51 // (required by the interpreter)
52 //
53 // Method embedded field layout (after declared fields):
54 // [EMBEDDED native_function (present only if native) ]
55 // [EMBEDDED signature_handler (present only if native) ]
56
57 class CheckedExceptionElement;
58 class LocalVariableTableElement;
59 class AdapterHandlerEntry;
60 class MethodData;
61 class MethodCounters;
62 class ConstMethod;
63 class InlineTableSizes;
64 class nmethod;
65 class InterpreterOopMap;
66
67 class Method : public Metadata {
68 friend class VMStructs;
69 friend class JVMCIVMStructs;
70 friend class MethodTest;
71 private:
72 // If you add a new field that points to any metaspace object, you
73 // must add this field to Method::metaspace_pointers_do().
74 ConstMethod* _constMethod; // Method read-only data.
75 MethodData* _method_data;
76 MethodCounters* _method_counters;
77 AdapterHandlerEntry* _adapter;
78 int _vtable_index; // vtable index of this method (see VtableIndexFlag)
79 AccessFlags _access_flags; // Access flags
80 MethodFlags _flags;
81
82 u2 _intrinsic_id; // vmSymbols::intrinsic_id (0 == _none)
83
84 JFR_ONLY(DEFINE_TRACE_FLAG;)
85
86 #ifndef PRODUCT
87 int64_t _compiled_invocation_count;
88
89 Symbol* _name;
90 #endif
91 // Entry point for calling both from and to the interpreter.
92 address _i2i_entry; // All-args-on-stack calling convention
93 // Entry point for calling from compiled code, to compiled code if it exists
94 // or else the interpreter.
95 volatile address _from_compiled_entry; // Cache of: _code ? _code->entry_point() : _adapter->c2i_entry()
96 // The entry point for calling both from and to compiled code is
97 // "_code->entry_point()". Because of tiered compilation and de-opt, this
98 // field can come and go. It can transition from null to not-null at any
99 // time (whenever a compile completes). It can transition from not-null to
100 // null only at safepoints (because of a de-opt).
101 nmethod* volatile _code; // Points to the corresponding piece of native code
102 volatile address _from_interpreted_entry; // Cache of _code ? _adapter->i2c_entry() : _i2i_entry
103
104 // Constructor
105 Method(ConstMethod* xconst, AccessFlags access_flags, Symbol* name);
106 public:
107
108 static Method* allocate(ClassLoaderData* loader_data,
109 int byte_code_size,
110 AccessFlags access_flags,
111 InlineTableSizes* sizes,
112 ConstMethod::MethodType method_type,
113 Symbol* name,
114 TRAPS);
115
116 // CDS and vtbl checking can create an empty Method to get vtbl pointer.
117 Method(){}
118
119 virtual bool is_method() const { return true; }
120
121 #if INCLUDE_CDS
122 void remove_unshareable_info();
123 void restore_unshareable_info(TRAPS);
297 // constraint classes are loaded if necessary. Note that this may
298 // throw an exception if loading of the constraint classes causes
299 // an IllegalAccessError (bugid 4307310) or an OutOfMemoryError.
300 // If an exception is thrown, returns the bci of the
301 // exception handler which caused the exception to be thrown, which
302 // is needed for proper retries. See, for example,
303 // InterpreterRuntime::exception_handler_for_exception.
304 static int fast_exception_handler_bci_for(const methodHandle& mh, Klass* ex_klass, int throw_bci, TRAPS);
305
306 static bool register_native(Klass* k,
307 Symbol* name,
308 Symbol* signature,
309 address entry,
310 TRAPS);
311
312 // method data access
313 MethodData* method_data() const {
314 return _method_data;
315 }
316
317 // mark an exception handler as entered (used to prune dead catch blocks in C2)
318 void set_exception_handler_entered(int handler_bci);
319
320 MethodCounters* method_counters() const {
321 return _method_counters;
322 }
323
324 void clear_method_counters() {
325 _method_counters = nullptr;
326 }
327
328 bool init_method_counters(MethodCounters* counters);
329
330 inline int prev_event_count() const;
331 inline void set_prev_event_count(int count);
332 inline jlong prev_time() const;
333 inline void set_prev_time(jlong time);
334 inline float rate() const;
335 inline void set_rate(float rate);
336
337 int invocation_count() const;
338 int backedge_count() const;
339
340 bool was_executed_more_than(int n);
341 bool was_never_executed() { return !was_executed_more_than(0); }
342
343 static void build_profiling_method_data(const methodHandle& method, TRAPS);
344
345 static MethodCounters* build_method_counters(Thread* current, Method* m);
346
347 int interpreter_invocation_count() { return invocation_count(); }
348
349 #ifndef PRODUCT
350 int64_t compiled_invocation_count() const { return _compiled_invocation_count;}
351 void set_compiled_invocation_count(int count) { _compiled_invocation_count = (int64_t)count; }
352 #else
353 // for PrintMethodData in a product build
354 int64_t compiled_invocation_count() const { return 0; }
355 #endif // not PRODUCT
356
357 // nmethod/verified compiler entry
358 address verified_code_entry();
359 bool check_code() const; // Not inline to avoid circular ref
360 nmethod* code() const;
361
362 // Locks NMethodState_lock if not held.
363 void unlink_code(nmethod *compare);
364 // Locks NMethodState_lock if not held.
365 void unlink_code();
366
367 private:
368 // Either called with NMethodState_lock held or from constructor.
369 void clear_code();
370
371 void clear_method_data() {
372 _method_data = nullptr;
373 }
374
375 public:
376 static void set_code(const methodHandle& mh, nmethod* code);
377 void set_adapter_entry(AdapterHandlerEntry* adapter) {
378 _adapter = adapter;
379 }
380 void set_from_compiled_entry(address entry) {
381 _from_compiled_entry = entry;
382 }
383
384 address get_i2c_entry();
385 address get_c2i_entry();
386 address get_c2i_unverified_entry();
387 address get_c2i_no_clinit_check_entry();
388 AdapterHandlerEntry* adapter() const {
389 return _adapter;
390 }
391 // setup entry points
392 void link_method(const methodHandle& method, TRAPS);
393 // clear entry points. Used by sharing code during dump time
394 void unlink_method() NOT_CDS_RETURN;
395 void remove_unshareable_flags() NOT_CDS_RETURN;
396
397 virtual void metaspace_pointers_do(MetaspaceClosure* iter);
398 virtual MetaspaceObj::Type type() const { return MethodType; }
399
400 // vtable index
401 enum VtableIndexFlag {
402 // Valid vtable indexes are non-negative (>= 0).
403 // These few negative values are used as sentinels.
578
579 // returns true if the method is static OR if the classfile version < 51
580 bool has_valid_initializer_flags() const;
581
582 // returns true if the method name is <clinit> and the method has
583 // valid static initializer flags.
584 bool is_static_initializer() const;
585
586 // returns true if the method name is <init>
587 bool is_object_initializer() const;
588
589 // returns true if the method name is wait0
590 bool is_object_wait0() const;
591
592 // compiled code support
593 // NOTE: code() is inherently racy as deopt can be clearing code
594 // simultaneously. Use with caution.
595 bool has_compiled_code() const;
596
597 bool needs_clinit_barrier() const;
598
599 // sizing
600 static int header_size() {
601 return align_up((int)sizeof(Method), wordSize) / wordSize;
602 }
603 static int size(bool is_native);
604 int size() const { return method_size(); }
605 void log_touched(Thread* current);
606 static void print_touched_methods(outputStream* out);
607
608 // interpreter support
609 static ByteSize const_offset() { return byte_offset_of(Method, _constMethod ); }
610 static ByteSize access_flags_offset() { return byte_offset_of(Method, _access_flags ); }
611 static ByteSize from_compiled_offset() { return byte_offset_of(Method, _from_compiled_entry); }
612 static ByteSize code_offset() { return byte_offset_of(Method, _code); }
613
614 static ByteSize method_counters_offset() {
615 return byte_offset_of(Method, _method_counters);
616 }
617 #ifndef PRODUCT
|
42 #endif
43
44
45 // A Method represents a Java method.
46 //
47 // Note that most applications load thousands of methods, so keeping the size of this
48 // class small has a big impact on footprint.
49 //
50 // Note that native_function and signature_handler have to be at fixed offsets
51 // (required by the interpreter)
52 //
53 // Method embedded field layout (after declared fields):
54 // [EMBEDDED native_function (present only if native) ]
55 // [EMBEDDED signature_handler (present only if native) ]
56
57 class CheckedExceptionElement;
58 class LocalVariableTableElement;
59 class AdapterHandlerEntry;
60 class MethodData;
61 class MethodCounters;
62 class MethodTrainingData;
63 class ConstMethod;
64 class InlineTableSizes;
65 class nmethod;
66 class InterpreterOopMap;
67 class SCCEntry;
68
69 class Method : public Metadata {
70 friend class VMStructs;
71 friend class JVMCIVMStructs;
72 friend class MethodTest;
73 private:
74 // If you add a new field that points to any metaspace object, you
75 // must add this field to Method::metaspace_pointers_do().
76 ConstMethod* _constMethod; // Method read-only data.
77 MethodData* _method_data;
78 MethodCounters* _method_counters;
79 AdapterHandlerEntry* _adapter;
80 int _vtable_index; // vtable index of this method (see VtableIndexFlag)
81 AccessFlags _access_flags; // Access flags
82 MethodFlags _flags;
83
84 u2 _intrinsic_id; // vmSymbols::intrinsic_id (0 == _none)
85
86 JFR_ONLY(DEFINE_TRACE_FLAG;)
87
88 #ifndef PRODUCT
89 int64_t _compiled_invocation_count;
90
91 Symbol* _name;
92 #endif
93 // Entry point for calling both from and to the interpreter.
94 address _i2i_entry; // All-args-on-stack calling convention
95 // Entry point for calling from compiled code, to compiled code if it exists
96 // or else the interpreter.
97 volatile address _from_compiled_entry; // Cache of: _code ? _code->entry_point() : _adapter->c2i_entry()
98 // The entry point for calling both from and to compiled code is
99 // "_code->entry_point()". Because of tiered compilation and de-opt, this
100 // field can come and go. It can transition from null to not-null at any
101 // time (whenever a compile completes). It can transition from not-null to
102 // null only at safepoints (because of a de-opt).
103 nmethod* volatile _code; // Points to the corresponding piece of native code
104 volatile address _from_interpreted_entry; // Cache of _code ? _adapter->i2c_entry() : _i2i_entry
105
106 nmethod* _preload_code; // preloaded SCCache code
107 SCCEntry* _scc_entry; // SCCache entry for pre-loading code
108
109 // Constructor
110 Method(ConstMethod* xconst, AccessFlags access_flags, Symbol* name);
111 public:
112
113 static Method* allocate(ClassLoaderData* loader_data,
114 int byte_code_size,
115 AccessFlags access_flags,
116 InlineTableSizes* sizes,
117 ConstMethod::MethodType method_type,
118 Symbol* name,
119 TRAPS);
120
121 // CDS and vtbl checking can create an empty Method to get vtbl pointer.
122 Method(){}
123
124 virtual bool is_method() const { return true; }
125
126 #if INCLUDE_CDS
127 void remove_unshareable_info();
128 void restore_unshareable_info(TRAPS);
302 // constraint classes are loaded if necessary. Note that this may
303 // throw an exception if loading of the constraint classes causes
304 // an IllegalAccessError (bugid 4307310) or an OutOfMemoryError.
305 // If an exception is thrown, returns the bci of the
306 // exception handler which caused the exception to be thrown, which
307 // is needed for proper retries. See, for example,
308 // InterpreterRuntime::exception_handler_for_exception.
309 static int fast_exception_handler_bci_for(const methodHandle& mh, Klass* ex_klass, int throw_bci, TRAPS);
310
311 static bool register_native(Klass* k,
312 Symbol* name,
313 Symbol* signature,
314 address entry,
315 TRAPS);
316
317 // method data access
318 MethodData* method_data() const {
319 return _method_data;
320 }
321
322 void set_method_data(MethodData* data);
323
324 MethodTrainingData* training_data_or_null() const;
325 bool init_training_data(MethodTrainingData* tdata);
326
327 // mark an exception handler as entered (used to prune dead catch blocks in C2)
328 void set_exception_handler_entered(int handler_bci);
329
330 MethodCounters* method_counters() const {
331 return _method_counters;
332 }
333
334 void clear_method_counters() {
335 _method_counters = nullptr;
336 }
337
338 bool init_method_counters(MethodCounters* counters);
339
340 inline int prev_event_count() const;
341 inline void set_prev_event_count(int count);
342 inline jlong prev_time() const;
343 inline void set_prev_time(jlong time);
344 inline float rate() const;
345 inline void set_rate(float rate);
346
347 int invocation_count() const;
348 int backedge_count() const;
349
350 bool was_executed_more_than(int n);
351 bool was_never_executed() { return !was_executed_more_than(0); }
352
353 static void build_profiling_method_data(const methodHandle& method, TRAPS);
354 static bool install_training_method_data(const methodHandle& method);
355 static MethodCounters* build_method_counters(Thread* current, Method* m);
356
357 int interpreter_invocation_count() { return invocation_count(); }
358
359 #ifndef PRODUCT
360 int64_t compiled_invocation_count() const { return _compiled_invocation_count;}
361 void set_compiled_invocation_count(int count) { _compiled_invocation_count = (int64_t)count; }
362 #else
363 // for PrintMethodData in a product build
364 int64_t compiled_invocation_count() const { return 0; }
365 #endif // not PRODUCT
366
367 // nmethod/verified compiler entry
368 address verified_code_entry();
369 bool check_code() const; // Not inline to avoid circular ref
370 nmethod* code() const;
371
372 // Locks NMethodState_lock if not held.
373 void unlink_code(nmethod *compare);
374 // Locks NMethodState_lock if not held.
375 void unlink_code();
376
377 private:
378 // Either called with NMethodState_lock held or from constructor.
379 void clear_code();
380
381 void clear_method_data() {
382 _method_data = nullptr;
383 }
384
385 public:
386 static void set_code(const methodHandle& mh, nmethod* code);
387 void set_adapter_entry(AdapterHandlerEntry* adapter) {
388 _adapter = adapter;
389 }
390 void set_from_compiled_entry(address entry) {
391 _from_compiled_entry = entry;
392 }
393
394 void set_preload_code(nmethod* code) {
395 _preload_code = code;
396 }
397 void set_scc_entry(SCCEntry* entry) {
398 _scc_entry = entry;
399 }
400 SCCEntry* scc_entry() const {
401 return _scc_entry;
402 }
403
404 address get_i2c_entry();
405 address get_c2i_entry();
406 address get_c2i_unverified_entry();
407 address get_c2i_no_clinit_check_entry();
408 AdapterHandlerEntry* adapter() const {
409 return _adapter;
410 }
411 // setup entry points
412 void link_method(const methodHandle& method, TRAPS);
413 // clear entry points. Used by sharing code during dump time
414 void unlink_method() NOT_CDS_RETURN;
415 void remove_unshareable_flags() NOT_CDS_RETURN;
416
417 virtual void metaspace_pointers_do(MetaspaceClosure* iter);
418 virtual MetaspaceObj::Type type() const { return MethodType; }
419
420 // vtable index
421 enum VtableIndexFlag {
422 // Valid vtable indexes are non-negative (>= 0).
423 // These few negative values are used as sentinels.
598
599 // returns true if the method is static OR if the classfile version < 51
600 bool has_valid_initializer_flags() const;
601
602 // returns true if the method name is <clinit> and the method has
603 // valid static initializer flags.
604 bool is_static_initializer() const;
605
606 // returns true if the method name is <init>
607 bool is_object_initializer() const;
608
609 // returns true if the method name is wait0
610 bool is_object_wait0() const;
611
612 // compiled code support
613 // NOTE: code() is inherently racy as deopt can be clearing code
614 // simultaneously. Use with caution.
615 bool has_compiled_code() const;
616
617 bool needs_clinit_barrier() const;
618 bool code_has_clinit_barriers() const;
619
620 // sizing
621 static int header_size() {
622 return align_up((int)sizeof(Method), wordSize) / wordSize;
623 }
624 static int size(bool is_native);
625 int size() const { return method_size(); }
626 void log_touched(Thread* current);
627 static void print_touched_methods(outputStream* out);
628
629 // interpreter support
630 static ByteSize const_offset() { return byte_offset_of(Method, _constMethod ); }
631 static ByteSize access_flags_offset() { return byte_offset_of(Method, _access_flags ); }
632 static ByteSize from_compiled_offset() { return byte_offset_of(Method, _from_compiled_entry); }
633 static ByteSize code_offset() { return byte_offset_of(Method, _code); }
634
635 static ByteSize method_counters_offset() {
636 return byte_offset_of(Method, _method_counters);
637 }
638 #ifndef PRODUCT
|