1 /* 2 * Copyright (c) 1998, 2025, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_CODE_CODEBLOB_HPP 26 #define SHARE_CODE_CODEBLOB_HPP 27 28 #include "asm/codeBuffer.hpp" 29 #include "compiler/compilerDefinitions.hpp" 30 #include "compiler/oopMap.hpp" 31 #include "runtime/javaFrameAnchor.hpp" 32 #include "runtime/frame.hpp" 33 #include "runtime/handles.hpp" 34 #include "utilities/align.hpp" 35 #include "utilities/macros.hpp" 36 37 class ImmutableOopMap; 38 class ImmutableOopMapSet; 39 class JNIHandleBlock; 40 class OopMapSet; 41 42 // CodeBlob Types 43 // Used in the CodeCache to assign CodeBlobs to different CodeHeaps 44 enum class CodeBlobType { 45 MethodNonProfiled = 0, // Execution level 1 and 4 (non-profiled) nmethods (including native nmethods) 46 MethodProfiled = 1, // Execution level 2 and 3 (profiled) nmethods 47 NonNMethod = 2, // Non-nmethods like Buffers, Adapters and Runtime Stubs 48 All = 3, // All types (No code cache segmentation) 49 NumTypes = 4 // Number of CodeBlobTypes 50 }; 51 52 // CodeBlob - superclass for all entries in the CodeCache. 53 // 54 // Subtypes are: 55 // nmethod : JIT Compiled Java methods 56 // RuntimeBlob : Non-compiled method code; generated glue code 57 // BufferBlob : Used for non-relocatable code such as interpreter, stubroutines, etc. 58 // AdapterBlob : Used to hold C2I/I2C adapters 59 // VtableBlob : Used for holding vtable chunks 60 // MethodHandlesAdapterBlob : Used to hold MethodHandles adapters 61 // BufferedInlineTypeBlob : used for pack/unpack handlers 62 // RuntimeStub : Call to VM runtime methods 63 // SingletonBlob : Super-class for all blobs that exist in only one instance 64 // DeoptimizationBlob : Used for deoptimization 65 // SafepointBlob : Used to handle illegal instruction exceptions 66 // ExceptionBlob : Used for stack unrolling 67 // UncommonTrapBlob : Used to handle uncommon traps 68 // UpcallStub : Used for upcalls from native code 69 // 70 // 71 // Layout in the CodeCache: 72 // - header 73 // - content space 74 // - instruction space 75 // Outside of the CodeCache: 76 // - mutable_data 77 // - relocation info 78 // - additional data for subclasses 79 80 enum class CodeBlobKind : u1 { 81 None, 82 Nmethod, 83 Buffer, 84 Adapter, 85 Vtable, 86 MHAdapter, 87 BufferedInlineType, 88 RuntimeStub, 89 Deoptimization, 90 Safepoint, 91 #ifdef COMPILER2 92 Exception, 93 UncommonTrap, 94 #endif 95 Upcall, 96 Number_Of_Kinds 97 }; 98 99 class UpcallStub; // for as_upcall_stub() 100 class RuntimeStub; // for as_runtime_stub() 101 class JavaFrameAnchor; // for UpcallStub::jfa_for_frame 102 103 class CodeBlob { 104 friend class VMStructs; 105 friend class JVMCIVMStructs; 106 107 protected: 108 // order fields from large to small to minimize padding between fields 109 ImmutableOopMapSet* _oop_maps; // OopMap for this CodeBlob 110 const char* _name; 111 address _mutable_data; 112 113 int _size; // total size of CodeBlob in bytes 114 int _relocation_size; // size of relocation (could be bigger than 64Kb) 115 int _content_offset; // offset to where content region begins (this includes consts, insts, stubs) 116 int _code_offset; // offset to where instructions region begins (this includes insts, stubs) 117 int _data_offset; // offset to where data region begins 118 int _frame_size; // size of stack frame in words (NOT slots. On x64 these are 64bit words) 119 int _mutable_data_size; 120 121 S390_ONLY(int _ctable_offset;) 122 123 uint16_t _header_size; // size of header (depends on subclass) 124 int16_t _frame_complete_offset; // instruction offsets in [0.._frame_complete_offset) have 125 // not finished setting up their frame. Beware of pc's in 126 // that range. There is a similar range(s) on returns 127 // which we don't detect. 128 129 CodeBlobKind _kind; // Kind of this code blob 130 131 bool _caller_must_gc_arguments; 132 133 #ifndef PRODUCT 134 AsmRemarks _asm_remarks; 135 DbgStrings _dbg_strings; 136 #endif 137 138 void print_on_impl(outputStream* st) const; 139 void print_value_on_impl(outputStream* st) const; 140 141 class Vptr { 142 public: 143 virtual void print_on(const CodeBlob* instance, outputStream* st) const = 0; 144 virtual void print_value_on(const CodeBlob* instance, outputStream* st) const = 0; 145 }; 146 147 const Vptr* vptr() const; 148 149 CodeBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, uint16_t header_size, 150 int16_t frame_complete_offset, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments, 151 int mutable_data_size); 152 153 // Simple CodeBlob used for simple BufferBlob. 154 CodeBlob(const char* name, CodeBlobKind kind, int size, uint16_t header_size); 155 156 void operator delete(void* p) { } 157 158 public: 159 160 ~CodeBlob() { 161 assert(_oop_maps == nullptr, "Not flushed"); 162 } 163 164 // Returns the space needed for CodeBlob 165 static unsigned int allocation_size(CodeBuffer* cb, int header_size); 166 static unsigned int align_code_offset(int offset); 167 168 // Deletion 169 void purge(); 170 171 // Typing 172 bool is_nmethod() const { return _kind == CodeBlobKind::Nmethod; } 173 bool is_buffer_blob() const { return _kind == CodeBlobKind::Buffer; } 174 bool is_runtime_stub() const { return _kind == CodeBlobKind::RuntimeStub; } 175 bool is_deoptimization_stub() const { return _kind == CodeBlobKind::Deoptimization; } 176 #ifdef COMPILER2 177 bool is_uncommon_trap_stub() const { return _kind == CodeBlobKind::UncommonTrap; } 178 bool is_exception_stub() const { return _kind == CodeBlobKind::Exception; } 179 #else 180 bool is_uncommon_trap_stub() const { return false; } 181 bool is_exception_stub() const { return false; } 182 #endif 183 bool is_safepoint_stub() const { return _kind == CodeBlobKind::Safepoint; } 184 bool is_adapter_blob() const { return _kind == CodeBlobKind::Adapter; } 185 bool is_vtable_blob() const { return _kind == CodeBlobKind::Vtable; } 186 bool is_method_handles_adapter_blob() const { return _kind == CodeBlobKind::MHAdapter; } 187 bool is_buffered_inline_type_blob() const { return _kind == CodeBlobKind::BufferedInlineType; } 188 bool is_upcall_stub() const { return _kind == CodeBlobKind::Upcall; } 189 190 // Casting 191 nmethod* as_nmethod_or_null() const { return is_nmethod() ? (nmethod*) this : nullptr; } 192 nmethod* as_nmethod() const { assert(is_nmethod(), "must be nmethod"); return (nmethod*) this; } 193 CodeBlob* as_codeblob() const { return (CodeBlob*) this; } 194 UpcallStub* as_upcall_stub() const { assert(is_upcall_stub(), "must be upcall stub"); return (UpcallStub*) this; } 195 RuntimeStub* as_runtime_stub() const { assert(is_runtime_stub(), "must be runtime blob"); return (RuntimeStub*) this; } 196 197 // Boundaries 198 address header_begin() const { return (address) this; } 199 address header_end() const { return ((address) this) + _header_size; } 200 address content_begin() const { return (address) header_begin() + _content_offset; } 201 address content_end() const { return (address) header_begin() + _data_offset; } 202 address code_begin() const { return (address) header_begin() + _code_offset; } 203 address code_end() const { return (address) header_begin() + _data_offset; } 204 address data_begin() const { return (address) header_begin() + _data_offset; } 205 address data_end() const { return (address) header_begin() + _size; } 206 address blob_end() const { return (address) header_begin() + _size; } 207 // code_end == content_end is true for all types of blobs for now, it is also checked in the constructor 208 209 int mutable_data_size() const { return _mutable_data_size; } 210 address mutable_data_begin() const { return _mutable_data; } 211 address mutable_data_end() const { return _mutable_data + _mutable_data_size; } 212 213 relocInfo* relocation_begin() const { return (relocInfo*)_mutable_data; } 214 relocInfo* relocation_end() const { return (relocInfo*)((address)relocation_begin() + _relocation_size); } 215 216 // Offsets 217 int content_offset() const { return _content_offset; } 218 int code_offset() const { return _code_offset; } 219 220 // This field holds the beginning of the const section in the old code buffer. 221 // It is needed to fix relocations of pc-relative loads when resizing the 222 // the constant pool or moving it. 223 S390_ONLY(address ctable_begin() const { return header_begin() + _ctable_offset; }) 224 void set_ctable_begin(address ctable) { S390_ONLY(_ctable_offset = ctable - header_begin();) } 225 226 // Sizes 227 int size() const { return _size; } 228 int header_size() const { return _header_size; } 229 int relocation_size() const { return pointer_delta_as_int((address) relocation_end(), (address) relocation_begin()); } 230 int content_size() const { return pointer_delta_as_int(content_end(), content_begin()); } 231 int code_size() const { return pointer_delta_as_int(code_end(), code_begin()); } 232 233 // Only used from CodeCache::free_unused_tail() after the Interpreter blob was trimmed 234 void adjust_size(size_t used) { 235 _size = (int)used; 236 _data_offset = _size; 237 } 238 239 // Containment 240 bool blob_contains(address addr) const { return header_begin() <= addr && addr < blob_end(); } 241 bool code_contains(address addr) const { return code_begin() <= addr && addr < code_end(); } 242 bool contains(address addr) const { return content_begin() <= addr && addr < content_end(); } 243 bool is_frame_complete_at(address addr) const { return _frame_complete_offset != CodeOffsets::frame_never_safe && 244 code_contains(addr) && addr >= code_begin() + _frame_complete_offset; } 245 int frame_complete_offset() const { return _frame_complete_offset; } 246 247 // OopMap for frame 248 ImmutableOopMapSet* oop_maps() const { return _oop_maps; } 249 void set_oop_maps(OopMapSet* p); 250 251 const ImmutableOopMap* oop_map_for_slot(int slot, address return_address) const; 252 const ImmutableOopMap* oop_map_for_return_address(address return_address) const; 253 254 // Frame support. Sizes are in word units. 255 int frame_size() const { return _frame_size; } 256 void set_frame_size(int size) { _frame_size = size; } 257 258 // Returns true, if the next frame is responsible for GC'ing oops passed as arguments 259 bool caller_must_gc_arguments(JavaThread* thread) const { return _caller_must_gc_arguments; } 260 261 // Naming 262 const char* name() const { return _name; } 263 void set_name(const char* name) { _name = name; } 264 265 // Debugging 266 void verify(); 267 void print() const; 268 void print_on(outputStream* st) const; 269 void print_value_on(outputStream* st) const; 270 271 void dump_for_addr(address addr, outputStream* st, bool verbose) const; 272 void print_code_on(outputStream* st); 273 274 // Print to stream, any comments associated with offset. 275 void print_block_comment(outputStream* stream, address block_begin) const; 276 277 #ifndef PRODUCT 278 AsmRemarks &asm_remarks() { return _asm_remarks; } 279 DbgStrings &dbg_strings() { return _dbg_strings; } 280 281 void use_remarks(AsmRemarks &remarks) { _asm_remarks.share(remarks); } 282 void use_strings(DbgStrings &strings) { _dbg_strings.share(strings); } 283 #endif 284 }; 285 286 //---------------------------------------------------------------------------------------------------- 287 // RuntimeBlob: used for non-compiled method code (adapters, stubs, blobs) 288 289 class RuntimeBlob : public CodeBlob { 290 friend class VMStructs; 291 public: 292 293 // Creation 294 // a) simple CodeBlob 295 RuntimeBlob(const char* name, CodeBlobKind kind, int size, uint16_t header_size) 296 : CodeBlob(name, kind, size, header_size) 297 {} 298 299 // b) full CodeBlob 300 // frame_complete is the offset from the beginning of the instructions 301 // to where the frame setup (from stackwalk viewpoint) is complete. 302 RuntimeBlob( 303 const char* name, 304 CodeBlobKind kind, 305 CodeBuffer* cb, 306 int size, 307 uint16_t header_size, 308 int16_t frame_complete, 309 int frame_size, 310 OopMapSet* oop_maps, 311 bool caller_must_gc_arguments = false 312 ); 313 314 static void free(RuntimeBlob* blob); 315 316 // Deal with Disassembler, VTune, Forte, JvmtiExport, MemoryService. 317 static void trace_new_stub(RuntimeBlob* blob, const char* name1, const char* name2 = ""); 318 319 class Vptr : public CodeBlob::Vptr { 320 }; 321 }; 322 323 class WhiteBox; 324 //---------------------------------------------------------------------------------------------------- 325 // BufferBlob: used to hold non-relocatable machine code such as the interpreter, stubroutines, etc. 326 327 class BufferBlob: public RuntimeBlob { 328 friend class VMStructs; 329 friend class AdapterBlob; 330 friend class VtableBlob; 331 friend class MethodHandlesAdapterBlob; 332 friend class BufferedInlineTypeBlob; 333 friend class UpcallStub; 334 friend class WhiteBox; 335 336 private: 337 // Creation support 338 BufferBlob(const char* name, CodeBlobKind kind, int size); 339 BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, int header_size); 340 BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments = false); 341 342 void* operator new(size_t s, unsigned size) throw(); 343 344 public: 345 // Creation 346 static BufferBlob* create(const char* name, uint buffer_size); 347 static BufferBlob* create(const char* name, CodeBuffer* cb); 348 349 static void free(BufferBlob* buf); 350 351 void print_on_impl(outputStream* st) const; 352 void print_value_on_impl(outputStream* st) const; 353 354 class Vptr : public RuntimeBlob::Vptr { 355 void print_on(const CodeBlob* instance, outputStream* st) const override { 356 ((const BufferBlob*)instance)->print_on_impl(st); 357 } 358 void print_value_on(const CodeBlob* instance, outputStream* st) const override { 359 ((const BufferBlob*)instance)->print_value_on_impl(st); 360 } 361 }; 362 363 static const Vptr _vpntr; 364 }; 365 366 367 //---------------------------------------------------------------------------------------------------- 368 // AdapterBlob: used to hold C2I/I2C adapters 369 370 class AdapterBlob: public BufferBlob { 371 private: 372 AdapterBlob(int size, CodeBuffer* cb, int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments = false); 373 374 public: 375 // Creation 376 static AdapterBlob* create(CodeBuffer* cb, 377 int frame_complete, 378 int frame_size, 379 OopMapSet* oop_maps, 380 bool caller_must_gc_arguments = false); 381 382 bool caller_must_gc_arguments(JavaThread* thread) const { return true; } 383 }; 384 385 //--------------------------------------------------------------------------------------------------- 386 class VtableBlob: public BufferBlob { 387 private: 388 VtableBlob(const char*, int); 389 390 void* operator new(size_t s, unsigned size) throw(); 391 392 public: 393 // Creation 394 static VtableBlob* create(const char* name, int buffer_size); 395 }; 396 397 //---------------------------------------------------------------------------------------------------- 398 // MethodHandlesAdapterBlob: used to hold MethodHandles adapters 399 400 class MethodHandlesAdapterBlob: public BufferBlob { 401 private: 402 MethodHandlesAdapterBlob(int size): BufferBlob("MethodHandles adapters", CodeBlobKind::MHAdapter, size) {} 403 404 public: 405 // Creation 406 static MethodHandlesAdapterBlob* create(int buffer_size); 407 }; 408 409 //---------------------------------------------------------------------------------------------------- 410 // BufferedInlineTypeBlob : used for pack/unpack handlers 411 412 class BufferedInlineTypeBlob: public BufferBlob { 413 private: 414 const int _pack_fields_off; 415 const int _pack_fields_jobject_off; 416 const int _unpack_fields_off; 417 418 BufferedInlineTypeBlob(int size, CodeBuffer* cb, int pack_fields_off, int pack_fields_jobject_off, int unpack_fields_off); 419 420 public: 421 // Creation 422 static BufferedInlineTypeBlob* create(CodeBuffer* cb, int pack_fields_off, int pack_fields_jobject_off, int unpack_fields_off); 423 424 address pack_fields() const { return code_begin() + _pack_fields_off; } 425 address pack_fields_jobject() const { return code_begin() + _pack_fields_jobject_off; } 426 address unpack_fields() const { return code_begin() + _unpack_fields_off; } 427 }; 428 429 //---------------------------------------------------------------------------------------------------- 430 // RuntimeStub: describes stubs used by compiled code to call a (static) C++ runtime routine 431 432 class RuntimeStub: public RuntimeBlob { 433 friend class VMStructs; 434 private: 435 // Creation support 436 RuntimeStub( 437 const char* name, 438 CodeBuffer* cb, 439 int size, 440 int16_t frame_complete, 441 int frame_size, 442 OopMapSet* oop_maps, 443 bool caller_must_gc_arguments 444 ); 445 446 void* operator new(size_t s, unsigned size) throw(); 447 448 public: 449 // Creation 450 static RuntimeStub* new_runtime_stub( 451 const char* stub_name, 452 CodeBuffer* cb, 453 int16_t frame_complete, 454 int frame_size, 455 OopMapSet* oop_maps, 456 bool caller_must_gc_arguments, 457 bool alloc_fail_is_fatal=true 458 ); 459 460 static void free(RuntimeStub* stub) { RuntimeBlob::free(stub); } 461 462 address entry_point() const { return code_begin(); } 463 464 void print_on_impl(outputStream* st) const; 465 void print_value_on_impl(outputStream* st) const; 466 467 class Vptr : public RuntimeBlob::Vptr { 468 void print_on(const CodeBlob* instance, outputStream* st) const override { 469 instance->as_runtime_stub()->print_on_impl(st); 470 } 471 void print_value_on(const CodeBlob* instance, outputStream* st) const override { 472 instance->as_runtime_stub()->print_value_on_impl(st); 473 } 474 }; 475 476 static const Vptr _vpntr; 477 }; 478 479 480 //---------------------------------------------------------------------------------------------------- 481 // Super-class for all blobs that exist in only one instance. Implements default behaviour. 482 483 class SingletonBlob: public RuntimeBlob { 484 friend class VMStructs; 485 486 protected: 487 void* operator new(size_t s, unsigned size, bool alloc_fail_is_fatal=true) throw(); 488 489 public: 490 SingletonBlob( 491 const char* name, 492 CodeBlobKind kind, 493 CodeBuffer* cb, 494 int size, 495 uint16_t header_size, 496 int frame_size, 497 OopMapSet* oop_maps 498 ) 499 : RuntimeBlob(name, kind, cb, size, header_size, CodeOffsets::frame_never_safe, frame_size, oop_maps) 500 {}; 501 502 address entry_point() { return code_begin(); } 503 504 void print_on_impl(outputStream* st) const; 505 void print_value_on_impl(outputStream* st) const; 506 507 class Vptr : public RuntimeBlob::Vptr { 508 void print_on(const CodeBlob* instance, outputStream* st) const override { 509 ((const SingletonBlob*)instance)->print_on_impl(st); 510 } 511 void print_value_on(const CodeBlob* instance, outputStream* st) const override { 512 ((const SingletonBlob*)instance)->print_value_on_impl(st); 513 } 514 }; 515 516 static const Vptr _vpntr; 517 }; 518 519 520 //---------------------------------------------------------------------------------------------------- 521 // DeoptimizationBlob 522 523 class DeoptimizationBlob: public SingletonBlob { 524 friend class VMStructs; 525 friend class JVMCIVMStructs; 526 private: 527 int _unpack_offset; 528 int _unpack_with_exception; 529 int _unpack_with_reexecution; 530 531 int _unpack_with_exception_in_tls; 532 533 #if INCLUDE_JVMCI 534 // Offsets when JVMCI calls uncommon_trap. 535 int _uncommon_trap_offset; 536 int _implicit_exception_uncommon_trap_offset; 537 #endif 538 539 // Creation support 540 DeoptimizationBlob( 541 CodeBuffer* cb, 542 int size, 543 OopMapSet* oop_maps, 544 int unpack_offset, 545 int unpack_with_exception_offset, 546 int unpack_with_reexecution_offset, 547 int frame_size 548 ); 549 550 public: 551 // Creation 552 static DeoptimizationBlob* create( 553 CodeBuffer* cb, 554 OopMapSet* oop_maps, 555 int unpack_offset, 556 int unpack_with_exception_offset, 557 int unpack_with_reexecution_offset, 558 int frame_size 559 ); 560 561 address unpack() const { return code_begin() + _unpack_offset; } 562 address unpack_with_exception() const { return code_begin() + _unpack_with_exception; } 563 address unpack_with_reexecution() const { return code_begin() + _unpack_with_reexecution; } 564 565 // Alternate entry point for C1 where the exception and issuing pc 566 // are in JavaThread::_exception_oop and JavaThread::_exception_pc 567 // instead of being in registers. This is needed because C1 doesn't 568 // model exception paths in a way that keeps these registers free so 569 // there may be live values in those registers during deopt. 570 void set_unpack_with_exception_in_tls_offset(int offset) { 571 _unpack_with_exception_in_tls = offset; 572 assert(code_contains(code_begin() + _unpack_with_exception_in_tls), "must be PC inside codeblob"); 573 } 574 address unpack_with_exception_in_tls() const { return code_begin() + _unpack_with_exception_in_tls; } 575 576 #if INCLUDE_JVMCI 577 // Offsets when JVMCI calls uncommon_trap. 578 void set_uncommon_trap_offset(int offset) { 579 _uncommon_trap_offset = offset; 580 assert(contains(code_begin() + _uncommon_trap_offset), "must be PC inside codeblob"); 581 } 582 address uncommon_trap() const { return code_begin() + _uncommon_trap_offset; } 583 584 void set_implicit_exception_uncommon_trap_offset(int offset) { 585 _implicit_exception_uncommon_trap_offset = offset; 586 assert(contains(code_begin() + _implicit_exception_uncommon_trap_offset), "must be PC inside codeblob"); 587 } 588 address implicit_exception_uncommon_trap() const { return code_begin() + _implicit_exception_uncommon_trap_offset; } 589 #endif // INCLUDE_JVMCI 590 591 void print_value_on_impl(outputStream* st) const; 592 593 class Vptr : public SingletonBlob::Vptr { 594 void print_value_on(const CodeBlob* instance, outputStream* st) const override { 595 ((const DeoptimizationBlob*)instance)->print_value_on_impl(st); 596 } 597 }; 598 599 static const Vptr _vpntr; 600 }; 601 602 603 //---------------------------------------------------------------------------------------------------- 604 // UncommonTrapBlob (currently only used by Compiler 2) 605 606 #ifdef COMPILER2 607 608 class UncommonTrapBlob: public SingletonBlob { 609 friend class VMStructs; 610 private: 611 // Creation support 612 UncommonTrapBlob( 613 CodeBuffer* cb, 614 int size, 615 OopMapSet* oop_maps, 616 int frame_size 617 ); 618 619 public: 620 // Creation 621 static UncommonTrapBlob* create( 622 CodeBuffer* cb, 623 OopMapSet* oop_maps, 624 int frame_size 625 ); 626 }; 627 628 629 //---------------------------------------------------------------------------------------------------- 630 // ExceptionBlob: used for exception unwinding in compiled code (currently only used by Compiler 2) 631 632 class ExceptionBlob: public SingletonBlob { 633 friend class VMStructs; 634 private: 635 // Creation support 636 ExceptionBlob( 637 CodeBuffer* cb, 638 int size, 639 OopMapSet* oop_maps, 640 int frame_size 641 ); 642 643 public: 644 // Creation 645 static ExceptionBlob* create( 646 CodeBuffer* cb, 647 OopMapSet* oop_maps, 648 int frame_size 649 ); 650 }; 651 #endif // COMPILER2 652 653 654 //---------------------------------------------------------------------------------------------------- 655 // SafepointBlob: handles illegal_instruction exceptions during a safepoint 656 657 class SafepointBlob: public SingletonBlob { 658 friend class VMStructs; 659 private: 660 // Creation support 661 SafepointBlob( 662 CodeBuffer* cb, 663 int size, 664 OopMapSet* oop_maps, 665 int frame_size 666 ); 667 668 public: 669 // Creation 670 static SafepointBlob* create( 671 CodeBuffer* cb, 672 OopMapSet* oop_maps, 673 int frame_size 674 ); 675 }; 676 677 //---------------------------------------------------------------------------------------------------- 678 679 class UpcallLinker; 680 681 // A (Panama) upcall stub. Not used by JNI. 682 class UpcallStub: public RuntimeBlob { 683 friend class VMStructs; 684 friend class UpcallLinker; 685 private: 686 jobject _receiver; 687 ByteSize _frame_data_offset; 688 689 UpcallStub(const char* name, CodeBuffer* cb, int size, jobject receiver, ByteSize frame_data_offset); 690 691 void* operator new(size_t s, unsigned size) throw(); 692 693 struct FrameData { 694 JavaFrameAnchor jfa; 695 JavaThread* thread; 696 JNIHandleBlock* old_handles; 697 JNIHandleBlock* new_handles; 698 }; 699 700 // defined in frame_ARCH.cpp 701 FrameData* frame_data_for_frame(const frame& frame) const; 702 public: 703 // Creation 704 static UpcallStub* create(const char* name, CodeBuffer* cb, jobject receiver, ByteSize frame_data_offset); 705 706 static void free(UpcallStub* blob); 707 708 jobject receiver() { return _receiver; } 709 710 JavaFrameAnchor* jfa_for_frame(const frame& frame) const; 711 712 // GC support 713 void oops_do(OopClosure* f, const frame& frame); 714 715 void print_on_impl(outputStream* st) const; 716 void print_value_on_impl(outputStream* st) const; 717 718 class Vptr : public RuntimeBlob::Vptr { 719 void print_on(const CodeBlob* instance, outputStream* st) const override { 720 instance->as_upcall_stub()->print_on_impl(st); 721 } 722 void print_value_on(const CodeBlob* instance, outputStream* st) const override { 723 instance->as_upcall_stub()->print_value_on_impl(st); 724 } 725 }; 726 727 static const Vptr _vpntr; 728 }; 729 730 #endif // SHARE_CODE_CODEBLOB_HPP