603 // only for C2, but done for C1 as well) we need a callee-saved oop
604 // map and therefore have to make these stubs into RuntimeStubs
605 // rather than BufferBlobs. If the compiler needs all registers to
606 // be preserved between the fault point and the exception handler
607 // then it must assume responsibility for that in
608 // AbstractCompiler::continuation_for_implicit_null_exception or
609 // continuation_for_implicit_division_by_zero_exception. All other
610 // implicit exceptions (e.g., NullPointerException or
611 // AbstractMethodError on entry) are either at call sites or
612 // otherwise assume that stack unwinding will be initiated, so
613 // caller saved registers were assumed volatile in the compiler.
614 address generate_throw_exception(const char* name,
615 address runtime_entry,
616 Register arg1 = noreg,
617 Register arg2 = noreg);
618
619 // shared exception handler for FFM upcall stubs
620 address generate_upcall_stub_exception_handler();
621 address generate_upcall_stub_load_target();
622
623 // Specialized stub implementations for UseSecondarySupersTable.
624 void generate_lookup_secondary_supers_table_stub();
625
626 // Slow path implementation for UseSecondarySupersTable.
627 address generate_lookup_secondary_supers_table_slow_path_stub();
628
629 void create_control_words();
630
631 // Initialization
632 void generate_initial_stubs();
633 void generate_continuation_stubs();
634 void generate_compiler_stubs();
635 void generate_final_stubs();
636
637 public:
638 StubGenerator(CodeBuffer* code, StubGenBlobId blob_id);
639 };
640
641 #endif // CPU_X86_STUBGENERATOR_X86_64_HPP
|
603 // only for C2, but done for C1 as well) we need a callee-saved oop
604 // map and therefore have to make these stubs into RuntimeStubs
605 // rather than BufferBlobs. If the compiler needs all registers to
606 // be preserved between the fault point and the exception handler
607 // then it must assume responsibility for that in
608 // AbstractCompiler::continuation_for_implicit_null_exception or
609 // continuation_for_implicit_division_by_zero_exception. All other
610 // implicit exceptions (e.g., NullPointerException or
611 // AbstractMethodError on entry) are either at call sites or
612 // otherwise assume that stack unwinding will be initiated, so
613 // caller saved registers were assumed volatile in the compiler.
614 address generate_throw_exception(const char* name,
615 address runtime_entry,
616 Register arg1 = noreg,
617 Register arg2 = noreg);
618
619 // shared exception handler for FFM upcall stubs
620 address generate_upcall_stub_exception_handler();
621 address generate_upcall_stub_load_target();
622
623 // interpreter or compiled code marshalling registers to/from inline type instance
624 address generate_return_value_stub(address destination, const char* name, bool has_res);
625
626 // Specialized stub implementations for UseSecondarySupersTable.
627 void generate_lookup_secondary_supers_table_stub();
628
629 // Slow path implementation for UseSecondarySupersTable.
630 address generate_lookup_secondary_supers_table_slow_path_stub();
631
632 void create_control_words();
633
634 // Initialization
635 void generate_initial_stubs();
636 void generate_continuation_stubs();
637 void generate_compiler_stubs();
638 void generate_final_stubs();
639
640 public:
641 StubGenerator(CodeBuffer* code, StubGenBlobId blob_id);
642 };
643
644 #endif // CPU_X86_STUBGENERATOR_X86_64_HPP
|