46 #include "runtime/sharedRuntime.hpp"
47 #include "runtime/stubCodeGenerator.hpp"
48 #include "runtime/stubRoutines.hpp"
49 #include "runtime/vframe.hpp"
50 #include "services/memoryService.hpp"
51 #include "utilities/align.hpp"
52 #ifdef COMPILER1
53 #include "c1/c1_Runtime1.hpp"
54 #endif
55
56 #include <type_traits>
57
58 // Virtual methods are not allowed in code blobs to simplify caching compiled code.
59 // Check all "leaf" subclasses of CodeBlob class.
60
61 static_assert(!std::is_polymorphic<nmethod>::value, "no virtual methods are allowed in nmethod");
62 static_assert(!std::is_polymorphic<AdapterBlob>::value, "no virtual methods are allowed in code blobs");
63 static_assert(!std::is_polymorphic<VtableBlob>::value, "no virtual methods are allowed in code blobs");
64 static_assert(!std::is_polymorphic<MethodHandlesAdapterBlob>::value, "no virtual methods are allowed in code blobs");
65 static_assert(!std::is_polymorphic<RuntimeStub>::value, "no virtual methods are allowed in code blobs");
66 static_assert(!std::is_polymorphic<DeoptimizationBlob>::value, "no virtual methods are allowed in code blobs");
67 static_assert(!std::is_polymorphic<SafepointBlob>::value, "no virtual methods are allowed in code blobs");
68 static_assert(!std::is_polymorphic<UpcallStub>::value, "no virtual methods are allowed in code blobs");
69 #ifdef COMPILER2
70 static_assert(!std::is_polymorphic<ExceptionBlob>::value, "no virtual methods are allowed in code blobs");
71 static_assert(!std::is_polymorphic<UncommonTrapBlob>::value, "no virtual methods are allowed in code blobs");
72 #endif
73
74 // Add proxy vtables.
75 // We need only few for now - they are used only from prints.
76 const nmethod::Vptr nmethod::_vpntr;
77 const BufferBlob::Vptr BufferBlob::_vpntr;
78 const RuntimeStub::Vptr RuntimeStub::_vpntr;
79 const SingletonBlob::Vptr SingletonBlob::_vpntr;
80 const DeoptimizationBlob::Vptr DeoptimizationBlob::_vpntr;
81 const UpcallStub::Vptr UpcallStub::_vpntr;
82
83 const CodeBlob::Vptr* CodeBlob::vptr() const {
84 constexpr const CodeBlob::Vptr* array[(size_t)CodeBlobKind::Number_Of_Kinds] = {
85 nullptr/* None */,
86 &nmethod::_vpntr,
87 &BufferBlob::_vpntr,
88 &AdapterBlob::_vpntr,
89 &VtableBlob::_vpntr,
90 &MethodHandlesAdapterBlob::_vpntr,
91 &RuntimeStub::_vpntr,
92 &DeoptimizationBlob::_vpntr,
93 &SafepointBlob::_vpntr,
94 #ifdef COMPILER2
95 &ExceptionBlob::_vpntr,
96 &UncommonTrapBlob::_vpntr,
97 #endif
98 &UpcallStub::_vpntr
99 };
100
101 return array[(size_t)_kind];
102 }
103
104 unsigned int CodeBlob::align_code_offset(int offset) {
105 // align the size to CodeEntryAlignment
106 int header_size = (int)CodeHeap::header_size();
107 return align_up(offset + header_size, CodeEntryAlignment) - header_size;
108 }
109
110 // This must be consistent with the CodeBlob constructor's layout actions.
295 BufferBlob* BufferBlob::create(const char* name, uint buffer_size) {
296 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
297
298 BufferBlob* blob = nullptr;
299 unsigned int size = sizeof(BufferBlob);
300 // align the size to CodeEntryAlignment
301 size = CodeBlob::align_code_offset(size);
302 size += align_up(buffer_size, oopSize);
303 assert(name != nullptr, "must provide a name");
304 {
305 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
306 blob = new (size) BufferBlob(name, CodeBlobKind::Buffer, size);
307 }
308 // Track memory usage statistic after releasing CodeCache_lock
309 MemoryService::track_code_cache_memory_usage();
310
311 return blob;
312 }
313
314
315 BufferBlob::BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size)
316 : RuntimeBlob(name, kind, cb, size, sizeof(BufferBlob), CodeOffsets::frame_never_safe, 0, nullptr)
317 {}
318
319 // Used by gtest
320 BufferBlob* BufferBlob::create(const char* name, CodeBuffer* cb) {
321 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
322
323 BufferBlob* blob = nullptr;
324 unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferBlob));
325 assert(name != nullptr, "must provide a name");
326 {
327 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
328 blob = new (size) BufferBlob(name, CodeBlobKind::Buffer, cb, size);
329 }
330 // Track memory usage statistic after releasing CodeCache_lock
331 MemoryService::track_code_cache_memory_usage();
332
333 return blob;
334 }
335
336 void* BufferBlob::operator new(size_t s, unsigned size) throw() {
337 return CodeCache::allocate(size, CodeBlobType::NonNMethod);
338 }
339
340 void BufferBlob::free(BufferBlob *blob) {
341 RuntimeBlob::free(blob);
342 }
343
344
345 //----------------------------------------------------------------------------------------------------
346 // Implementation of AdapterBlob
347
348 AdapterBlob::AdapterBlob(int size, CodeBuffer* cb) :
349 BufferBlob("I2C/C2I adapters", CodeBlobKind::Adapter, cb, size) {
350 CodeCache::commit(this);
351 }
352
353 AdapterBlob* AdapterBlob::create(CodeBuffer* cb) {
354 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
355
356 CodeCache::gc_on_allocation();
357
358 AdapterBlob* blob = nullptr;
359 unsigned int size = CodeBlob::allocation_size(cb, sizeof(AdapterBlob));
360 {
361 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
362 blob = new (size) AdapterBlob(size, cb);
363 }
364 // Track memory usage statistic after releasing CodeCache_lock
365 MemoryService::track_code_cache_memory_usage();
366
367 return blob;
368 }
369
370 //----------------------------------------------------------------------------------------------------
371 // Implementation of VtableBlob
372
373 void* VtableBlob::operator new(size_t s, unsigned size) throw() {
374 // Handling of allocation failure stops compilation and prints a bunch of
375 // stuff, which requires unlocking the CodeCache_lock, so that the Compile_lock
376 // can be locked, and then re-locking the CodeCache_lock. That is not safe in
377 // this context as we hold the CompiledICLocker. So we just don't handle code
378 // cache exhaustion here; we leave that for a later allocation that does not
379 // hold the CompiledICLocker.
380 return CodeCache::allocate(size, CodeBlobType::NonNMethod, false /* handle_alloc_failure */);
381 }
382
424
425 MethodHandlesAdapterBlob* blob = nullptr;
426 unsigned int size = sizeof(MethodHandlesAdapterBlob);
427 // align the size to CodeEntryAlignment
428 size = CodeBlob::align_code_offset(size);
429 size += align_up(buffer_size, oopSize);
430 {
431 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
432 blob = new (size) MethodHandlesAdapterBlob(size);
433 if (blob == nullptr) {
434 vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "CodeCache: no room for method handle adapter blob");
435 }
436 }
437 // Track memory usage statistic after releasing CodeCache_lock
438 MemoryService::track_code_cache_memory_usage();
439
440 return blob;
441 }
442
443 //----------------------------------------------------------------------------------------------------
444 // Implementation of RuntimeStub
445
446 RuntimeStub::RuntimeStub(
447 const char* name,
448 CodeBuffer* cb,
449 int size,
450 int16_t frame_complete,
451 int frame_size,
452 OopMapSet* oop_maps,
453 bool caller_must_gc_arguments
454 )
455 : RuntimeBlob(name, CodeBlobKind::RuntimeStub, cb, size, sizeof(RuntimeStub),
456 frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
457 {
458 }
459
460 RuntimeStub* RuntimeStub::new_runtime_stub(const char* stub_name,
461 CodeBuffer* cb,
462 int16_t frame_complete,
463 int frame_size,
|
46 #include "runtime/sharedRuntime.hpp"
47 #include "runtime/stubCodeGenerator.hpp"
48 #include "runtime/stubRoutines.hpp"
49 #include "runtime/vframe.hpp"
50 #include "services/memoryService.hpp"
51 #include "utilities/align.hpp"
52 #ifdef COMPILER1
53 #include "c1/c1_Runtime1.hpp"
54 #endif
55
56 #include <type_traits>
57
58 // Virtual methods are not allowed in code blobs to simplify caching compiled code.
59 // Check all "leaf" subclasses of CodeBlob class.
60
61 static_assert(!std::is_polymorphic<nmethod>::value, "no virtual methods are allowed in nmethod");
62 static_assert(!std::is_polymorphic<AdapterBlob>::value, "no virtual methods are allowed in code blobs");
63 static_assert(!std::is_polymorphic<VtableBlob>::value, "no virtual methods are allowed in code blobs");
64 static_assert(!std::is_polymorphic<MethodHandlesAdapterBlob>::value, "no virtual methods are allowed in code blobs");
65 static_assert(!std::is_polymorphic<RuntimeStub>::value, "no virtual methods are allowed in code blobs");
66 static_assert(!std::is_polymorphic<BufferedInlineTypeBlob>::value, "no virtual methods are allowed in code blobs");
67 static_assert(!std::is_polymorphic<DeoptimizationBlob>::value, "no virtual methods are allowed in code blobs");
68 static_assert(!std::is_polymorphic<SafepointBlob>::value, "no virtual methods are allowed in code blobs");
69 static_assert(!std::is_polymorphic<UpcallStub>::value, "no virtual methods are allowed in code blobs");
70 #ifdef COMPILER2
71 static_assert(!std::is_polymorphic<ExceptionBlob>::value, "no virtual methods are allowed in code blobs");
72 static_assert(!std::is_polymorphic<UncommonTrapBlob>::value, "no virtual methods are allowed in code blobs");
73 #endif
74
75 // Add proxy vtables.
76 // We need only few for now - they are used only from prints.
77 const nmethod::Vptr nmethod::_vpntr;
78 const BufferBlob::Vptr BufferBlob::_vpntr;
79 const RuntimeStub::Vptr RuntimeStub::_vpntr;
80 const SingletonBlob::Vptr SingletonBlob::_vpntr;
81 const DeoptimizationBlob::Vptr DeoptimizationBlob::_vpntr;
82 const UpcallStub::Vptr UpcallStub::_vpntr;
83
84 const CodeBlob::Vptr* CodeBlob::vptr() const {
85 constexpr const CodeBlob::Vptr* array[(size_t)CodeBlobKind::Number_Of_Kinds] = {
86 nullptr/* None */,
87 &nmethod::_vpntr,
88 &BufferBlob::_vpntr,
89 &AdapterBlob::_vpntr,
90 &VtableBlob::_vpntr,
91 &MethodHandlesAdapterBlob::_vpntr,
92 &BufferedInlineTypeBlob::_vpntr,
93 &RuntimeStub::_vpntr,
94 &DeoptimizationBlob::_vpntr,
95 &SafepointBlob::_vpntr,
96 #ifdef COMPILER2
97 &ExceptionBlob::_vpntr,
98 &UncommonTrapBlob::_vpntr,
99 #endif
100 &UpcallStub::_vpntr
101 };
102
103 return array[(size_t)_kind];
104 }
105
106 unsigned int CodeBlob::align_code_offset(int offset) {
107 // align the size to CodeEntryAlignment
108 int header_size = (int)CodeHeap::header_size();
109 return align_up(offset + header_size, CodeEntryAlignment) - header_size;
110 }
111
112 // This must be consistent with the CodeBlob constructor's layout actions.
297 BufferBlob* BufferBlob::create(const char* name, uint buffer_size) {
298 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
299
300 BufferBlob* blob = nullptr;
301 unsigned int size = sizeof(BufferBlob);
302 // align the size to CodeEntryAlignment
303 size = CodeBlob::align_code_offset(size);
304 size += align_up(buffer_size, oopSize);
305 assert(name != nullptr, "must provide a name");
306 {
307 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
308 blob = new (size) BufferBlob(name, CodeBlobKind::Buffer, size);
309 }
310 // Track memory usage statistic after releasing CodeCache_lock
311 MemoryService::track_code_cache_memory_usage();
312
313 return blob;
314 }
315
316
317 BufferBlob::BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, int header_size)
318 : RuntimeBlob(name, kind, cb, size, header_size, CodeOffsets::frame_never_safe, 0, nullptr)
319 {}
320
321 // Used by gtest
322 BufferBlob* BufferBlob::create(const char* name, CodeBuffer* cb) {
323 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
324
325 BufferBlob* blob = nullptr;
326 unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferBlob));
327 assert(name != nullptr, "must provide a name");
328 {
329 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
330 blob = new (size) BufferBlob(name, CodeBlobKind::Buffer, cb, size, sizeof(BufferBlob));
331 }
332 // Track memory usage statistic after releasing CodeCache_lock
333 MemoryService::track_code_cache_memory_usage();
334
335 return blob;
336 }
337
338 void* BufferBlob::operator new(size_t s, unsigned size) throw() {
339 return CodeCache::allocate(size, CodeBlobType::NonNMethod);
340 }
341
342 void BufferBlob::free(BufferBlob *blob) {
343 RuntimeBlob::free(blob);
344 }
345
346 BufferBlob::BufferBlob(const char* name, CodeBlobKind kind, CodeBuffer* cb, int size, int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments)
347 : RuntimeBlob(name, kind, cb, size, sizeof(BufferBlob), frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
348 {}
349
350
351 //----------------------------------------------------------------------------------------------------
352 // Implementation of AdapterBlob
353
354 AdapterBlob::AdapterBlob(int size, CodeBuffer* cb, int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments) :
355 BufferBlob("I2C/C2I adapters", CodeBlobKind::Adapter, cb, size, frame_complete, frame_size, oop_maps, caller_must_gc_arguments) {
356 CodeCache::commit(this);
357 }
358
359 AdapterBlob* AdapterBlob::create(CodeBuffer* cb, int frame_complete, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments) {
360 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
361
362 CodeCache::gc_on_allocation();
363
364 AdapterBlob* blob = nullptr;
365 unsigned int size = CodeBlob::allocation_size(cb, sizeof(AdapterBlob));
366 {
367 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
368 blob = new (size) AdapterBlob(size, cb, frame_complete, frame_size, oop_maps, caller_must_gc_arguments);
369 }
370 // Track memory usage statistic after releasing CodeCache_lock
371 MemoryService::track_code_cache_memory_usage();
372
373 return blob;
374 }
375
376 //----------------------------------------------------------------------------------------------------
377 // Implementation of VtableBlob
378
379 void* VtableBlob::operator new(size_t s, unsigned size) throw() {
380 // Handling of allocation failure stops compilation and prints a bunch of
381 // stuff, which requires unlocking the CodeCache_lock, so that the Compile_lock
382 // can be locked, and then re-locking the CodeCache_lock. That is not safe in
383 // this context as we hold the CompiledICLocker. So we just don't handle code
384 // cache exhaustion here; we leave that for a later allocation that does not
385 // hold the CompiledICLocker.
386 return CodeCache::allocate(size, CodeBlobType::NonNMethod, false /* handle_alloc_failure */);
387 }
388
430
431 MethodHandlesAdapterBlob* blob = nullptr;
432 unsigned int size = sizeof(MethodHandlesAdapterBlob);
433 // align the size to CodeEntryAlignment
434 size = CodeBlob::align_code_offset(size);
435 size += align_up(buffer_size, oopSize);
436 {
437 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
438 blob = new (size) MethodHandlesAdapterBlob(size);
439 if (blob == nullptr) {
440 vm_exit_out_of_memory(size, OOM_MALLOC_ERROR, "CodeCache: no room for method handle adapter blob");
441 }
442 }
443 // Track memory usage statistic after releasing CodeCache_lock
444 MemoryService::track_code_cache_memory_usage();
445
446 return blob;
447 }
448
449 //----------------------------------------------------------------------------------------------------
450 // Implementation of BufferedInlineTypeBlob
451 BufferedInlineTypeBlob::BufferedInlineTypeBlob(int size, CodeBuffer* cb, int pack_fields_off, int pack_fields_jobject_off, int unpack_fields_off) :
452 BufferBlob("buffered inline type", CodeBlobKind::BufferedInlineType, cb, size, sizeof(BufferedInlineTypeBlob)),
453 _pack_fields_off(pack_fields_off),
454 _pack_fields_jobject_off(pack_fields_jobject_off),
455 _unpack_fields_off(unpack_fields_off) {
456 CodeCache::commit(this);
457 }
458
459 BufferedInlineTypeBlob* BufferedInlineTypeBlob::create(CodeBuffer* cb, int pack_fields_off, int pack_fields_jobject_off, int unpack_fields_off) {
460 ThreadInVMfromUnknown __tiv; // get to VM state in case we block on CodeCache_lock
461
462 BufferedInlineTypeBlob* blob = nullptr;
463 unsigned int size = CodeBlob::allocation_size(cb, sizeof(BufferedInlineTypeBlob));
464 {
465 MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
466 blob = new (size) BufferedInlineTypeBlob(size, cb, pack_fields_off, pack_fields_jobject_off, unpack_fields_off);
467 }
468 // Track memory usage statistic after releasing CodeCache_lock
469 MemoryService::track_code_cache_memory_usage();
470
471 return blob;
472 }
473
474 //----------------------------------------------------------------------------------------------------
475 // Implementation of RuntimeStub
476
477 RuntimeStub::RuntimeStub(
478 const char* name,
479 CodeBuffer* cb,
480 int size,
481 int16_t frame_complete,
482 int frame_size,
483 OopMapSet* oop_maps,
484 bool caller_must_gc_arguments
485 )
486 : RuntimeBlob(name, CodeBlobKind::RuntimeStub, cb, size, sizeof(RuntimeStub),
487 frame_complete, frame_size, oop_maps, caller_must_gc_arguments)
488 {
489 }
490
491 RuntimeStub* RuntimeStub::new_runtime_stub(const char* stub_name,
492 CodeBuffer* cb,
493 int16_t frame_complete,
494 int frame_size,
|