15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "cds/aotArtifactFinder.hpp"
26 #include "cds/aotClassLinker.hpp"
27 #include "cds/aotLinkedClassBulkLoader.hpp"
28 #include "cds/archiveBuilder.hpp"
29 #include "cds/archiveHeapWriter.hpp"
30 #include "cds/archiveUtils.hpp"
31 #include "cds/cdsConfig.hpp"
32 #include "cds/cppVtables.hpp"
33 #include "cds/dumpAllocStats.hpp"
34 #include "cds/dynamicArchive.hpp"
35 #include "cds/heapShared.hpp"
36 #include "cds/metaspaceShared.hpp"
37 #include "cds/regeneratedClasses.hpp"
38 #include "classfile/classLoader.hpp"
39 #include "classfile/classLoaderDataShared.hpp"
40 #include "classfile/classLoaderExt.hpp"
41 #include "classfile/javaClasses.hpp"
42 #include "classfile/symbolTable.hpp"
43 #include "classfile/systemDictionaryShared.hpp"
44 #include "classfile/vmClasses.hpp"
45 #include "interpreter/abstractInterpreter.hpp"
46 #include "jvm.h"
47 #include "logging/log.hpp"
48 #include "logging/logStream.hpp"
49 #include "memory/allStatic.hpp"
50 #include "memory/memoryReserver.hpp"
51 #include "memory/memRegion.hpp"
52 #include "memory/resourceArea.hpp"
53 #include "oops/compressedKlass.inline.hpp"
54 #include "oops/instanceKlass.hpp"
55 #include "oops/objArrayKlass.hpp"
56 #include "oops/objArrayOop.inline.hpp"
57 #include "oops/oopHandle.inline.hpp"
58 #include "runtime/arguments.hpp"
59 #include "runtime/fieldDescriptor.inline.hpp"
60 #include "runtime/globals_extension.hpp"
61 #include "runtime/javaThread.hpp"
62 #include "runtime/sharedRuntime.hpp"
63 #include "utilities/align.hpp"
64 #include "utilities/bitMap.inline.hpp"
65 #include "utilities/formatBuffer.hpp"
66
67 ArchiveBuilder* ArchiveBuilder::_current = nullptr;
68
69 ArchiveBuilder::OtherROAllocMark::~OtherROAllocMark() {
70 char* newtop = ArchiveBuilder::current()->_ro_region.top();
71 ArchiveBuilder::alloc_stats()->record_other_type(int(newtop - _oldtop), true);
72 }
73
74 ArchiveBuilder::SourceObjList::SourceObjList() : _ptrmap(16 * K, mtClassShared) {
75 _total_bytes = 0;
76 _objs = new (mtClassShared) GrowableArray<SourceObjInfo*>(128 * K, mtClassShared);
77 }
112 assert(field_offset_in_bytes >= 0, "must be");
113 assert(field_offset_in_bytes + intx(sizeof(intptr_t)) <= intx(src_obj_size), "must be");
114 assert(is_aligned(field_offset_in_bytes, sizeof(address)), "must be");
115
116 BitMap::idx_t idx = BitMap::idx_t(src_info->ptrmap_start() + (uintx)(field_offset_in_bytes / sizeof(address)));
117 _ptrmap.set_bit(BitMap::idx_t(idx));
118 }
119
120 class RelocateEmbeddedPointers : public BitMapClosure {
121 ArchiveBuilder* _builder;
122 address _buffered_obj;
123 BitMap::idx_t _start_idx;
124 public:
125 RelocateEmbeddedPointers(ArchiveBuilder* builder, address buffered_obj, BitMap::idx_t start_idx) :
126 _builder(builder), _buffered_obj(buffered_obj), _start_idx(start_idx) {}
127
128 bool do_bit(BitMap::idx_t bit_offset) {
129 size_t field_offset = size_t(bit_offset - _start_idx) * sizeof(address);
130 address* ptr_loc = (address*)(_buffered_obj + field_offset);
131
132 address old_p = *ptr_loc;
133 address new_p = _builder->get_buffered_addr(old_p);
134
135 log_trace(cds)("Ref: [" PTR_FORMAT "] -> " PTR_FORMAT " => " PTR_FORMAT,
136 p2i(ptr_loc), p2i(old_p), p2i(new_p));
137
138 ArchivePtrMarker::set_and_mark_pointer(ptr_loc, new_p);
139 return true; // keep iterating the bitmap
140 }
141 };
142
143 void ArchiveBuilder::SourceObjList::relocate(int i, ArchiveBuilder* builder) {
144 SourceObjInfo* src_info = objs()->at(i);
145 assert(src_info->should_copy(), "must be");
146 BitMap::idx_t start = BitMap::idx_t(src_info->ptrmap_start()); // inclusive
147 BitMap::idx_t end = BitMap::idx_t(src_info->ptrmap_end()); // exclusive
148
149 RelocateEmbeddedPointers relocator(builder, src_info->buffered_addr(), start);
150 _ptrmap.iterate(&relocator, start, end);
151 }
152
153 ArchiveBuilder::ArchiveBuilder() :
154 _current_dump_region(nullptr),
155 _buffer_bottom(nullptr),
156 _requested_static_archive_bottom(nullptr),
157 _requested_static_archive_top(nullptr),
158 _requested_dynamic_archive_bottom(nullptr),
159 _requested_dynamic_archive_top(nullptr),
160 _mapped_static_archive_bottom(nullptr),
161 _mapped_static_archive_top(nullptr),
162 _buffer_to_requested_delta(0),
163 _pz_region("pz", MAX_SHARED_DELTA), // protection zone -- used only during dumping; does NOT exist in cds archive.
164 _rw_region("rw", MAX_SHARED_DELTA),
165 _ro_region("ro", MAX_SHARED_DELTA),
166 _ptrmap(mtClassShared),
167 _rw_ptrmap(mtClassShared),
168 _ro_ptrmap(mtClassShared),
169 _rw_src_objs(),
170 _ro_src_objs(),
171 _src_obj_table(INITIAL_TABLE_SIZE, MAX_TABLE_SIZE),
172 _buffered_to_src_table(INITIAL_TABLE_SIZE, MAX_TABLE_SIZE),
173 _total_heap_region_size(0)
174 {
175 _klasses = new (mtClassShared) GrowableArray<Klass*>(4 * K, mtClassShared);
176 _symbols = new (mtClassShared) GrowableArray<Symbol*>(256 * K, mtClassShared);
177 _entropy_seed = 0x12345678;
178 assert(_current == nullptr, "must be");
179 _current = this;
180 }
181
182 ArchiveBuilder::~ArchiveBuilder() {
183 assert(_current == this, "must be");
184 _current = nullptr;
185
186 for (int i = 0; i < _symbols->length(); i++) {
187 _symbols->at(i)->decrement_refcount();
188 }
189
190 delete _klasses;
191 delete _symbols;
192 if (_shared_rs.is_reserved()) {
193 MemoryReserver::release(_shared_rs);
194 }
195
196 AOTArtifactFinder::dispose();
197 }
289
290 int ArchiveBuilder::compare_symbols_by_address(Symbol** a, Symbol** b) {
291 if (a[0] < b[0]) {
292 return -1;
293 } else {
294 assert(a[0] > b[0], "Duplicated symbol %s unexpected", (*a)->as_C_string());
295 return 1;
296 }
297 }
298
299 int ArchiveBuilder::compare_klass_by_name(Klass** a, Klass** b) {
300 return a[0]->name()->fast_compare(b[0]->name());
301 }
302
303 void ArchiveBuilder::sort_klasses() {
304 log_info(cds)("Sorting classes ... ");
305 _klasses->sort(compare_klass_by_name);
306 }
307
308 address ArchiveBuilder::reserve_buffer() {
309 size_t buffer_size = LP64_ONLY(CompressedClassSpaceSize) NOT_LP64(256 * M);
310 ReservedSpace rs = MemoryReserver::reserve(buffer_size,
311 MetaspaceShared::core_region_alignment(),
312 os::vm_page_size());
313 if (!rs.is_reserved()) {
314 log_error(cds)("Failed to reserve %zu bytes of output buffer.", buffer_size);
315 MetaspaceShared::unrecoverable_writing_error();
316 }
317
318 // buffer_bottom is the lowest address of the 2 core regions (rw, ro) when
319 // we are copying the class metadata into the buffer.
320 address buffer_bottom = (address)rs.base();
321 log_info(cds)("Reserved output buffer space at " PTR_FORMAT " [%zu bytes]",
322 p2i(buffer_bottom), buffer_size);
323 _shared_rs = rs;
324
325 _buffer_bottom = buffer_bottom;
326
327 if (CDSConfig::is_dumping_static_archive()) {
328 _current_dump_region = &_pz_region;
329 } else {
415 }
416
417 FollowMode follow_mode = get_follow_mode(ref);
418 SourceObjInfo src_info(ref, read_only, follow_mode);
419 bool created;
420 SourceObjInfo* p = _src_obj_table.put_if_absent(src_obj, src_info, &created);
421 if (created) {
422 if (_src_obj_table.maybe_grow()) {
423 log_info(cds, hashtables)("Expanded _src_obj_table table to %d", _src_obj_table.table_size());
424 }
425 }
426
427 #ifdef ASSERT
428 if (ref->msotype() == MetaspaceObj::MethodType) {
429 Method* m = (Method*)ref->obj();
430 assert(!RegeneratedClasses::has_been_regenerated((address)m->method_holder()),
431 "Should not archive methods in a class that has been regenerated");
432 }
433 #endif
434
435 assert(p->read_only() == src_info.read_only(), "must be");
436
437 if (created && src_info.should_copy()) {
438 if (read_only) {
439 _ro_src_objs.append(p);
440 } else {
441 _rw_src_objs.append(p);
442 }
443 return true; // Need to recurse into this ref only if we are copying it
444 } else {
445 return false;
446 }
447 }
448
449 void ArchiveBuilder::record_regenerated_object(address orig_src_obj, address regen_src_obj) {
450 // Record the fact that orig_src_obj has been replaced by regen_src_obj. All calls to get_buffered_addr(orig_src_obj)
451 // should return the same value as get_buffered_addr(regen_src_obj).
452 SourceObjInfo* p = _src_obj_table.get(regen_src_obj);
453 assert(p != nullptr, "regenerated object should always be dumped");
454 SourceObjInfo orig_src_info(orig_src_obj, p);
512 return SystemDictionaryShared::is_excluded_class(ik);
513 } else if (klass->is_objArray_klass()) {
514 Klass* bottom = ObjArrayKlass::cast(klass)->bottom_klass();
515 if (CDSConfig::is_dumping_dynamic_archive() && MetaspaceShared::is_shared_static(bottom)) {
516 // The bottom class is in the static archive so it's clearly not excluded.
517 return false;
518 } else if (bottom->is_instance_klass()) {
519 return SystemDictionaryShared::is_excluded_class(InstanceKlass::cast(bottom));
520 }
521 }
522
523 return false;
524 }
525
526 ArchiveBuilder::FollowMode ArchiveBuilder::get_follow_mode(MetaspaceClosure::Ref *ref) {
527 address obj = ref->obj();
528 if (CDSConfig::is_dumping_dynamic_archive() && MetaspaceShared::is_in_shared_metaspace(obj)) {
529 // Don't dump existing shared metadata again.
530 return point_to_it;
531 } else if (ref->msotype() == MetaspaceObj::MethodDataType ||
532 ref->msotype() == MetaspaceObj::MethodCountersType) {
533 return set_to_null;
534 } else {
535 if (ref->msotype() == MetaspaceObj::ClassType) {
536 Klass* klass = (Klass*)ref->obj();
537 assert(klass->is_klass(), "must be");
538 if (is_excluded(klass)) {
539 ResourceMark rm;
540 log_debug(cds, dynamic)("Skipping class (excluded): %s", klass->external_name());
541 return set_to_null;
542 }
543 }
544
545 return make_a_copy;
546 }
547 }
548
549 void ArchiveBuilder::start_dump_region(DumpRegion* next) {
550 current_dump_region()->pack(next);
551 _current_dump_region = next;
552 }
553
705 }
706
707 bool ArchiveBuilder::has_been_buffered(address src_addr) const {
708 if (RegeneratedClasses::has_been_regenerated(src_addr) ||
709 _src_obj_table.get(src_addr) == nullptr ||
710 get_buffered_addr(src_addr) == nullptr) {
711 return false;
712 } else {
713 return true;
714 }
715 }
716
717 address ArchiveBuilder::get_buffered_addr(address src_addr) const {
718 SourceObjInfo* p = _src_obj_table.get(src_addr);
719 assert(p != nullptr, "src_addr " INTPTR_FORMAT " is used but has not been archived",
720 p2i(src_addr));
721
722 return p->buffered_addr();
723 }
724
725 address ArchiveBuilder::get_source_addr(address buffered_addr) const {
726 assert(is_in_buffer_space(buffered_addr), "must be");
727 address* src_p = _buffered_to_src_table.get(buffered_addr);
728 assert(src_p != nullptr && *src_p != nullptr, "must be");
729 return *src_p;
730 }
731
732 void ArchiveBuilder::relocate_embedded_pointers(ArchiveBuilder::SourceObjList* src_objs) {
733 for (int i = 0; i < src_objs->objs()->length(); i++) {
734 src_objs->relocate(i, this);
735 }
736 }
737
738 void ArchiveBuilder::relocate_metaspaceobj_embedded_pointers() {
739 log_info(cds)("Relocating embedded pointers in core regions ... ");
740 relocate_embedded_pointers(&_rw_src_objs);
741 relocate_embedded_pointers(&_ro_src_objs);
742 }
743
744 #define ADD_COUNT(x) \
745 x += 1; \
746 x ## _a += aotlinked ? 1 : 0; \
747 x ## _i += inited ? 1 : 0;
748
749 #define DECLARE_INSTANCE_KLASS_COUNTER(x) \
750 int x = 0; \
751 int x ## _a = 0; \
752 int x ## _i = 0;
753
754 void ArchiveBuilder::make_klasses_shareable() {
755 DECLARE_INSTANCE_KLASS_COUNTER(num_instance_klasses);
756 DECLARE_INSTANCE_KLASS_COUNTER(num_boot_klasses);
757 DECLARE_INSTANCE_KLASS_COUNTER(num_vm_klasses);
758 DECLARE_INSTANCE_KLASS_COUNTER(num_platform_klasses);
759 DECLARE_INSTANCE_KLASS_COUNTER(num_app_klasses);
760 DECLARE_INSTANCE_KLASS_COUNTER(num_old_klasses);
761 DECLARE_INSTANCE_KLASS_COUNTER(num_hidden_klasses);
923 log_info(cds)(" boot " STATS_FORMAT, STATS_PARAMS(boot_klasses));
924 log_info(cds)(" vm " STATS_FORMAT, STATS_PARAMS(vm_klasses));
925 log_info(cds)(" platform " STATS_FORMAT, STATS_PARAMS(platform_klasses));
926 log_info(cds)(" app " STATS_FORMAT, STATS_PARAMS(app_klasses));
927 log_info(cds)(" unregistered " STATS_FORMAT, STATS_PARAMS(unregistered_klasses));
928 log_info(cds)(" (enum) " STATS_FORMAT, STATS_PARAMS(enum_klasses));
929 log_info(cds)(" (hidden) " STATS_FORMAT, STATS_PARAMS(hidden_klasses));
930 log_info(cds)(" (old) " STATS_FORMAT, STATS_PARAMS(old_klasses));
931 log_info(cds)(" (unlinked) = %5d, boot = %d, plat = %d, app = %d, unreg = %d",
932 num_unlinked_klasses, boot_unlinked, platform_unlinked, app_unlinked, unreg_unlinked);
933 log_info(cds)(" obj array classes = %5d", num_obj_array_klasses);
934 log_info(cds)(" type array classes = %5d", num_type_array_klasses);
935 log_info(cds)(" symbols = %5d", _symbols->length());
936
937 #undef STATS_FORMAT
938 #undef STATS_PARAMS
939
940 DynamicArchive::make_array_klasses_shareable();
941 }
942
943 void ArchiveBuilder::serialize_dynamic_archivable_items(SerializeClosure* soc) {
944 SymbolTable::serialize_shared_table_header(soc, false);
945 SystemDictionaryShared::serialize_dictionary_headers(soc, false);
946 DynamicArchive::serialize_array_klasses(soc);
947 AOTLinkedClassBulkLoader::serialize(soc, false);
948 }
949
950 uintx ArchiveBuilder::buffer_to_offset(address p) const {
951 address requested_p = to_requested(p);
952 assert(requested_p >= _requested_static_archive_bottom, "must be");
953 return requested_p - _requested_static_archive_bottom;
954 }
955
956 uintx ArchiveBuilder::any_to_offset(address p) const {
957 if (is_in_mapped_static_archive(p)) {
958 assert(CDSConfig::is_dumping_dynamic_archive(), "must be");
959 return p - _mapped_static_archive_bottom;
960 }
961 if (!is_in_buffer_space(p)) {
962 // p must be a "source" address
963 p = get_buffered_addr(p);
964 }
965 return buffer_to_offset(p);
966 }
967
968 address ArchiveBuilder::offset_to_buffered_address(u4 offset) const {
969 address requested_addr = _requested_static_archive_bottom + offset;
970 address buffered_addr = requested_addr - _buffer_to_requested_delta;
971 assert(is_in_buffer_space(buffered_addr), "bad offset");
972 return buffered_addr;
973 }
974
975 #if INCLUDE_CDS_JAVA_HEAP
976 narrowKlass ArchiveBuilder::get_requested_narrow_klass(Klass* k) {
977 assert(CDSConfig::is_dumping_heap(), "sanity");
978 k = get_buffered_klass(k);
979 Klass* requested_k = to_requested(k);
980 const int narrow_klass_shift = ArchiveBuilder::precomputed_narrow_klass_shift();
981 #ifdef ASSERT
982 const size_t klass_alignment = MAX2(SharedSpaceObjectAlignment, (size_t)nth_bit(narrow_klass_shift));
983 assert(is_aligned(k, klass_alignment), "Klass " PTR_FORMAT " misaligned.", p2i(k));
984 #endif
985 address narrow_klass_base = _requested_static_archive_bottom; // runtime encoding base == runtime mapping start
986 // Note: use the "raw" version of encode that takes explicit narrow klass base and shift. Don't use any
987 // of the variants that do sanity checks, nor any of those that use the current - dump - JVM's encoding setting.
1064 ArchivePtrMarker::compact(_max_non_null_offset);
1065 }
1066 };
1067
1068 #ifdef _LP64
1069 int ArchiveBuilder::precomputed_narrow_klass_shift() {
1070 // Legacy Mode:
1071 // We use 32 bits for narrowKlass, which should cover the full 4G Klass range. Shift can be 0.
1072 // CompactObjectHeader Mode:
1073 // narrowKlass is much smaller, and we use the highest possible shift value to later get the maximum
1074 // Klass encoding range.
1075 //
1076 // Note that all of this may change in the future, if we decide to correct the pre-calculated
1077 // narrow Klass IDs at archive load time.
1078 assert(UseCompressedClassPointers, "Only needed for compressed class pointers");
1079 return UseCompactObjectHeaders ? CompressedKlassPointers::max_shift() : 0;
1080 }
1081 #endif // _LP64
1082
1083 void ArchiveBuilder::relocate_to_requested() {
1084 ro_region()->pack();
1085
1086 size_t my_archive_size = buffer_top() - buffer_bottom();
1087
1088 if (CDSConfig::is_dumping_static_archive()) {
1089 _requested_static_archive_top = _requested_static_archive_bottom + my_archive_size;
1090 RelocateBufferToRequested<true> patcher(this);
1091 patcher.doit();
1092 } else {
1093 assert(CDSConfig::is_dumping_dynamic_archive(), "must be");
1094 _requested_dynamic_archive_top = _requested_dynamic_archive_bottom + my_archive_size;
1095 RelocateBufferToRequested<false> patcher(this);
1096 patcher.doit();
1097 }
1098 }
1099
1100 // Write detailed info to a mapfile to analyze contents of the archive.
1101 // static dump:
1102 // java -Xshare:dump -Xlog:cds+map=trace:file=cds.map:none:filesize=0
1103 // dynamic dump:
1104 // java -cp MyApp.jar -XX:ArchiveClassesAtExit=MyApp.jsa \
1386 assert(java_lang_Class::is_instance(scratch_mirror), "sanity");
1387 if (java_lang_Class::is_primitive(scratch_mirror)) {
1388 for (int i = T_BOOLEAN; i < T_VOID+1; i++) {
1389 BasicType bt = (BasicType)i;
1390 if (!is_reference_type(bt) && scratch_mirror == HeapShared::scratch_java_mirror(bt)) {
1391 oop orig_mirror = Universe::java_mirror(bt);
1392 java_lang_Class::print_signature(orig_mirror, st);
1393 return;
1394 }
1395 }
1396 ShouldNotReachHere();
1397 }
1398 java_lang_Class::print_signature(scratch_mirror, st);
1399 }
1400
1401 static void log_heap_roots() {
1402 LogStreamHandle(Trace, cds, map, oops) st;
1403 if (st.is_enabled()) {
1404 for (int i = 0; i < HeapShared::pending_roots()->length(); i++) {
1405 st.print("roots[%4d]: ", i);
1406 print_oop_info_cr(&st, HeapShared::pending_roots()->at(i));
1407 }
1408 }
1409 }
1410
1411 // Example output:
1412 // - The first number is the requested address (if print_requested_addr == true)
1413 // - The second number is the narrowOop version of the requested address (if UseCompressedOops == true)
1414 // 0x00000007ffc7e840 (0xfff8fd08) java.lang.Class Ljava/util/Array;
1415 // 0x00000007ffc000f8 (0xfff8001f) [B length: 11
1416 static void print_oop_info_cr(outputStream* st, oop source_oop, bool print_requested_addr = true) {
1417 if (source_oop == nullptr) {
1418 st->print_cr("null");
1419 } else {
1420 ResourceMark rm;
1421 oop requested_obj = ArchiveHeapWriter::source_obj_to_requested_obj(source_oop);
1422 if (print_requested_addr) {
1423 st->print(PTR_FORMAT " ", p2i(requested_obj));
1424 }
1425 if (UseCompressedOops) {
1426 st->print("(0x%08x) ", CompressedOops::narrow_oop_value(requested_obj));
1427 }
1428 if (source_oop->is_array()) {
1429 int array_len = arrayOop(source_oop)->length();
1430 st->print_cr("%s length: %d", source_oop->klass()->external_name(), array_len);
1431 } else {
1432 st->print("%s", source_oop->klass()->external_name());
1433
1434 if (java_lang_String::is_instance(source_oop)) {
1435 st->print(" ");
1436 java_lang_String::print(source_oop, st);
1437 } else if (java_lang_Class::is_instance(source_oop)) {
1438 oop scratch_mirror = source_oop;
1439
1440 st->print(" ");
1441 print_class_signature_for_mirror(st, scratch_mirror);
1442
1443 Klass* src_klass = java_lang_Class::as_Klass(scratch_mirror);
1444 if (src_klass != nullptr && src_klass->is_instance_klass()) {
1445 InstanceKlass* buffered_klass =
1446 ArchiveBuilder::current()->get_buffered_addr(InstanceKlass::cast(src_klass));
1447 if (buffered_klass->has_aot_initialized_mirror()) {
1448 st->print(" (aot-inited)");
1449 }
1450 }
1451 }
1452 st->cr();
1453 }
1454 }
1455 }
1456 #endif // INCLUDE_CDS_JAVA_HEAP
1505 if (heap_info->is_used()) {
1506 log_heap_region(heap_info);
1507 }
1508 #endif
1509
1510 log_info(cds, map)("[End of CDS archive map]");
1511 }
1512 }; // end ArchiveBuilder::CDSMapLogger
1513
1514 void ArchiveBuilder::print_stats() {
1515 _alloc_stats.print_stats(int(_ro_region.used()), int(_rw_region.used()));
1516 }
1517
1518 void ArchiveBuilder::write_archive(FileMapInfo* mapinfo, ArchiveHeapInfo* heap_info) {
1519 // Make sure NUM_CDS_REGIONS (exported in cds.h) agrees with
1520 // MetaspaceShared::n_regions (internal to hotspot).
1521 assert(NUM_CDS_REGIONS == MetaspaceShared::n_regions, "sanity");
1522
1523 write_region(mapinfo, MetaspaceShared::rw, &_rw_region, /*read_only=*/false,/*allow_exec=*/false);
1524 write_region(mapinfo, MetaspaceShared::ro, &_ro_region, /*read_only=*/true, /*allow_exec=*/false);
1525
1526 // Split pointer map into read-write and read-only bitmaps
1527 ArchivePtrMarker::initialize_rw_ro_maps(&_rw_ptrmap, &_ro_ptrmap);
1528
1529 size_t bitmap_size_in_bytes;
1530 char* bitmap = mapinfo->write_bitmap_region(ArchivePtrMarker::rw_ptrmap(), ArchivePtrMarker::ro_ptrmap(), heap_info,
1531 bitmap_size_in_bytes);
1532
1533 if (heap_info->is_used()) {
1534 _total_heap_region_size = mapinfo->write_heap_region(heap_info);
1535 }
1536
1537 print_region_stats(mapinfo, heap_info);
1538
1539 mapinfo->set_requested_base((char*)MetaspaceShared::requested_base_address());
1540 mapinfo->set_header_crc(mapinfo->compute_header_crc());
1541 // After this point, we should not write any data into mapinfo->header() since this
1542 // would corrupt its checksum we have calculated before.
1543 mapinfo->write_header();
1544 mapinfo->close();
1545
1546 if (log_is_enabled(Info, cds)) {
1547 log_info(cds)("Full module graph = %s", CDSConfig::is_dumping_full_module_graph() ? "enabled" : "disabled");
1548 print_stats();
1549 }
1550
1551 if (log_is_enabled(Info, cds, map)) {
1552 CDSMapLogger::log(this, mapinfo, heap_info,
1553 bitmap, bitmap_size_in_bytes);
1554 }
1555 CDS_JAVA_HEAP_ONLY(HeapShared::destroy_archived_object_cache());
1556 FREE_C_HEAP_ARRAY(char, bitmap);
1557 }
1558
1559 void ArchiveBuilder::write_region(FileMapInfo* mapinfo, int region_idx, DumpRegion* dump_region, bool read_only, bool allow_exec) {
1560 mapinfo->write_region(region_idx, dump_region->base(), dump_region->used(), read_only, allow_exec);
1561 }
1562
1563 void ArchiveBuilder::print_region_stats(FileMapInfo *mapinfo, ArchiveHeapInfo* heap_info) {
1564 // Print statistics of all the regions
1565 const size_t bitmap_used = mapinfo->region_at(MetaspaceShared::bm)->used();
1566 const size_t bitmap_reserved = mapinfo->region_at(MetaspaceShared::bm)->used_aligned();
1567 const size_t total_reserved = _ro_region.reserved() + _rw_region.reserved() +
1568 bitmap_reserved +
1569 _total_heap_region_size;
1570 const size_t total_bytes = _ro_region.used() + _rw_region.used() +
1571 bitmap_used +
1572 _total_heap_region_size;
1573 const double total_u_perc = percent_of(total_bytes, total_reserved);
1574
1575 _rw_region.print(total_reserved);
1576 _ro_region.print(total_reserved);
1577
1578 print_bitmap_region_stats(bitmap_used, total_reserved);
1579
1580 if (heap_info->is_used()) {
1581 print_heap_region_stats(heap_info, total_reserved);
1582 }
1583
1584 log_debug(cds)("total : %9zu [100.0%% of total] out of %9zu bytes [%5.1f%% used]",
1585 total_bytes, total_reserved, total_u_perc);
1586 }
1587
1588 void ArchiveBuilder::print_bitmap_region_stats(size_t size, size_t total_size) {
1589 log_debug(cds)("bm space: %9zu [ %4.1f%% of total] out of %9zu bytes [100.0%% used]",
1590 size, size/double(total_size)*100.0, size);
1591 }
1592
1593 void ArchiveBuilder::print_heap_region_stats(ArchiveHeapInfo *info, size_t total_size) {
1594 char* start = info->buffer_start();
1595 size_t size = info->buffer_byte_size();
1596 char* top = start + size;
|
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "cds/aotArtifactFinder.hpp"
26 #include "cds/aotClassLinker.hpp"
27 #include "cds/aotLinkedClassBulkLoader.hpp"
28 #include "cds/archiveBuilder.hpp"
29 #include "cds/archiveHeapWriter.hpp"
30 #include "cds/archiveUtils.hpp"
31 #include "cds/cdsConfig.hpp"
32 #include "cds/cppVtables.hpp"
33 #include "cds/dumpAllocStats.hpp"
34 #include "cds/dynamicArchive.hpp"
35 #include "cds/finalImageRecipes.hpp"
36 #include "cds/heapShared.hpp"
37 #include "cds/metaspaceShared.hpp"
38 #include "cds/regeneratedClasses.hpp"
39 #include "classfile/classLoader.hpp"
40 #include "classfile/classLoaderDataShared.hpp"
41 #include "classfile/classLoaderExt.hpp"
42 #include "classfile/javaClasses.hpp"
43 #include "classfile/symbolTable.hpp"
44 #include "classfile/systemDictionaryShared.hpp"
45 #include "classfile/vmClasses.hpp"
46 #include "code/aotCodeCache.hpp"
47 #include "interpreter/abstractInterpreter.hpp"
48 #include "jvm.h"
49 #include "logging/log.hpp"
50 #include "logging/logStream.hpp"
51 #include "memory/allStatic.hpp"
52 #include "memory/memoryReserver.hpp"
53 #include "memory/memRegion.hpp"
54 #include "memory/resourceArea.hpp"
55 #include "oops/compressedKlass.inline.hpp"
56 #include "oops/instanceKlass.hpp"
57 #include "oops/methodCounters.hpp"
58 #include "oops/methodData.hpp"
59 #include "oops/objArrayKlass.hpp"
60 #include "oops/objArrayOop.inline.hpp"
61 #include "oops/oopHandle.inline.hpp"
62 #include "oops/trainingData.hpp"
63 #include "runtime/arguments.hpp"
64 #include "runtime/fieldDescriptor.inline.hpp"
65 #include "runtime/globals_extension.hpp"
66 #include "runtime/javaThread.hpp"
67 #include "runtime/sharedRuntime.hpp"
68 #include "utilities/align.hpp"
69 #include "utilities/bitMap.inline.hpp"
70 #include "utilities/formatBuffer.hpp"
71
72 ArchiveBuilder* ArchiveBuilder::_current = nullptr;
73
74 ArchiveBuilder::OtherROAllocMark::~OtherROAllocMark() {
75 char* newtop = ArchiveBuilder::current()->_ro_region.top();
76 ArchiveBuilder::alloc_stats()->record_other_type(int(newtop - _oldtop), true);
77 }
78
79 ArchiveBuilder::SourceObjList::SourceObjList() : _ptrmap(16 * K, mtClassShared) {
80 _total_bytes = 0;
81 _objs = new (mtClassShared) GrowableArray<SourceObjInfo*>(128 * K, mtClassShared);
82 }
117 assert(field_offset_in_bytes >= 0, "must be");
118 assert(field_offset_in_bytes + intx(sizeof(intptr_t)) <= intx(src_obj_size), "must be");
119 assert(is_aligned(field_offset_in_bytes, sizeof(address)), "must be");
120
121 BitMap::idx_t idx = BitMap::idx_t(src_info->ptrmap_start() + (uintx)(field_offset_in_bytes / sizeof(address)));
122 _ptrmap.set_bit(BitMap::idx_t(idx));
123 }
124
125 class RelocateEmbeddedPointers : public BitMapClosure {
126 ArchiveBuilder* _builder;
127 address _buffered_obj;
128 BitMap::idx_t _start_idx;
129 public:
130 RelocateEmbeddedPointers(ArchiveBuilder* builder, address buffered_obj, BitMap::idx_t start_idx) :
131 _builder(builder), _buffered_obj(buffered_obj), _start_idx(start_idx) {}
132
133 bool do_bit(BitMap::idx_t bit_offset) {
134 size_t field_offset = size_t(bit_offset - _start_idx) * sizeof(address);
135 address* ptr_loc = (address*)(_buffered_obj + field_offset);
136
137 address old_p_with_tags = *ptr_loc;
138 assert(old_p_with_tags != nullptr, "null ptrs shouldn't have been marked");
139
140 address old_p = MetaspaceClosure::strip_tags(old_p_with_tags);
141 uintx tags = MetaspaceClosure::decode_tags(old_p_with_tags);
142 address new_p = _builder->get_buffered_addr(old_p);
143
144 bool nulled;
145 if (new_p == nullptr) {
146 // old_p had a FollowMode of set_to_null
147 nulled = true;
148 } else {
149 new_p = MetaspaceClosure::add_tags(new_p, tags);
150 nulled = false;
151 }
152
153 log_trace(cds)("Ref: [" PTR_FORMAT "] -> " PTR_FORMAT " => " PTR_FORMAT " %zu",
154 p2i(ptr_loc), p2i(old_p) + tags, p2i(new_p), tags);
155
156 ArchivePtrMarker::set_and_mark_pointer(ptr_loc, new_p);
157 ArchiveBuilder::current()->count_relocated_pointer(tags != 0, nulled);
158 return true; // keep iterating the bitmap
159 }
160 };
161
162 void ArchiveBuilder::SourceObjList::relocate(int i, ArchiveBuilder* builder) {
163 SourceObjInfo* src_info = objs()->at(i);
164 assert(src_info->should_copy(), "must be");
165 BitMap::idx_t start = BitMap::idx_t(src_info->ptrmap_start()); // inclusive
166 BitMap::idx_t end = BitMap::idx_t(src_info->ptrmap_end()); // exclusive
167
168 RelocateEmbeddedPointers relocator(builder, src_info->buffered_addr(), start);
169 _ptrmap.iterate(&relocator, start, end);
170 }
171
172 ArchiveBuilder::ArchiveBuilder() :
173 _current_dump_region(nullptr),
174 _buffer_bottom(nullptr),
175 _requested_static_archive_bottom(nullptr),
176 _requested_static_archive_top(nullptr),
177 _requested_dynamic_archive_bottom(nullptr),
178 _requested_dynamic_archive_top(nullptr),
179 _mapped_static_archive_bottom(nullptr),
180 _mapped_static_archive_top(nullptr),
181 _buffer_to_requested_delta(0),
182 _pz_region("pz", MAX_SHARED_DELTA), // protection zone -- used only during dumping; does NOT exist in cds archive.
183 _rw_region("rw", MAX_SHARED_DELTA),
184 _ro_region("ro", MAX_SHARED_DELTA),
185 _cc_region("cc", MAX_SHARED_DELTA),
186 _ptrmap(mtClassShared),
187 _rw_ptrmap(mtClassShared),
188 _ro_ptrmap(mtClassShared),
189 _cc_ptrmap(mtClassShared),
190 _rw_src_objs(),
191 _ro_src_objs(),
192 _src_obj_table(INITIAL_TABLE_SIZE, MAX_TABLE_SIZE),
193 _buffered_to_src_table(INITIAL_TABLE_SIZE, MAX_TABLE_SIZE),
194 _total_heap_region_size(0)
195 {
196 _klasses = new (mtClassShared) GrowableArray<Klass*>(4 * K, mtClassShared);
197 _symbols = new (mtClassShared) GrowableArray<Symbol*>(256 * K, mtClassShared);
198 _entropy_seed = 0x12345678;
199 _relocated_ptr_info._num_ptrs = 0;
200 _relocated_ptr_info._num_tagged_ptrs = 0;
201 _relocated_ptr_info._num_nulled_ptrs = 0;
202 assert(_current == nullptr, "must be");
203 _current = this;
204 }
205
206 ArchiveBuilder::~ArchiveBuilder() {
207 assert(_current == this, "must be");
208 _current = nullptr;
209
210 for (int i = 0; i < _symbols->length(); i++) {
211 _symbols->at(i)->decrement_refcount();
212 }
213
214 delete _klasses;
215 delete _symbols;
216 if (_shared_rs.is_reserved()) {
217 MemoryReserver::release(_shared_rs);
218 }
219
220 AOTArtifactFinder::dispose();
221 }
313
314 int ArchiveBuilder::compare_symbols_by_address(Symbol** a, Symbol** b) {
315 if (a[0] < b[0]) {
316 return -1;
317 } else {
318 assert(a[0] > b[0], "Duplicated symbol %s unexpected", (*a)->as_C_string());
319 return 1;
320 }
321 }
322
323 int ArchiveBuilder::compare_klass_by_name(Klass** a, Klass** b) {
324 return a[0]->name()->fast_compare(b[0]->name());
325 }
326
327 void ArchiveBuilder::sort_klasses() {
328 log_info(cds)("Sorting classes ... ");
329 _klasses->sort(compare_klass_by_name);
330 }
331
332 address ArchiveBuilder::reserve_buffer() {
333 // AOTCodeCache::max_aot_code_size() accounts for cached code region.
334 size_t buffer_size = LP64_ONLY(CompressedClassSpaceSize) NOT_LP64(256 * M) + AOTCodeCache::max_aot_code_size();
335 ReservedSpace rs = MemoryReserver::reserve(buffer_size,
336 MetaspaceShared::core_region_alignment(),
337 os::vm_page_size());
338 if (!rs.is_reserved()) {
339 log_error(cds)("Failed to reserve %zu bytes of output buffer.", buffer_size);
340 MetaspaceShared::unrecoverable_writing_error();
341 }
342
343 // buffer_bottom is the lowest address of the 2 core regions (rw, ro) when
344 // we are copying the class metadata into the buffer.
345 address buffer_bottom = (address)rs.base();
346 log_info(cds)("Reserved output buffer space at " PTR_FORMAT " [%zu bytes]",
347 p2i(buffer_bottom), buffer_size);
348 _shared_rs = rs;
349
350 _buffer_bottom = buffer_bottom;
351
352 if (CDSConfig::is_dumping_static_archive()) {
353 _current_dump_region = &_pz_region;
354 } else {
440 }
441
442 FollowMode follow_mode = get_follow_mode(ref);
443 SourceObjInfo src_info(ref, read_only, follow_mode);
444 bool created;
445 SourceObjInfo* p = _src_obj_table.put_if_absent(src_obj, src_info, &created);
446 if (created) {
447 if (_src_obj_table.maybe_grow()) {
448 log_info(cds, hashtables)("Expanded _src_obj_table table to %d", _src_obj_table.table_size());
449 }
450 }
451
452 #ifdef ASSERT
453 if (ref->msotype() == MetaspaceObj::MethodType) {
454 Method* m = (Method*)ref->obj();
455 assert(!RegeneratedClasses::has_been_regenerated((address)m->method_holder()),
456 "Should not archive methods in a class that has been regenerated");
457 }
458 #endif
459
460 if (ref->msotype() == MetaspaceObj::MethodDataType) {
461 MethodData* md = (MethodData*)ref->obj();
462 md->clean_method_data(false /* always_clean */);
463 }
464
465 assert(p->read_only() == src_info.read_only(), "must be");
466
467 if (created && src_info.should_copy()) {
468 if (read_only) {
469 _ro_src_objs.append(p);
470 } else {
471 _rw_src_objs.append(p);
472 }
473 return true; // Need to recurse into this ref only if we are copying it
474 } else {
475 return false;
476 }
477 }
478
479 void ArchiveBuilder::record_regenerated_object(address orig_src_obj, address regen_src_obj) {
480 // Record the fact that orig_src_obj has been replaced by regen_src_obj. All calls to get_buffered_addr(orig_src_obj)
481 // should return the same value as get_buffered_addr(regen_src_obj).
482 SourceObjInfo* p = _src_obj_table.get(regen_src_obj);
483 assert(p != nullptr, "regenerated object should always be dumped");
484 SourceObjInfo orig_src_info(orig_src_obj, p);
542 return SystemDictionaryShared::is_excluded_class(ik);
543 } else if (klass->is_objArray_klass()) {
544 Klass* bottom = ObjArrayKlass::cast(klass)->bottom_klass();
545 if (CDSConfig::is_dumping_dynamic_archive() && MetaspaceShared::is_shared_static(bottom)) {
546 // The bottom class is in the static archive so it's clearly not excluded.
547 return false;
548 } else if (bottom->is_instance_klass()) {
549 return SystemDictionaryShared::is_excluded_class(InstanceKlass::cast(bottom));
550 }
551 }
552
553 return false;
554 }
555
556 ArchiveBuilder::FollowMode ArchiveBuilder::get_follow_mode(MetaspaceClosure::Ref *ref) {
557 address obj = ref->obj();
558 if (CDSConfig::is_dumping_dynamic_archive() && MetaspaceShared::is_in_shared_metaspace(obj)) {
559 // Don't dump existing shared metadata again.
560 return point_to_it;
561 } else if (ref->msotype() == MetaspaceObj::MethodDataType ||
562 ref->msotype() == MetaspaceObj::MethodCountersType ||
563 ref->msotype() == MetaspaceObj::KlassTrainingDataType ||
564 ref->msotype() == MetaspaceObj::MethodTrainingDataType ||
565 ref->msotype() == MetaspaceObj::CompileTrainingDataType) {
566 return TrainingData::need_data() ? make_a_copy : set_to_null;
567 } else if (ref->msotype() == MetaspaceObj::AdapterHandlerEntryType) {
568 if (CDSConfig::is_dumping_adapters()) {
569 AdapterHandlerEntry* entry = (AdapterHandlerEntry*)ref->obj();
570 return AdapterHandlerLibrary::is_abstract_method_adapter(entry) ? set_to_null : make_a_copy;
571 } else {
572 return set_to_null;
573 }
574 } else {
575 if (ref->msotype() == MetaspaceObj::ClassType) {
576 Klass* klass = (Klass*)ref->obj();
577 assert(klass->is_klass(), "must be");
578 if (is_excluded(klass)) {
579 ResourceMark rm;
580 log_debug(cds, dynamic)("Skipping class (excluded): %s", klass->external_name());
581 return set_to_null;
582 }
583 }
584
585 return make_a_copy;
586 }
587 }
588
589 void ArchiveBuilder::start_dump_region(DumpRegion* next) {
590 current_dump_region()->pack(next);
591 _current_dump_region = next;
592 }
593
745 }
746
747 bool ArchiveBuilder::has_been_buffered(address src_addr) const {
748 if (RegeneratedClasses::has_been_regenerated(src_addr) ||
749 _src_obj_table.get(src_addr) == nullptr ||
750 get_buffered_addr(src_addr) == nullptr) {
751 return false;
752 } else {
753 return true;
754 }
755 }
756
757 address ArchiveBuilder::get_buffered_addr(address src_addr) const {
758 SourceObjInfo* p = _src_obj_table.get(src_addr);
759 assert(p != nullptr, "src_addr " INTPTR_FORMAT " is used but has not been archived",
760 p2i(src_addr));
761
762 return p->buffered_addr();
763 }
764
765 bool ArchiveBuilder::has_been_archived(address src_addr) const {
766 SourceObjInfo* p = _src_obj_table.get(src_addr);
767 return (p != nullptr);
768 }
769
770 address ArchiveBuilder::get_source_addr(address buffered_addr) const {
771 assert(is_in_buffer_space(buffered_addr), "must be");
772 address* src_p = _buffered_to_src_table.get(buffered_addr);
773 assert(src_p != nullptr && *src_p != nullptr, "must be");
774 return *src_p;
775 }
776
777 void ArchiveBuilder::relocate_embedded_pointers(ArchiveBuilder::SourceObjList* src_objs) {
778 for (int i = 0; i < src_objs->objs()->length(); i++) {
779 src_objs->relocate(i, this);
780 }
781 }
782
783 void ArchiveBuilder::relocate_metaspaceobj_embedded_pointers() {
784 log_info(cds)("Relocating embedded pointers in core regions ... ");
785 relocate_embedded_pointers(&_rw_src_objs);
786 relocate_embedded_pointers(&_ro_src_objs);
787 log_info(cds)("Relocating %zu pointers, %zu tagged, %zu nulled",
788 _relocated_ptr_info._num_ptrs,
789 _relocated_ptr_info._num_tagged_ptrs,
790 _relocated_ptr_info._num_nulled_ptrs);
791 }
792
793 #define ADD_COUNT(x) \
794 x += 1; \
795 x ## _a += aotlinked ? 1 : 0; \
796 x ## _i += inited ? 1 : 0;
797
798 #define DECLARE_INSTANCE_KLASS_COUNTER(x) \
799 int x = 0; \
800 int x ## _a = 0; \
801 int x ## _i = 0;
802
803 void ArchiveBuilder::make_klasses_shareable() {
804 DECLARE_INSTANCE_KLASS_COUNTER(num_instance_klasses);
805 DECLARE_INSTANCE_KLASS_COUNTER(num_boot_klasses);
806 DECLARE_INSTANCE_KLASS_COUNTER(num_vm_klasses);
807 DECLARE_INSTANCE_KLASS_COUNTER(num_platform_klasses);
808 DECLARE_INSTANCE_KLASS_COUNTER(num_app_klasses);
809 DECLARE_INSTANCE_KLASS_COUNTER(num_old_klasses);
810 DECLARE_INSTANCE_KLASS_COUNTER(num_hidden_klasses);
972 log_info(cds)(" boot " STATS_FORMAT, STATS_PARAMS(boot_klasses));
973 log_info(cds)(" vm " STATS_FORMAT, STATS_PARAMS(vm_klasses));
974 log_info(cds)(" platform " STATS_FORMAT, STATS_PARAMS(platform_klasses));
975 log_info(cds)(" app " STATS_FORMAT, STATS_PARAMS(app_klasses));
976 log_info(cds)(" unregistered " STATS_FORMAT, STATS_PARAMS(unregistered_klasses));
977 log_info(cds)(" (enum) " STATS_FORMAT, STATS_PARAMS(enum_klasses));
978 log_info(cds)(" (hidden) " STATS_FORMAT, STATS_PARAMS(hidden_klasses));
979 log_info(cds)(" (old) " STATS_FORMAT, STATS_PARAMS(old_klasses));
980 log_info(cds)(" (unlinked) = %5d, boot = %d, plat = %d, app = %d, unreg = %d",
981 num_unlinked_klasses, boot_unlinked, platform_unlinked, app_unlinked, unreg_unlinked);
982 log_info(cds)(" obj array classes = %5d", num_obj_array_klasses);
983 log_info(cds)(" type array classes = %5d", num_type_array_klasses);
984 log_info(cds)(" symbols = %5d", _symbols->length());
985
986 #undef STATS_FORMAT
987 #undef STATS_PARAMS
988
989 DynamicArchive::make_array_klasses_shareable();
990 }
991
992 void ArchiveBuilder::make_training_data_shareable() {
993 auto clean_td = [&] (address& src_obj, SourceObjInfo& info) {
994 if (!is_in_buffer_space(info.buffered_addr())) {
995 return;
996 }
997
998 if (info.msotype() == MetaspaceObj::KlassTrainingDataType ||
999 info.msotype() == MetaspaceObj::MethodTrainingDataType ||
1000 info.msotype() == MetaspaceObj::CompileTrainingDataType) {
1001 TrainingData* buffered_td = (TrainingData*)info.buffered_addr();
1002 buffered_td->remove_unshareable_info();
1003 } else if (info.msotype() == MetaspaceObj::MethodDataType) {
1004 MethodData* buffered_mdo = (MethodData*)info.buffered_addr();
1005 buffered_mdo->remove_unshareable_info();
1006 } else if (info.msotype() == MetaspaceObj::MethodCountersType) {
1007 MethodCounters* buffered_mc = (MethodCounters*)info.buffered_addr();
1008 buffered_mc->remove_unshareable_info();
1009 }
1010 };
1011 _src_obj_table.iterate_all(clean_td);
1012 }
1013
1014 void ArchiveBuilder::serialize_dynamic_archivable_items(SerializeClosure* soc) {
1015 SymbolTable::serialize_shared_table_header(soc, false);
1016 SystemDictionaryShared::serialize_dictionary_headers(soc, false);
1017 DynamicArchive::serialize_array_klasses(soc);
1018 AOTLinkedClassBulkLoader::serialize(soc, false);
1019 }
1020
1021 uintx ArchiveBuilder::buffer_to_offset(address p) const {
1022 address requested_p = to_requested(p);
1023 assert(requested_p >= _requested_static_archive_bottom, "must be");
1024 return requested_p - _requested_static_archive_bottom;
1025 }
1026
1027 uintx ArchiveBuilder::any_to_offset(address p) const {
1028 if (is_in_mapped_static_archive(p)) {
1029 assert(CDSConfig::is_dumping_dynamic_archive(), "must be");
1030 return p - _mapped_static_archive_bottom;
1031 }
1032 if (!is_in_buffer_space(p)) {
1033 // p must be a "source" address
1034 p = get_buffered_addr(p);
1035 }
1036 return buffer_to_offset(p);
1037 }
1038
1039 void ArchiveBuilder::start_cc_region() {
1040 ro_region()->pack();
1041 start_dump_region(&_cc_region);
1042 }
1043
1044 void ArchiveBuilder::end_cc_region() {
1045 _cc_region.pack();
1046 }
1047
1048 address ArchiveBuilder::offset_to_buffered_address(u4 offset) const {
1049 address requested_addr = _requested_static_archive_bottom + offset;
1050 address buffered_addr = requested_addr - _buffer_to_requested_delta;
1051 assert(is_in_buffer_space(buffered_addr), "bad offset");
1052 return buffered_addr;
1053 }
1054
1055 #if INCLUDE_CDS_JAVA_HEAP
1056 narrowKlass ArchiveBuilder::get_requested_narrow_klass(Klass* k) {
1057 assert(CDSConfig::is_dumping_heap(), "sanity");
1058 k = get_buffered_klass(k);
1059 Klass* requested_k = to_requested(k);
1060 const int narrow_klass_shift = ArchiveBuilder::precomputed_narrow_klass_shift();
1061 #ifdef ASSERT
1062 const size_t klass_alignment = MAX2(SharedSpaceObjectAlignment, (size_t)nth_bit(narrow_klass_shift));
1063 assert(is_aligned(k, klass_alignment), "Klass " PTR_FORMAT " misaligned.", p2i(k));
1064 #endif
1065 address narrow_klass_base = _requested_static_archive_bottom; // runtime encoding base == runtime mapping start
1066 // Note: use the "raw" version of encode that takes explicit narrow klass base and shift. Don't use any
1067 // of the variants that do sanity checks, nor any of those that use the current - dump - JVM's encoding setting.
1144 ArchivePtrMarker::compact(_max_non_null_offset);
1145 }
1146 };
1147
1148 #ifdef _LP64
1149 int ArchiveBuilder::precomputed_narrow_klass_shift() {
1150 // Legacy Mode:
1151 // We use 32 bits for narrowKlass, which should cover the full 4G Klass range. Shift can be 0.
1152 // CompactObjectHeader Mode:
1153 // narrowKlass is much smaller, and we use the highest possible shift value to later get the maximum
1154 // Klass encoding range.
1155 //
1156 // Note that all of this may change in the future, if we decide to correct the pre-calculated
1157 // narrow Klass IDs at archive load time.
1158 assert(UseCompressedClassPointers, "Only needed for compressed class pointers");
1159 return UseCompactObjectHeaders ? CompressedKlassPointers::max_shift() : 0;
1160 }
1161 #endif // _LP64
1162
1163 void ArchiveBuilder::relocate_to_requested() {
1164 if (!ro_region()->is_packed()) {
1165 ro_region()->pack();
1166 }
1167
1168 size_t my_archive_size = buffer_top() - buffer_bottom();
1169
1170 if (CDSConfig::is_dumping_static_archive()) {
1171 _requested_static_archive_top = _requested_static_archive_bottom + my_archive_size;
1172 RelocateBufferToRequested<true> patcher(this);
1173 patcher.doit();
1174 } else {
1175 assert(CDSConfig::is_dumping_dynamic_archive(), "must be");
1176 _requested_dynamic_archive_top = _requested_dynamic_archive_bottom + my_archive_size;
1177 RelocateBufferToRequested<false> patcher(this);
1178 patcher.doit();
1179 }
1180 }
1181
1182 // Write detailed info to a mapfile to analyze contents of the archive.
1183 // static dump:
1184 // java -Xshare:dump -Xlog:cds+map=trace:file=cds.map:none:filesize=0
1185 // dynamic dump:
1186 // java -cp MyApp.jar -XX:ArchiveClassesAtExit=MyApp.jsa \
1468 assert(java_lang_Class::is_instance(scratch_mirror), "sanity");
1469 if (java_lang_Class::is_primitive(scratch_mirror)) {
1470 for (int i = T_BOOLEAN; i < T_VOID+1; i++) {
1471 BasicType bt = (BasicType)i;
1472 if (!is_reference_type(bt) && scratch_mirror == HeapShared::scratch_java_mirror(bt)) {
1473 oop orig_mirror = Universe::java_mirror(bt);
1474 java_lang_Class::print_signature(orig_mirror, st);
1475 return;
1476 }
1477 }
1478 ShouldNotReachHere();
1479 }
1480 java_lang_Class::print_signature(scratch_mirror, st);
1481 }
1482
1483 static void log_heap_roots() {
1484 LogStreamHandle(Trace, cds, map, oops) st;
1485 if (st.is_enabled()) {
1486 for (int i = 0; i < HeapShared::pending_roots()->length(); i++) {
1487 st.print("roots[%4d]: ", i);
1488 print_oop_info_cr(&st, HeapShared::pending_roots()->at(i).resolve());
1489 }
1490 }
1491 }
1492
1493 // Example output:
1494 // - The first number is the requested address (if print_requested_addr == true)
1495 // - The second number is the narrowOop version of the requested address (if UseCompressedOops == true)
1496 // 0x00000007ffc7e840 (0xfff8fd08) java.lang.Class Ljava/util/Array;
1497 // 0x00000007ffc000f8 (0xfff8001f) [B length: 11
1498 static void print_oop_info_cr(outputStream* st, oop source_oop, bool print_requested_addr = true) {
1499 if (source_oop == nullptr) {
1500 st->print_cr("null");
1501 } else {
1502 ResourceMark rm;
1503 oop requested_obj = ArchiveHeapWriter::source_obj_to_requested_obj(source_oop);
1504 if (print_requested_addr) {
1505 st->print(PTR_FORMAT " ", p2i(requested_obj));
1506 }
1507 if (UseCompressedOops) {
1508 st->print("(0x%08x) ", CompressedOops::narrow_oop_value(requested_obj));
1509 }
1510 if (source_oop->is_array()) {
1511 int array_len = arrayOop(source_oop)->length();
1512 st->print_cr("%s length: %d", source_oop->klass()->external_name(), array_len);
1513 } else {
1514 st->print("%s", source_oop->klass()->external_name());
1515
1516 if (java_lang_String::is_instance(source_oop)) {
1517 st->print(" ");
1518 java_lang_String::print(source_oop, st);
1519 } else if (java_lang_invoke_MethodType::is_instance(source_oop)) {
1520 st->print(" ");
1521 java_lang_invoke_MethodType::print_signature(source_oop, st);
1522 } else if (java_lang_Class::is_instance(source_oop)) {
1523 oop scratch_mirror = source_oop;
1524
1525 st->print(" ");
1526 print_class_signature_for_mirror(st, scratch_mirror);
1527
1528 Klass* src_klass = java_lang_Class::as_Klass(scratch_mirror);
1529 if (src_klass != nullptr && src_klass->is_instance_klass()) {
1530 InstanceKlass* buffered_klass =
1531 ArchiveBuilder::current()->get_buffered_addr(InstanceKlass::cast(src_klass));
1532 if (buffered_klass->has_aot_initialized_mirror()) {
1533 st->print(" (aot-inited)");
1534 }
1535 }
1536 }
1537 st->cr();
1538 }
1539 }
1540 }
1541 #endif // INCLUDE_CDS_JAVA_HEAP
1590 if (heap_info->is_used()) {
1591 log_heap_region(heap_info);
1592 }
1593 #endif
1594
1595 log_info(cds, map)("[End of CDS archive map]");
1596 }
1597 }; // end ArchiveBuilder::CDSMapLogger
1598
1599 void ArchiveBuilder::print_stats() {
1600 _alloc_stats.print_stats(int(_ro_region.used()), int(_rw_region.used()));
1601 }
1602
1603 void ArchiveBuilder::write_archive(FileMapInfo* mapinfo, ArchiveHeapInfo* heap_info) {
1604 // Make sure NUM_CDS_REGIONS (exported in cds.h) agrees with
1605 // MetaspaceShared::n_regions (internal to hotspot).
1606 assert(NUM_CDS_REGIONS == MetaspaceShared::n_regions, "sanity");
1607
1608 write_region(mapinfo, MetaspaceShared::rw, &_rw_region, /*read_only=*/false,/*allow_exec=*/false);
1609 write_region(mapinfo, MetaspaceShared::ro, &_ro_region, /*read_only=*/true, /*allow_exec=*/false);
1610 write_region(mapinfo, MetaspaceShared::cc, &_cc_region, /*read_only=*/false,/*allow_exec=*/false);
1611
1612 // Split pointer map into read-write and read-only bitmaps
1613 ArchivePtrMarker::initialize_rw_ro_cc_maps(&_rw_ptrmap, &_ro_ptrmap, &_cc_ptrmap);
1614
1615 size_t bitmap_size_in_bytes;
1616 char* bitmap = mapinfo->write_bitmap_region(ArchivePtrMarker::rw_ptrmap(),
1617 ArchivePtrMarker::ro_ptrmap(),
1618 ArchivePtrMarker::cc_ptrmap(),
1619 heap_info,
1620 bitmap_size_in_bytes);
1621
1622 if (heap_info->is_used()) {
1623 _total_heap_region_size = mapinfo->write_heap_region(heap_info);
1624 }
1625
1626 print_region_stats(mapinfo, heap_info);
1627
1628 mapinfo->set_requested_base((char*)MetaspaceShared::requested_base_address());
1629 mapinfo->set_header_crc(mapinfo->compute_header_crc());
1630 // After this point, we should not write any data into mapinfo->header() since this
1631 // would corrupt its checksum we have calculated before.
1632 mapinfo->write_header();
1633 mapinfo->close();
1634
1635 if (log_is_enabled(Info, cds)) {
1636 log_info(cds)("Full module graph = %s", CDSConfig::is_dumping_full_module_graph() ? "enabled" : "disabled");
1637 print_stats();
1638 }
1639
1640 if (log_is_enabled(Info, cds, map)) {
1641 CDSMapLogger::log(this, mapinfo, heap_info,
1642 bitmap, bitmap_size_in_bytes);
1643 }
1644 CDS_JAVA_HEAP_ONLY(HeapShared::destroy_archived_object_cache());
1645 FREE_C_HEAP_ARRAY(char, bitmap);
1646 }
1647
1648 void ArchiveBuilder::write_region(FileMapInfo* mapinfo, int region_idx, DumpRegion* dump_region, bool read_only, bool allow_exec) {
1649 mapinfo->write_region(region_idx, dump_region->base(), dump_region->used(), read_only, allow_exec);
1650 }
1651
1652 void ArchiveBuilder::count_relocated_pointer(bool tagged, bool nulled) {
1653 _relocated_ptr_info._num_ptrs ++;
1654 _relocated_ptr_info._num_tagged_ptrs += tagged ? 1 : 0;
1655 _relocated_ptr_info._num_nulled_ptrs += nulled ? 1 : 0;
1656 }
1657
1658 void ArchiveBuilder::print_region_stats(FileMapInfo *mapinfo, ArchiveHeapInfo* heap_info) {
1659 // Print statistics of all the regions
1660 const size_t bitmap_used = mapinfo->region_at(MetaspaceShared::bm)->used();
1661 const size_t bitmap_reserved = mapinfo->region_at(MetaspaceShared::bm)->used_aligned();
1662 const size_t total_reserved = _ro_region.reserved() + _rw_region.reserved() +
1663 bitmap_reserved +
1664 _total_heap_region_size;
1665 const size_t total_bytes = _ro_region.used() + _rw_region.used() +
1666 bitmap_used +
1667 _total_heap_region_size;
1668 const double total_u_perc = percent_of(total_bytes, total_reserved);
1669
1670 _rw_region.print(total_reserved);
1671 _ro_region.print(total_reserved);
1672 _cc_region.print(total_reserved);
1673
1674 print_bitmap_region_stats(bitmap_used, total_reserved);
1675
1676 if (heap_info->is_used()) {
1677 print_heap_region_stats(heap_info, total_reserved);
1678 }
1679
1680 log_debug(cds)("total : %9zu [100.0%% of total] out of %9zu bytes [%5.1f%% used]",
1681 total_bytes, total_reserved, total_u_perc);
1682 }
1683
1684 void ArchiveBuilder::print_bitmap_region_stats(size_t size, size_t total_size) {
1685 log_debug(cds)("bm space: %9zu [ %4.1f%% of total] out of %9zu bytes [100.0%% used]",
1686 size, size/double(total_size)*100.0, size);
1687 }
1688
1689 void ArchiveBuilder::print_heap_region_stats(ArchiveHeapInfo *info, size_t total_size) {
1690 char* start = info->buffer_start();
1691 size_t size = info->buffer_byte_size();
1692 char* top = start + size;
|