7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "classfile/classFileParser.hpp"
26 #include "classfile/fieldLayoutBuilder.hpp"
27 #include "jvm.h"
28 #include "memory/resourceArea.hpp"
29 #include "oops/array.hpp"
30 #include "oops/fieldStreams.inline.hpp"
31 #include "oops/instanceMirrorKlass.hpp"
32 #include "oops/instanceKlass.inline.hpp"
33 #include "oops/klass.inline.hpp"
34 #include "runtime/fieldDescriptor.inline.hpp"
35
36
37 LayoutRawBlock::LayoutRawBlock(Kind kind, int size) :
38 _next_block(nullptr),
39 _prev_block(nullptr),
40 _kind(kind),
41 _offset(-1),
42 _alignment(1),
43 _size(size),
44 _field_index(-1),
45 _is_reference(false) {
46 assert(kind == EMPTY || kind == RESERVED || kind == PADDING || kind == INHERITED,
47 "Otherwise, should use the constructor with a field index argument");
48 assert(size > 0, "Sanity check");
49 }
50
51
52 LayoutRawBlock::LayoutRawBlock(int index, Kind kind, int size, int alignment, bool is_reference) :
53 _next_block(nullptr),
54 _prev_block(nullptr),
55 _kind(kind),
56 _offset(-1),
57 _alignment(alignment),
58 _size(size),
59 _field_index(index),
60 _is_reference(is_reference) {
61 assert(kind == REGULAR || kind == FLATTENED || kind == INHERITED,
62 "Other kind do not have a field index");
63 assert(size > 0, "Sanity check");
64 assert(alignment > 0, "Sanity check");
65 }
66
67 bool LayoutRawBlock::fit(int size, int alignment) {
68 int adjustment = 0;
69 if ((_offset % alignment) != 0) {
70 adjustment = alignment - (_offset % alignment);
71 }
72 return _size >= size + adjustment;
73 }
74
75 FieldGroup::FieldGroup(int contended_group) :
76 _next(nullptr),
77 _primitive_fields(nullptr),
78 _oop_fields(nullptr),
79 _contended_group(contended_group), // -1 means no contended group, 0 means default contended group
80 _oop_count(0) {}
81
82 void FieldGroup::add_primitive_field(int idx, BasicType type) {
83 int size = type2aelembytes(type);
84 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for primitive types */, false);
85 if (_primitive_fields == nullptr) {
86 _primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
87 }
88 _primitive_fields->append(block);
89 }
90
91 void FieldGroup::add_oop_field(int idx) {
92 int size = type2aelembytes(T_OBJECT);
93 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for oops */, true);
94 if (_oop_fields == nullptr) {
95 _oop_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
96 }
97 _oop_fields->append(block);
98 _oop_count++;
99 }
100
101 void FieldGroup::sort_by_size() {
102 if (_primitive_fields != nullptr) {
103 _primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
104 }
105 }
106
107 FieldLayout::FieldLayout(GrowableArray<FieldInfo>* field_info, ConstantPool* cp) :
108 _field_info(field_info),
109 _cp(cp),
110 _blocks(nullptr),
111 _start(_blocks),
112 _last(_blocks) {}
113
114 void FieldLayout::initialize_static_layout() {
115 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
116 _blocks->set_offset(0);
117 _last = _blocks;
118 _start = _blocks;
119 // Note: at this stage, InstanceMirrorKlass::offset_of_static_fields() could be zero, because
120 // during bootstrapping, the size of the java.lang.Class is still not known when layout
121 // of static field is computed. Field offsets are fixed later when the size is known
122 // (see java_lang_Class::fixup_mirror())
123 if (InstanceMirrorKlass::offset_of_static_fields() > 0) {
124 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, InstanceMirrorKlass::offset_of_static_fields()));
125 _blocks->set_offset(0);
126 }
127 }
128
129 void FieldLayout::initialize_instance_layout(const InstanceKlass* super_klass) {
130 if (super_klass == nullptr) {
131 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
132 _blocks->set_offset(0);
133 _last = _blocks;
134 _start = _blocks;
135 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes()));
136 } else {
137 bool has_fields = reconstruct_layout(super_klass);
138 fill_holes(super_klass);
139 if (!super_klass->has_contended_annotations() || !has_fields) {
140 _start = _blocks; // start allocating fields from the first empty block
141 } else {
142 _start = _last; // append fields at the end of the reconstructed layout
143 }
144 }
145 }
146
147 LayoutRawBlock* FieldLayout::first_field_block() {
148 LayoutRawBlock* block = _start;
149 while (block->kind() != LayoutRawBlock::INHERITED && block->kind() != LayoutRawBlock::REGULAR
150 && block->kind() != LayoutRawBlock::FLATTENED && block->kind() != LayoutRawBlock::PADDING) {
151 block = block->next_block();
152 }
153 return block;
154 }
155
156
157 // Insert a set of fields into a layout using a best-fit strategy.
158 // For each field, search for the smallest empty slot able to fit the field
159 // (satisfying both size and alignment requirements), if none is found,
160 // add the field at the end of the layout.
161 // Fields cannot be inserted before the block specified in the "start" argument
162 void FieldLayout::add(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
163 if (list == nullptr) return;
164 if (start == nullptr) start = this->_start;
165 bool last_search_success = false;
166 int last_size = 0;
167 int last_alignment = 0;
168 for (int i = 0; i < list->length(); i ++) {
169 LayoutRawBlock* b = list->at(i);
170 LayoutRawBlock* cursor = nullptr;
171 LayoutRawBlock* candidate = nullptr;
172
173 // if start is the last block, just append the field
174 if (start == last_block()) {
175 candidate = last_block();
176 }
177 // Before iterating over the layout to find an empty slot fitting the field's requirements,
178 // check if the previous field had the same requirements and if the search for a fitting slot
179 // was successful. If the requirements were the same but the search failed, a new search will
180 // fail the same way, so just append the field at the of the layout.
181 else if (b->size() == last_size && b->alignment() == last_alignment && !last_search_success) {
182 candidate = last_block();
183 } else {
184 // Iterate over the layout to find an empty slot fitting the field's requirements
185 last_size = b->size();
186 last_alignment = b->alignment();
187 cursor = last_block()->prev_block();
188 assert(cursor != nullptr, "Sanity check");
189 last_search_success = true;
190 while (cursor != start) {
191 if (cursor->kind() == LayoutRawBlock::EMPTY && cursor->fit(b->size(), b->alignment())) {
192 if (candidate == nullptr || cursor->size() < candidate->size()) {
193 candidate = cursor;
194 }
195 }
196 cursor = cursor->prev_block();
197 }
198 if (candidate == nullptr) {
199 candidate = last_block();
200 last_search_success = false;
201 }
202 assert(candidate != nullptr, "Candidate must not be null");
203 assert(candidate->kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
204 assert(candidate->fit(b->size(), b->alignment()), "Candidate must be able to store the block");
205 }
206
207 insert_field_block(candidate, b);
208 }
209 }
210
211 // Used for classes with hard coded field offsets, insert a field at the specified offset */
212 void FieldLayout::add_field_at_offset(LayoutRawBlock* block, int offset, LayoutRawBlock* start) {
213 assert(block != nullptr, "Sanity check");
214 block->set_offset(offset);
215 if (start == nullptr) {
216 start = this->_start;
217 }
218 LayoutRawBlock* slot = start;
219 while (slot != nullptr) {
220 if ((slot->offset() <= block->offset() && (slot->offset() + slot->size()) > block->offset()) ||
221 slot == _last){
222 assert(slot->kind() == LayoutRawBlock::EMPTY, "Matching slot must be an empty slot");
223 assert(slot->size() >= block->offset() + block->size() ,"Matching slot must be big enough");
224 if (slot->offset() < block->offset()) {
225 int adjustment = block->offset() - slot->offset();
226 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
227 insert(slot, adj);
228 }
229 insert(slot, block);
230 if (slot->size() == 0) {
231 remove(slot);
232 }
233 _field_info->adr_at(block->field_index())->set_offset(block->offset());
234 return;
235 }
236 slot = slot->next_block();
237 }
238 fatal("Should have found a matching slot above, corrupted layout or invalid offset");
239 }
240
241 // The allocation logic uses a best fit strategy: the set of fields is allocated
242 // in the first empty slot big enough to contain the whole set ((including padding
243 // to fit alignment constraints).
244 void FieldLayout::add_contiguously(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
245 if (list == nullptr) return;
246 if (start == nullptr) {
247 start = _start;
248 }
249 // This code assumes that if the first block is well aligned, the following
250 // blocks would naturally be well aligned (no need for adjustment)
251 int size = 0;
252 for (int i = 0; i < list->length(); i++) {
253 size += list->at(i)->size();
254 }
255
256 LayoutRawBlock* candidate = nullptr;
257 if (start == last_block()) {
258 candidate = last_block();
259 } else {
260 LayoutRawBlock* first = list->at(0);
261 candidate = last_block()->prev_block();
262 while (candidate->kind() != LayoutRawBlock::EMPTY || !candidate->fit(size, first->alignment())) {
263 if (candidate == start) {
264 candidate = last_block();
265 break;
266 }
267 candidate = candidate->prev_block();
268 }
269 assert(candidate != nullptr, "Candidate must not be null");
270 assert(candidate->kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
271 assert(candidate->fit(size, first->alignment()), "Candidate must be able to store the whole contiguous block");
272 }
273
274 for (int i = 0; i < list->length(); i++) {
275 LayoutRawBlock* b = list->at(i);
276 insert_field_block(candidate, b);
277 assert((candidate->offset() % b->alignment() == 0), "Contiguous blocks must be naturally well aligned");
278 }
279 }
280
281 LayoutRawBlock* FieldLayout::insert_field_block(LayoutRawBlock* slot, LayoutRawBlock* block) {
282 assert(slot->kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
283 if (slot->offset() % block->alignment() != 0) {
284 int adjustment = block->alignment() - (slot->offset() % block->alignment());
285 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
286 insert(slot, adj);
287 }
288 insert(slot, block);
289 if (slot->size() == 0) {
290 remove(slot);
291 }
292 _field_info->adr_at(block->field_index())->set_offset(block->offset());
293 return block;
294 }
295
296 bool FieldLayout::reconstruct_layout(const InstanceKlass* ik) {
297 bool has_instance_fields = false;
298 GrowableArray<LayoutRawBlock*>* all_fields = new GrowableArray<LayoutRawBlock*>(32);
299 while (ik != nullptr) {
300 for (AllFieldStream fs(ik->fieldinfo_stream(), ik->constants()); !fs.done(); fs.next()) {
301 BasicType type = Signature::basic_type(fs.signature());
302 // distinction between static and non-static fields is missing
303 if (fs.access_flags().is_static()) continue;
304 has_instance_fields = true;
305 int size = type2aelembytes(type);
306 // INHERITED blocks are marked as non-reference because oop_maps are handled by their holder class
307 LayoutRawBlock* block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED, size, size, false);
308 block->set_offset(fs.offset());
309 all_fields->append(block);
310 }
311 ik = ik->super() == nullptr ? nullptr : InstanceKlass::cast(ik->super());
312 }
313
314 all_fields->sort(LayoutRawBlock::compare_offset);
315 _blocks = new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes());
316 _blocks->set_offset(0);
317 _last = _blocks;
318
319 for(int i = 0; i < all_fields->length(); i++) {
320 LayoutRawBlock* b = all_fields->at(i);
321 _last->set_next_block(b);
322 b->set_prev_block(_last);
323 _last = b;
324 }
325 _start = _blocks;
326 return has_instance_fields;
327 }
328
329 // Called during the reconstruction of a layout, after fields from super
330 // classes have been inserted. It fills unused slots between inserted fields
331 // with EMPTY blocks, so the regular field insertion methods would work.
332 // This method handles classes with @Contended annotations differently
333 // by inserting PADDING blocks instead of EMPTY block to prevent subclasses'
334 // fields to interfere with contended fields/classes.
335 void FieldLayout::fill_holes(const InstanceKlass* super_klass) {
336 assert(_blocks != nullptr, "Sanity check");
337 assert(_blocks->offset() == 0, "first block must be at offset zero");
338 LayoutRawBlock::Kind filling_type = super_klass->has_contended_annotations() ? LayoutRawBlock::PADDING: LayoutRawBlock::EMPTY;
339 LayoutRawBlock* b = _blocks;
340 while (b->next_block() != nullptr) {
341 if (b->next_block()->offset() > (b->offset() + b->size())) {
342 int size = b->next_block()->offset() - (b->offset() + b->size());
343 LayoutRawBlock* empty = new LayoutRawBlock(filling_type, size);
344 empty->set_offset(b->offset() + b->size());
345 empty->set_next_block(b->next_block());
346 b->next_block()->set_prev_block(empty);
347 b->set_next_block(empty);
348 empty->set_prev_block(b);
349 }
350 b = b->next_block();
351 }
352 assert(b->next_block() == nullptr, "Invariant at this point");
353 assert(b->kind() != LayoutRawBlock::EMPTY, "Sanity check");
354
355 // If the super class has @Contended annotation, a padding block is
356 // inserted at the end to ensure that fields from the subclasses won't share
357 // the cache line of the last field of the contended class
358 if (super_klass->has_contended_annotations() && ContendedPaddingWidth > 0) {
359 LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
360 p->set_offset(b->offset() + b->size());
361 b->set_next_block(p);
362 p->set_prev_block(b);
363 b = p;
364 }
365
366 LayoutRawBlock* last = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
367 last->set_offset(b->offset() + b->size());
368 assert(last->offset() > 0, "Sanity check");
369 b->set_next_block(last);
370 last->set_prev_block(b);
371 _last = last;
372 }
373
374 LayoutRawBlock* FieldLayout::insert(LayoutRawBlock* slot, LayoutRawBlock* block) {
375 assert(slot->kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
376 assert(slot->offset() % block->alignment() == 0, "Incompatible alignment");
377 block->set_offset(slot->offset());
378 slot->set_offset(slot->offset() + block->size());
379 assert((slot->size() - block->size()) < slot->size(), "underflow checking");
380 assert(slot->size() - block->size() >= 0, "no negative size allowed");
381 slot->set_size(slot->size() - block->size());
382 block->set_prev_block(slot->prev_block());
383 block->set_next_block(slot);
384 slot->set_prev_block(block);
385 if (block->prev_block() != nullptr) {
386 block->prev_block()->set_next_block(block);
387 }
388 if (_blocks == slot) {
389 _blocks = block;
390 }
391 return block;
392 }
393
394 void FieldLayout::remove(LayoutRawBlock* block) {
395 assert(block != nullptr, "Sanity check");
396 assert(block != _last, "Sanity check");
397 if (_blocks == block) {
398 _blocks = block->next_block();
399 if (_blocks != nullptr) {
400 _blocks->set_prev_block(nullptr);
401 }
402 } else {
403 assert(block->prev_block() != nullptr, "_prev should be set for non-head blocks");
404 block->prev_block()->set_next_block(block->next_block());
405 block->next_block()->set_prev_block(block->prev_block());
406 }
407 if (block == _start) {
408 _start = block->prev_block();
409 }
410 }
411
412 void FieldLayout::print(outputStream* output, bool is_static, const InstanceKlass* super) {
413 ResourceMark rm;
414 LayoutRawBlock* b = _blocks;
415 while(b != _last) {
416 switch(b->kind()) {
417 case LayoutRawBlock::REGULAR: {
418 FieldInfo* fi = _field_info->adr_at(b->field_index());
419 output->print_cr(" @%d \"%s\" %s %d/%d %s",
420 b->offset(),
421 fi->name(_cp)->as_C_string(),
422 fi->signature(_cp)->as_C_string(),
423 b->size(),
424 b->alignment(),
425 "REGULAR");
426 break;
427 }
428 case LayoutRawBlock::FLATTENED: {
429 FieldInfo* fi = _field_info->adr_at(b->field_index());
430 output->print_cr(" @%d \"%s\" %s %d/%d %s",
431 b->offset(),
432 fi->name(_cp)->as_C_string(),
433 fi->signature(_cp)->as_C_string(),
434 b->size(),
435 b->alignment(),
436 "FLATTENED");
437 break;
438 }
439 case LayoutRawBlock::RESERVED: {
440 output->print_cr(" @%d %d/- %s",
441 b->offset(),
442 b->size(),
443 "RESERVED");
444 break;
445 }
446 case LayoutRawBlock::INHERITED: {
447 assert(!is_static, "Static fields are not inherited in layouts");
448 assert(super != nullptr, "super klass must be provided to retrieve inherited fields info");
449 bool found = false;
450 const InstanceKlass* ik = super;
451 while (!found && ik != nullptr) {
452 for (AllFieldStream fs(ik->fieldinfo_stream(), ik->constants()); !fs.done(); fs.next()) {
453 if (fs.offset() == b->offset()) {
454 output->print_cr(" @%d \"%s\" %s %d/%d %s",
455 b->offset(),
456 fs.name()->as_C_string(),
457 fs.signature()->as_C_string(),
458 b->size(),
459 b->size(), // so far, alignment constraint == size, will change with Valhalla
460 "INHERITED");
461 found = true;
462 break;
463 }
464 }
465 ik = ik->java_super();
466 }
467 break;
468 }
469 case LayoutRawBlock::EMPTY:
470 output->print_cr(" @%d %d/1 %s",
471 b->offset(),
472 b->size(),
473 "EMPTY");
474 break;
475 case LayoutRawBlock::PADDING:
476 output->print_cr(" @%d %d/1 %s",
477 b->offset(),
478 b->size(),
479 "PADDING");
480 break;
481 }
482 b = b->next_block();
483 }
484 }
485
486 FieldLayoutBuilder::FieldLayoutBuilder(const Symbol* classname, const InstanceKlass* super_klass, ConstantPool* constant_pool,
487 GrowableArray<FieldInfo>* field_info, bool is_contended, FieldLayoutInfo* info) :
488 _classname(classname),
489 _super_klass(super_klass),
490 _constant_pool(constant_pool),
491 _field_info(field_info),
492 _info(info),
493 _root_group(nullptr),
494 _contended_groups(GrowableArray<FieldGroup*>(8)),
495 _static_fields(nullptr),
496 _layout(nullptr),
497 _static_layout(nullptr),
498 _nonstatic_oopmap_count(0),
499 _alignment(-1),
500 _has_nonstatic_fields(false),
501 _is_contended(is_contended) {}
502
503
504 FieldGroup* FieldLayoutBuilder::get_or_create_contended_group(int g) {
505 assert(g > 0, "must only be called for named contended groups");
506 FieldGroup* fg = nullptr;
507 for (int i = 0; i < _contended_groups.length(); i++) {
508 fg = _contended_groups.at(i);
509 if (fg->contended_group() == g) return fg;
510 }
511 fg = new FieldGroup(g);
512 _contended_groups.append(fg);
513 return fg;
514 }
515
516 void FieldLayoutBuilder::prologue() {
517 _layout = new FieldLayout(_field_info, _constant_pool);
518 const InstanceKlass* super_klass = _super_klass;
519 _layout->initialize_instance_layout(super_klass);
520 if (super_klass != nullptr) {
521 _has_nonstatic_fields = super_klass->has_nonstatic_fields();
522 }
523 _static_layout = new FieldLayout(_field_info, _constant_pool);
524 _static_layout->initialize_static_layout();
525 _static_fields = new FieldGroup();
526 _root_group = new FieldGroup();
527 }
528
529 // Field sorting for regular classes:
530 // - fields are sorted in static and non-static fields
531 // - non-static fields are also sorted according to their contention group
532 // (support of the @Contended annotation)
533 // - @Contended annotation is ignored for static fields
534 void FieldLayoutBuilder::regular_field_sorting() {
535 int idx = 0;
536 for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it, ++idx) {
537 FieldInfo ctrl = _field_info->at(0);
538 FieldGroup* group = nullptr;
539 FieldInfo fieldinfo = *it;
540 if (fieldinfo.access_flags().is_static()) {
541 group = _static_fields;
542 } else {
543 _has_nonstatic_fields = true;
544 if (fieldinfo.field_flags().is_contended()) {
545 int g = fieldinfo.contended_group();
546 if (g == 0) {
547 group = new FieldGroup(true);
548 _contended_groups.append(group);
549 } else {
550 group = get_or_create_contended_group(g);
551 }
552 } else {
553 group = _root_group;
554 }
555 }
556 assert(group != nullptr, "invariant");
557 BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
558 switch(type) {
559 case T_BYTE:
560 case T_CHAR:
561 case T_DOUBLE:
562 case T_FLOAT:
563 case T_INT:
564 case T_LONG:
565 case T_SHORT:
566 case T_BOOLEAN:
567 group->add_primitive_field(idx, type);
568 break;
569 case T_OBJECT:
570 case T_ARRAY:
571 if (group != _static_fields) _nonstatic_oopmap_count++;
572 group->add_oop_field(idx);
573 break;
574 default:
575 fatal("Something wrong?");
576 }
577 }
578 _root_group->sort_by_size();
579 _static_fields->sort_by_size();
580 if (!_contended_groups.is_empty()) {
581 for (int i = 0; i < _contended_groups.length(); i++) {
582 _contended_groups.at(i)->sort_by_size();
583 }
584 }
585 }
586
587 void FieldLayoutBuilder::insert_contended_padding(LayoutRawBlock* slot) {
588 if (ContendedPaddingWidth > 0) {
589 LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
590 _layout->insert(slot, padding);
591 }
592 }
593
594 // Computation of regular classes layout is an evolution of the previous default layout
595 // (FieldAllocationStyle 1):
596 // - primitive fields are allocated first (from the biggest to the smallest)
597 // - then oop fields are allocated, either in existing gaps or at the end of
598 // the layout
599 void FieldLayoutBuilder::compute_regular_layout() {
600 bool need_tail_padding = false;
601 prologue();
602 regular_field_sorting();
603
604 if (_is_contended) {
605 _layout->set_start(_layout->last_block());
606 // insertion is currently easy because the current strategy doesn't try to fill holes
607 // in super classes layouts => the _start block is by consequence the _last_block
608 insert_contended_padding(_layout->start());
609 need_tail_padding = true;
610 }
611 _layout->add(_root_group->primitive_fields());
612 _layout->add(_root_group->oop_fields());
613
614 if (!_contended_groups.is_empty()) {
615 for (int i = 0; i < _contended_groups.length(); i++) {
616 FieldGroup* cg = _contended_groups.at(i);
617 LayoutRawBlock* start = _layout->last_block();
618 insert_contended_padding(start);
619 _layout->add(cg->primitive_fields(), start);
620 _layout->add(cg->oop_fields(), start);
621 need_tail_padding = true;
622 }
623 }
624
625 if (need_tail_padding) {
626 insert_contended_padding(_layout->last_block());
627 }
628
629 _static_layout->add_contiguously(this->_static_fields->oop_fields());
630 _static_layout->add(this->_static_fields->primitive_fields());
631
632 epilogue();
633 }
634
635 void FieldLayoutBuilder::epilogue() {
636 // Computing oopmaps
637 int super_oop_map_count = (_super_klass == nullptr) ? 0 :_super_klass->nonstatic_oop_map_count();
638 int max_oop_map_count = super_oop_map_count + _nonstatic_oopmap_count;
639
640 OopMapBlocksBuilder* nonstatic_oop_maps =
641 new OopMapBlocksBuilder(max_oop_map_count);
642 if (super_oop_map_count > 0) {
643 nonstatic_oop_maps->initialize_inherited_blocks(_super_klass->start_of_nonstatic_oop_maps(),
644 _super_klass->nonstatic_oop_map_count());
645 }
646
647 if (_root_group->oop_fields() != nullptr) {
648 for (int i = 0; i < _root_group->oop_fields()->length(); i++) {
649 LayoutRawBlock* b = _root_group->oop_fields()->at(i);
650 nonstatic_oop_maps->add(b->offset(), 1);
651 }
652 }
653
654 if (!_contended_groups.is_empty()) {
655 for (int i = 0; i < _contended_groups.length(); i++) {
656 FieldGroup* cg = _contended_groups.at(i);
657 if (cg->oop_count() > 0) {
658 assert(cg->oop_fields() != nullptr && cg->oop_fields()->at(0) != nullptr, "oop_count > 0 but no oop fields found");
659 nonstatic_oop_maps->add(cg->oop_fields()->at(0)->offset(), cg->oop_count());
660 }
661 }
662 }
663
664 nonstatic_oop_maps->compact();
665
666 int instance_end = align_up(_layout->last_block()->offset(), wordSize);
667 int static_fields_end = align_up(_static_layout->last_block()->offset(), wordSize);
668 int static_fields_size = (static_fields_end -
669 InstanceMirrorKlass::offset_of_static_fields()) / wordSize;
670 int nonstatic_field_end = align_up(_layout->last_block()->offset(), heapOopSize);
671
672 // Pass back information needed for InstanceKlass creation
673
674 _info->oop_map_blocks = nonstatic_oop_maps;
675 _info->_instance_size = align_object_size(instance_end / wordSize);
676 _info->_static_field_size = static_fields_size;
677 _info->_nonstatic_field_size = (nonstatic_field_end - instanceOopDesc::base_offset_in_bytes()) / heapOopSize;
678 _info->_has_nonstatic_fields = _has_nonstatic_fields;
679
680 if (PrintFieldLayout) {
681 ResourceMark rm;
682 tty->print_cr("Layout of class %s", _classname->as_C_string());
683 tty->print_cr("Instance fields:");
684 _layout->print(tty, false, _super_klass);
685 tty->print_cr("Static fields:");
686 _static_layout->print(tty, true, nullptr);
687 tty->print_cr("Instance size = %d bytes", _info->_instance_size * wordSize);
688 tty->print_cr("---");
689 }
690 }
691
692 void FieldLayoutBuilder::build_layout() {
693 compute_regular_layout();
694 }
|
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25 #include "classfile/classFileParser.hpp"
26 #include "classfile/fieldLayoutBuilder.hpp"
27 #include "classfile/systemDictionary.hpp"
28 #include "classfile/vmSymbols.hpp"
29 #include "jvm.h"
30 #include "memory/resourceArea.hpp"
31 #include "oops/array.hpp"
32 #include "oops/fieldStreams.inline.hpp"
33 #include "oops/instanceMirrorKlass.hpp"
34 #include "oops/instanceKlass.inline.hpp"
35 #include "oops/klass.inline.hpp"
36 #include "oops/inlineKlass.inline.hpp"
37 #include "runtime/fieldDescriptor.inline.hpp"
38 #include "utilities/powerOfTwo.hpp"
39
40 static LayoutKind field_layout_selection(FieldInfo field_info, Array<InlineLayoutInfo>* inline_layout_info_array,
41 bool use_atomic_flat) {
42
43 if (!UseFieldFlattening) {
44 return LayoutKind::REFERENCE;
45 }
46
47 if (field_info.field_flags().is_injected()) {
48 // don't flatten injected fields
49 return LayoutKind::REFERENCE;
50 }
51
52 if (field_info.access_flags().is_volatile()) {
53 // volatile is used as a keyword to prevent flattening
54 return LayoutKind::REFERENCE;
55 }
56
57 if (inline_layout_info_array == nullptr || inline_layout_info_array->adr_at(field_info.index())->klass() == nullptr) {
58 // field's type is not a known value class, using a reference
59 return LayoutKind::REFERENCE;
60 }
61
62 InlineLayoutInfo* inline_field_info = inline_layout_info_array->adr_at(field_info.index());
63 InlineKlass* vk = inline_field_info->klass();
64
65 if (field_info.field_flags().is_null_free_inline_type()) {
66 assert(field_info.access_flags().is_strict(), "null-free fields must be strict");
67 if (vk->must_be_atomic() || AlwaysAtomicAccesses) {
68 if (vk->is_naturally_atomic() && vk->has_non_atomic_layout()) return LayoutKind::NON_ATOMIC_FLAT;
69 return (vk->has_atomic_layout() && use_atomic_flat) ? LayoutKind::ATOMIC_FLAT : LayoutKind::REFERENCE;
70 } else {
71 return vk->has_non_atomic_layout() ? LayoutKind::NON_ATOMIC_FLAT : LayoutKind::REFERENCE;
72 }
73 } else {
74 if (UseNullableValueFlattening && vk->has_nullable_atomic_layout()) {
75 return use_atomic_flat ? LayoutKind::NULLABLE_ATOMIC_FLAT : LayoutKind::REFERENCE;
76 } else {
77 return LayoutKind::REFERENCE;
78 }
79 }
80 }
81
82 static void get_size_and_alignment(InlineKlass* vk, LayoutKind kind, int* size, int* alignment) {
83 switch(kind) {
84 case LayoutKind::NON_ATOMIC_FLAT:
85 *size = vk->non_atomic_size_in_bytes();
86 *alignment = vk->non_atomic_alignment();
87 break;
88 case LayoutKind::ATOMIC_FLAT:
89 *size = vk->atomic_size_in_bytes();
90 *alignment = *size;
91 break;
92 case LayoutKind::NULLABLE_ATOMIC_FLAT:
93 *size = vk->nullable_atomic_size_in_bytes();
94 *alignment = *size;
95 break;
96 default:
97 ShouldNotReachHere();
98 }
99 }
100
101 LayoutRawBlock::LayoutRawBlock(Kind kind, int size) :
102 _next_block(nullptr),
103 _prev_block(nullptr),
104 _inline_klass(nullptr),
105 _block_kind(kind),
106 _offset(-1),
107 _alignment(1),
108 _size(size),
109 _field_index(-1) {
110 assert(kind == EMPTY || kind == RESERVED || kind == PADDING || kind == INHERITED || kind == NULL_MARKER,
111 "Otherwise, should use the constructor with a field index argument");
112 assert(size > 0, "Sanity check");
113 }
114
115
116 LayoutRawBlock::LayoutRawBlock(int index, Kind kind, int size, int alignment) :
117 _next_block(nullptr),
118 _prev_block(nullptr),
119 _inline_klass(nullptr),
120 _block_kind(kind),
121 _offset(-1),
122 _alignment(alignment),
123 _size(size),
124 _field_index(index) {
125 assert(kind == REGULAR || kind == FLAT || kind == INHERITED,
126 "Other kind do not have a field index");
127 assert(size > 0, "Sanity check");
128 assert(alignment > 0, "Sanity check");
129 }
130
131 bool LayoutRawBlock::fit(int size, int alignment) {
132 int adjustment = 0;
133 if ((_offset % alignment) != 0) {
134 adjustment = alignment - (_offset % alignment);
135 }
136 return _size >= size + adjustment;
137 }
138
139 FieldGroup::FieldGroup(int contended_group) :
140 _next(nullptr),
141 _small_primitive_fields(nullptr),
142 _big_primitive_fields(nullptr),
143 _oop_fields(nullptr),
144 _contended_group(contended_group), // -1 means no contended group, 0 means default contended group
145 _oop_count(0) {}
146
147 void FieldGroup::add_primitive_field(int idx, BasicType type) {
148 int size = type2aelembytes(type);
149 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for primitive types */);
150 if (size >= oopSize) {
151 add_to_big_primitive_list(block);
152 } else {
153 add_to_small_primitive_list(block);
154 }
155 }
156
157 void FieldGroup::add_oop_field(int idx) {
158 int size = type2aelembytes(T_OBJECT);
159 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::REGULAR, size, size /* alignment == size for oops */);
160 if (_oop_fields == nullptr) {
161 _oop_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
162 }
163 _oop_fields->append(block);
164 _oop_count++;
165 }
166
167 void FieldGroup::add_flat_field(int idx, InlineKlass* vk, LayoutKind lk, int size, int alignment) {
168 LayoutRawBlock* block = new LayoutRawBlock(idx, LayoutRawBlock::FLAT, size, alignment);
169 block->set_inline_klass(vk);
170 block->set_layout_kind(lk);
171 if (block->size() >= oopSize) {
172 add_to_big_primitive_list(block);
173 } else {
174 add_to_small_primitive_list(block);
175 }
176 }
177
178 void FieldGroup::sort_by_size() {
179 if (_small_primitive_fields != nullptr) {
180 _small_primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
181 }
182 if (_big_primitive_fields != nullptr) {
183 _big_primitive_fields->sort(LayoutRawBlock::compare_size_inverted);
184 }
185 }
186
187 void FieldGroup::add_to_small_primitive_list(LayoutRawBlock* block) {
188 if (_small_primitive_fields == nullptr) {
189 _small_primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
190 }
191 _small_primitive_fields->append(block);
192 }
193
194 void FieldGroup::add_to_big_primitive_list(LayoutRawBlock* block) {
195 if (_big_primitive_fields == nullptr) {
196 _big_primitive_fields = new GrowableArray<LayoutRawBlock*>(INITIAL_LIST_SIZE);
197 }
198 _big_primitive_fields->append(block);
199 }
200
201 FieldLayout::FieldLayout(GrowableArray<FieldInfo>* field_info, Array<InlineLayoutInfo>* inline_layout_info_array, ConstantPool* cp) :
202 _field_info(field_info),
203 _inline_layout_info_array(inline_layout_info_array),
204 _cp(cp),
205 _blocks(nullptr),
206 _start(_blocks),
207 _last(_blocks),
208 _super_first_field_offset(-1),
209 _super_alignment(-1),
210 _super_min_align_required(-1),
211 _null_reset_value_offset(-1),
212 _super_has_fields(false),
213 _has_inherited_fields(false) {}
214
215 void FieldLayout::initialize_static_layout() {
216 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
217 _blocks->set_offset(0);
218 _last = _blocks;
219 _start = _blocks;
220 // Note: at this stage, InstanceMirrorKlass::offset_of_static_fields() could be zero, because
221 // during bootstrapping, the size of the java.lang.Class is still not known when layout
222 // of static field is computed. Field offsets are fixed later when the size is known
223 // (see java_lang_Class::fixup_mirror())
224 if (InstanceMirrorKlass::offset_of_static_fields() > 0) {
225 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, InstanceMirrorKlass::offset_of_static_fields()));
226 _blocks->set_offset(0);
227 }
228 }
229
230 void FieldLayout::initialize_instance_layout(const InstanceKlass* super_klass) {
231 if (super_klass == nullptr) {
232 _blocks = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
233 _blocks->set_offset(0);
234 _last = _blocks;
235 _start = _blocks;
236 insert(first_empty_block(), new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes()));
237 } else {
238 _super_has_fields = reconstruct_layout(super_klass);
239 fill_holes(super_klass);
240 if ((!super_klass->has_contended_annotations()) || !_super_has_fields) {
241 _start = _blocks; // start allocating fields from the first empty block
242 } else {
243 _start = _last; // append fields at the end of the reconstructed layout
244 }
245 }
246 }
247
248 LayoutRawBlock* FieldLayout::first_field_block() {
249 LayoutRawBlock* block = _blocks;
250 while (block != nullptr
251 && block->block_kind() != LayoutRawBlock::INHERITED
252 && block->block_kind() != LayoutRawBlock::REGULAR
253 && block->block_kind() != LayoutRawBlock::FLAT
254 && block->block_kind() != LayoutRawBlock::NULL_MARKER) {
255 block = block->next_block();
256 }
257 return block;
258 }
259
260 // Insert a set of fields into a layout.
261 // For each field, search for an empty slot able to fit the field
262 // (satisfying both size and alignment requirements), if none is found,
263 // add the field at the end of the layout.
264 // Fields cannot be inserted before the block specified in the "start" argument
265 void FieldLayout::add(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
266 if (list == nullptr) return;
267 if (start == nullptr) start = this->_start;
268 bool last_search_success = false;
269 int last_size = 0;
270 int last_alignment = 0;
271 for (int i = 0; i < list->length(); i ++) {
272 LayoutRawBlock* b = list->at(i);
273 LayoutRawBlock* cursor = nullptr;
274 LayoutRawBlock* candidate = nullptr;
275 // if start is the last block, just append the field
276 if (start == last_block()) {
277 candidate = last_block();
278 }
279 // Before iterating over the layout to find an empty slot fitting the field's requirements,
280 // check if the previous field had the same requirements and if the search for a fitting slot
281 // was successful. If the requirements were the same but the search failed, a new search will
282 // fail the same way, so just append the field at the of the layout.
283 else if (b->size() == last_size && b->alignment() == last_alignment && !last_search_success) {
284 candidate = last_block();
285 } else {
286 // Iterate over the layout to find an empty slot fitting the field's requirements
287 last_size = b->size();
288 last_alignment = b->alignment();
289 cursor = last_block()->prev_block();
290 assert(cursor != nullptr, "Sanity check");
291 last_search_success = true;
292
293 while (cursor != start) {
294 if (cursor->block_kind() == LayoutRawBlock::EMPTY && cursor->fit(b->size(), b->alignment())) {
295 if (candidate == nullptr || cursor->size() < candidate->size()) {
296 candidate = cursor;
297 }
298 }
299 cursor = cursor->prev_block();
300 }
301 if (candidate == nullptr) {
302 candidate = last_block();
303 last_search_success = false;
304 }
305 assert(candidate != nullptr, "Candidate must not be null");
306 assert(candidate->block_kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
307 assert(candidate->fit(b->size(), b->alignment()), "Candidate must be able to store the block");
308 }
309 insert_field_block(candidate, b);
310 }
311 }
312
313 // Used for classes with hard coded field offsets, insert a field at the specified offset */
314 void FieldLayout::add_field_at_offset(LayoutRawBlock* block, int offset, LayoutRawBlock* start) {
315 assert(block != nullptr, "Sanity check");
316 block->set_offset(offset);
317 if (start == nullptr) {
318 start = this->_start;
319 }
320 LayoutRawBlock* slot = start;
321 while (slot != nullptr) {
322 if ((slot->offset() <= block->offset() && (slot->offset() + slot->size()) > block->offset()) ||
323 slot == _last){
324 assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Matching slot must be an empty slot");
325 assert(slot->size() >= block->offset() - slot->offset() + block->size() ,"Matching slot must be big enough");
326 if (slot->offset() < block->offset()) {
327 int adjustment = block->offset() - slot->offset();
328 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
329 insert(slot, adj);
330 }
331 insert(slot, block);
332 if (slot->size() == 0) {
333 remove(slot);
334 }
335 if (block->block_kind() == LayoutRawBlock::REGULAR || block->block_kind() == LayoutRawBlock::FLAT) {
336 _field_info->adr_at(block->field_index())->set_offset(block->offset());
337 }
338 return;
339 }
340 slot = slot->next_block();
341 }
342 fatal("Should have found a matching slot above, corrupted layout or invalid offset");
343 }
344
345 // The allocation logic uses a best fit strategy: the set of fields is allocated
346 // in the first empty slot big enough to contain the whole set ((including padding
347 // to fit alignment constraints).
348 void FieldLayout::add_contiguously(GrowableArray<LayoutRawBlock*>* list, LayoutRawBlock* start) {
349 if (list == nullptr) return;
350 if (start == nullptr) {
351 start = _start;
352 }
353 // This code assumes that if the first block is well aligned, the following
354 // blocks would naturally be well aligned (no need for adjustment)
355 int size = 0;
356 for (int i = 0; i < list->length(); i++) {
357 size += list->at(i)->size();
358 }
359
360 LayoutRawBlock* candidate = nullptr;
361 if (start == last_block()) {
362 candidate = last_block();
363 } else {
364 LayoutRawBlock* first = list->at(0);
365 candidate = last_block()->prev_block();
366 while (candidate->block_kind() != LayoutRawBlock::EMPTY || !candidate->fit(size, first->alignment())) {
367 if (candidate == start) {
368 candidate = last_block();
369 break;
370 }
371 candidate = candidate->prev_block();
372 }
373 assert(candidate != nullptr, "Candidate must not be null");
374 assert(candidate->block_kind() == LayoutRawBlock::EMPTY, "Candidate must be an empty block");
375 assert(candidate->fit(size, first->alignment()), "Candidate must be able to store the whole contiguous block");
376 }
377
378 for (int i = 0; i < list->length(); i++) {
379 LayoutRawBlock* b = list->at(i);
380 insert_field_block(candidate, b);
381 assert((candidate->offset() % b->alignment() == 0), "Contiguous blocks must be naturally well aligned");
382 }
383 }
384
385 LayoutRawBlock* FieldLayout::insert_field_block(LayoutRawBlock* slot, LayoutRawBlock* block) {
386 assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
387 if (slot->offset() % block->alignment() != 0) {
388 int adjustment = block->alignment() - (slot->offset() % block->alignment());
389 LayoutRawBlock* adj = new LayoutRawBlock(LayoutRawBlock::EMPTY, adjustment);
390 insert(slot, adj);
391 }
392 assert(block->size() >= block->size(), "Enough space must remain after adjustment");
393 insert(slot, block);
394 if (slot->size() == 0) {
395 remove(slot);
396 }
397 // NULL_MARKER blocks are not real fields, so they don't have an entry in the FieldInfo array
398 if (block->block_kind() != LayoutRawBlock::NULL_MARKER) {
399 _field_info->adr_at(block->field_index())->set_offset(block->offset());
400 if (_field_info->adr_at(block->field_index())->name(_cp) == vmSymbols::null_reset_value_name()) {
401 _null_reset_value_offset = block->offset();
402 }
403 }
404 if (block->block_kind() == LayoutRawBlock::FLAT && block->layout_kind() == LayoutKind::NULLABLE_ATOMIC_FLAT) {
405 int nm_offset = block->inline_klass()->null_marker_offset() - block->inline_klass()->payload_offset() + block->offset();
406 _field_info->adr_at(block->field_index())->set_null_marker_offset(nm_offset);
407 _inline_layout_info_array->adr_at(block->field_index())->set_null_marker_offset(nm_offset);
408 }
409
410 return block;
411 }
412
413 bool FieldLayout::reconstruct_layout(const InstanceKlass* ik) {
414 bool has_instance_fields = false;
415 if (ik->is_abstract() && !ik->is_identity_class()) {
416 _super_alignment = type2aelembytes(BasicType::T_LONG);
417 }
418 GrowableArray<LayoutRawBlock*>* all_fields = new GrowableArray<LayoutRawBlock*>(32);
419 while (ik != nullptr) {
420 for (AllFieldStream fs(ik->fieldinfo_stream(), ik->constants()); !fs.done(); fs.next()) {
421 BasicType type = Signature::basic_type(fs.signature());
422 // distinction between static and non-static fields is missing
423 if (fs.access_flags().is_static()) continue;
424 has_instance_fields = true;
425 _has_inherited_fields = true;
426 if (_super_first_field_offset == -1 || fs.offset() < _super_first_field_offset) _super_first_field_offset = fs.offset();
427 LayoutRawBlock* block;
428 if (fs.is_flat()) {
429 InlineLayoutInfo layout_info = ik->inline_layout_info(fs.index());
430 InlineKlass* vk = layout_info.klass();
431 block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED,
432 vk->layout_size_in_bytes(layout_info.kind()),
433 vk->layout_alignment(layout_info.kind()));
434 assert(_super_alignment == -1 || _super_alignment >= vk->payload_alignment(), "Invalid value alignment");
435 _super_min_align_required = _super_min_align_required > vk->payload_alignment() ? _super_min_align_required : vk->payload_alignment();
436 } else {
437 int size = type2aelembytes(type);
438 // INHERITED blocks are marked as non-reference because oop_maps are handled by their holder class
439 block = new LayoutRawBlock(fs.index(), LayoutRawBlock::INHERITED, size, size);
440 // For primitive types, the alignment is equal to the size
441 assert(_super_alignment == -1 || _super_alignment >= size, "Invalid value alignment");
442 _super_min_align_required = _super_min_align_required > size ? _super_min_align_required : size;
443 }
444 block->set_offset(fs.offset());
445 all_fields->append(block);
446 }
447 ik = ik->super() == nullptr ? nullptr : InstanceKlass::cast(ik->super());
448 }
449 all_fields->sort(LayoutRawBlock::compare_offset);
450 _blocks = new LayoutRawBlock(LayoutRawBlock::RESERVED, instanceOopDesc::base_offset_in_bytes());
451 _blocks->set_offset(0);
452 _last = _blocks;
453 for(int i = 0; i < all_fields->length(); i++) {
454 LayoutRawBlock* b = all_fields->at(i);
455 _last->set_next_block(b);
456 b->set_prev_block(_last);
457 _last = b;
458 }
459 _start = _blocks;
460 return has_instance_fields;
461 }
462
463 // Called during the reconstruction of a layout, after fields from super
464 // classes have been inserted. It fills unused slots between inserted fields
465 // with EMPTY blocks, so the regular field insertion methods would work.
466 // This method handles classes with @Contended annotations differently
467 // by inserting PADDING blocks instead of EMPTY block to prevent subclasses'
468 // fields to interfere with contended fields/classes.
469 void FieldLayout::fill_holes(const InstanceKlass* super_klass) {
470 assert(_blocks != nullptr, "Sanity check");
471 assert(_blocks->offset() == 0, "first block must be at offset zero");
472 LayoutRawBlock::Kind filling_type = super_klass->has_contended_annotations() ? LayoutRawBlock::PADDING: LayoutRawBlock::EMPTY;
473 LayoutRawBlock* b = _blocks;
474 while (b->next_block() != nullptr) {
475 if (b->next_block()->offset() > (b->offset() + b->size())) {
476 int size = b->next_block()->offset() - (b->offset() + b->size());
477 // FIXME it would be better if initial empty block where tagged as PADDING for value classes
478 LayoutRawBlock* empty = new LayoutRawBlock(filling_type, size);
479 empty->set_offset(b->offset() + b->size());
480 empty->set_next_block(b->next_block());
481 b->next_block()->set_prev_block(empty);
482 b->set_next_block(empty);
483 empty->set_prev_block(b);
484 }
485 b = b->next_block();
486 }
487 assert(b->next_block() == nullptr, "Invariant at this point");
488 assert(b->block_kind() != LayoutRawBlock::EMPTY, "Sanity check");
489 // If the super class has @Contended annotation, a padding block is
490 // inserted at the end to ensure that fields from the subclasses won't share
491 // the cache line of the last field of the contended class
492 if (super_klass->has_contended_annotations() && ContendedPaddingWidth > 0) {
493 LayoutRawBlock* p = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
494 p->set_offset(b->offset() + b->size());
495 b->set_next_block(p);
496 p->set_prev_block(b);
497 b = p;
498 }
499
500 LayoutRawBlock* last = new LayoutRawBlock(LayoutRawBlock::EMPTY, INT_MAX);
501 last->set_offset(b->offset() + b->size());
502 assert(last->offset() > 0, "Sanity check");
503 b->set_next_block(last);
504 last->set_prev_block(b);
505 _last = last;
506 }
507
508 LayoutRawBlock* FieldLayout::insert(LayoutRawBlock* slot, LayoutRawBlock* block) {
509 assert(slot->block_kind() == LayoutRawBlock::EMPTY, "Blocks can only be inserted in empty blocks");
510 assert(slot->offset() % block->alignment() == 0, "Incompatible alignment");
511 block->set_offset(slot->offset());
512 slot->set_offset(slot->offset() + block->size());
513 assert((slot->size() - block->size()) < slot->size(), "underflow checking");
514 assert(slot->size() - block->size() >= 0, "no negative size allowed");
515 slot->set_size(slot->size() - block->size());
516 block->set_prev_block(slot->prev_block());
517 block->set_next_block(slot);
518 slot->set_prev_block(block);
519 if (block->prev_block() != nullptr) {
520 block->prev_block()->set_next_block(block);
521 }
522 if (_blocks == slot) {
523 _blocks = block;
524 }
525 if (_start == slot) {
526 _start = block;
527 }
528 return block;
529 }
530
531 void FieldLayout::remove(LayoutRawBlock* block) {
532 assert(block != nullptr, "Sanity check");
533 assert(block != _last, "Sanity check");
534 if (_blocks == block) {
535 _blocks = block->next_block();
536 if (_blocks != nullptr) {
537 _blocks->set_prev_block(nullptr);
538 }
539 } else {
540 assert(block->prev_block() != nullptr, "_prev should be set for non-head blocks");
541 block->prev_block()->set_next_block(block->next_block());
542 block->next_block()->set_prev_block(block->prev_block());
543 }
544 if (block == _start) {
545 _start = block->prev_block();
546 }
547 }
548
549 void FieldLayout::shift_fields(int shift) {
550 LayoutRawBlock* b = first_field_block();
551 LayoutRawBlock* previous = b->prev_block();
552 if (previous->block_kind() == LayoutRawBlock::EMPTY) {
553 previous->set_size(previous->size() + shift);
554 } else {
555 LayoutRawBlock* nb = new LayoutRawBlock(LayoutRawBlock::PADDING, shift);
556 nb->set_offset(b->offset());
557 previous->set_next_block(nb);
558 nb->set_prev_block(previous);
559 b->set_prev_block(nb);
560 nb->set_next_block(b);
561 }
562 while (b != nullptr) {
563 b->set_offset(b->offset() + shift);
564 if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
565 _field_info->adr_at(b->field_index())->set_offset(b->offset());
566 if (b->layout_kind() == LayoutKind::NULLABLE_ATOMIC_FLAT) {
567 int new_nm_offset = _field_info->adr_at(b->field_index())->null_marker_offset() + shift;
568 _field_info->adr_at(b->field_index())->set_null_marker_offset(new_nm_offset);
569 _inline_layout_info_array->adr_at(b->field_index())->set_null_marker_offset(new_nm_offset);
570
571 }
572 }
573 assert(b->block_kind() == LayoutRawBlock::EMPTY || b->offset() % b->alignment() == 0, "Must still be correctly aligned");
574 b = b->next_block();
575 }
576 }
577
578 LayoutRawBlock* FieldLayout::find_null_marker() {
579 LayoutRawBlock* b = _blocks;
580 while (b != nullptr) {
581 if (b->block_kind() == LayoutRawBlock::NULL_MARKER) {
582 return b;
583 }
584 b = b->next_block();
585 }
586 ShouldNotReachHere();
587 }
588
589 void FieldLayout::remove_null_marker() {
590 LayoutRawBlock* b = first_field_block();
591 while (b != nullptr) {
592 if (b->block_kind() == LayoutRawBlock::NULL_MARKER) {
593 if (b->next_block()->block_kind() == LayoutRawBlock::EMPTY) {
594 LayoutRawBlock* n = b->next_block();
595 remove(b);
596 n->set_offset(b->offset());
597 n->set_size(n->size() + b->size());
598 } else {
599 b->set_block_kind(LayoutRawBlock::EMPTY);
600 }
601 return;
602 }
603 b = b->next_block();
604 }
605 ShouldNotReachHere(); // if we reach this point, the null marker was not found!
606 }
607
608 static const char* layout_kind_to_string(LayoutKind lk) {
609 switch(lk) {
610 case LayoutKind::REFERENCE:
611 return "REFERENCE";
612 case LayoutKind::NON_ATOMIC_FLAT:
613 return "NON_ATOMIC_FLAT";
614 case LayoutKind::ATOMIC_FLAT:
615 return "ATOMIC_FLAT";
616 case LayoutKind::NULLABLE_ATOMIC_FLAT:
617 return "NULLABLE_ATOMIC_FLAT";
618 case LayoutKind::UNKNOWN:
619 return "UNKNOWN";
620 default:
621 ShouldNotReachHere();
622 }
623 }
624
625 void FieldLayout::print(outputStream* output, bool is_static, const InstanceKlass* super, Array<InlineLayoutInfo>* inline_fields) {
626 ResourceMark rm;
627 LayoutRawBlock* b = _blocks;
628 while(b != _last) {
629 switch(b->block_kind()) {
630 case LayoutRawBlock::REGULAR: {
631 FieldInfo* fi = _field_info->adr_at(b->field_index());
632 output->print_cr(" @%d %s %d/%d \"%s\" %s",
633 b->offset(),
634 "REGULAR",
635 b->size(),
636 b->alignment(),
637 fi->name(_cp)->as_C_string(),
638 fi->signature(_cp)->as_C_string());
639 break;
640 }
641 case LayoutRawBlock::FLAT: {
642 FieldInfo* fi = _field_info->adr_at(b->field_index());
643 InlineKlass* ik = inline_fields->adr_at(fi->index())->klass();
644 assert(ik != nullptr, "");
645 output->print_cr(" @%d %s %d/%d \"%s\" %s %s@%p %s",
646 b->offset(),
647 "FLAT",
648 b->size(),
649 b->alignment(),
650 fi->name(_cp)->as_C_string(),
651 fi->signature(_cp)->as_C_string(),
652 ik->name()->as_C_string(),
653 ik->class_loader_data(), layout_kind_to_string(b->layout_kind()));
654 break;
655 }
656 case LayoutRawBlock::RESERVED: {
657 output->print_cr(" @%d %s %d/-",
658 b->offset(),
659 "RESERVED",
660 b->size());
661 break;
662 }
663 case LayoutRawBlock::INHERITED: {
664 assert(!is_static, "Static fields are not inherited in layouts");
665 assert(super != nullptr, "super klass must be provided to retrieve inherited fields info");
666 bool found = false;
667 const InstanceKlass* ik = super;
668 while (!found && ik != nullptr) {
669 for (AllFieldStream fs(ik->fieldinfo_stream(), ik->constants()); !fs.done(); fs.next()) {
670 if (fs.offset() == b->offset() && fs.access_flags().is_static() == is_static) {
671 output->print_cr(" @%d %s %d/%d \"%s\" %s",
672 b->offset(),
673 "INHERITED",
674 b->size(),
675 b->size(), // so far, alignment constraint == size, will change with Valhalla => FIXME
676 fs.name()->as_C_string(),
677 fs.signature()->as_C_string());
678 found = true;
679 break;
680 }
681 }
682 ik = ik->java_super();
683 }
684 break;
685 }
686 case LayoutRawBlock::EMPTY:
687 output->print_cr(" @%d %s %d/1",
688 b->offset(),
689 "EMPTY",
690 b->size());
691 break;
692 case LayoutRawBlock::PADDING:
693 output->print_cr(" @%d %s %d/1",
694 b->offset(),
695 "PADDING",
696 b->size());
697 break;
698 case LayoutRawBlock::NULL_MARKER:
699 {
700 output->print_cr(" @%d %s %d/1 ",
701 b->offset(),
702 "NULL_MARKER",
703 b->size());
704 break;
705 }
706 default:
707 fatal("Unknown block type");
708 }
709 b = b->next_block();
710 }
711 }
712
713 FieldLayoutBuilder::FieldLayoutBuilder(const Symbol* classname, ClassLoaderData* loader_data, const InstanceKlass* super_klass, ConstantPool* constant_pool,
714 GrowableArray<FieldInfo>* field_info, bool is_contended, bool is_inline_type,bool is_abstract_value,
715 bool must_be_atomic, FieldLayoutInfo* info, Array<InlineLayoutInfo>* inline_layout_info_array) :
716 _classname(classname),
717 _loader_data(loader_data),
718 _super_klass(super_klass),
719 _constant_pool(constant_pool),
720 _field_info(field_info),
721 _info(info),
722 _inline_layout_info_array(inline_layout_info_array),
723 _root_group(nullptr),
724 _contended_groups(GrowableArray<FieldGroup*>(8)),
725 _static_fields(nullptr),
726 _layout(nullptr),
727 _static_layout(nullptr),
728 _nonstatic_oopmap_count(0),
729 _payload_alignment(-1),
730 _payload_offset(-1),
731 _null_marker_offset(-1),
732 _payload_size_in_bytes(-1),
733 _non_atomic_layout_size_in_bytes(-1),
734 _non_atomic_layout_alignment(-1),
735 _atomic_layout_size_in_bytes(-1),
736 _nullable_layout_size_in_bytes(-1),
737 _fields_size_sum(0),
738 _declared_non_static_fields_count(0),
739 _has_non_naturally_atomic_fields(false),
740 _is_naturally_atomic(false),
741 _must_be_atomic(must_be_atomic),
742 _has_nonstatic_fields(false),
743 _has_inline_type_fields(false),
744 _is_contended(is_contended),
745 _is_inline_type(is_inline_type),
746 _is_abstract_value(is_abstract_value),
747 _has_flattening_information(is_inline_type),
748 _is_empty_inline_class(false) {}
749
750 FieldGroup* FieldLayoutBuilder::get_or_create_contended_group(int g) {
751 assert(g > 0, "must only be called for named contended groups");
752 FieldGroup* fg = nullptr;
753 for (int i = 0; i < _contended_groups.length(); i++) {
754 fg = _contended_groups.at(i);
755 if (fg->contended_group() == g) return fg;
756 }
757 fg = new FieldGroup(g);
758 _contended_groups.append(fg);
759 return fg;
760 }
761
762 void FieldLayoutBuilder::prologue() {
763 _layout = new FieldLayout(_field_info, _inline_layout_info_array, _constant_pool);
764 const InstanceKlass* super_klass = _super_klass;
765 _layout->initialize_instance_layout(super_klass);
766 _nonstatic_oopmap_count = super_klass == nullptr ? 0 : super_klass->nonstatic_oop_map_count();
767 if (super_klass != nullptr) {
768 _has_nonstatic_fields = super_klass->has_nonstatic_fields();
769 }
770 _static_layout = new FieldLayout(_field_info, _inline_layout_info_array, _constant_pool);
771 _static_layout->initialize_static_layout();
772 _static_fields = new FieldGroup();
773 _root_group = new FieldGroup();
774 }
775
776 // Field sorting for regular (non-inline) classes:
777 // - fields are sorted in static and non-static fields
778 // - non-static fields are also sorted according to their contention group
779 // (support of the @Contended annotation)
780 // - @Contended annotation is ignored for static fields
781 // - field flattening decisions are taken in this method
782 void FieldLayoutBuilder::regular_field_sorting() {
783 int idx = 0;
784 for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it, ++idx) {
785 FieldGroup* group = nullptr;
786 FieldInfo fieldinfo = *it;
787 if (fieldinfo.access_flags().is_static()) {
788 group = _static_fields;
789 } else {
790 _has_nonstatic_fields = true;
791 if (fieldinfo.field_flags().is_contended()) {
792 int g = fieldinfo.contended_group();
793 if (g == 0) {
794 group = new FieldGroup(true);
795 _contended_groups.append(group);
796 } else {
797 group = get_or_create_contended_group(g);
798 }
799 } else {
800 group = _root_group;
801 }
802 }
803 assert(group != nullptr, "invariant");
804 BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
805 switch(type) {
806 case T_BYTE:
807 case T_CHAR:
808 case T_DOUBLE:
809 case T_FLOAT:
810 case T_INT:
811 case T_LONG:
812 case T_SHORT:
813 case T_BOOLEAN:
814 group->add_primitive_field(idx, type);
815 break;
816 case T_OBJECT:
817 case T_ARRAY:
818 {
819 LayoutKind lk = field_layout_selection(fieldinfo, _inline_layout_info_array, true);
820 if (fieldinfo.field_flags().is_null_free_inline_type() || lk != LayoutKind::REFERENCE
821 || (!fieldinfo.field_flags().is_injected()
822 && _inline_layout_info_array != nullptr && _inline_layout_info_array->adr_at(fieldinfo.index())->klass() != nullptr
823 && !_inline_layout_info_array->adr_at(fieldinfo.index())->klass()->is_identity_class())) {
824 _has_inline_type_fields = true;
825 _has_flattening_information = true;
826 }
827 if (lk == LayoutKind::REFERENCE) {
828 if (group != _static_fields) _nonstatic_oopmap_count++;
829 group->add_oop_field(idx);
830 } else {
831 _has_flattening_information = true;
832 InlineKlass* vk = _inline_layout_info_array->adr_at(fieldinfo.index())->klass();
833 int size, alignment;
834 get_size_and_alignment(vk, lk, &size, &alignment);
835 group->add_flat_field(idx, vk, lk, size, alignment);
836 _inline_layout_info_array->adr_at(fieldinfo.index())->set_kind(lk);
837 _nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
838 _field_info->adr_at(idx)->field_flags_addr()->update_flat(true);
839 _field_info->adr_at(idx)->set_layout_kind(lk);
840 // no need to update _must_be_atomic if vk->must_be_atomic() is true because current class is not an inline class
841 }
842 break;
843 }
844 default:
845 fatal("Something wrong?");
846 }
847 }
848 _root_group->sort_by_size();
849 _static_fields->sort_by_size();
850 if (!_contended_groups.is_empty()) {
851 for (int i = 0; i < _contended_groups.length(); i++) {
852 _contended_groups.at(i)->sort_by_size();
853 }
854 }
855 }
856
857 /* Field sorting for inline classes:
858 * - because inline classes are immutable, the @Contended annotation is ignored
859 * when computing their layout (with only read operation, there's no false
860 * sharing issue)
861 * - this method also records the alignment of the field with the most
862 * constraining alignment, this value is then used as the alignment
863 * constraint when flattening this inline type into another container
864 * - field flattening decisions are taken in this method (those decisions are
865 * currently only based in the size of the fields to be flattened, the size
866 * of the resulting instance is not considered)
867 */
868 void FieldLayoutBuilder::inline_class_field_sorting() {
869 assert(_is_inline_type || _is_abstract_value, "Should only be used for inline classes");
870 int alignment = -1;
871 int idx = 0;
872 for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it, ++idx) {
873 FieldGroup* group = nullptr;
874 FieldInfo fieldinfo = *it;
875 int field_alignment = 1;
876 if (fieldinfo.access_flags().is_static()) {
877 group = _static_fields;
878 } else {
879 _has_nonstatic_fields = true;
880 _declared_non_static_fields_count++;
881 group = _root_group;
882 }
883 assert(group != nullptr, "invariant");
884 BasicType type = Signature::basic_type(fieldinfo.signature(_constant_pool));
885 switch(type) {
886 case T_BYTE:
887 case T_CHAR:
888 case T_DOUBLE:
889 case T_FLOAT:
890 case T_INT:
891 case T_LONG:
892 case T_SHORT:
893 case T_BOOLEAN:
894 if (group != _static_fields) {
895 field_alignment = type2aelembytes(type); // alignment == size for primitive types
896 }
897 group->add_primitive_field(fieldinfo.index(), type);
898 break;
899 case T_OBJECT:
900 case T_ARRAY:
901 {
902 bool use_atomic_flat = _must_be_atomic; // flatten atomic fields only if the container is itself atomic
903 LayoutKind lk = field_layout_selection(fieldinfo, _inline_layout_info_array, use_atomic_flat);
904 if (fieldinfo.field_flags().is_null_free_inline_type() || lk != LayoutKind::REFERENCE
905 || (!fieldinfo.field_flags().is_injected()
906 && _inline_layout_info_array != nullptr && _inline_layout_info_array->adr_at(fieldinfo.index())->klass() != nullptr
907 && !_inline_layout_info_array->adr_at(fieldinfo.index())->klass()->is_identity_class())) {
908 _has_inline_type_fields = true;
909 _has_flattening_information = true;
910 }
911 if (lk == LayoutKind::REFERENCE) {
912 if (group != _static_fields) {
913 _nonstatic_oopmap_count++;
914 field_alignment = type2aelembytes(type); // alignment == size for oops
915 }
916 group->add_oop_field(idx);
917 } else {
918 _has_flattening_information = true;
919 InlineKlass* vk = _inline_layout_info_array->adr_at(fieldinfo.index())->klass();
920 if (!vk->is_naturally_atomic()) _has_non_naturally_atomic_fields = true;
921 int size, alignment;
922 get_size_and_alignment(vk, lk, &size, &alignment);
923 group->add_flat_field(idx, vk, lk, size, alignment);
924 _inline_layout_info_array->adr_at(fieldinfo.index())->set_kind(lk);
925 _nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
926 field_alignment = alignment;
927 _field_info->adr_at(idx)->field_flags_addr()->update_flat(true);
928 _field_info->adr_at(idx)->set_layout_kind(lk);
929 }
930 break;
931 }
932 default:
933 fatal("Unexpected BasicType");
934 }
935 if (!fieldinfo.access_flags().is_static() && field_alignment > alignment) alignment = field_alignment;
936 }
937 _payload_alignment = alignment;
938 assert(_has_nonstatic_fields || _is_abstract_value, "Concrete value types do not support zero instance size yet");
939 }
940
941 void FieldLayoutBuilder::insert_contended_padding(LayoutRawBlock* slot) {
942 if (ContendedPaddingWidth > 0) {
943 LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, ContendedPaddingWidth);
944 _layout->insert(slot, padding);
945 }
946 }
947
948 /* Computation of regular classes layout is an evolution of the previous default layout
949 * (FieldAllocationStyle 1):
950 * - primitive fields (both primitive types and flat inline types) are allocated
951 * first, from the biggest to the smallest
952 * - then oop fields are allocated (to increase chances to have contiguous oops and
953 * a simpler oopmap).
954 */
955 void FieldLayoutBuilder::compute_regular_layout() {
956 bool need_tail_padding = false;
957 prologue();
958 regular_field_sorting();
959 if (_is_contended) {
960 _layout->set_start(_layout->last_block());
961 // insertion is currently easy because the current strategy doesn't try to fill holes
962 // in super classes layouts => the _start block is by consequence the _last_block
963 insert_contended_padding(_layout->start());
964 need_tail_padding = true;
965 }
966 _layout->add(_root_group->big_primitive_fields());
967 _layout->add(_root_group->small_primitive_fields());
968 _layout->add(_root_group->oop_fields());
969
970 if (!_contended_groups.is_empty()) {
971 for (int i = 0; i < _contended_groups.length(); i++) {
972 FieldGroup* cg = _contended_groups.at(i);
973 LayoutRawBlock* start = _layout->last_block();
974 insert_contended_padding(start);
975 _layout->add(cg->big_primitive_fields());
976 _layout->add(cg->small_primitive_fields(), start);
977 _layout->add(cg->oop_fields(), start);
978 need_tail_padding = true;
979 }
980 }
981
982 if (need_tail_padding) {
983 insert_contended_padding(_layout->last_block());
984 }
985
986 // Warning: IntanceMirrorKlass expects static oops to be allocated first
987 _static_layout->add_contiguously(_static_fields->oop_fields());
988 _static_layout->add(_static_fields->big_primitive_fields());
989 _static_layout->add(_static_fields->small_primitive_fields());
990
991 epilogue();
992 }
993
994 /* Computation of inline classes has a slightly different strategy than for
995 * regular classes. Regular classes have their oop fields allocated at the end
996 * of the layout to increase GC performances. Unfortunately, this strategy
997 * increases the number of empty slots inside an instance. Because the purpose
998 * of inline classes is to be embedded into other containers, it is critical
999 * to keep their size as small as possible. For this reason, the allocation
1000 * strategy is:
1001 * - big primitive fields (primitive types and flat inline type smaller
1002 * than an oop) are allocated first (from the biggest to the smallest)
1003 * - then oop fields
1004 * - then small primitive fields (from the biggest to the smallest)
1005 */
1006 void FieldLayoutBuilder::compute_inline_class_layout() {
1007
1008 // Test if the concrete inline class is an empty class (no instance fields)
1009 // and insert a dummy field if needed
1010 if (!_is_abstract_value) {
1011 bool declares_non_static_fields = false;
1012 for (GrowableArrayIterator<FieldInfo> it = _field_info->begin(); it != _field_info->end(); ++it) {
1013 FieldInfo fieldinfo = *it;
1014 if (!fieldinfo.access_flags().is_static()) {
1015 declares_non_static_fields = true;
1016 break;
1017 }
1018 }
1019 if (!declares_non_static_fields) {
1020 bool has_inherited_fields = false;
1021 const InstanceKlass* super = _super_klass;
1022 while(super != nullptr) {
1023 if (super->has_nonstatic_fields()) {
1024 has_inherited_fields = true;
1025 break;
1026 }
1027 super = super->super() == nullptr ? nullptr : InstanceKlass::cast(super->super());
1028 }
1029
1030 if (!has_inherited_fields) {
1031 // Inject ".empty" dummy field
1032 _is_empty_inline_class = true;
1033 FieldInfo::FieldFlags fflags(0);
1034 fflags.update_injected(true);
1035 AccessFlags aflags;
1036 FieldInfo fi(aflags,
1037 (u2)vmSymbols::as_int(VM_SYMBOL_ENUM_NAME(empty_marker_name)),
1038 (u2)vmSymbols::as_int(VM_SYMBOL_ENUM_NAME(byte_signature)),
1039 0,
1040 fflags);
1041 int idx = _field_info->append(fi);
1042 _field_info->adr_at(idx)->set_index(idx);
1043 }
1044 }
1045 }
1046
1047 prologue();
1048 inline_class_field_sorting();
1049
1050 assert(_layout->start()->block_kind() == LayoutRawBlock::RESERVED, "Unexpected");
1051
1052 if (_layout->super_has_fields() && !_is_abstract_value) { // non-static field layout
1053 if (!_has_nonstatic_fields) {
1054 assert(_is_abstract_value, "Concrete value types have at least one field");
1055 // Nothing to do
1056 } else {
1057 // decide which alignment to use, then set first allowed field offset
1058
1059 assert(_layout->super_alignment() >= _payload_alignment, "Incompatible alignment");
1060 assert(_layout->super_alignment() % _payload_alignment == 0, "Incompatible alignment");
1061
1062 if (_payload_alignment < _layout->super_alignment()) {
1063 int new_alignment = _payload_alignment > _layout->super_min_align_required() ? _payload_alignment : _layout->super_min_align_required();
1064 assert(new_alignment % _payload_alignment == 0, "Must be");
1065 assert(new_alignment % _layout->super_min_align_required() == 0, "Must be");
1066 _payload_alignment = new_alignment;
1067 }
1068 _layout->set_start(_layout->first_field_block());
1069 }
1070 } else {
1071 if (_is_abstract_value && _has_nonstatic_fields) {
1072 _payload_alignment = type2aelembytes(BasicType::T_LONG);
1073 }
1074 assert(_layout->start()->next_block()->block_kind() == LayoutRawBlock::EMPTY || !UseCompressedClassPointers, "Unexpected");
1075 LayoutRawBlock* first_empty = _layout->start()->next_block();
1076 if (first_empty->offset() % _payload_alignment != 0) {
1077 LayoutRawBlock* padding = new LayoutRawBlock(LayoutRawBlock::PADDING, _payload_alignment - (first_empty->offset() % _payload_alignment));
1078 _layout->insert(first_empty, padding);
1079 if (first_empty->size() == 0) {
1080 _layout->remove(first_empty);
1081 }
1082 _layout->set_start(padding);
1083 }
1084 }
1085
1086 _layout->add(_root_group->big_primitive_fields());
1087 _layout->add(_root_group->oop_fields());
1088 _layout->add(_root_group->small_primitive_fields());
1089
1090 LayoutRawBlock* first_field = _layout->first_field_block();
1091 if (first_field != nullptr) {
1092 _payload_offset = _layout->first_field_block()->offset();
1093 _payload_size_in_bytes = _layout->last_block()->offset() - _layout->first_field_block()->offset();
1094 } else {
1095 assert(_is_abstract_value, "Concrete inline types must have at least one field");
1096 _payload_offset = _layout->blocks()->size();
1097 _payload_size_in_bytes = 0;
1098 }
1099
1100 // Determining if the value class is naturally atomic:
1101 if ((!_layout->super_has_fields() && _declared_non_static_fields_count <= 1 && !_has_non_naturally_atomic_fields)
1102 || (_layout->super_has_fields() && _super_klass->is_naturally_atomic() && _declared_non_static_fields_count == 0)) {
1103 _is_naturally_atomic = true;
1104 }
1105
1106 // At this point, the characteristics of the raw layout (used in standalone instances) are known.
1107 // From this, additional layouts will be computed: atomic and nullable layouts
1108 // Once those additional layouts are computed, the raw layout might need some adjustments
1109
1110 bool vm_uses_flattening = UseFieldFlattening || UseArrayFlattening;
1111
1112 if (!_is_abstract_value && vm_uses_flattening) { // Flat layouts are only for concrete value classes
1113 // Validation of the non atomic layout
1114 if (UseNonAtomicValueFlattening && !AlwaysAtomicAccesses && (!_must_be_atomic || _is_naturally_atomic)) {
1115 _non_atomic_layout_size_in_bytes = _payload_size_in_bytes;
1116 _non_atomic_layout_alignment = _payload_alignment;
1117 }
1118
1119 // Next step is to compute the characteristics for a layout enabling atomic updates
1120 if (UseAtomicValueFlattening) {
1121 int atomic_size = _payload_size_in_bytes == 0 ? 0 : round_up_power_of_2(_payload_size_in_bytes);
1122 if (atomic_size <= (int)MAX_ATOMIC_OP_SIZE) {
1123 _atomic_layout_size_in_bytes = atomic_size;
1124 }
1125 }
1126
1127 // Next step is the nullable layout: the layout must include a null marker and must also be atomic
1128 if (UseNullableValueFlattening) {
1129 // Looking if there's an empty slot inside the layout that could be used to store a null marker
1130 // FIXME: could it be possible to re-use the .empty field as a null marker for empty values?
1131 LayoutRawBlock* b = _layout->first_field_block();
1132 assert(b != nullptr, "A concrete value class must have at least one (possible dummy) field");
1133 int null_marker_offset = -1;
1134 if (_is_empty_inline_class) {
1135 // Reusing the dummy field as a field marker
1136 assert(_field_info->adr_at(b->field_index())->name(_constant_pool) == vmSymbols::empty_marker_name(), "b must be the dummy field");
1137 null_marker_offset = b->offset();
1138 } else {
1139 while (b != _layout->last_block()) {
1140 if (b->block_kind() == LayoutRawBlock::EMPTY) {
1141 break;
1142 }
1143 b = b->next_block();
1144 }
1145 if (b != _layout->last_block()) {
1146 // found an empty slot, register its offset from the beginning of the payload
1147 null_marker_offset = b->offset();
1148 LayoutRawBlock* marker = new LayoutRawBlock(LayoutRawBlock::NULL_MARKER, 1);
1149 _layout->add_field_at_offset(marker, b->offset());
1150 }
1151 if (null_marker_offset == -1) { // no empty slot available to store the null marker, need to inject one
1152 int last_offset = _layout->last_block()->offset();
1153 LayoutRawBlock* marker = new LayoutRawBlock(LayoutRawBlock::NULL_MARKER, 1);
1154 _layout->insert_field_block(_layout->last_block(), marker);
1155 assert(marker->offset() == last_offset, "Null marker should have been inserted at the end");
1156 null_marker_offset = marker->offset();
1157 }
1158 }
1159
1160 // Now that the null marker is there, the size of the nullable layout must computed (remember, must be atomic too)
1161 int new_raw_size = _layout->last_block()->offset() - _layout->first_field_block()->offset();
1162 int nullable_size = round_up_power_of_2(new_raw_size);
1163 if (nullable_size <= (int)MAX_ATOMIC_OP_SIZE) {
1164 _nullable_layout_size_in_bytes = nullable_size;
1165 _null_marker_offset = null_marker_offset;
1166 } else {
1167 // If the nullable layout is rejected, the NULL_MARKER block should be removed
1168 // from the layout, otherwise it will appear anyway if the layout is printer
1169 if (!_is_empty_inline_class) { // empty values don't have a dedicated NULL_MARKER block
1170 _layout->remove_null_marker();
1171 }
1172 _null_marker_offset = -1;
1173 }
1174 }
1175 // If the inline class has an atomic or nullable (which is also atomic) layout,
1176 // we want the raw layout to have the same alignment as those atomic layouts so access codes
1177 // could remain simple (single instruction without intermediate copy). This might required
1178 // to shift all fields in the raw layout, but this operation is possible only if the class
1179 // doesn't have inherited fields (offsets of inherited fields cannot be changed). If a
1180 // field shift is needed but not possible, all atomic layouts are disabled and only reference
1181 // and loosely consistent are supported.
1182 int required_alignment = _payload_alignment;
1183 if (has_atomic_layout() && _payload_alignment < atomic_layout_size_in_bytes()) {
1184 required_alignment = atomic_layout_size_in_bytes();
1185 }
1186 if (has_nullable_atomic_layout() && _payload_alignment < nullable_layout_size_in_bytes()) {
1187 required_alignment = nullable_layout_size_in_bytes();
1188 }
1189 int shift = first_field->offset() % required_alignment;
1190 if (shift != 0) {
1191 if (required_alignment > _payload_alignment && !_layout->has_inherited_fields()) {
1192 assert(_layout->first_field_block() != nullptr, "A concrete value class must have at least one (possible dummy) field");
1193 _layout->shift_fields(shift);
1194 _payload_offset = _layout->first_field_block()->offset();
1195 if (has_nullable_atomic_layout()) {
1196 assert(!_is_empty_inline_class, "Should not get here with empty values");
1197 _null_marker_offset = _layout->find_null_marker()->offset();
1198 }
1199 _payload_alignment = required_alignment;
1200 } else {
1201 _atomic_layout_size_in_bytes = -1;
1202 if (has_nullable_atomic_layout() && !_is_empty_inline_class) { // empty values don't have a dedicated NULL_MARKER block
1203 _layout->remove_null_marker();
1204 }
1205 _nullable_layout_size_in_bytes = -1;
1206 _null_marker_offset = -1;
1207 }
1208 } else {
1209 _payload_alignment = required_alignment;
1210 }
1211
1212 // If the inline class has a nullable layout, the layout used in heap allocated standalone
1213 // instances must also be the nullable layout, in order to be able to set the null marker to
1214 // non-null before copying the payload to other containers.
1215 if (has_nullable_atomic_layout() && payload_layout_size_in_bytes() < nullable_layout_size_in_bytes()) {
1216 _payload_size_in_bytes = nullable_layout_size_in_bytes();
1217 }
1218 }
1219 // Warning:: InstanceMirrorKlass expects static oops to be allocated first
1220 _static_layout->add_contiguously(_static_fields->oop_fields());
1221 _static_layout->add(_static_fields->big_primitive_fields());
1222 _static_layout->add(_static_fields->small_primitive_fields());
1223
1224 epilogue();
1225 }
1226
1227 void FieldLayoutBuilder::add_flat_field_oopmap(OopMapBlocksBuilder* nonstatic_oop_maps,
1228 InlineKlass* vklass, int offset) {
1229 int diff = offset - vklass->payload_offset();
1230 const OopMapBlock* map = vklass->start_of_nonstatic_oop_maps();
1231 const OopMapBlock* last_map = map + vklass->nonstatic_oop_map_count();
1232 while (map < last_map) {
1233 nonstatic_oop_maps->add(map->offset() + diff, map->count());
1234 map++;
1235 }
1236 }
1237
1238 void FieldLayoutBuilder::register_embedded_oops_from_list(OopMapBlocksBuilder* nonstatic_oop_maps, GrowableArray<LayoutRawBlock*>* list) {
1239 if (list == nullptr) return;
1240 for (int i = 0; i < list->length(); i++) {
1241 LayoutRawBlock* f = list->at(i);
1242 if (f->block_kind() == LayoutRawBlock::FLAT) {
1243 InlineKlass* vk = f->inline_klass();
1244 assert(vk != nullptr, "Should have been initialized");
1245 if (vk->contains_oops()) {
1246 add_flat_field_oopmap(nonstatic_oop_maps, vk, f->offset());
1247 }
1248 }
1249 }
1250 }
1251
1252 void FieldLayoutBuilder::register_embedded_oops(OopMapBlocksBuilder* nonstatic_oop_maps, FieldGroup* group) {
1253 if (group->oop_fields() != nullptr) {
1254 for (int i = 0; i < group->oop_fields()->length(); i++) {
1255 LayoutRawBlock* b = group->oop_fields()->at(i);
1256 nonstatic_oop_maps->add(b->offset(), 1);
1257 }
1258 }
1259 register_embedded_oops_from_list(nonstatic_oop_maps, group->big_primitive_fields());
1260 register_embedded_oops_from_list(nonstatic_oop_maps, group->small_primitive_fields());
1261 }
1262
1263 void FieldLayoutBuilder::epilogue() {
1264 // Computing oopmaps
1265 OopMapBlocksBuilder* nonstatic_oop_maps =
1266 new OopMapBlocksBuilder(_nonstatic_oopmap_count);
1267 int super_oop_map_count = (_super_klass == nullptr) ? 0 :_super_klass->nonstatic_oop_map_count();
1268 if (super_oop_map_count > 0) {
1269 nonstatic_oop_maps->initialize_inherited_blocks(_super_klass->start_of_nonstatic_oop_maps(),
1270 _super_klass->nonstatic_oop_map_count());
1271 }
1272 register_embedded_oops(nonstatic_oop_maps, _root_group);
1273 if (!_contended_groups.is_empty()) {
1274 for (int i = 0; i < _contended_groups.length(); i++) {
1275 FieldGroup* cg = _contended_groups.at(i);
1276 if (cg->oop_count() > 0) {
1277 assert(cg->oop_fields() != nullptr && cg->oop_fields()->at(0) != nullptr, "oop_count > 0 but no oop fields found");
1278 register_embedded_oops(nonstatic_oop_maps, cg);
1279 }
1280 }
1281 }
1282 nonstatic_oop_maps->compact();
1283
1284 int instance_end = align_up(_layout->last_block()->offset(), wordSize);
1285 int static_fields_end = align_up(_static_layout->last_block()->offset(), wordSize);
1286 int static_fields_size = (static_fields_end -
1287 InstanceMirrorKlass::offset_of_static_fields()) / wordSize;
1288 int nonstatic_field_end = align_up(_layout->last_block()->offset(), heapOopSize);
1289
1290 // Pass back information needed for InstanceKlass creation
1291
1292 _info->oop_map_blocks = nonstatic_oop_maps;
1293 _info->_instance_size = align_object_size(instance_end / wordSize);
1294 _info->_static_field_size = static_fields_size;
1295 _info->_nonstatic_field_size = (nonstatic_field_end - instanceOopDesc::base_offset_in_bytes()) / heapOopSize;
1296 _info->_has_nonstatic_fields = _has_nonstatic_fields;
1297 _info->_has_inline_fields = _has_inline_type_fields;
1298 _info->_is_naturally_atomic = _is_naturally_atomic;
1299 if (_is_inline_type) {
1300 _info->_must_be_atomic = _must_be_atomic;
1301 _info->_payload_alignment = _payload_alignment;
1302 _info->_payload_offset = _payload_offset;
1303 _info->_payload_size_in_bytes = _payload_size_in_bytes;
1304 _info->_non_atomic_size_in_bytes = _non_atomic_layout_size_in_bytes;
1305 _info->_non_atomic_alignment = _non_atomic_layout_alignment;
1306 _info->_atomic_layout_size_in_bytes = _atomic_layout_size_in_bytes;
1307 _info->_nullable_layout_size_in_bytes = _nullable_layout_size_in_bytes;
1308 _info->_null_marker_offset = _null_marker_offset;
1309 _info->_null_reset_value_offset = _static_layout->null_reset_value_offset();
1310 _info->_is_empty_inline_klass = _is_empty_inline_class;
1311 }
1312
1313 // This may be too restrictive, since if all the fields fit in 64
1314 // bits we could make the decision to align instances of this class
1315 // to 64-bit boundaries, and load and store them as single words.
1316 // And on machines which supported larger atomics we could similarly
1317 // allow larger values to be atomic, if properly aligned.
1318
1319 #ifdef ASSERT
1320 // Tests verifying integrity of field layouts are using the output of -XX:+PrintFieldLayout
1321 // which prints the details of LayoutRawBlocks used to compute the layout.
1322 // The code below checks that offsets in the _field_info meta-data match offsets
1323 // in the LayoutRawBlocks
1324 LayoutRawBlock* b = _layout->blocks();
1325 while(b != _layout->last_block()) {
1326 if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
1327 if (_field_info->adr_at(b->field_index())->offset() != (u4)b->offset()) {
1328 tty->print_cr("Offset from field info = %d, offset from block = %d", (int)_field_info->adr_at(b->field_index())->offset(), b->offset());
1329 }
1330 assert(_field_info->adr_at(b->field_index())->offset() == (u4)b->offset()," Must match");
1331 }
1332 b = b->next_block();
1333 }
1334 b = _static_layout->blocks();
1335 while(b != _static_layout->last_block()) {
1336 if (b->block_kind() == LayoutRawBlock::REGULAR || b->block_kind() == LayoutRawBlock::FLAT) {
1337 assert(_field_info->adr_at(b->field_index())->offset() == (u4)b->offset()," Must match");
1338 }
1339 b = b->next_block();
1340 }
1341 #endif // ASSERT
1342
1343 static bool first_layout_print = true;
1344
1345
1346 if (PrintFieldLayout || (PrintInlineLayout && _has_flattening_information)) {
1347 ResourceMark rm;
1348 stringStream st;
1349 if (first_layout_print) {
1350 st.print_cr("Field layout log format: @offset size/alignment [name] [signature] [comment]");
1351 st.print_cr("Heap oop size = %d", heapOopSize);
1352 first_layout_print = false;
1353 }
1354 if (_super_klass != nullptr) {
1355 st.print_cr("Layout of class %s@%p extends %s@%p", _classname->as_C_string(),
1356 _loader_data, _super_klass->name()->as_C_string(), _super_klass->class_loader_data());
1357 } else {
1358 st.print_cr("Layout of class %s@%p", _classname->as_C_string(), _loader_data);
1359 }
1360 st.print_cr("Instance fields:");
1361 _layout->print(&st, false, _super_klass, _inline_layout_info_array);
1362 st.print_cr("Static fields:");
1363 _static_layout->print(&st, true, nullptr, _inline_layout_info_array);
1364 st.print_cr("Instance size = %d bytes", _info->_instance_size * wordSize);
1365 if (_is_inline_type) {
1366 st.print_cr("First field offset = %d", _payload_offset);
1367 st.print_cr("Payload layout: %d/%d", _payload_size_in_bytes, _payload_alignment);
1368 if (has_non_atomic_flat_layout()) {
1369 st.print_cr("Non atomic flat layout: %d/%d", _non_atomic_layout_size_in_bytes, _non_atomic_layout_alignment);
1370 } else {
1371 st.print_cr("Non atomic flat layout: -/-");
1372 }
1373 if (has_atomic_layout()) {
1374 st.print_cr("Atomic flat layout: %d/%d", _atomic_layout_size_in_bytes, _atomic_layout_size_in_bytes);
1375 } else {
1376 st.print_cr("Atomic flat layout: -/-");
1377 }
1378 if (has_nullable_atomic_layout()) {
1379 st.print_cr("Nullable flat layout: %d/%d", _nullable_layout_size_in_bytes, _nullable_layout_size_in_bytes);
1380 } else {
1381 st.print_cr("Nullable flat layout: -/-");
1382 }
1383 if (_null_marker_offset != -1) {
1384 st.print_cr("Null marker offset = %d", _null_marker_offset);
1385 }
1386 }
1387 st.print_cr("---");
1388 // Print output all together.
1389 tty->print_raw(st.as_string());
1390 }
1391 }
1392
1393 void FieldLayoutBuilder::build_layout() {
1394 if (_is_inline_type || _is_abstract_value) {
1395 compute_inline_class_layout();
1396 } else {
1397 compute_regular_layout();
1398 }
1399 }
|