2 * Copyright (C) 2014 The Android Open Source Project
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
17 #ifndef ART_RUNTIME_STACK_MAP_H_
18 #define ART_RUNTIME_STACK_MAP_H_
20 #include "arch/code_offset.h"
21 #include "base/bit_vector.h"
22 #include "base/bit_utils.h"
23 #include "bit_memory_region.h"
25 #include "memory_region.h"
26 #include "method_info.h"
31 class VariableIndentationOutputStream;
33 // Size of a frame slot, in bytes. This constant is a signed value,
34 // to please the compiler in arithmetic operations involving int32_t
36 static constexpr ssize_t kFrameSlotSize = 4;
38 // Size of Dex virtual registers.
39 static constexpr size_t kVRegSize = 4;
43 class StackMapEncoding;
44 struct CodeInfoEncoding;
47 * Classes in the following file are wrapper on stack map information backed
48 * by a MemoryRegion. As such they read and write to the region, they don't have
52 // Dex register location container used by DexRegisterMap and StackMapStream.
53 class DexRegisterLocation {
56 * The location kind used to populate the Dex register information in a
57 * StackMapStream can either be:
58 * - kStack: vreg stored on the stack, value holds the stack offset;
59 * - kInRegister: vreg stored in low 32 bits of a core physical register,
60 * value holds the register number;
61 * - kInRegisterHigh: vreg stored in high 32 bits of a core physical register,
62 * value holds the register number;
63 * - kInFpuRegister: vreg stored in low 32 bits of an FPU register,
64 * value holds the register number;
65 * - kInFpuRegisterHigh: vreg stored in high 32 bits of an FPU register,
66 * value holds the register number;
67 * - kConstant: value holds the constant;
69 * In addition, DexRegisterMap also uses these values:
70 * - kInStackLargeOffset: value holds a "large" stack offset (greater than
71 * or equal to 128 bytes);
72 * - kConstantLargeValue: value holds a "large" constant (lower than 0, or
73 * or greater than or equal to 32);
74 * - kNone: the register has no location, meaning it has not been set.
76 enum class Kind : uint8_t {
77 // Short location kinds, for entries fitting on one byte (3 bits
78 // for the kind, 5 bits for the value) in a DexRegisterMap.
79 kInStack = 0, // 0b000
80 kInRegister = 1, // 0b001
81 kInRegisterHigh = 2, // 0b010
82 kInFpuRegister = 3, // 0b011
83 kInFpuRegisterHigh = 4, // 0b100
84 kConstant = 5, // 0b101
86 // Large location kinds, requiring a 5-byte encoding (1 byte for the
87 // kind, 4 bytes for the value).
89 // Stack location at a large offset, meaning that the offset value
90 // divided by the stack frame slot size (4 bytes) cannot fit on a
91 // 5-bit unsigned integer (i.e., this offset value is greater than
92 // or equal to 2^5 * 4 = 128 bytes).
93 kInStackLargeOffset = 6, // 0b110
95 // Large constant, that cannot fit on a 5-bit signed integer (i.e.,
96 // lower than 0, or greater than or equal to 2^5 = 32).
97 kConstantLargeValue = 7, // 0b111
99 // Entries with no location are not stored and do not need own marker.
100 kNone = static_cast<uint8_t>(-1),
102 kLastLocationKind = kConstantLargeValue
107 "art::DexRegisterLocation::Kind has a size different from one byte.");
109 static bool IsShortLocationKind(Kind kind) {
112 case Kind::kInRegister:
113 case Kind::kInRegisterHigh:
114 case Kind::kInFpuRegister:
115 case Kind::kInFpuRegisterHigh:
116 case Kind::kConstant:
119 case Kind::kInStackLargeOffset:
120 case Kind::kConstantLargeValue:
124 LOG(FATAL) << "Unexpected location kind";
129 // Convert `kind` to a "surface" kind, i.e. one that doesn't include
130 // any value with a "large" qualifier.
131 // TODO: Introduce another enum type for the surface kind?
132 static Kind ConvertToSurfaceKind(Kind kind) {
135 case Kind::kInRegister:
136 case Kind::kInRegisterHigh:
137 case Kind::kInFpuRegister:
138 case Kind::kInFpuRegisterHigh:
139 case Kind::kConstant:
142 case Kind::kInStackLargeOffset:
143 return Kind::kInStack;
145 case Kind::kConstantLargeValue:
146 return Kind::kConstant;
154 // Required by art::StackMapStream::LocationCatalogEntriesIndices.
155 DexRegisterLocation() : kind_(Kind::kNone), value_(0) {}
157 DexRegisterLocation(Kind kind, int32_t value) : kind_(kind), value_(value) {}
159 static DexRegisterLocation None() {
160 return DexRegisterLocation(Kind::kNone, 0);
163 // Get the "surface" kind of the location, i.e., the one that doesn't
164 // include any value with a "large" qualifier.
165 Kind GetKind() const {
166 return ConvertToSurfaceKind(kind_);
169 // Get the value of the location.
170 int32_t GetValue() const { return value_; }
172 // Get the actual kind of the location.
173 Kind GetInternalKind() const { return kind_; }
175 bool operator==(DexRegisterLocation other) const {
176 return kind_ == other.kind_ && value_ == other.value_;
179 bool operator!=(DexRegisterLocation other) const {
180 return !(*this == other);
187 friend class DexRegisterLocationHashFn;
190 std::ostream& operator<<(std::ostream& stream, const DexRegisterLocation::Kind& kind);
193 * Store information on unique Dex register locations used in a method.
194 * The information is of the form:
196 * [DexRegisterLocation+].
198 * DexRegisterLocations are either 1- or 5-byte wide (see art::DexRegisterLocation::Kind).
200 class DexRegisterLocationCatalog {
202 explicit DexRegisterLocationCatalog(MemoryRegion region) : region_(region) {}
204 // Short (compressed) location, fitting on one byte.
205 typedef uint8_t ShortLocation;
207 void SetRegisterInfo(size_t offset, const DexRegisterLocation& dex_register_location) {
208 DexRegisterLocation::Kind kind = ComputeCompressedKind(dex_register_location);
209 int32_t value = dex_register_location.GetValue();
210 if (DexRegisterLocation::IsShortLocationKind(kind)) {
211 // Short location. Compress the kind and the value as a single byte.
212 if (kind == DexRegisterLocation::Kind::kInStack) {
213 // Instead of storing stack offsets expressed in bytes for
214 // short stack locations, store slot offsets. A stack offset
215 // is a multiple of 4 (kFrameSlotSize). This means that by
216 // dividing it by 4, we can fit values from the [0, 128)
217 // interval in a short stack location, and not just values
218 // from the [0, 32) interval.
219 DCHECK_EQ(value % kFrameSlotSize, 0);
220 value /= kFrameSlotSize;
222 DCHECK(IsShortValue(value)) << value;
223 region_.StoreUnaligned<ShortLocation>(offset, MakeShortLocation(kind, value));
225 // Large location. Write the location on one byte and the value
227 DCHECK(!IsShortValue(value)) << value;
228 if (kind == DexRegisterLocation::Kind::kInStackLargeOffset) {
229 // Also divide large stack offsets by 4 for the sake of consistency.
230 DCHECK_EQ(value % kFrameSlotSize, 0);
231 value /= kFrameSlotSize;
233 // Data can be unaligned as the written Dex register locations can
234 // either be 1-byte or 5-byte wide. Use
235 // art::MemoryRegion::StoreUnaligned instead of
236 // art::MemoryRegion::Store to prevent unligned word accesses on ARM.
237 region_.StoreUnaligned<DexRegisterLocation::Kind>(offset, kind);
238 region_.StoreUnaligned<int32_t>(offset + sizeof(DexRegisterLocation::Kind), value);
242 // Find the offset of the location catalog entry number `location_catalog_entry_index`.
243 size_t FindLocationOffset(size_t location_catalog_entry_index) const {
244 size_t offset = kFixedSize;
245 // Skip the first `location_catalog_entry_index - 1` entries.
246 for (uint16_t i = 0; i < location_catalog_entry_index; ++i) {
247 // Read the first next byte and inspect its first 3 bits to decide
248 // whether it is a short or a large location.
249 DexRegisterLocation::Kind kind = ExtractKindAtOffset(offset);
250 if (DexRegisterLocation::IsShortLocationKind(kind)) {
251 // Short location. Skip the current byte.
252 offset += SingleShortEntrySize();
254 // Large location. Skip the 5 next bytes.
255 offset += SingleLargeEntrySize();
261 // Get the internal kind of entry at `location_catalog_entry_index`.
262 DexRegisterLocation::Kind GetLocationInternalKind(size_t location_catalog_entry_index) const {
263 if (location_catalog_entry_index == kNoLocationEntryIndex) {
264 return DexRegisterLocation::Kind::kNone;
266 return ExtractKindAtOffset(FindLocationOffset(location_catalog_entry_index));
269 // Get the (surface) kind and value of entry at `location_catalog_entry_index`.
270 DexRegisterLocation GetDexRegisterLocation(size_t location_catalog_entry_index) const {
271 if (location_catalog_entry_index == kNoLocationEntryIndex) {
272 return DexRegisterLocation::None();
274 size_t offset = FindLocationOffset(location_catalog_entry_index);
275 // Read the first byte and inspect its first 3 bits to get the location.
276 ShortLocation first_byte = region_.LoadUnaligned<ShortLocation>(offset);
277 DexRegisterLocation::Kind kind = ExtractKindFromShortLocation(first_byte);
278 if (DexRegisterLocation::IsShortLocationKind(kind)) {
279 // Short location. Extract the value from the remaining 5 bits.
280 int32_t value = ExtractValueFromShortLocation(first_byte);
281 if (kind == DexRegisterLocation::Kind::kInStack) {
282 // Convert the stack slot (short) offset to a byte offset value.
283 value *= kFrameSlotSize;
285 return DexRegisterLocation(kind, value);
287 // Large location. Read the four next bytes to get the value.
288 int32_t value = region_.LoadUnaligned<int32_t>(offset + sizeof(DexRegisterLocation::Kind));
289 if (kind == DexRegisterLocation::Kind::kInStackLargeOffset) {
290 // Convert the stack slot (large) offset to a byte offset value.
291 value *= kFrameSlotSize;
293 return DexRegisterLocation(kind, value);
297 // Compute the compressed kind of `location`.
298 static DexRegisterLocation::Kind ComputeCompressedKind(const DexRegisterLocation& location) {
299 DexRegisterLocation::Kind kind = location.GetInternalKind();
301 case DexRegisterLocation::Kind::kInStack:
302 return IsShortStackOffsetValue(location.GetValue())
303 ? DexRegisterLocation::Kind::kInStack
304 : DexRegisterLocation::Kind::kInStackLargeOffset;
306 case DexRegisterLocation::Kind::kInRegister:
307 case DexRegisterLocation::Kind::kInRegisterHigh:
308 DCHECK_GE(location.GetValue(), 0);
309 DCHECK_LT(location.GetValue(), 1 << kValueBits);
312 case DexRegisterLocation::Kind::kInFpuRegister:
313 case DexRegisterLocation::Kind::kInFpuRegisterHigh:
314 DCHECK_GE(location.GetValue(), 0);
315 DCHECK_LT(location.GetValue(), 1 << kValueBits);
318 case DexRegisterLocation::Kind::kConstant:
319 return IsShortConstantValue(location.GetValue())
320 ? DexRegisterLocation::Kind::kConstant
321 : DexRegisterLocation::Kind::kConstantLargeValue;
323 case DexRegisterLocation::Kind::kConstantLargeValue:
324 case DexRegisterLocation::Kind::kInStackLargeOffset:
325 case DexRegisterLocation::Kind::kNone:
326 LOG(FATAL) << "Unexpected location kind " << kind;
331 // Can `location` be turned into a short location?
332 static bool CanBeEncodedAsShortLocation(const DexRegisterLocation& location) {
333 DexRegisterLocation::Kind kind = location.GetInternalKind();
335 case DexRegisterLocation::Kind::kInStack:
336 return IsShortStackOffsetValue(location.GetValue());
338 case DexRegisterLocation::Kind::kInRegister:
339 case DexRegisterLocation::Kind::kInRegisterHigh:
340 case DexRegisterLocation::Kind::kInFpuRegister:
341 case DexRegisterLocation::Kind::kInFpuRegisterHigh:
344 case DexRegisterLocation::Kind::kConstant:
345 return IsShortConstantValue(location.GetValue());
347 case DexRegisterLocation::Kind::kConstantLargeValue:
348 case DexRegisterLocation::Kind::kInStackLargeOffset:
349 case DexRegisterLocation::Kind::kNone:
350 LOG(FATAL) << "Unexpected location kind " << kind;
355 static size_t EntrySize(const DexRegisterLocation& location) {
356 return CanBeEncodedAsShortLocation(location) ? SingleShortEntrySize() : SingleLargeEntrySize();
359 static size_t SingleShortEntrySize() {
360 return sizeof(ShortLocation);
363 static size_t SingleLargeEntrySize() {
364 return sizeof(DexRegisterLocation::Kind) + sizeof(int32_t);
367 size_t Size() const {
368 return region_.size();
371 void Dump(VariableIndentationOutputStream* vios,
372 const CodeInfo& code_info);
374 // Special (invalid) Dex register location catalog entry index meaning
375 // that there is no location for a given Dex register (i.e., it is
376 // mapped to a DexRegisterLocation::Kind::kNone location).
377 static constexpr size_t kNoLocationEntryIndex = -1;
380 static constexpr int kFixedSize = 0;
382 // Width of the kind "field" in a short location, in bits.
383 static constexpr size_t kKindBits = 3;
384 // Width of the value "field" in a short location, in bits.
385 static constexpr size_t kValueBits = 5;
387 static constexpr uint8_t kKindMask = (1 << kKindBits) - 1;
388 static constexpr int32_t kValueMask = (1 << kValueBits) - 1;
389 static constexpr size_t kKindOffset = 0;
390 static constexpr size_t kValueOffset = kKindBits;
392 static bool IsShortStackOffsetValue(int32_t value) {
393 DCHECK_EQ(value % kFrameSlotSize, 0);
394 return IsShortValue(value / kFrameSlotSize);
397 static bool IsShortConstantValue(int32_t value) {
398 return IsShortValue(value);
401 static bool IsShortValue(int32_t value) {
402 return IsUint<kValueBits>(value);
405 static ShortLocation MakeShortLocation(DexRegisterLocation::Kind kind, int32_t value) {
406 uint8_t kind_integer_value = static_cast<uint8_t>(kind);
407 DCHECK(IsUint<kKindBits>(kind_integer_value)) << kind_integer_value;
408 DCHECK(IsShortValue(value)) << value;
409 return (kind_integer_value & kKindMask) << kKindOffset
410 | (value & kValueMask) << kValueOffset;
413 static DexRegisterLocation::Kind ExtractKindFromShortLocation(ShortLocation location) {
414 uint8_t kind = (location >> kKindOffset) & kKindMask;
415 DCHECK_LE(kind, static_cast<uint8_t>(DexRegisterLocation::Kind::kLastLocationKind));
416 // We do not encode kNone locations in the stack map.
417 DCHECK_NE(kind, static_cast<uint8_t>(DexRegisterLocation::Kind::kNone));
418 return static_cast<DexRegisterLocation::Kind>(kind);
421 static int32_t ExtractValueFromShortLocation(ShortLocation location) {
422 return (location >> kValueOffset) & kValueMask;
425 // Extract a location kind from the byte at position `offset`.
426 DexRegisterLocation::Kind ExtractKindAtOffset(size_t offset) const {
427 ShortLocation first_byte = region_.LoadUnaligned<ShortLocation>(offset);
428 return ExtractKindFromShortLocation(first_byte);
431 MemoryRegion region_;
433 friend class CodeInfo;
434 friend class StackMapStream;
437 /* Information on Dex register locations for a specific PC, mapping a
438 * stack map's Dex register to a location entry in a DexRegisterLocationCatalog.
439 * The information is of the form:
441 * [live_bit_mask, entries*]
443 * where entries are concatenated unsigned integer values encoded on a number
444 * of bits (fixed per DexRegisterMap instances of a CodeInfo object) depending
445 * on the number of entries in the Dex register location catalog
446 * (see DexRegisterMap::SingleEntrySizeInBits). The map is 1-byte aligned.
448 class DexRegisterMap {
450 explicit DexRegisterMap(MemoryRegion region) : region_(region) {}
453 bool IsValid() const { return region_.pointer() != nullptr; }
455 // Get the surface kind of Dex register `dex_register_number`.
456 DexRegisterLocation::Kind GetLocationKind(uint16_t dex_register_number,
457 uint16_t number_of_dex_registers,
458 const CodeInfo& code_info,
459 const CodeInfoEncoding& enc) const {
460 return DexRegisterLocation::ConvertToSurfaceKind(
461 GetLocationInternalKind(dex_register_number, number_of_dex_registers, code_info, enc));
464 // Get the internal kind of Dex register `dex_register_number`.
465 DexRegisterLocation::Kind GetLocationInternalKind(uint16_t dex_register_number,
466 uint16_t number_of_dex_registers,
467 const CodeInfo& code_info,
468 const CodeInfoEncoding& enc) const;
470 // Get the Dex register location `dex_register_number`.
471 DexRegisterLocation GetDexRegisterLocation(uint16_t dex_register_number,
472 uint16_t number_of_dex_registers,
473 const CodeInfo& code_info,
474 const CodeInfoEncoding& enc) const;
476 int32_t GetStackOffsetInBytes(uint16_t dex_register_number,
477 uint16_t number_of_dex_registers,
478 const CodeInfo& code_info,
479 const CodeInfoEncoding& enc) const {
480 DexRegisterLocation location =
481 GetDexRegisterLocation(dex_register_number, number_of_dex_registers, code_info, enc);
482 DCHECK(location.GetKind() == DexRegisterLocation::Kind::kInStack);
483 // GetDexRegisterLocation returns the offset in bytes.
484 return location.GetValue();
487 int32_t GetConstant(uint16_t dex_register_number,
488 uint16_t number_of_dex_registers,
489 const CodeInfo& code_info,
490 const CodeInfoEncoding& enc) const {
491 DexRegisterLocation location =
492 GetDexRegisterLocation(dex_register_number, number_of_dex_registers, code_info, enc);
493 DCHECK_EQ(location.GetKind(), DexRegisterLocation::Kind::kConstant);
494 return location.GetValue();
497 int32_t GetMachineRegister(uint16_t dex_register_number,
498 uint16_t number_of_dex_registers,
499 const CodeInfo& code_info,
500 const CodeInfoEncoding& enc) const {
501 DexRegisterLocation location =
502 GetDexRegisterLocation(dex_register_number, number_of_dex_registers, code_info, enc);
503 DCHECK(location.GetInternalKind() == DexRegisterLocation::Kind::kInRegister ||
504 location.GetInternalKind() == DexRegisterLocation::Kind::kInRegisterHigh ||
505 location.GetInternalKind() == DexRegisterLocation::Kind::kInFpuRegister ||
506 location.GetInternalKind() == DexRegisterLocation::Kind::kInFpuRegisterHigh)
507 << location.GetInternalKind();
508 return location.GetValue();
511 // Get the index of the entry in the Dex register location catalog
512 // corresponding to `dex_register_number`.
513 size_t GetLocationCatalogEntryIndex(uint16_t dex_register_number,
514 uint16_t number_of_dex_registers,
515 size_t number_of_location_catalog_entries) const {
516 if (!IsDexRegisterLive(dex_register_number)) {
517 return DexRegisterLocationCatalog::kNoLocationEntryIndex;
520 if (number_of_location_catalog_entries == 1) {
521 // We do not allocate space for location maps in the case of a
522 // single-entry location catalog, as it is useless. The only valid
527 // The bit offset of the beginning of the map locations.
528 size_t map_locations_offset_in_bits =
529 GetLocationMappingDataOffset(number_of_dex_registers) * kBitsPerByte;
530 size_t index_in_dex_register_map = GetIndexInDexRegisterMap(dex_register_number);
531 DCHECK_LT(index_in_dex_register_map, GetNumberOfLiveDexRegisters(number_of_dex_registers));
532 // The bit size of an entry.
533 size_t map_entry_size_in_bits = SingleEntrySizeInBits(number_of_location_catalog_entries);
534 // The bit offset where `index_in_dex_register_map` is located.
535 size_t entry_offset_in_bits =
536 map_locations_offset_in_bits + index_in_dex_register_map * map_entry_size_in_bits;
537 size_t location_catalog_entry_index =
538 region_.LoadBits(entry_offset_in_bits, map_entry_size_in_bits);
539 DCHECK_LT(location_catalog_entry_index, number_of_location_catalog_entries);
540 return location_catalog_entry_index;
543 // Map entry at `index_in_dex_register_map` to `location_catalog_entry_index`.
544 void SetLocationCatalogEntryIndex(size_t index_in_dex_register_map,
545 size_t location_catalog_entry_index,
546 uint16_t number_of_dex_registers,
547 size_t number_of_location_catalog_entries) {
548 DCHECK_LT(index_in_dex_register_map, GetNumberOfLiveDexRegisters(number_of_dex_registers));
549 DCHECK_LT(location_catalog_entry_index, number_of_location_catalog_entries);
551 if (number_of_location_catalog_entries == 1) {
552 // We do not allocate space for location maps in the case of a
553 // single-entry location catalog, as it is useless.
557 // The bit offset of the beginning of the map locations.
558 size_t map_locations_offset_in_bits =
559 GetLocationMappingDataOffset(number_of_dex_registers) * kBitsPerByte;
560 // The bit size of an entry.
561 size_t map_entry_size_in_bits = SingleEntrySizeInBits(number_of_location_catalog_entries);
562 // The bit offset where `index_in_dex_register_map` is located.
563 size_t entry_offset_in_bits =
564 map_locations_offset_in_bits + index_in_dex_register_map * map_entry_size_in_bits;
565 region_.StoreBits(entry_offset_in_bits, location_catalog_entry_index, map_entry_size_in_bits);
568 void SetLiveBitMask(uint16_t number_of_dex_registers,
569 const BitVector& live_dex_registers_mask) {
570 size_t live_bit_mask_offset_in_bits = GetLiveBitMaskOffset() * kBitsPerByte;
571 for (uint16_t i = 0; i < number_of_dex_registers; ++i) {
572 region_.StoreBit(live_bit_mask_offset_in_bits + i, live_dex_registers_mask.IsBitSet(i));
576 ALWAYS_INLINE bool IsDexRegisterLive(uint16_t dex_register_number) const {
577 size_t live_bit_mask_offset_in_bits = GetLiveBitMaskOffset() * kBitsPerByte;
578 return region_.LoadBit(live_bit_mask_offset_in_bits + dex_register_number);
581 size_t GetNumberOfLiveDexRegisters(uint16_t number_of_dex_registers) const {
582 size_t number_of_live_dex_registers = 0;
583 for (size_t i = 0; i < number_of_dex_registers; ++i) {
584 if (IsDexRegisterLive(i)) {
585 ++number_of_live_dex_registers;
588 return number_of_live_dex_registers;
591 static size_t GetLiveBitMaskOffset() {
595 // Compute the size of the live register bit mask (in bytes), for a
596 // method having `number_of_dex_registers` Dex registers.
597 static size_t GetLiveBitMaskSize(uint16_t number_of_dex_registers) {
598 return RoundUp(number_of_dex_registers, kBitsPerByte) / kBitsPerByte;
601 static size_t GetLocationMappingDataOffset(uint16_t number_of_dex_registers) {
602 return GetLiveBitMaskOffset() + GetLiveBitMaskSize(number_of_dex_registers);
605 size_t GetLocationMappingDataSize(uint16_t number_of_dex_registers,
606 size_t number_of_location_catalog_entries) const {
607 size_t location_mapping_data_size_in_bits =
608 GetNumberOfLiveDexRegisters(number_of_dex_registers)
609 * SingleEntrySizeInBits(number_of_location_catalog_entries);
610 return RoundUp(location_mapping_data_size_in_bits, kBitsPerByte) / kBitsPerByte;
613 // Return the size of a map entry in bits. Note that if
614 // `number_of_location_catalog_entries` equals 1, this function returns 0,
615 // which is fine, as there is no need to allocate a map for a
616 // single-entry location catalog; the only valid location catalog entry index
617 // for a live register in this case is 0 and there is no need to
619 static size_t SingleEntrySizeInBits(size_t number_of_location_catalog_entries) {
620 // Handle the case of 0, as we cannot pass 0 to art::WhichPowerOf2.
621 return number_of_location_catalog_entries == 0
623 : WhichPowerOf2(RoundUpToPowerOfTwo(number_of_location_catalog_entries));
626 // Return the size of the DexRegisterMap object, in bytes.
627 size_t Size() const {
628 return region_.size();
631 void Dump(VariableIndentationOutputStream* vios,
632 const CodeInfo& code_info, uint16_t number_of_dex_registers) const;
635 // Return the index in the Dex register map corresponding to the Dex
636 // register number `dex_register_number`.
637 size_t GetIndexInDexRegisterMap(uint16_t dex_register_number) const {
638 if (!IsDexRegisterLive(dex_register_number)) {
639 return kInvalidIndexInDexRegisterMap;
641 return GetNumberOfLiveDexRegisters(dex_register_number);
644 // Special (invalid) Dex register map entry index meaning that there
645 // is no index in the map for a given Dex register (i.e., it must
646 // have been mapped to a DexRegisterLocation::Kind::kNone location).
647 static constexpr size_t kInvalidIndexInDexRegisterMap = -1;
649 static constexpr int kFixedSize = 0;
651 MemoryRegion region_;
653 friend class CodeInfo;
654 friend class StackMapStream;
657 // Represents bit range of bit-packed integer field.
658 // We reuse the idea from ULEB128p1 to support encoding of -1 (aka 0xFFFFFFFF).
659 // If min_value is set to -1, we implicitly subtract one from any loaded value,
660 // and add one to any stored value. This is generalized to any negative values.
661 // In other words, min_value acts as a base and the stored value is added to it.
662 struct FieldEncoding {
663 FieldEncoding(size_t start_offset, size_t end_offset, int32_t min_value = 0)
664 : start_offset_(start_offset), end_offset_(end_offset), min_value_(min_value) {
665 DCHECK_LE(start_offset_, end_offset_);
666 DCHECK_LE(BitSize(), 32u);
669 ALWAYS_INLINE size_t BitSize() const { return end_offset_ - start_offset_; }
671 template <typename Region>
672 ALWAYS_INLINE int32_t Load(const Region& region) const {
673 DCHECK_LE(end_offset_, region.size_in_bits());
674 return static_cast<int32_t>(region.LoadBits(start_offset_, BitSize())) + min_value_;
677 template <typename Region>
678 ALWAYS_INLINE void Store(Region region, int32_t value) const {
679 region.StoreBits(start_offset_, value - min_value_, BitSize());
680 DCHECK_EQ(Load(region), value);
684 size_t start_offset_;
689 class StackMapEncoding {
692 : dex_pc_bit_offset_(0),
693 dex_register_map_bit_offset_(0),
694 inline_info_bit_offset_(0),
695 register_mask_index_bit_offset_(0),
696 stack_mask_index_bit_offset_(0),
697 total_bit_size_(0) {}
699 // Set stack map bit layout based on given sizes.
700 // Returns the size of stack map in bits.
701 size_t SetFromSizes(size_t native_pc_max,
703 size_t dex_register_map_size,
704 size_t number_of_inline_info,
705 size_t number_of_register_masks,
706 size_t number_of_stack_masks) {
708 DCHECK_EQ(kNativePcBitOffset, total_bit_size_);
709 total_bit_size_ += MinimumBitsToStore(native_pc_max);
711 dex_pc_bit_offset_ = total_bit_size_;
712 total_bit_size_ += MinimumBitsToStore(1 /* kNoDexPc */ + dex_pc_max);
714 // We also need +1 for kNoDexRegisterMap, but since the size is strictly
715 // greater than any offset we might try to encode, we already implicitly have it.
716 dex_register_map_bit_offset_ = total_bit_size_;
717 total_bit_size_ += MinimumBitsToStore(dex_register_map_size);
719 // We also need +1 for kNoInlineInfo, but since the inline_info_size is strictly
720 // greater than the offset we might try to encode, we already implicitly have it.
721 // If inline_info_size is zero, we can encode only kNoInlineInfo (in zero bits).
722 inline_info_bit_offset_ = total_bit_size_;
723 total_bit_size_ += MinimumBitsToStore(number_of_inline_info);
725 register_mask_index_bit_offset_ = total_bit_size_;
726 total_bit_size_ += MinimumBitsToStore(number_of_register_masks);
728 stack_mask_index_bit_offset_ = total_bit_size_;
729 total_bit_size_ += MinimumBitsToStore(number_of_stack_masks);
731 return total_bit_size_;
734 ALWAYS_INLINE FieldEncoding GetNativePcEncoding() const {
735 return FieldEncoding(kNativePcBitOffset, dex_pc_bit_offset_);
737 ALWAYS_INLINE FieldEncoding GetDexPcEncoding() const {
738 return FieldEncoding(dex_pc_bit_offset_, dex_register_map_bit_offset_, -1 /* min_value */);
740 ALWAYS_INLINE FieldEncoding GetDexRegisterMapEncoding() const {
741 return FieldEncoding(dex_register_map_bit_offset_, inline_info_bit_offset_, -1 /* min_value */);
743 ALWAYS_INLINE FieldEncoding GetInlineInfoEncoding() const {
744 return FieldEncoding(inline_info_bit_offset_,
745 register_mask_index_bit_offset_,
748 ALWAYS_INLINE FieldEncoding GetRegisterMaskIndexEncoding() const {
749 return FieldEncoding(register_mask_index_bit_offset_, stack_mask_index_bit_offset_);
751 ALWAYS_INLINE FieldEncoding GetStackMaskIndexEncoding() const {
752 return FieldEncoding(stack_mask_index_bit_offset_, total_bit_size_);
754 ALWAYS_INLINE size_t BitSize() const {
755 return total_bit_size_;
758 // Encode the encoding into the vector.
759 template<typename Vector>
760 void Encode(Vector* dest) const {
761 static_assert(alignof(StackMapEncoding) == 1, "Should not require alignment");
762 const uint8_t* ptr = reinterpret_cast<const uint8_t*>(this);
763 dest->insert(dest->end(), ptr, ptr + sizeof(*this));
766 // Decode the encoding from a pointer, updates the pointer.
767 void Decode(const uint8_t** ptr) {
768 *this = *reinterpret_cast<const StackMapEncoding*>(*ptr);
769 *ptr += sizeof(*this);
772 void Dump(VariableIndentationOutputStream* vios) const;
775 static constexpr size_t kNativePcBitOffset = 0;
776 uint8_t dex_pc_bit_offset_;
777 uint8_t dex_register_map_bit_offset_;
778 uint8_t inline_info_bit_offset_;
779 uint8_t register_mask_index_bit_offset_;
780 uint8_t stack_mask_index_bit_offset_;
781 uint8_t total_bit_size_;
785 * A Stack Map holds compilation information for a specific PC necessary for:
786 * - Mapping it to a dex PC,
787 * - Knowing which stack entries are objects,
788 * - Knowing which registers hold objects,
789 * - Knowing the inlining information,
790 * - Knowing the values of dex registers.
792 * The information is of the form:
794 * [native_pc_offset, dex_pc, dex_register_map_offset, inlining_info_index, register_mask_index,
800 explicit StackMap(BitMemoryRegion region) : region_(region) {}
802 ALWAYS_INLINE bool IsValid() const { return region_.pointer() != nullptr; }
804 ALWAYS_INLINE uint32_t GetDexPc(const StackMapEncoding& encoding) const {
805 return encoding.GetDexPcEncoding().Load(region_);
808 ALWAYS_INLINE void SetDexPc(const StackMapEncoding& encoding, uint32_t dex_pc) {
809 encoding.GetDexPcEncoding().Store(region_, dex_pc);
812 ALWAYS_INLINE uint32_t GetNativePcOffset(const StackMapEncoding& encoding,
813 InstructionSet instruction_set) const {
815 CodeOffset::FromCompressedOffset(encoding.GetNativePcEncoding().Load(region_)));
816 return offset.Uint32Value(instruction_set);
819 ALWAYS_INLINE void SetNativePcCodeOffset(const StackMapEncoding& encoding,
820 CodeOffset native_pc_offset) {
821 encoding.GetNativePcEncoding().Store(region_, native_pc_offset.CompressedValue());
824 ALWAYS_INLINE uint32_t GetDexRegisterMapOffset(const StackMapEncoding& encoding) const {
825 return encoding.GetDexRegisterMapEncoding().Load(region_);
828 ALWAYS_INLINE void SetDexRegisterMapOffset(const StackMapEncoding& encoding, uint32_t offset) {
829 encoding.GetDexRegisterMapEncoding().Store(region_, offset);
832 ALWAYS_INLINE uint32_t GetInlineInfoIndex(const StackMapEncoding& encoding) const {
833 return encoding.GetInlineInfoEncoding().Load(region_);
836 ALWAYS_INLINE void SetInlineInfoIndex(const StackMapEncoding& encoding, uint32_t index) {
837 encoding.GetInlineInfoEncoding().Store(region_, index);
840 ALWAYS_INLINE uint32_t GetRegisterMaskIndex(const StackMapEncoding& encoding) const {
841 return encoding.GetRegisterMaskIndexEncoding().Load(region_);
844 ALWAYS_INLINE void SetRegisterMaskIndex(const StackMapEncoding& encoding, uint32_t mask) {
845 encoding.GetRegisterMaskIndexEncoding().Store(region_, mask);
848 ALWAYS_INLINE uint32_t GetStackMaskIndex(const StackMapEncoding& encoding) const {
849 return encoding.GetStackMaskIndexEncoding().Load(region_);
852 ALWAYS_INLINE void SetStackMaskIndex(const StackMapEncoding& encoding, uint32_t mask) {
853 encoding.GetStackMaskIndexEncoding().Store(region_, mask);
856 ALWAYS_INLINE bool HasDexRegisterMap(const StackMapEncoding& encoding) const {
857 return GetDexRegisterMapOffset(encoding) != kNoDexRegisterMap;
860 ALWAYS_INLINE bool HasInlineInfo(const StackMapEncoding& encoding) const {
861 return GetInlineInfoIndex(encoding) != kNoInlineInfo;
864 ALWAYS_INLINE bool Equals(const StackMap& other) const {
865 return region_.pointer() == other.region_.pointer() &&
866 region_.size() == other.region_.size() &&
867 region_.BitOffset() == other.region_.BitOffset();
870 void Dump(VariableIndentationOutputStream* vios,
871 const CodeInfo& code_info,
872 const CodeInfoEncoding& encoding,
873 const MethodInfo& method_info,
874 uint32_t code_offset,
875 uint16_t number_of_dex_registers,
876 InstructionSet instruction_set,
877 const std::string& header_suffix = "") const;
879 // Special (invalid) offset for the DexRegisterMapOffset field meaning
880 // that there is no Dex register map for this stack map.
881 static constexpr uint32_t kNoDexRegisterMap = -1;
883 // Special (invalid) offset for the InlineDescriptorOffset field meaning
884 // that there is no inline info for this stack map.
885 static constexpr uint32_t kNoInlineInfo = -1;
888 static constexpr int kFixedSize = 0;
890 BitMemoryRegion region_;
892 friend class StackMapStream;
895 class InlineInfoEncoding {
897 void SetFromSizes(size_t method_index_idx_max,
899 size_t extra_data_max,
900 size_t dex_register_map_size) {
901 total_bit_size_ = kMethodIndexBitOffset;
902 total_bit_size_ += MinimumBitsToStore(method_index_idx_max);
904 dex_pc_bit_offset_ = dchecked_integral_cast<uint8_t>(total_bit_size_);
905 // Note: We're not encoding the dex pc if there is none. That's the case
906 // for an intrinsified native method, such as String.charAt().
907 if (dex_pc_max != DexFile::kDexNoIndex) {
908 total_bit_size_ += MinimumBitsToStore(1 /* kNoDexPc */ + dex_pc_max);
911 extra_data_bit_offset_ = dchecked_integral_cast<uint8_t>(total_bit_size_);
912 total_bit_size_ += MinimumBitsToStore(extra_data_max);
914 // We also need +1 for kNoDexRegisterMap, but since the size is strictly
915 // greater than any offset we might try to encode, we already implicitly have it.
916 dex_register_map_bit_offset_ = dchecked_integral_cast<uint8_t>(total_bit_size_);
917 total_bit_size_ += MinimumBitsToStore(dex_register_map_size);
920 ALWAYS_INLINE FieldEncoding GetMethodIndexIdxEncoding() const {
921 return FieldEncoding(kMethodIndexBitOffset, dex_pc_bit_offset_);
923 ALWAYS_INLINE FieldEncoding GetDexPcEncoding() const {
924 return FieldEncoding(dex_pc_bit_offset_, extra_data_bit_offset_, -1 /* min_value */);
926 ALWAYS_INLINE FieldEncoding GetExtraDataEncoding() const {
927 return FieldEncoding(extra_data_bit_offset_, dex_register_map_bit_offset_);
929 ALWAYS_INLINE FieldEncoding GetDexRegisterMapEncoding() const {
930 return FieldEncoding(dex_register_map_bit_offset_, total_bit_size_, -1 /* min_value */);
932 ALWAYS_INLINE size_t BitSize() const {
933 return total_bit_size_;
936 void Dump(VariableIndentationOutputStream* vios) const;
938 // Encode the encoding into the vector.
939 template<typename Vector>
940 void Encode(Vector* dest) const {
941 static_assert(alignof(InlineInfoEncoding) == 1, "Should not require alignment");
942 const uint8_t* ptr = reinterpret_cast<const uint8_t*>(this);
943 dest->insert(dest->end(), ptr, ptr + sizeof(*this));
946 // Decode the encoding from a pointer, updates the pointer.
947 void Decode(const uint8_t** ptr) {
948 *this = *reinterpret_cast<const InlineInfoEncoding*>(*ptr);
949 *ptr += sizeof(*this);
953 static constexpr uint8_t kIsLastBitOffset = 0;
954 static constexpr uint8_t kMethodIndexBitOffset = 1;
955 uint8_t dex_pc_bit_offset_;
956 uint8_t extra_data_bit_offset_;
957 uint8_t dex_register_map_bit_offset_;
958 uint8_t total_bit_size_;
962 * Inline information for a specific PC. The information is of the form:
965 * method_index (or ArtMethod high bits),
967 * extra_data (ArtMethod low bits or 1),
968 * dex_register_map_offset]+.
972 explicit InlineInfo(BitMemoryRegion region) : region_(region) {}
974 ALWAYS_INLINE uint32_t GetDepth(const InlineInfoEncoding& encoding) const {
976 while (!GetRegionAtDepth(encoding, depth++).LoadBit(0)) { } // Check is_last bit.
980 ALWAYS_INLINE void SetDepth(const InlineInfoEncoding& encoding, uint32_t depth) {
981 DCHECK_GT(depth, 0u);
982 for (size_t d = 0; d < depth; ++d) {
983 GetRegionAtDepth(encoding, d).StoreBit(0, d == depth - 1); // Set is_last bit.
987 ALWAYS_INLINE uint32_t GetMethodIndexIdxAtDepth(const InlineInfoEncoding& encoding,
988 uint32_t depth) const {
989 DCHECK(!EncodesArtMethodAtDepth(encoding, depth));
990 return encoding.GetMethodIndexIdxEncoding().Load(GetRegionAtDepth(encoding, depth));
993 ALWAYS_INLINE void SetMethodIndexIdxAtDepth(const InlineInfoEncoding& encoding,
996 encoding.GetMethodIndexIdxEncoding().Store(GetRegionAtDepth(encoding, depth), index);
1000 ALWAYS_INLINE uint32_t GetMethodIndexAtDepth(const InlineInfoEncoding& encoding,
1001 const MethodInfo& method_info,
1002 uint32_t depth) const {
1003 return method_info.GetMethodIndex(GetMethodIndexIdxAtDepth(encoding, depth));
1006 ALWAYS_INLINE uint32_t GetDexPcAtDepth(const InlineInfoEncoding& encoding,
1007 uint32_t depth) const {
1008 return encoding.GetDexPcEncoding().Load(GetRegionAtDepth(encoding, depth));
1011 ALWAYS_INLINE void SetDexPcAtDepth(const InlineInfoEncoding& encoding,
1014 encoding.GetDexPcEncoding().Store(GetRegionAtDepth(encoding, depth), dex_pc);
1017 ALWAYS_INLINE bool EncodesArtMethodAtDepth(const InlineInfoEncoding& encoding,
1018 uint32_t depth) const {
1019 return (encoding.GetExtraDataEncoding().Load(GetRegionAtDepth(encoding, depth)) & 1) == 0;
1022 ALWAYS_INLINE void SetExtraDataAtDepth(const InlineInfoEncoding& encoding,
1024 uint32_t extra_data) {
1025 encoding.GetExtraDataEncoding().Store(GetRegionAtDepth(encoding, depth), extra_data);
1028 ALWAYS_INLINE ArtMethod* GetArtMethodAtDepth(const InlineInfoEncoding& encoding,
1029 uint32_t depth) const {
1030 uint32_t low_bits = encoding.GetExtraDataEncoding().Load(GetRegionAtDepth(encoding, depth));
1031 uint32_t high_bits = encoding.GetMethodIndexIdxEncoding().Load(
1032 GetRegionAtDepth(encoding, depth));
1033 if (high_bits == 0) {
1034 return reinterpret_cast<ArtMethod*>(low_bits);
1036 uint64_t address = high_bits;
1037 address = address << 32;
1038 return reinterpret_cast<ArtMethod*>(address | low_bits);
1042 ALWAYS_INLINE uint32_t GetDexRegisterMapOffsetAtDepth(const InlineInfoEncoding& encoding,
1043 uint32_t depth) const {
1044 return encoding.GetDexRegisterMapEncoding().Load(GetRegionAtDepth(encoding, depth));
1047 ALWAYS_INLINE void SetDexRegisterMapOffsetAtDepth(const InlineInfoEncoding& encoding,
1050 encoding.GetDexRegisterMapEncoding().Store(GetRegionAtDepth(encoding, depth), offset);
1053 ALWAYS_INLINE bool HasDexRegisterMapAtDepth(const InlineInfoEncoding& encoding,
1054 uint32_t depth) const {
1055 return GetDexRegisterMapOffsetAtDepth(encoding, depth) != StackMap::kNoDexRegisterMap;
1058 void Dump(VariableIndentationOutputStream* vios,
1059 const CodeInfo& info,
1060 const MethodInfo& method_info,
1061 uint16_t* number_of_dex_registers) const;
1064 ALWAYS_INLINE BitMemoryRegion GetRegionAtDepth(const InlineInfoEncoding& encoding,
1065 uint32_t depth) const {
1066 size_t entry_size = encoding.BitSize();
1067 DCHECK_GT(entry_size, 0u);
1068 return region_.Subregion(depth * entry_size, entry_size);
1071 BitMemoryRegion region_;
1074 // Bit sized region encoding, may be more than 255 bits.
1075 class BitRegionEncoding {
1077 uint32_t num_bits = 0;
1079 ALWAYS_INLINE size_t BitSize() const {
1083 template<typename Vector>
1084 void Encode(Vector* dest) const {
1085 EncodeUnsignedLeb128(dest, num_bits); // Use leb in case num_bits is greater than 255.
1088 void Decode(const uint8_t** ptr) {
1089 num_bits = DecodeUnsignedLeb128(ptr);
1093 // A table of bit sized encodings.
1094 template <typename Encoding>
1095 struct BitEncodingTable {
1096 static constexpr size_t kInvalidOffset = static_cast<size_t>(-1);
1097 // How the encoding is laid out (serialized).
1100 // Number of entries in the table (serialized).
1103 // Bit offset for the base of the table (computed).
1104 size_t bit_offset = kInvalidOffset;
1106 template<typename Vector>
1107 void Encode(Vector* dest) const {
1108 EncodeUnsignedLeb128(dest, num_entries);
1109 encoding.Encode(dest);
1112 ALWAYS_INLINE void Decode(const uint8_t** ptr) {
1113 num_entries = DecodeUnsignedLeb128(ptr);
1114 encoding.Decode(ptr);
1117 // Set the bit offset in the table and adds the space used by the table to offset.
1118 void UpdateBitOffset(size_t* offset) {
1119 DCHECK(offset != nullptr);
1120 bit_offset = *offset;
1121 *offset += encoding.BitSize() * num_entries;
1124 // Return the bit region for the map at index i.
1125 ALWAYS_INLINE BitMemoryRegion BitRegion(MemoryRegion region, size_t index) const {
1126 DCHECK_NE(bit_offset, kInvalidOffset) << "Invalid table offset";
1127 DCHECK_LT(index, num_entries);
1128 const size_t map_size = encoding.BitSize();
1129 return BitMemoryRegion(region, bit_offset + index * map_size, map_size);
1133 // A byte sized table of possible variable sized encodings.
1134 struct ByteSizedTable {
1135 static constexpr size_t kInvalidOffset = static_cast<size_t>(-1);
1137 // Number of entries in the table (serialized).
1138 size_t num_entries = 0;
1140 // Number of bytes of the table (serialized).
1143 // Bit offset for the base of the table (computed).
1144 size_t byte_offset = kInvalidOffset;
1146 template<typename Vector>
1147 void Encode(Vector* dest) const {
1148 EncodeUnsignedLeb128(dest, num_entries);
1149 EncodeUnsignedLeb128(dest, num_bytes);
1152 ALWAYS_INLINE void Decode(const uint8_t** ptr) {
1153 num_entries = DecodeUnsignedLeb128(ptr);
1154 num_bytes = DecodeUnsignedLeb128(ptr);
1157 // Set the bit offset of the table. Adds the total bit size of the table to offset.
1158 void UpdateBitOffset(size_t* offset) {
1159 DCHECK(offset != nullptr);
1160 DCHECK_ALIGNED(*offset, kBitsPerByte);
1161 byte_offset = *offset / kBitsPerByte;
1162 *offset += num_bytes * kBitsPerByte;
1166 // Format is [native pc, invoke type, method index].
1167 class InvokeInfoEncoding {
1169 void SetFromSizes(size_t native_pc_max,
1170 size_t invoke_type_max,
1171 size_t method_index_max) {
1172 total_bit_size_ = 0;
1173 DCHECK_EQ(kNativePcBitOffset, total_bit_size_);
1174 total_bit_size_ += MinimumBitsToStore(native_pc_max);
1175 invoke_type_bit_offset_ = total_bit_size_;
1176 total_bit_size_ += MinimumBitsToStore(invoke_type_max);
1177 method_index_bit_offset_ = total_bit_size_;
1178 total_bit_size_ += MinimumBitsToStore(method_index_max);
1181 ALWAYS_INLINE FieldEncoding GetNativePcEncoding() const {
1182 return FieldEncoding(kNativePcBitOffset, invoke_type_bit_offset_);
1185 ALWAYS_INLINE FieldEncoding GetInvokeTypeEncoding() const {
1186 return FieldEncoding(invoke_type_bit_offset_, method_index_bit_offset_);
1189 ALWAYS_INLINE FieldEncoding GetMethodIndexEncoding() const {
1190 return FieldEncoding(method_index_bit_offset_, total_bit_size_);
1193 ALWAYS_INLINE size_t BitSize() const {
1194 return total_bit_size_;
1197 template<typename Vector>
1198 void Encode(Vector* dest) const {
1199 static_assert(alignof(InvokeInfoEncoding) == 1, "Should not require alignment");
1200 const uint8_t* ptr = reinterpret_cast<const uint8_t*>(this);
1201 dest->insert(dest->end(), ptr, ptr + sizeof(*this));
1204 void Decode(const uint8_t** ptr) {
1205 *this = *reinterpret_cast<const InvokeInfoEncoding*>(*ptr);
1206 *ptr += sizeof(*this);
1210 static constexpr uint8_t kNativePcBitOffset = 0;
1211 uint8_t invoke_type_bit_offset_;
1212 uint8_t method_index_bit_offset_;
1213 uint8_t total_bit_size_;
1218 explicit InvokeInfo(BitMemoryRegion region) : region_(region) {}
1220 ALWAYS_INLINE uint32_t GetNativePcOffset(const InvokeInfoEncoding& encoding,
1221 InstructionSet instruction_set) const {
1223 CodeOffset::FromCompressedOffset(encoding.GetNativePcEncoding().Load(region_)));
1224 return offset.Uint32Value(instruction_set);
1227 ALWAYS_INLINE void SetNativePcCodeOffset(const InvokeInfoEncoding& encoding,
1228 CodeOffset native_pc_offset) {
1229 encoding.GetNativePcEncoding().Store(region_, native_pc_offset.CompressedValue());
1232 ALWAYS_INLINE uint32_t GetInvokeType(const InvokeInfoEncoding& encoding) const {
1233 return encoding.GetInvokeTypeEncoding().Load(region_);
1236 ALWAYS_INLINE void SetInvokeType(const InvokeInfoEncoding& encoding, uint32_t invoke_type) {
1237 encoding.GetInvokeTypeEncoding().Store(region_, invoke_type);
1240 ALWAYS_INLINE uint32_t GetMethodIndexIdx(const InvokeInfoEncoding& encoding) const {
1241 return encoding.GetMethodIndexEncoding().Load(region_);
1244 ALWAYS_INLINE void SetMethodIndexIdx(const InvokeInfoEncoding& encoding,
1245 uint32_t method_index_idx) {
1246 encoding.GetMethodIndexEncoding().Store(region_, method_index_idx);
1249 ALWAYS_INLINE uint32_t GetMethodIndex(const InvokeInfoEncoding& encoding,
1250 MethodInfo method_info) const {
1251 return method_info.GetMethodIndex(GetMethodIndexIdx(encoding));
1254 bool IsValid() const { return region_.pointer() != nullptr; }
1257 BitMemoryRegion region_;
1260 // Most of the fields are encoded as ULEB128 to save space.
1261 struct CodeInfoEncoding {
1262 static constexpr uint32_t kInvalidSize = static_cast<size_t>(-1);
1263 // Byte sized tables go first to avoid unnecessary alignment bits.
1264 ByteSizedTable dex_register_map;
1265 ByteSizedTable location_catalog;
1266 BitEncodingTable<StackMapEncoding> stack_map;
1267 BitEncodingTable<BitRegionEncoding> register_mask;
1268 BitEncodingTable<BitRegionEncoding> stack_mask;
1269 BitEncodingTable<InvokeInfoEncoding> invoke_info;
1270 BitEncodingTable<InlineInfoEncoding> inline_info;
1272 CodeInfoEncoding() {}
1274 explicit CodeInfoEncoding(const void* data) {
1275 const uint8_t* ptr = reinterpret_cast<const uint8_t*>(data);
1276 dex_register_map.Decode(&ptr);
1277 location_catalog.Decode(&ptr);
1278 stack_map.Decode(&ptr);
1279 register_mask.Decode(&ptr);
1280 stack_mask.Decode(&ptr);
1281 invoke_info.Decode(&ptr);
1282 if (stack_map.encoding.GetInlineInfoEncoding().BitSize() > 0) {
1283 inline_info.Decode(&ptr);
1285 inline_info = BitEncodingTable<InlineInfoEncoding>();
1288 dchecked_integral_cast<uint32_t>(ptr - reinterpret_cast<const uint8_t*>(data));
1289 ComputeTableOffsets();
1292 // Compress is not const since it calculates cache_header_size. This is used by PrepareForFillIn.
1293 template<typename Vector>
1294 void Compress(Vector* dest) {
1295 dex_register_map.Encode(dest);
1296 location_catalog.Encode(dest);
1297 stack_map.Encode(dest);
1298 register_mask.Encode(dest);
1299 stack_mask.Encode(dest);
1300 invoke_info.Encode(dest);
1301 if (stack_map.encoding.GetInlineInfoEncoding().BitSize() > 0) {
1302 inline_info.Encode(dest);
1304 cache_header_size = dest->size();
1307 ALWAYS_INLINE void ComputeTableOffsets() {
1309 size_t bit_offset = HeaderSize() * kBitsPerByte;
1310 // The byte tables must be aligned so they must go first.
1311 dex_register_map.UpdateBitOffset(&bit_offset);
1312 location_catalog.UpdateBitOffset(&bit_offset);
1313 // Other tables don't require alignment.
1314 stack_map.UpdateBitOffset(&bit_offset);
1315 register_mask.UpdateBitOffset(&bit_offset);
1316 stack_mask.UpdateBitOffset(&bit_offset);
1317 invoke_info.UpdateBitOffset(&bit_offset);
1318 inline_info.UpdateBitOffset(&bit_offset);
1319 cache_non_header_size = RoundUp(bit_offset, kBitsPerByte) / kBitsPerByte - HeaderSize();
1322 ALWAYS_INLINE size_t HeaderSize() const {
1323 DCHECK_NE(cache_header_size, kInvalidSize) << "Uninitialized";
1324 return cache_header_size;
1327 ALWAYS_INLINE size_t NonHeaderSize() const {
1328 DCHECK_NE(cache_non_header_size, kInvalidSize) << "Uninitialized";
1329 return cache_non_header_size;
1333 // Computed fields (not serialized).
1334 // Header size in bytes, cached to avoid needing to re-decoding the encoding in HeaderSize.
1335 uint32_t cache_header_size = kInvalidSize;
1336 // Non header size in bytes, cached to avoid needing to re-decoding the encoding in NonHeaderSize.
1337 uint32_t cache_non_header_size = kInvalidSize;
1341 * Wrapper around all compiler information collected for a method.
1342 * The information is of the form:
1344 * [CodeInfoEncoding, DexRegisterMap+, DexLocationCatalog+, StackMap+, RegisterMask+, StackMask+,
1347 * where CodeInfoEncoding is of the form:
1349 * [ByteSizedTable(dex_register_map), ByteSizedTable(location_catalog),
1350 * BitEncodingTable<StackMapEncoding>, BitEncodingTable<BitRegionEncoding>,
1351 * BitEncodingTable<BitRegionEncoding>, BitEncodingTable<InlineInfoEncoding>]
1355 explicit CodeInfo(MemoryRegion region) : region_(region) {
1358 explicit CodeInfo(const void* data) {
1359 CodeInfoEncoding encoding = CodeInfoEncoding(data);
1360 region_ = MemoryRegion(const_cast<void*>(data),
1361 encoding.HeaderSize() + encoding.NonHeaderSize());
1364 CodeInfoEncoding ExtractEncoding() const {
1365 CodeInfoEncoding encoding(region_.begin());
1366 AssertValidStackMap(encoding);
1370 bool HasInlineInfo(const CodeInfoEncoding& encoding) const {
1371 return encoding.stack_map.encoding.GetInlineInfoEncoding().BitSize() > 0;
1374 DexRegisterLocationCatalog GetDexRegisterLocationCatalog(const CodeInfoEncoding& encoding) const {
1375 return DexRegisterLocationCatalog(region_.Subregion(encoding.location_catalog.byte_offset,
1376 encoding.location_catalog.num_bytes));
1379 ALWAYS_INLINE size_t GetNumberOfStackMaskBits(const CodeInfoEncoding& encoding) const {
1380 return encoding.stack_mask.encoding.BitSize();
1383 ALWAYS_INLINE StackMap GetStackMapAt(size_t index, const CodeInfoEncoding& encoding) const {
1384 return StackMap(encoding.stack_map.BitRegion(region_, index));
1387 BitMemoryRegion GetStackMask(size_t index, const CodeInfoEncoding& encoding) const {
1388 return encoding.stack_mask.BitRegion(region_, index);
1391 BitMemoryRegion GetStackMaskOf(const CodeInfoEncoding& encoding,
1392 const StackMap& stack_map) const {
1393 return GetStackMask(stack_map.GetStackMaskIndex(encoding.stack_map.encoding), encoding);
1396 BitMemoryRegion GetRegisterMask(size_t index, const CodeInfoEncoding& encoding) const {
1397 return encoding.register_mask.BitRegion(region_, index);
1400 uint32_t GetRegisterMaskOf(const CodeInfoEncoding& encoding, const StackMap& stack_map) const {
1401 size_t index = stack_map.GetRegisterMaskIndex(encoding.stack_map.encoding);
1402 return GetRegisterMask(index, encoding).LoadBits(0u, encoding.register_mask.encoding.BitSize());
1405 uint32_t GetNumberOfLocationCatalogEntries(const CodeInfoEncoding& encoding) const {
1406 return encoding.location_catalog.num_entries;
1409 uint32_t GetDexRegisterLocationCatalogSize(const CodeInfoEncoding& encoding) const {
1410 return encoding.location_catalog.num_bytes;
1413 uint32_t GetNumberOfStackMaps(const CodeInfoEncoding& encoding) const {
1414 return encoding.stack_map.num_entries;
1417 // Get the size of all the stack maps of this CodeInfo object, in bits. Not byte aligned.
1418 ALWAYS_INLINE size_t GetStackMapsSizeInBits(const CodeInfoEncoding& encoding) const {
1419 return encoding.stack_map.encoding.BitSize() * GetNumberOfStackMaps(encoding);
1422 InvokeInfo GetInvokeInfo(const CodeInfoEncoding& encoding, size_t index) const {
1423 return InvokeInfo(encoding.invoke_info.BitRegion(region_, index));
1426 DexRegisterMap GetDexRegisterMapOf(StackMap stack_map,
1427 const CodeInfoEncoding& encoding,
1428 size_t number_of_dex_registers) const {
1429 if (!stack_map.HasDexRegisterMap(encoding.stack_map.encoding)) {
1430 return DexRegisterMap();
1432 const uint32_t offset = encoding.dex_register_map.byte_offset +
1433 stack_map.GetDexRegisterMapOffset(encoding.stack_map.encoding);
1434 size_t size = ComputeDexRegisterMapSizeOf(encoding, offset, number_of_dex_registers);
1435 return DexRegisterMap(region_.Subregion(offset, size));
1438 size_t GetDexRegisterMapsSize(const CodeInfoEncoding& encoding,
1439 uint32_t number_of_dex_registers) const {
1441 for (size_t i = 0, e = GetNumberOfStackMaps(encoding); i < e; ++i) {
1442 StackMap stack_map = GetStackMapAt(i, encoding);
1443 DexRegisterMap map(GetDexRegisterMapOf(stack_map, encoding, number_of_dex_registers));
1444 total += map.Size();
1449 // Return the `DexRegisterMap` pointed by `inline_info` at depth `depth`.
1450 DexRegisterMap GetDexRegisterMapAtDepth(uint8_t depth,
1451 InlineInfo inline_info,
1452 const CodeInfoEncoding& encoding,
1453 uint32_t number_of_dex_registers) const {
1454 if (!inline_info.HasDexRegisterMapAtDepth(encoding.inline_info.encoding, depth)) {
1455 return DexRegisterMap();
1457 uint32_t offset = encoding.dex_register_map.byte_offset +
1458 inline_info.GetDexRegisterMapOffsetAtDepth(encoding.inline_info.encoding, depth);
1459 size_t size = ComputeDexRegisterMapSizeOf(encoding, offset, number_of_dex_registers);
1460 return DexRegisterMap(region_.Subregion(offset, size));
1464 InlineInfo GetInlineInfo(size_t index, const CodeInfoEncoding& encoding) const {
1465 // Since we do not know the depth, we just return the whole remaining map. The caller may
1466 // access the inline info for arbitrary depths. To return the precise inline info we would need
1467 // to count the depth before returning.
1468 // TODO: Clean this up.
1469 const size_t bit_offset = encoding.inline_info.bit_offset +
1470 index * encoding.inline_info.encoding.BitSize();
1471 return InlineInfo(BitMemoryRegion(region_, bit_offset, region_.size_in_bits() - bit_offset));
1474 InlineInfo GetInlineInfoOf(StackMap stack_map, const CodeInfoEncoding& encoding) const {
1475 DCHECK(stack_map.HasInlineInfo(encoding.stack_map.encoding));
1476 uint32_t index = stack_map.GetInlineInfoIndex(encoding.stack_map.encoding);
1477 return GetInlineInfo(index, encoding);
1480 StackMap GetStackMapForDexPc(uint32_t dex_pc, const CodeInfoEncoding& encoding) const {
1481 for (size_t i = 0, e = GetNumberOfStackMaps(encoding); i < e; ++i) {
1482 StackMap stack_map = GetStackMapAt(i, encoding);
1483 if (stack_map.GetDexPc(encoding.stack_map.encoding) == dex_pc) {
1490 // Searches the stack map list backwards because catch stack maps are stored
1492 StackMap GetCatchStackMapForDexPc(uint32_t dex_pc, const CodeInfoEncoding& encoding) const {
1493 for (size_t i = GetNumberOfStackMaps(encoding); i > 0; --i) {
1494 StackMap stack_map = GetStackMapAt(i - 1, encoding);
1495 if (stack_map.GetDexPc(encoding.stack_map.encoding) == dex_pc) {
1502 StackMap GetOsrStackMapForDexPc(uint32_t dex_pc, const CodeInfoEncoding& encoding) const {
1503 size_t e = GetNumberOfStackMaps(encoding);
1505 // There cannot be OSR stack map if there is no stack map.
1508 // Walk over all stack maps. If two consecutive stack maps are identical, then we
1509 // have found a stack map suitable for OSR.
1510 const StackMapEncoding& stack_map_encoding = encoding.stack_map.encoding;
1511 for (size_t i = 0; i < e - 1; ++i) {
1512 StackMap stack_map = GetStackMapAt(i, encoding);
1513 if (stack_map.GetDexPc(stack_map_encoding) == dex_pc) {
1514 StackMap other = GetStackMapAt(i + 1, encoding);
1515 if (other.GetDexPc(stack_map_encoding) == dex_pc &&
1516 other.GetNativePcOffset(stack_map_encoding, kRuntimeISA) ==
1517 stack_map.GetNativePcOffset(stack_map_encoding, kRuntimeISA)) {
1518 DCHECK_EQ(other.GetDexRegisterMapOffset(stack_map_encoding),
1519 stack_map.GetDexRegisterMapOffset(stack_map_encoding));
1520 DCHECK(!stack_map.HasInlineInfo(stack_map_encoding));
1522 // Make sure there are not three identical stack maps following each other.
1524 stack_map.GetNativePcOffset(stack_map_encoding, kRuntimeISA),
1525 GetStackMapAt(i + 2, encoding).GetNativePcOffset(stack_map_encoding, kRuntimeISA));
1534 StackMap GetStackMapForNativePcOffset(uint32_t native_pc_offset,
1535 const CodeInfoEncoding& encoding) const {
1536 // TODO: Safepoint stack maps are sorted by native_pc_offset but catch stack
1537 // maps are not. If we knew that the method does not have try/catch,
1538 // we could do binary search.
1539 for (size_t i = 0, e = GetNumberOfStackMaps(encoding); i < e; ++i) {
1540 StackMap stack_map = GetStackMapAt(i, encoding);
1541 if (stack_map.GetNativePcOffset(encoding.stack_map.encoding, kRuntimeISA) ==
1549 InvokeInfo GetInvokeInfoForNativePcOffset(uint32_t native_pc_offset,
1550 const CodeInfoEncoding& encoding) {
1551 for (size_t index = 0; index < encoding.invoke_info.num_entries; index++) {
1552 InvokeInfo item = GetInvokeInfo(encoding, index);
1553 if (item.GetNativePcOffset(encoding.invoke_info.encoding, kRuntimeISA) == native_pc_offset) {
1557 return InvokeInfo(BitMemoryRegion());
1560 // Dump this CodeInfo object on `os`. `code_offset` is the (absolute)
1561 // native PC of the compiled method and `number_of_dex_registers` the
1562 // number of Dex virtual registers used in this method. If
1563 // `dump_stack_maps` is true, also dump the stack maps and the
1564 // associated Dex register maps.
1565 void Dump(VariableIndentationOutputStream* vios,
1566 uint32_t code_offset,
1567 uint16_t number_of_dex_registers,
1568 bool dump_stack_maps,
1569 InstructionSet instruction_set,
1570 const MethodInfo& method_info) const;
1572 // Check that the code info has valid stack map and abort if it does not.
1573 void AssertValidStackMap(const CodeInfoEncoding& encoding) const {
1574 if (region_.size() != 0 && region_.size_in_bits() < GetStackMapsSizeInBits(encoding)) {
1575 LOG(FATAL) << region_.size() << "\n"
1576 << encoding.HeaderSize() << "\n"
1577 << encoding.NonHeaderSize() << "\n"
1578 << encoding.location_catalog.num_entries << "\n"
1579 << encoding.stack_map.num_entries << "\n"
1580 << encoding.stack_map.encoding.BitSize();
1585 // Compute the size of the Dex register map associated to the stack map at
1586 // `dex_register_map_offset_in_code_info`.
1587 size_t ComputeDexRegisterMapSizeOf(const CodeInfoEncoding& encoding,
1588 uint32_t dex_register_map_offset_in_code_info,
1589 uint16_t number_of_dex_registers) const {
1590 // Offset where the actual mapping data starts within art::DexRegisterMap.
1591 size_t location_mapping_data_offset_in_dex_register_map =
1592 DexRegisterMap::GetLocationMappingDataOffset(number_of_dex_registers);
1593 // Create a temporary art::DexRegisterMap to be able to call
1594 // art::DexRegisterMap::GetNumberOfLiveDexRegisters and
1595 DexRegisterMap dex_register_map_without_locations(
1596 MemoryRegion(region_.Subregion(dex_register_map_offset_in_code_info,
1597 location_mapping_data_offset_in_dex_register_map)));
1598 size_t number_of_live_dex_registers =
1599 dex_register_map_without_locations.GetNumberOfLiveDexRegisters(number_of_dex_registers);
1600 size_t location_mapping_data_size_in_bits =
1601 DexRegisterMap::SingleEntrySizeInBits(GetNumberOfLocationCatalogEntries(encoding))
1602 * number_of_live_dex_registers;
1603 size_t location_mapping_data_size_in_bytes =
1604 RoundUp(location_mapping_data_size_in_bits, kBitsPerByte) / kBitsPerByte;
1605 size_t dex_register_map_size =
1606 location_mapping_data_offset_in_dex_register_map + location_mapping_data_size_in_bytes;
1607 return dex_register_map_size;
1610 // Compute the size of a Dex register location catalog starting at offset `origin`
1611 // in `region_` and containing `number_of_dex_locations` entries.
1612 size_t ComputeDexRegisterLocationCatalogSize(uint32_t origin,
1613 uint32_t number_of_dex_locations) const {
1614 // TODO: Ideally, we would like to use art::DexRegisterLocationCatalog::Size or
1615 // art::DexRegisterLocationCatalog::FindLocationOffset, but the
1616 // DexRegisterLocationCatalog is not yet built. Try to factor common code.
1617 size_t offset = origin + DexRegisterLocationCatalog::kFixedSize;
1619 // Skip the first `number_of_dex_locations - 1` entries.
1620 for (uint16_t i = 0; i < number_of_dex_locations; ++i) {
1621 // Read the first next byte and inspect its first 3 bits to decide
1622 // whether it is a short or a large location.
1623 DexRegisterLocationCatalog::ShortLocation first_byte =
1624 region_.LoadUnaligned<DexRegisterLocationCatalog::ShortLocation>(offset);
1625 DexRegisterLocation::Kind kind =
1626 DexRegisterLocationCatalog::ExtractKindFromShortLocation(first_byte);
1627 if (DexRegisterLocation::IsShortLocationKind(kind)) {
1628 // Short location. Skip the current byte.
1629 offset += DexRegisterLocationCatalog::SingleShortEntrySize();
1631 // Large location. Skip the 5 next bytes.
1632 offset += DexRegisterLocationCatalog::SingleLargeEntrySize();
1635 size_t size = offset - origin;
1639 MemoryRegion region_;
1640 friend class StackMapStream;
1643 #undef ELEMENT_BYTE_OFFSET_AFTER
1644 #undef ELEMENT_BIT_OFFSET_AFTER
1648 #endif // ART_RUNTIME_STACK_MAP_H_