2 * Copyright (C) 2011 The Android Open Source Project
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
17 #include "image_writer.h"
25 #include <unordered_set>
28 #include "art_field-inl.h"
29 #include "art_method-inl.h"
30 #include "base/logging.h"
31 #include "base/unix_file/fd_file.h"
32 #include "class_linker-inl.h"
33 #include "compiled_method.h"
34 #include "dex_file-inl.h"
35 #include "driver/compiler_driver.h"
37 #include "elf_utils.h"
38 #include "elf_writer.h"
39 #include "gc/accounting/card_table-inl.h"
40 #include "gc/accounting/heap_bitmap.h"
41 #include "gc/accounting/space_bitmap-inl.h"
43 #include "gc/space/large_object_space.h"
44 #include "gc/space/space-inl.h"
47 #include "intern_table.h"
48 #include "linear_alloc.h"
49 #include "lock_word.h"
50 #include "mirror/abstract_method.h"
51 #include "mirror/array-inl.h"
52 #include "mirror/class-inl.h"
53 #include "mirror/class_loader.h"
54 #include "mirror/dex_cache-inl.h"
55 #include "mirror/method.h"
56 #include "mirror/object-inl.h"
57 #include "mirror/object_array-inl.h"
58 #include "mirror/string-inl.h"
61 #include "oat_file_manager.h"
63 #include "scoped_thread_state_change.h"
64 #include "handle_scope-inl.h"
65 #include "utils/dex_cache_arrays_layout-inl.h"
67 using ::art::mirror::Class;
68 using ::art::mirror::DexCache;
69 using ::art::mirror::Object;
70 using ::art::mirror::ObjectArray;
71 using ::art::mirror::String;
75 // Separate objects into multiple bins to optimize dirty memory use.
76 static constexpr bool kBinObjects = true;
78 // Return true if an object is already in an image space.
79 bool ImageWriter::IsInBootImage(const void* obj) const {
80 gc::Heap* const heap = Runtime::Current()->GetHeap();
81 if (!compile_app_image_) {
82 DCHECK(heap->GetBootImageSpaces().empty());
85 for (gc::space::ImageSpace* boot_image_space : heap->GetBootImageSpaces()) {
86 const uint8_t* image_begin = boot_image_space->Begin();
87 // Real image end including ArtMethods and ArtField sections.
88 const uint8_t* image_end = image_begin + boot_image_space->GetImageHeader().GetImageSize();
89 if (image_begin <= obj && obj < image_end) {
96 bool ImageWriter::IsInBootOatFile(const void* ptr) const {
97 gc::Heap* const heap = Runtime::Current()->GetHeap();
98 if (!compile_app_image_) {
99 DCHECK(heap->GetBootImageSpaces().empty());
102 for (gc::space::ImageSpace* boot_image_space : heap->GetBootImageSpaces()) {
103 const ImageHeader& image_header = boot_image_space->GetImageHeader();
104 if (image_header.GetOatFileBegin() <= ptr && ptr < image_header.GetOatFileEnd()) {
111 static void CheckNoDexObjectsCallback(Object* obj, void* arg ATTRIBUTE_UNUSED)
112 SHARED_REQUIRES(Locks::mutator_lock_) {
113 Class* klass = obj->GetClass();
114 CHECK_NE(PrettyClass(klass), "com.android.dex.Dex");
117 static void CheckNoDexObjects() {
118 ScopedObjectAccess soa(Thread::Current());
119 Runtime::Current()->GetHeap()->VisitObjects(CheckNoDexObjectsCallback, nullptr);
122 bool ImageWriter::PrepareImageAddressSpace() {
123 target_ptr_size_ = InstructionSetPointerSize(compiler_driver_.GetInstructionSet());
124 gc::Heap* const heap = Runtime::Current()->GetHeap();
126 ScopedObjectAccess soa(Thread::Current());
127 PruneNonImageClasses(); // Remove junk
128 if (!compile_app_image_) {
129 // Avoid for app image since this may increase RAM and image size.
130 ComputeLazyFieldsForImageClasses(); // Add useful information
133 heap->CollectGarbage(false); // Remove garbage.
135 // Dex caches must not have their dex fields set in the image. These are memory buffers of mapped
138 // We may open them in the unstarted-runtime code for class metadata. Their fields should all be
139 // reset in PruneNonImageClasses and the objects reclaimed in the GC. Make sure that's actually
146 ScopedObjectAccess soa(Thread::Current());
147 CheckNonImageClassesRemoved();
151 ScopedObjectAccess soa(Thread::Current());
152 CalculateNewObjectOffsets();
155 // This needs to happen after CalculateNewObjectOffsets since it relies on intern_table_bytes_ and
156 // bin size sums being calculated.
157 if (!AllocMemory()) {
164 bool ImageWriter::Write(int image_fd,
165 const std::vector<const char*>& image_filenames,
166 const std::vector<const char*>& oat_filenames) {
167 // If image_fd or oat_fd are not kInvalidFd then we may have empty strings in image_filenames or
169 CHECK(!image_filenames.empty());
170 if (image_fd != kInvalidFd) {
171 CHECK_EQ(image_filenames.size(), 1u);
173 CHECK(!oat_filenames.empty());
174 CHECK_EQ(image_filenames.size(), oat_filenames.size());
177 ScopedObjectAccess soa(Thread::Current());
178 for (size_t i = 0; i < oat_filenames.size(); ++i) {
180 CopyAndFixupNativeData(i);
185 // TODO: heap validation can't handle these fix up passes.
186 ScopedObjectAccess soa(Thread::Current());
187 Runtime::Current()->GetHeap()->DisableObjectValidation();
188 CopyAndFixupObjects();
191 for (size_t i = 0; i < image_filenames.size(); ++i) {
192 const char* image_filename = image_filenames[i];
193 ImageInfo& image_info = GetImageInfo(i);
194 std::unique_ptr<File> image_file;
195 if (image_fd != kInvalidFd) {
196 if (strlen(image_filename) == 0u) {
197 image_file.reset(new File(image_fd, unix_file::kCheckSafeUsage));
198 // Empty the file in case it already exists.
199 if (image_file != nullptr) {
200 TEMP_FAILURE_RETRY(image_file->SetLength(0));
201 TEMP_FAILURE_RETRY(image_file->Flush());
204 LOG(ERROR) << "image fd " << image_fd << " name " << image_filename;
207 image_file.reset(OS::CreateEmptyFile(image_filename));
210 if (image_file == nullptr) {
211 LOG(ERROR) << "Failed to open image file " << image_filename;
215 if (!compile_app_image_ && fchmod(image_file->Fd(), 0644) != 0) {
216 PLOG(ERROR) << "Failed to make image file world readable: " << image_filename;
221 std::unique_ptr<char[]> compressed_data;
222 // Image data size excludes the bitmap and the header.
223 ImageHeader* const image_header = reinterpret_cast<ImageHeader*>(image_info.image_->Begin());
224 const size_t image_data_size = image_header->GetImageSize() - sizeof(ImageHeader);
225 char* image_data = reinterpret_cast<char*>(image_info.image_->Begin()) + sizeof(ImageHeader);
227 const char* image_data_to_write;
228 const uint64_t compress_start_time = NanoTime();
230 CHECK_EQ(image_header->storage_mode_, image_storage_mode_);
231 switch (image_storage_mode_) {
232 case ImageHeader::kStorageModeLZ4: {
233 const size_t compressed_max_size = LZ4_compressBound(image_data_size);
234 compressed_data.reset(new char[compressed_max_size]);
235 data_size = LZ4_compress(
236 reinterpret_cast<char*>(image_info.image_->Begin()) + sizeof(ImageHeader),
242 case ImageHeader::kStorageModeLZ4HC: {
243 // Bound is same as non HC.
244 const size_t compressed_max_size = LZ4_compressBound(image_data_size);
245 compressed_data.reset(new char[compressed_max_size]);
246 data_size = LZ4_compressHC(
247 reinterpret_cast<char*>(image_info.image_->Begin()) + sizeof(ImageHeader),
252 case ImageHeader::kStorageModeUncompressed: {
253 data_size = image_data_size;
254 image_data_to_write = image_data;
258 LOG(FATAL) << "Unsupported";
263 if (compressed_data != nullptr) {
264 image_data_to_write = &compressed_data[0];
265 VLOG(compiler) << "Compressed from " << image_data_size << " to " << data_size << " in "
266 << PrettyDuration(NanoTime() - compress_start_time);
269 // Write out the image + fields + methods.
270 const bool is_compressed = compressed_data != nullptr;
271 if (!image_file->PwriteFully(image_data_to_write, data_size, sizeof(ImageHeader))) {
272 PLOG(ERROR) << "Failed to write image file data " << image_filename;
277 // Write out the image bitmap at the page aligned start of the image end, also uncompressed for
279 const ImageSection& bitmap_section = image_header->GetImageSection(
280 ImageHeader::kSectionImageBitmap);
281 // Align up since data size may be unaligned if the image is compressed.
282 size_t bitmap_position_in_file = RoundUp(sizeof(ImageHeader) + data_size, kPageSize);
283 if (!is_compressed) {
284 CHECK_EQ(bitmap_position_in_file, bitmap_section.Offset());
286 if (!image_file->PwriteFully(reinterpret_cast<char*>(image_info.image_bitmap_->Begin()),
287 bitmap_section.Size(),
288 bitmap_position_in_file)) {
289 PLOG(ERROR) << "Failed to write image file " << image_filename;
294 int err = image_file->Flush();
296 PLOG(ERROR) << "Failed to flush image file " << image_filename << " with result " << err;
301 // Write header last in case the compiler gets killed in the middle of image writing.
302 // We do not want to have a corrupted image with a valid header.
303 // The header is uncompressed since it contains whether the image is compressed or not.
304 image_header->data_size_ = data_size;
305 if (!image_file->PwriteFully(reinterpret_cast<char*>(image_info.image_->Begin()),
308 PLOG(ERROR) << "Failed to write image file header " << image_filename;
313 CHECK_EQ(bitmap_position_in_file + bitmap_section.Size(),
314 static_cast<size_t>(image_file->GetLength()));
315 if (image_file->FlushCloseOrErase() != 0) {
316 PLOG(ERROR) << "Failed to flush and close image file " << image_filename;
323 void ImageWriter::SetImageOffset(mirror::Object* object, size_t offset) {
324 DCHECK(object != nullptr);
325 DCHECK_NE(offset, 0U);
327 // The object is already deflated from when we set the bin slot. Just overwrite the lock word.
328 object->SetLockWord(LockWord::FromForwardingAddress(offset), false);
329 DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u);
330 DCHECK(IsImageOffsetAssigned(object));
333 void ImageWriter::UpdateImageOffset(mirror::Object* obj, uintptr_t offset) {
334 DCHECK(IsImageOffsetAssigned(obj)) << obj << " " << offset;
335 obj->SetLockWord(LockWord::FromForwardingAddress(offset), false);
336 DCHECK_EQ(obj->GetLockWord(false).ReadBarrierState(), 0u);
339 void ImageWriter::AssignImageOffset(mirror::Object* object, ImageWriter::BinSlot bin_slot) {
340 DCHECK(object != nullptr);
341 DCHECK_NE(image_objects_offset_begin_, 0u);
343 size_t oat_index = GetOatIndex(object);
344 ImageInfo& image_info = GetImageInfo(oat_index);
345 size_t bin_slot_offset = image_info.bin_slot_offsets_[bin_slot.GetBin()];
346 size_t new_offset = bin_slot_offset + bin_slot.GetIndex();
347 DCHECK_ALIGNED(new_offset, kObjectAlignment);
349 SetImageOffset(object, new_offset);
350 DCHECK_LT(new_offset, image_info.image_end_);
353 bool ImageWriter::IsImageOffsetAssigned(mirror::Object* object) const {
354 // Will also return true if the bin slot was assigned since we are reusing the lock word.
355 DCHECK(object != nullptr);
356 return object->GetLockWord(false).GetState() == LockWord::kForwardingAddress;
359 size_t ImageWriter::GetImageOffset(mirror::Object* object) const {
360 DCHECK(object != nullptr);
361 DCHECK(IsImageOffsetAssigned(object));
362 LockWord lock_word = object->GetLockWord(false);
363 size_t offset = lock_word.ForwardingAddress();
364 size_t oat_index = GetOatIndex(object);
365 const ImageInfo& image_info = GetImageInfo(oat_index);
366 DCHECK_LT(offset, image_info.image_end_);
370 void ImageWriter::SetImageBinSlot(mirror::Object* object, BinSlot bin_slot) {
371 DCHECK(object != nullptr);
372 DCHECK(!IsImageOffsetAssigned(object));
373 DCHECK(!IsImageBinSlotAssigned(object));
375 // Before we stomp over the lock word, save the hash code for later.
376 Monitor::Deflate(Thread::Current(), object);;
377 LockWord lw(object->GetLockWord(false));
378 switch (lw.GetState()) {
379 case LockWord::kFatLocked: {
380 LOG(FATAL) << "Fat locked object " << object << " found during object copy";
383 case LockWord::kThinLocked: {
384 LOG(FATAL) << "Thin locked object " << object << " found during object copy";
387 case LockWord::kUnlocked:
388 // No hash, don't need to save it.
390 case LockWord::kHashCode:
391 DCHECK(saved_hashcode_map_.find(object) == saved_hashcode_map_.end());
392 saved_hashcode_map_.emplace(object, lw.GetHashCode());
395 LOG(FATAL) << "Unreachable.";
398 object->SetLockWord(LockWord::FromForwardingAddress(bin_slot.Uint32Value()), false);
399 DCHECK_EQ(object->GetLockWord(false).ReadBarrierState(), 0u);
400 DCHECK(IsImageBinSlotAssigned(object));
403 void ImageWriter::PrepareDexCacheArraySlots() {
404 // Prepare dex cache array starts based on the ordering specified in the CompilerDriver.
405 // Set the slot size early to avoid DCHECK() failures in IsImageBinSlotAssigned()
406 // when AssignImageBinSlot() assigns their indexes out or order.
407 for (const DexFile* dex_file : compiler_driver_.GetDexFilesForOatFile()) {
408 auto it = dex_file_oat_index_map_.find(dex_file);
409 DCHECK(it != dex_file_oat_index_map_.end()) << dex_file->GetLocation();
410 ImageInfo& image_info = GetImageInfo(it->second);
411 image_info.dex_cache_array_starts_.Put(dex_file, image_info.bin_slot_sizes_[kBinDexCacheArray]);
412 DexCacheArraysLayout layout(target_ptr_size_, dex_file);
413 image_info.bin_slot_sizes_[kBinDexCacheArray] += layout.Size();
416 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
417 Thread* const self = Thread::Current();
418 ReaderMutexLock mu(self, *class_linker->DexLock());
419 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
420 mirror::DexCache* dex_cache =
421 down_cast<mirror::DexCache*>(self->DecodeJObject(data.weak_root));
422 if (dex_cache == nullptr || IsInBootImage(dex_cache)) {
425 const DexFile* dex_file = dex_cache->GetDexFile();
426 DexCacheArraysLayout layout(target_ptr_size_, dex_file);
427 DCHECK(layout.Valid());
428 size_t oat_index = GetOatIndexForDexCache(dex_cache);
429 ImageInfo& image_info = GetImageInfo(oat_index);
430 uint32_t start = image_info.dex_cache_array_starts_.Get(dex_file);
431 DCHECK_EQ(dex_file->NumTypeIds() != 0u, dex_cache->GetResolvedTypes() != nullptr);
432 AddDexCacheArrayRelocation(dex_cache->GetResolvedTypes(),
433 start + layout.TypesOffset(),
435 DCHECK_EQ(dex_file->NumMethodIds() != 0u, dex_cache->GetResolvedMethods() != nullptr);
436 AddDexCacheArrayRelocation(dex_cache->GetResolvedMethods(),
437 start + layout.MethodsOffset(),
439 DCHECK_EQ(dex_file->NumFieldIds() != 0u, dex_cache->GetResolvedFields() != nullptr);
440 AddDexCacheArrayRelocation(dex_cache->GetResolvedFields(),
441 start + layout.FieldsOffset(),
443 DCHECK_EQ(dex_file->NumStringIds() != 0u, dex_cache->GetStrings() != nullptr);
444 AddDexCacheArrayRelocation(dex_cache->GetStrings(), start + layout.StringsOffset(), dex_cache);
448 void ImageWriter::AddDexCacheArrayRelocation(void* array, size_t offset, DexCache* dex_cache) {
449 if (array != nullptr) {
450 DCHECK(!IsInBootImage(array));
451 size_t oat_index = GetOatIndexForDexCache(dex_cache);
452 native_object_relocations_.emplace(array,
453 NativeObjectRelocation { oat_index, offset, kNativeObjectRelocationTypeDexCacheArray });
457 void ImageWriter::AddMethodPointerArray(mirror::PointerArray* arr) {
458 DCHECK(arr != nullptr);
460 for (size_t i = 0, len = arr->GetLength(); i < len; i++) {
461 ArtMethod* method = arr->GetElementPtrSize<ArtMethod*>(i, target_ptr_size_);
462 if (method != nullptr && !method->IsRuntimeMethod()) {
463 mirror::Class* klass = method->GetDeclaringClass();
464 CHECK(klass == nullptr || KeepClass(klass))
465 << PrettyClass(klass) << " should be a kept class";
469 // kBinArtMethodClean picked arbitrarily, just required to differentiate between ArtFields and
471 pointer_arrays_.emplace(arr, kBinArtMethodClean);
474 void ImageWriter::AssignImageBinSlot(mirror::Object* object) {
475 DCHECK(object != nullptr);
476 size_t object_size = object->SizeOf();
478 // The magic happens here. We segregate objects into different bins based
479 // on how likely they are to get dirty at runtime.
481 // Likely-to-dirty objects get packed together into the same bin so that
482 // at runtime their page dirtiness ratio (how many dirty objects a page has) is
485 // This means more pages will stay either clean or shared dirty (with zygote) and
486 // the app will use less of its own (private) memory.
487 Bin bin = kBinRegular;
488 size_t current_offset = 0u;
492 // Changing the bin of an object is purely a memory-use tuning.
493 // It has no change on runtime correctness.
495 // Memory analysis has determined that the following types of objects get dirtied
498 // * Dex cache arrays are stored in a special bin. The arrays for each dex cache have
499 // a fixed layout which helps improve generated code (using PC-relative addressing),
500 // so we pre-calculate their offsets separately in PrepareDexCacheArraySlots().
501 // Since these arrays are huge, most pages do not overlap other objects and it's not
502 // really important where they are for the clean/dirty separation. Due to their
503 // special PC-relative addressing, we arbitrarily keep them at the end.
504 // * Class'es which are verified [their clinit runs only at runtime]
505 // - classes in general [because their static fields get overwritten]
506 // - initialized classes with all-final statics are unlikely to be ever dirty,
507 // so bin them separately
508 // * Art Methods that are:
509 // - native [their native entry point is not looked up until runtime]
510 // - have declaring classes that aren't initialized
511 // [their interpreter/quick entry points are trampolines until the class
512 // becomes initialized]
514 // We also assume the following objects get dirtied either never or extremely rarely:
515 // * Strings (they are immutable)
516 // * Art methods that aren't native and have initialized declared classes
518 // We assume that "regular" bin objects are highly unlikely to become dirtied,
519 // so packing them together will not result in a noticeably tighter dirty-to-clean ratio.
521 if (object->IsClass()) {
522 bin = kBinClassVerified;
523 mirror::Class* klass = object->AsClass();
525 // Add non-embedded vtable to the pointer array table if there is one.
526 auto* vtable = klass->GetVTable();
527 if (vtable != nullptr) {
528 AddMethodPointerArray(vtable);
530 auto* iftable = klass->GetIfTable();
531 if (iftable != nullptr) {
532 for (int32_t i = 0; i < klass->GetIfTableCount(); ++i) {
533 if (iftable->GetMethodArrayCount(i) > 0) {
534 AddMethodPointerArray(iftable->GetMethodArray(i));
539 if (klass->GetStatus() == Class::kStatusInitialized) {
540 bin = kBinClassInitialized;
542 // If the class's static fields are all final, put it into a separate bin
543 // since it's very likely it will stay clean.
544 uint32_t num_static_fields = klass->NumStaticFields();
545 if (num_static_fields == 0) {
546 bin = kBinClassInitializedFinalStatics;
548 // Maybe all the statics are final?
549 bool all_final = true;
550 for (uint32_t i = 0; i < num_static_fields; ++i) {
551 ArtField* field = klass->GetStaticField(i);
552 if (!field->IsFinal()) {
559 bin = kBinClassInitializedFinalStatics;
563 } else if (object->GetClass<kVerifyNone>()->IsStringClass()) {
564 bin = kBinString; // Strings are almost always immutable (except for object header).
565 } // else bin = kBinRegular
568 size_t oat_index = GetOatIndex(object);
569 ImageInfo& image_info = GetImageInfo(oat_index);
571 size_t offset_delta = RoundUp(object_size, kObjectAlignment); // 64-bit alignment
572 current_offset = image_info.bin_slot_sizes_[bin]; // How many bytes the current bin is at (aligned).
573 // Move the current bin size up to accommodate the object we just assigned a bin slot.
574 image_info.bin_slot_sizes_[bin] += offset_delta;
576 BinSlot new_bin_slot(bin, current_offset);
577 SetImageBinSlot(object, new_bin_slot);
579 ++image_info.bin_slot_count_[bin];
581 // Grow the image closer to the end by the object we just assigned.
582 image_info.image_end_ += offset_delta;
585 bool ImageWriter::WillMethodBeDirty(ArtMethod* m) const {
589 mirror::Class* declaring_class = m->GetDeclaringClass();
590 // Initialized is highly unlikely to dirty since there's no entry points to mutate.
591 return declaring_class == nullptr || declaring_class->GetStatus() != Class::kStatusInitialized;
594 bool ImageWriter::IsImageBinSlotAssigned(mirror::Object* object) const {
595 DCHECK(object != nullptr);
597 // We always stash the bin slot into a lockword, in the 'forwarding address' state.
598 // If it's in some other state, then we haven't yet assigned an image bin slot.
599 if (object->GetLockWord(false).GetState() != LockWord::kForwardingAddress) {
601 } else if (kIsDebugBuild) {
602 LockWord lock_word = object->GetLockWord(false);
603 size_t offset = lock_word.ForwardingAddress();
604 BinSlot bin_slot(offset);
605 size_t oat_index = GetOatIndex(object);
606 const ImageInfo& image_info = GetImageInfo(oat_index);
607 DCHECK_LT(bin_slot.GetIndex(), image_info.bin_slot_sizes_[bin_slot.GetBin()])
608 << "bin slot offset should not exceed the size of that bin";
613 ImageWriter::BinSlot ImageWriter::GetImageBinSlot(mirror::Object* object) const {
614 DCHECK(object != nullptr);
615 DCHECK(IsImageBinSlotAssigned(object));
617 LockWord lock_word = object->GetLockWord(false);
618 size_t offset = lock_word.ForwardingAddress(); // TODO: ForwardingAddress should be uint32_t
619 DCHECK_LE(offset, std::numeric_limits<uint32_t>::max());
621 BinSlot bin_slot(static_cast<uint32_t>(offset));
622 size_t oat_index = GetOatIndex(object);
623 const ImageInfo& image_info = GetImageInfo(oat_index);
624 DCHECK_LT(bin_slot.GetIndex(), image_info.bin_slot_sizes_[bin_slot.GetBin()]);
629 bool ImageWriter::AllocMemory() {
630 for (ImageInfo& image_info : image_infos_) {
631 ImageSection unused_sections[ImageHeader::kSectionCount];
632 const size_t length = RoundUp(
633 image_info.CreateImageSections(target_ptr_size_, unused_sections),
636 std::string error_msg;
637 image_info.image_.reset(MemMap::MapAnonymous("image writer image",
640 PROT_READ | PROT_WRITE,
644 if (UNLIKELY(image_info.image_.get() == nullptr)) {
645 LOG(ERROR) << "Failed to allocate memory for image file generation: " << error_msg;
649 // Create the image bitmap, only needs to cover mirror object section which is up to image_end_.
650 CHECK_LE(image_info.image_end_, length);
651 image_info.image_bitmap_.reset(gc::accounting::ContinuousSpaceBitmap::Create(
652 "image bitmap", image_info.image_->Begin(), RoundUp(image_info.image_end_, kPageSize)));
653 if (image_info.image_bitmap_.get() == nullptr) {
654 LOG(ERROR) << "Failed to allocate memory for image bitmap";
661 class ComputeLazyFieldsForClassesVisitor : public ClassVisitor {
663 bool operator()(Class* c) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
664 StackHandleScope<1> hs(Thread::Current());
665 mirror::Class::ComputeName(hs.NewHandle(c));
670 void ImageWriter::ComputeLazyFieldsForImageClasses() {
671 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
672 ComputeLazyFieldsForClassesVisitor visitor;
673 class_linker->VisitClassesWithoutClassesLock(&visitor);
676 static bool IsBootClassLoaderClass(mirror::Class* klass) SHARED_REQUIRES(Locks::mutator_lock_) {
677 return klass->GetClassLoader() == nullptr;
680 bool ImageWriter::IsBootClassLoaderNonImageClass(mirror::Class* klass) {
681 return IsBootClassLoaderClass(klass) && !IsInBootImage(klass);
684 bool ImageWriter::PruneAppImageClass(mirror::Class* klass) {
685 bool early_exit = false;
686 std::unordered_set<mirror::Class*> visited;
687 return PruneAppImageClassInternal(klass, &early_exit, &visited);
690 bool ImageWriter::PruneAppImageClassInternal(
691 mirror::Class* klass,
693 std::unordered_set<mirror::Class*>* visited) {
694 DCHECK(early_exit != nullptr);
695 DCHECK(visited != nullptr);
696 DCHECK(compile_app_image_);
697 if (klass == nullptr || IsInBootImage(klass)) {
700 auto found = prune_class_memo_.find(klass);
701 if (found != prune_class_memo_.end()) {
702 // Already computed, return the found value.
703 return found->second;
705 // Circular dependencies, return false but do not store the result in the memoization table.
706 if (visited->find(klass) != visited->end()) {
710 visited->emplace(klass);
711 bool result = IsBootClassLoaderClass(klass);
713 // Prune if not an image class, this handles any broken sets of image classes such as having a
714 // class in the set but not it's superclass.
715 result = result || !compiler_driver_.IsImageClass(klass->GetDescriptor(&temp));
716 bool my_early_exit = false; // Only for ourselves, ignore caller.
717 // Remove classes that failed to verify since we don't want to have java.lang.VerifyError in the
719 if (klass->GetStatus() == mirror::Class::kStatusError) {
722 CHECK(klass->GetVerifyError() == nullptr) << PrettyClass(klass);
725 // Check interfaces since these wont be visited through VisitReferences.)
726 mirror::IfTable* if_table = klass->GetIfTable();
727 for (size_t i = 0, num_interfaces = klass->GetIfTableCount(); i < num_interfaces; ++i) {
728 result = result || PruneAppImageClassInternal(if_table->GetInterface(i),
733 if (klass->IsObjectArrayClass()) {
734 result = result || PruneAppImageClassInternal(klass->GetComponentType(),
738 // Check static fields and their classes.
739 size_t num_static_fields = klass->NumReferenceStaticFields();
740 if (num_static_fields != 0 && klass->IsResolved()) {
741 // Presumably GC can happen when we are cross compiling, it should not cause performance
742 // problems to do pointer size logic.
743 MemberOffset field_offset = klass->GetFirstReferenceStaticFieldOffset(
744 Runtime::Current()->GetClassLinker()->GetImagePointerSize());
745 for (size_t i = 0u; i < num_static_fields; ++i) {
746 mirror::Object* ref = klass->GetFieldObject<mirror::Object>(field_offset);
747 if (ref != nullptr) {
748 if (ref->IsClass()) {
749 result = result || PruneAppImageClassInternal(ref->AsClass(),
753 result = result || PruneAppImageClassInternal(ref->GetClass(),
758 field_offset = MemberOffset(field_offset.Uint32Value() +
759 sizeof(mirror::HeapReference<mirror::Object>));
762 result = result || PruneAppImageClassInternal(klass->GetSuperClass(),
765 // Erase the element we stored earlier since we are exiting the function.
766 auto it = visited->find(klass);
767 DCHECK(it != visited->end());
769 // Only store result if it is true or none of the calls early exited due to circular
770 // dependencies. If visited is empty then we are the root caller, in this case the cycle was in
771 // a child call and we can remember the result.
772 if (result == true || !my_early_exit || visited->empty()) {
773 prune_class_memo_[klass] = result;
775 *early_exit |= my_early_exit;
779 bool ImageWriter::KeepClass(Class* klass) {
780 if (klass == nullptr) {
783 if (compile_app_image_ && Runtime::Current()->GetHeap()->ObjectIsInBootImageSpace(klass)) {
784 // Already in boot image, return true.
788 if (!compiler_driver_.IsImageClass(klass->GetDescriptor(&temp))) {
791 if (compile_app_image_) {
792 // For app images, we need to prune boot loader classes that are not in the boot image since
793 // these may have already been loaded when the app image is loaded.
794 // Keep classes in the boot image space since we don't want to re-resolve these.
795 return !PruneAppImageClass(klass);
800 class NonImageClassesVisitor : public ClassVisitor {
802 explicit NonImageClassesVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {}
804 bool operator()(Class* klass) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
805 if (!image_writer_->KeepClass(klass)) {
806 classes_to_prune_.insert(klass);
811 std::unordered_set<mirror::Class*> classes_to_prune_;
812 ImageWriter* const image_writer_;
815 void ImageWriter::PruneNonImageClasses() {
816 Runtime* runtime = Runtime::Current();
817 ClassLinker* class_linker = runtime->GetClassLinker();
818 Thread* self = Thread::Current();
820 // Make a list of classes we would like to prune.
821 NonImageClassesVisitor visitor(this);
822 class_linker->VisitClasses(&visitor);
824 // Remove the undesired classes from the class roots.
825 VLOG(compiler) << "Pruning " << visitor.classes_to_prune_.size() << " classes";
826 for (mirror::Class* klass : visitor.classes_to_prune_) {
828 const char* name = klass->GetDescriptor(&temp);
829 VLOG(compiler) << "Pruning class " << name;
830 if (!compile_app_image_) {
831 DCHECK(IsBootClassLoaderClass(klass));
833 bool result = class_linker->RemoveClass(name, klass->GetClassLoader());
837 // Clear references to removed classes from the DexCaches.
838 ArtMethod* resolution_method = runtime->GetResolutionMethod();
840 ScopedAssertNoThreadSuspension sa(self, __FUNCTION__);
841 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_); // For ClassInClassTable
842 ReaderMutexLock mu2(self, *class_linker->DexLock());
843 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
844 if (self->IsJWeakCleared(data.weak_root)) {
847 mirror::DexCache* dex_cache = self->DecodeJObject(data.weak_root)->AsDexCache();
848 for (size_t i = 0; i < dex_cache->NumResolvedTypes(); i++) {
849 Class* klass = dex_cache->GetResolvedType(i);
850 if (klass != nullptr && !KeepClass(klass)) {
851 dex_cache->SetResolvedType(i, nullptr);
854 ArtMethod** resolved_methods = dex_cache->GetResolvedMethods();
855 for (size_t i = 0, num = dex_cache->NumResolvedMethods(); i != num; ++i) {
857 mirror::DexCache::GetElementPtrSize(resolved_methods, i, target_ptr_size_);
858 DCHECK(method != nullptr) << "Expected resolution method instead of null method";
859 mirror::Class* declaring_class = method->GetDeclaringClass();
860 // Copied methods may be held live by a class which was not an image class but have a
861 // declaring class which is an image class. Set it to the resolution method to be safe and
862 // prevent dangling pointers.
863 if (method->IsCopied() || !KeepClass(declaring_class)) {
864 mirror::DexCache::SetElementPtrSize(resolved_methods,
869 // Check that the class is still in the classes table.
870 DCHECK(class_linker->ClassInClassTable(declaring_class)) << "Class "
871 << PrettyClass(declaring_class) << " not in class linker table";
874 ArtField** resolved_fields = dex_cache->GetResolvedFields();
875 for (size_t i = 0; i < dex_cache->NumResolvedFields(); i++) {
876 ArtField* field = mirror::DexCache::GetElementPtrSize(resolved_fields, i, target_ptr_size_);
877 if (field != nullptr && !KeepClass(field->GetDeclaringClass())) {
878 dex_cache->SetResolvedField(i, nullptr, target_ptr_size_);
881 // Clean the dex field. It might have been populated during the initialization phase, but
882 // contains data only valid during a real run.
883 dex_cache->SetFieldObject<false>(mirror::DexCache::DexOffset(), nullptr);
886 // Drop the array class cache in the ClassLinker, as these are roots holding those classes live.
887 class_linker->DropFindArrayClassCache();
889 // Clear to save RAM.
890 prune_class_memo_.clear();
893 void ImageWriter::CheckNonImageClassesRemoved() {
894 if (compiler_driver_.GetImageClasses() != nullptr) {
895 gc::Heap* heap = Runtime::Current()->GetHeap();
896 heap->VisitObjects(CheckNonImageClassesRemovedCallback, this);
900 void ImageWriter::CheckNonImageClassesRemovedCallback(Object* obj, void* arg) {
901 ImageWriter* image_writer = reinterpret_cast<ImageWriter*>(arg);
902 if (obj->IsClass() && !image_writer->IsInBootImage(obj)) {
903 Class* klass = obj->AsClass();
904 if (!image_writer->KeepClass(klass)) {
905 image_writer->DumpImageClasses();
907 CHECK(image_writer->KeepClass(klass)) << klass->GetDescriptor(&temp)
908 << " " << PrettyDescriptor(klass);
913 void ImageWriter::DumpImageClasses() {
914 auto image_classes = compiler_driver_.GetImageClasses();
915 CHECK(image_classes != nullptr);
916 for (const std::string& image_class : *image_classes) {
917 LOG(INFO) << " " << image_class;
921 mirror::String* ImageWriter::FindInternedString(mirror::String* string) {
922 Thread* const self = Thread::Current();
923 for (const ImageInfo& image_info : image_infos_) {
924 mirror::String* const found = image_info.intern_table_->LookupStrong(self, string);
925 DCHECK(image_info.intern_table_->LookupWeak(self, string) == nullptr)
926 << string->ToModifiedUtf8();
927 if (found != nullptr) {
931 if (compile_app_image_) {
932 Runtime* const runtime = Runtime::Current();
933 mirror::String* found = runtime->GetInternTable()->LookupStrong(self, string);
934 // If we found it in the runtime intern table it could either be in the boot image or interned
935 // during app image compilation. If it was in the boot image return that, otherwise return null
936 // since it belongs to another image space.
937 if (found != nullptr && runtime->GetHeap()->ObjectIsInBootImageSpace(found)) {
940 DCHECK(runtime->GetInternTable()->LookupWeak(self, string) == nullptr)
941 << string->ToModifiedUtf8();
946 void ImageWriter::CalculateObjectBinSlots(Object* obj) {
947 DCHECK(obj != nullptr);
948 // if it is a string, we want to intern it if its not interned.
949 if (obj->GetClass()->IsStringClass()) {
950 size_t oat_index = GetOatIndex(obj);
951 ImageInfo& image_info = GetImageInfo(oat_index);
953 // we must be an interned string that was forward referenced and already assigned
954 if (IsImageBinSlotAssigned(obj)) {
955 DCHECK_EQ(obj, FindInternedString(obj->AsString()));
958 // Need to check if the string is already interned in another image info so that we don't have
959 // the intern tables of two different images contain the same string.
960 mirror::String* interned = FindInternedString(obj->AsString());
961 if (interned == nullptr) {
962 // Not in another image space, insert to our table.
963 interned = image_info.intern_table_->InternStrongImageString(obj->AsString());
965 if (obj != interned) {
966 if (!IsImageBinSlotAssigned(interned)) {
967 // interned obj is after us, allocate its location early
968 AssignImageBinSlot(interned);
970 // point those looking for this object to the interned version.
971 SetImageBinSlot(obj, GetImageBinSlot(interned));
974 // else (obj == interned), nothing to do but fall through to the normal case
977 AssignImageBinSlot(obj);
980 ObjectArray<Object>* ImageWriter::CreateImageRoots(size_t oat_index) const {
981 Runtime* runtime = Runtime::Current();
982 ClassLinker* class_linker = runtime->GetClassLinker();
983 Thread* self = Thread::Current();
984 StackHandleScope<3> hs(self);
985 Handle<Class> object_array_class(hs.NewHandle(
986 class_linker->FindSystemClass(self, "[Ljava/lang/Object;")));
988 std::unordered_set<const DexFile*> image_dex_files;
989 for (auto& pair : dex_file_oat_index_map_) {
990 const DexFile* image_dex_file = pair.first;
991 size_t image_oat_index = pair.second;
992 if (oat_index == image_oat_index) {
993 image_dex_files.insert(image_dex_file);
997 // build an Object[] of all the DexCaches used in the source_space_.
998 // Since we can't hold the dex lock when allocating the dex_caches
999 // ObjectArray, we lock the dex lock twice, first to get the number
1000 // of dex caches first and then lock it again to copy the dex
1001 // caches. We check that the number of dex caches does not change.
1002 size_t dex_cache_count = 0;
1004 ReaderMutexLock mu(self, *class_linker->DexLock());
1005 // Count number of dex caches not in the boot image.
1006 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
1007 mirror::DexCache* dex_cache =
1008 down_cast<mirror::DexCache*>(self->DecodeJObject(data.weak_root));
1009 const DexFile* dex_file = dex_cache->GetDexFile();
1010 if (!IsInBootImage(dex_cache)) {
1011 dex_cache_count += image_dex_files.find(dex_file) != image_dex_files.end() ? 1u : 0u;
1015 Handle<ObjectArray<Object>> dex_caches(
1016 hs.NewHandle(ObjectArray<Object>::Alloc(self, object_array_class.Get(), dex_cache_count)));
1017 CHECK(dex_caches.Get() != nullptr) << "Failed to allocate a dex cache array.";
1019 ReaderMutexLock mu(self, *class_linker->DexLock());
1020 size_t non_image_dex_caches = 0;
1021 // Re-count number of non image dex caches.
1022 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
1023 mirror::DexCache* dex_cache =
1024 down_cast<mirror::DexCache*>(self->DecodeJObject(data.weak_root));
1025 const DexFile* dex_file = dex_cache->GetDexFile();
1026 if (!IsInBootImage(dex_cache)) {
1027 non_image_dex_caches += image_dex_files.find(dex_file) != image_dex_files.end() ? 1u : 0u;
1030 CHECK_EQ(dex_cache_count, non_image_dex_caches)
1031 << "The number of non-image dex caches changed.";
1033 for (const ClassLinker::DexCacheData& data : class_linker->GetDexCachesData()) {
1034 mirror::DexCache* dex_cache =
1035 down_cast<mirror::DexCache*>(self->DecodeJObject(data.weak_root));
1036 const DexFile* dex_file = dex_cache->GetDexFile();
1037 if (!IsInBootImage(dex_cache) && image_dex_files.find(dex_file) != image_dex_files.end()) {
1038 dex_caches->Set<false>(i, dex_cache);
1044 // build an Object[] of the roots needed to restore the runtime
1045 auto image_roots(hs.NewHandle(
1046 ObjectArray<Object>::Alloc(self, object_array_class.Get(), ImageHeader::kImageRootsMax)));
1047 image_roots->Set<false>(ImageHeader::kDexCaches, dex_caches.Get());
1048 image_roots->Set<false>(ImageHeader::kClassRoots, class_linker->GetClassRoots());
1049 for (int i = 0; i < ImageHeader::kImageRootsMax; i++) {
1050 CHECK(image_roots->Get(i) != nullptr);
1052 return image_roots.Get();
1055 // Walk instance fields of the given Class. Separate function to allow recursion on the super
1057 void ImageWriter::WalkInstanceFields(mirror::Object* obj, mirror::Class* klass) {
1058 // Visit fields of parent classes first.
1059 StackHandleScope<1> hs(Thread::Current());
1060 Handle<mirror::Class> h_class(hs.NewHandle(klass));
1061 mirror::Class* super = h_class->GetSuperClass();
1062 if (super != nullptr) {
1063 WalkInstanceFields(obj, super);
1066 size_t num_reference_fields = h_class->NumReferenceInstanceFields();
1067 MemberOffset field_offset = h_class->GetFirstReferenceInstanceFieldOffset();
1068 for (size_t i = 0; i < num_reference_fields; ++i) {
1069 mirror::Object* value = obj->GetFieldObject<mirror::Object>(field_offset);
1070 if (value != nullptr) {
1071 WalkFieldsInOrder(value);
1073 field_offset = MemberOffset(field_offset.Uint32Value() +
1074 sizeof(mirror::HeapReference<mirror::Object>));
1078 // For an unvisited object, visit it then all its children found via fields.
1079 void ImageWriter::WalkFieldsInOrder(mirror::Object* obj) {
1080 if (IsInBootImage(obj)) {
1081 // Object is in the image, don't need to fix it up.
1084 // Use our own visitor routine (instead of GC visitor) to get better locality between
1085 // an object and its fields
1086 if (!IsImageBinSlotAssigned(obj)) {
1087 // Walk instance fields of all objects
1088 StackHandleScope<2> hs(Thread::Current());
1089 Handle<mirror::Object> h_obj(hs.NewHandle(obj));
1090 Handle<mirror::Class> klass(hs.NewHandle(obj->GetClass()));
1091 // visit the object itself.
1092 CalculateObjectBinSlots(h_obj.Get());
1093 WalkInstanceFields(h_obj.Get(), klass.Get());
1094 // Walk static fields of a Class.
1095 if (h_obj->IsClass()) {
1096 size_t num_reference_static_fields = klass->NumReferenceStaticFields();
1097 MemberOffset field_offset = klass->GetFirstReferenceStaticFieldOffset(target_ptr_size_);
1098 for (size_t i = 0; i < num_reference_static_fields; ++i) {
1099 mirror::Object* value = h_obj->GetFieldObject<mirror::Object>(field_offset);
1100 if (value != nullptr) {
1101 WalkFieldsInOrder(value);
1103 field_offset = MemberOffset(field_offset.Uint32Value() +
1104 sizeof(mirror::HeapReference<mirror::Object>));
1106 // Visit and assign offsets for fields and field arrays.
1107 auto* as_klass = h_obj->AsClass();
1108 mirror::DexCache* dex_cache = as_klass->GetDexCache();
1109 DCHECK_NE(klass->GetStatus(), mirror::Class::kStatusError);
1110 if (compile_app_image_) {
1111 // Extra sanity, no boot loader classes should be left!
1112 CHECK(!IsBootClassLoaderClass(as_klass)) << PrettyClass(as_klass);
1114 LengthPrefixedArray<ArtField>* fields[] = {
1115 as_klass->GetSFieldsPtr(), as_klass->GetIFieldsPtr(),
1117 size_t oat_index = GetOatIndexForDexCache(dex_cache);
1118 ImageInfo& image_info = GetImageInfo(oat_index);
1120 // Note: This table is only accessed from the image writer, so the lock is technically
1122 WriterMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
1123 // Insert in the class table for this iamge.
1124 image_info.class_table_->Insert(as_klass);
1126 for (LengthPrefixedArray<ArtField>* cur_fields : fields) {
1127 // Total array length including header.
1128 if (cur_fields != nullptr) {
1129 const size_t header_size = LengthPrefixedArray<ArtField>::ComputeSize(0);
1130 // Forward the entire array at once.
1131 auto it = native_object_relocations_.find(cur_fields);
1132 CHECK(it == native_object_relocations_.end()) << "Field array " << cur_fields
1133 << " already forwarded";
1134 size_t& offset = image_info.bin_slot_sizes_[kBinArtField];
1135 DCHECK(!IsInBootImage(cur_fields));
1136 native_object_relocations_.emplace(
1138 NativeObjectRelocation {
1139 oat_index, offset, kNativeObjectRelocationTypeArtFieldArray
1141 offset += header_size;
1142 // Forward individual fields so that we can quickly find where they belong.
1143 for (size_t i = 0, count = cur_fields->size(); i < count; ++i) {
1144 // Need to forward arrays separate of fields.
1145 ArtField* field = &cur_fields->At(i);
1146 auto it2 = native_object_relocations_.find(field);
1147 CHECK(it2 == native_object_relocations_.end()) << "Field at index=" << i
1148 << " already assigned " << PrettyField(field) << " static=" << field->IsStatic();
1149 DCHECK(!IsInBootImage(field));
1150 native_object_relocations_.emplace(
1152 NativeObjectRelocation { oat_index, offset, kNativeObjectRelocationTypeArtField });
1153 offset += sizeof(ArtField);
1157 // Visit and assign offsets for methods.
1158 size_t num_methods = as_klass->NumMethods();
1159 if (num_methods != 0) {
1160 bool any_dirty = false;
1161 for (auto& m : as_klass->GetMethods(target_ptr_size_)) {
1162 if (WillMethodBeDirty(&m)) {
1167 NativeObjectRelocationType type = any_dirty
1168 ? kNativeObjectRelocationTypeArtMethodDirty
1169 : kNativeObjectRelocationTypeArtMethodClean;
1170 Bin bin_type = BinTypeForNativeRelocationType(type);
1171 // Forward the entire array at once, but header first.
1172 const size_t method_alignment = ArtMethod::Alignment(target_ptr_size_);
1173 const size_t method_size = ArtMethod::Size(target_ptr_size_);
1174 const size_t header_size = LengthPrefixedArray<ArtMethod>::ComputeSize(0,
1177 LengthPrefixedArray<ArtMethod>* array = as_klass->GetMethodsPtr();
1178 auto it = native_object_relocations_.find(array);
1179 CHECK(it == native_object_relocations_.end())
1180 << "Method array " << array << " already forwarded";
1181 size_t& offset = image_info.bin_slot_sizes_[bin_type];
1182 DCHECK(!IsInBootImage(array));
1183 native_object_relocations_.emplace(array,
1184 NativeObjectRelocation {
1187 any_dirty ? kNativeObjectRelocationTypeArtMethodArrayDirty
1188 : kNativeObjectRelocationTypeArtMethodArrayClean });
1189 offset += header_size;
1190 for (auto& m : as_klass->GetMethods(target_ptr_size_)) {
1191 AssignMethodOffset(&m, type, oat_index);
1193 (any_dirty ? dirty_methods_ : clean_methods_) += num_methods;
1195 } else if (h_obj->IsObjectArray()) {
1196 // Walk elements of an object array.
1197 int32_t length = h_obj->AsObjectArray<mirror::Object>()->GetLength();
1198 for (int32_t i = 0; i < length; i++) {
1199 mirror::ObjectArray<mirror::Object>* obj_array = h_obj->AsObjectArray<mirror::Object>();
1200 mirror::Object* value = obj_array->Get(i);
1201 if (value != nullptr) {
1202 WalkFieldsInOrder(value);
1205 } else if (h_obj->IsClassLoader()) {
1206 // Register the class loader if it has a class table.
1207 // The fake boot class loader should not get registered and we should end up with only one
1209 mirror::ClassLoader* class_loader = h_obj->AsClassLoader();
1210 if (class_loader->GetClassTable() != nullptr) {
1211 class_loaders_.insert(class_loader);
1217 void ImageWriter::AssignMethodOffset(ArtMethod* method,
1218 NativeObjectRelocationType type,
1220 DCHECK(!IsInBootImage(method));
1221 auto it = native_object_relocations_.find(method);
1222 CHECK(it == native_object_relocations_.end()) << "Method " << method << " already assigned "
1223 << PrettyMethod(method);
1224 ImageInfo& image_info = GetImageInfo(oat_index);
1225 size_t& offset = image_info.bin_slot_sizes_[BinTypeForNativeRelocationType(type)];
1226 native_object_relocations_.emplace(method, NativeObjectRelocation { oat_index, offset, type });
1227 offset += ArtMethod::Size(target_ptr_size_);
1230 void ImageWriter::WalkFieldsCallback(mirror::Object* obj, void* arg) {
1231 ImageWriter* writer = reinterpret_cast<ImageWriter*>(arg);
1232 DCHECK(writer != nullptr);
1233 writer->WalkFieldsInOrder(obj);
1236 void ImageWriter::UnbinObjectsIntoOffsetCallback(mirror::Object* obj, void* arg) {
1237 ImageWriter* writer = reinterpret_cast<ImageWriter*>(arg);
1238 DCHECK(writer != nullptr);
1239 if (!writer->IsInBootImage(obj)) {
1240 writer->UnbinObjectsIntoOffset(obj);
1244 void ImageWriter::UnbinObjectsIntoOffset(mirror::Object* obj) {
1245 DCHECK(!IsInBootImage(obj));
1246 CHECK(obj != nullptr);
1248 // We know the bin slot, and the total bin sizes for all objects by now,
1249 // so calculate the object's final image offset.
1251 DCHECK(IsImageBinSlotAssigned(obj));
1252 BinSlot bin_slot = GetImageBinSlot(obj);
1253 // Change the lockword from a bin slot into an offset
1254 AssignImageOffset(obj, bin_slot);
1257 void ImageWriter::CalculateNewObjectOffsets() {
1258 Thread* const self = Thread::Current();
1259 StackHandleScopeCollection handles(self);
1260 std::vector<Handle<ObjectArray<Object>>> image_roots;
1261 for (size_t i = 0, size = oat_filenames_.size(); i != size; ++i) {
1262 image_roots.push_back(handles.NewHandle(CreateImageRoots(i)));
1265 auto* runtime = Runtime::Current();
1266 auto* heap = runtime->GetHeap();
1268 // Leave space for the header, but do not write it yet, we need to
1269 // know where image_roots is going to end up
1270 image_objects_offset_begin_ = RoundUp(sizeof(ImageHeader), kObjectAlignment); // 64-bit-alignment
1272 // Clear any pre-existing monitors which may have been in the monitor words, assign bin slots.
1273 heap->VisitObjects(WalkFieldsCallback, this);
1274 // Write the image runtime methods.
1275 image_methods_[ImageHeader::kResolutionMethod] = runtime->GetResolutionMethod();
1276 image_methods_[ImageHeader::kImtConflictMethod] = runtime->GetImtConflictMethod();
1277 image_methods_[ImageHeader::kImtUnimplementedMethod] = runtime->GetImtUnimplementedMethod();
1278 image_methods_[ImageHeader::kCalleeSaveMethod] = runtime->GetCalleeSaveMethod(Runtime::kSaveAll);
1279 image_methods_[ImageHeader::kRefsOnlySaveMethod] =
1280 runtime->GetCalleeSaveMethod(Runtime::kRefsOnly);
1281 image_methods_[ImageHeader::kRefsAndArgsSaveMethod] =
1282 runtime->GetCalleeSaveMethod(Runtime::kRefsAndArgs);
1284 // Add room for fake length prefixed array for holding the image methods.
1285 const auto image_method_type = kNativeObjectRelocationTypeArtMethodArrayClean;
1286 auto it = native_object_relocations_.find(&image_method_array_);
1287 CHECK(it == native_object_relocations_.end());
1288 ImageInfo& default_image_info = GetImageInfo(GetDefaultOatIndex());
1290 default_image_info.bin_slot_sizes_[BinTypeForNativeRelocationType(image_method_type)];
1291 if (!compile_app_image_) {
1292 native_object_relocations_.emplace(&image_method_array_,
1293 NativeObjectRelocation { GetDefaultOatIndex(), offset, image_method_type });
1295 size_t method_alignment = ArtMethod::Alignment(target_ptr_size_);
1296 const size_t array_size = LengthPrefixedArray<ArtMethod>::ComputeSize(
1297 0, ArtMethod::Size(target_ptr_size_), method_alignment);
1298 CHECK_ALIGNED_PARAM(array_size, method_alignment);
1299 offset += array_size;
1300 for (auto* m : image_methods_) {
1301 CHECK(m != nullptr);
1302 CHECK(m->IsRuntimeMethod());
1303 DCHECK_EQ(compile_app_image_, IsInBootImage(m)) << "Trampolines should be in boot image";
1304 if (!IsInBootImage(m)) {
1305 AssignMethodOffset(m, kNativeObjectRelocationTypeArtMethodClean, GetDefaultOatIndex());
1308 // Calculate size of the dex cache arrays slot and prepare offsets.
1309 PrepareDexCacheArraySlots();
1311 // Calculate the sizes of the intern tables and class tables.
1312 for (ImageInfo& image_info : image_infos_) {
1313 // Calculate how big the intern table will be after being serialized.
1314 InternTable* const intern_table = image_info.intern_table_.get();
1315 CHECK_EQ(intern_table->WeakSize(), 0u) << " should have strong interned all the strings";
1316 image_info.intern_table_bytes_ = intern_table->WriteToMemory(nullptr);
1317 // Calculate the size of the class table.
1318 ReaderMutexLock mu(self, *Locks::classlinker_classes_lock_);
1319 image_info.class_table_bytes_ += image_info.class_table_->WriteToMemory(nullptr);
1322 // Calculate bin slot offsets.
1323 for (ImageInfo& image_info : image_infos_) {
1324 size_t bin_offset = image_objects_offset_begin_;
1325 for (size_t i = 0; i != kBinSize; ++i) {
1326 image_info.bin_slot_offsets_[i] = bin_offset;
1327 bin_offset += image_info.bin_slot_sizes_[i];
1328 if (i == kBinArtField) {
1329 static_assert(kBinArtField + 1 == kBinArtMethodClean, "Methods follow fields.");
1330 static_assert(alignof(ArtField) == 4u, "ArtField alignment is 4.");
1331 DCHECK_ALIGNED(bin_offset, 4u);
1332 DCHECK(method_alignment == 4u || method_alignment == 8u);
1333 bin_offset = RoundUp(bin_offset, method_alignment);
1336 // NOTE: There may be additional padding between the bin slots and the intern table.
1337 DCHECK_EQ(image_info.image_end_,
1338 GetBinSizeSum(image_info, kBinMirrorCount) + image_objects_offset_begin_);
1341 // Calculate image offsets.
1342 size_t image_offset = 0;
1343 for (ImageInfo& image_info : image_infos_) {
1344 image_info.image_begin_ = global_image_begin_ + image_offset;
1345 image_info.image_offset_ = image_offset;
1346 ImageSection unused_sections[ImageHeader::kSectionCount];
1347 image_info.image_size_ = RoundUp(
1348 image_info.CreateImageSections(target_ptr_size_, unused_sections),
1350 // There should be no gaps until the next image.
1351 image_offset += image_info.image_size_;
1354 // Transform each object's bin slot into an offset which will be used to do the final copy.
1355 heap->VisitObjects(UnbinObjectsIntoOffsetCallback, this);
1357 // DCHECK_EQ(image_end_, GetBinSizeSum(kBinMirrorCount) + image_objects_offset_begin_);
1360 for (ImageInfo& image_info : image_infos_) {
1361 image_info.image_roots_address_ = PointerToLowMemUInt32(GetImageAddress(image_roots[i].Get()));
1365 // Update the native relocations by adding their bin sums.
1366 for (auto& pair : native_object_relocations_) {
1367 NativeObjectRelocation& relocation = pair.second;
1368 Bin bin_type = BinTypeForNativeRelocationType(relocation.type);
1369 ImageInfo& image_info = GetImageInfo(relocation.oat_index);
1370 relocation.offset += image_info.bin_slot_offsets_[bin_type];
1373 // Note that image_info.image_end_ is left at end of used mirror object section.
1376 size_t ImageWriter::ImageInfo::CreateImageSections(size_t target_ptr_size,
1377 ImageSection* out_sections) const {
1378 DCHECK(out_sections != nullptr);
1380 auto* objects_section = &out_sections[ImageHeader::kSectionObjects];
1381 *objects_section = ImageSection(0u, image_end_);
1382 size_t cur_pos = objects_section->End();
1383 // Add field section.
1384 auto* field_section = &out_sections[ImageHeader::kSectionArtFields];
1385 *field_section = ImageSection(cur_pos, bin_slot_sizes_[kBinArtField]);
1386 CHECK_EQ(bin_slot_offsets_[kBinArtField], field_section->Offset());
1387 cur_pos = field_section->End();
1388 // Round up to the alignment the required by the method section.
1389 cur_pos = RoundUp(cur_pos, ArtMethod::Alignment(target_ptr_size));
1390 // Add method section.
1391 auto* methods_section = &out_sections[ImageHeader::kSectionArtMethods];
1392 *methods_section = ImageSection(cur_pos,
1393 bin_slot_sizes_[kBinArtMethodClean] +
1394 bin_slot_sizes_[kBinArtMethodDirty]);
1395 CHECK_EQ(bin_slot_offsets_[kBinArtMethodClean], methods_section->Offset());
1396 cur_pos = methods_section->End();
1397 // Add dex cache arrays section.
1398 auto* dex_cache_arrays_section = &out_sections[ImageHeader::kSectionDexCacheArrays];
1399 *dex_cache_arrays_section = ImageSection(cur_pos, bin_slot_sizes_[kBinDexCacheArray]);
1400 CHECK_EQ(bin_slot_offsets_[kBinDexCacheArray], dex_cache_arrays_section->Offset());
1401 cur_pos = dex_cache_arrays_section->End();
1402 // Round up to the alignment the string table expects. See HashSet::WriteToMemory.
1403 cur_pos = RoundUp(cur_pos, sizeof(uint64_t));
1404 // Calculate the size of the interned strings.
1405 auto* interned_strings_section = &out_sections[ImageHeader::kSectionInternedStrings];
1406 *interned_strings_section = ImageSection(cur_pos, intern_table_bytes_);
1407 cur_pos = interned_strings_section->End();
1408 // Round up to the alignment the class table expects. See HashSet::WriteToMemory.
1409 cur_pos = RoundUp(cur_pos, sizeof(uint64_t));
1410 // Calculate the size of the class table section.
1411 auto* class_table_section = &out_sections[ImageHeader::kSectionClassTable];
1412 *class_table_section = ImageSection(cur_pos, class_table_bytes_);
1413 cur_pos = class_table_section->End();
1414 // Image end goes right before the start of the image bitmap.
1418 void ImageWriter::CreateHeader(size_t oat_index) {
1419 ImageInfo& image_info = GetImageInfo(oat_index);
1420 const uint8_t* oat_file_begin = image_info.oat_file_begin_;
1421 const uint8_t* oat_file_end = oat_file_begin + image_info.oat_loaded_size_;
1422 const uint8_t* oat_data_end = image_info.oat_data_begin_ + image_info.oat_size_;
1424 // Create the image sections.
1425 ImageSection sections[ImageHeader::kSectionCount];
1426 const size_t image_end = image_info.CreateImageSections(target_ptr_size_, sections);
1428 // Finally bitmap section.
1429 const size_t bitmap_bytes = image_info.image_bitmap_->Size();
1430 auto* bitmap_section = §ions[ImageHeader::kSectionImageBitmap];
1431 *bitmap_section = ImageSection(RoundUp(image_end, kPageSize), RoundUp(bitmap_bytes, kPageSize));
1432 if (VLOG_IS_ON(compiler)) {
1433 LOG(INFO) << "Creating header for " << oat_filenames_[oat_index];
1435 for (const ImageSection& section : sections) {
1436 LOG(INFO) << static_cast<ImageHeader::ImageSections>(idx) << " " << section;
1439 LOG(INFO) << "Methods: clean=" << clean_methods_ << " dirty=" << dirty_methods_;
1440 LOG(INFO) << "Image roots address=" << std::hex << image_info.image_roots_address_ << std::dec;
1441 LOG(INFO) << "Image begin=" << std::hex << reinterpret_cast<uintptr_t>(global_image_begin_)
1442 << " Image offset=" << image_info.image_offset_ << std::dec;
1443 LOG(INFO) << "Oat file begin=" << std::hex << reinterpret_cast<uintptr_t>(oat_file_begin)
1444 << " Oat data begin=" << reinterpret_cast<uintptr_t>(image_info.oat_data_begin_)
1445 << " Oat data end=" << reinterpret_cast<uintptr_t>(oat_data_end)
1446 << " Oat file end=" << reinterpret_cast<uintptr_t>(oat_file_end);
1448 // Store boot image info for app image so that we can relocate.
1449 uint32_t boot_image_begin = 0;
1450 uint32_t boot_image_end = 0;
1451 uint32_t boot_oat_begin = 0;
1452 uint32_t boot_oat_end = 0;
1453 gc::Heap* const heap = Runtime::Current()->GetHeap();
1454 heap->GetBootImagesSize(&boot_image_begin, &boot_image_end, &boot_oat_begin, &boot_oat_end);
1456 // Create the header, leave 0 for data size since we will fill this in as we are writing the
1458 new (image_info.image_->Begin()) ImageHeader(PointerToLowMemUInt32(image_info.image_begin_),
1461 image_info.image_roots_address_,
1462 image_info.oat_checksum_,
1463 PointerToLowMemUInt32(oat_file_begin),
1464 PointerToLowMemUInt32(image_info.oat_data_begin_),
1465 PointerToLowMemUInt32(oat_data_end),
1466 PointerToLowMemUInt32(oat_file_end),
1468 boot_image_end - boot_image_begin,
1470 boot_oat_end - boot_oat_begin,
1473 /*is_pic*/compile_app_image_,
1474 image_storage_mode_,
1478 ArtMethod* ImageWriter::GetImageMethodAddress(ArtMethod* method) {
1479 auto it = native_object_relocations_.find(method);
1480 CHECK(it != native_object_relocations_.end()) << PrettyMethod(method) << " @ " << method;
1481 size_t oat_index = GetOatIndex(method->GetDexCache());
1482 ImageInfo& image_info = GetImageInfo(oat_index);
1483 CHECK_GE(it->second.offset, image_info.image_end_) << "ArtMethods should be after Objects";
1484 return reinterpret_cast<ArtMethod*>(image_info.image_begin_ + it->second.offset);
1487 class FixupRootVisitor : public RootVisitor {
1489 explicit FixupRootVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {
1492 void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED)
1493 OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
1494 for (size_t i = 0; i < count; ++i) {
1495 *roots[i] = image_writer_->GetImageAddress(*roots[i]);
1499 void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
1500 const RootInfo& info ATTRIBUTE_UNUSED)
1501 OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
1502 for (size_t i = 0; i < count; ++i) {
1503 roots[i]->Assign(image_writer_->GetImageAddress(roots[i]->AsMirrorPtr()));
1508 ImageWriter* const image_writer_;
1511 void ImageWriter::CopyAndFixupNativeData(size_t oat_index) {
1512 ImageInfo& image_info = GetImageInfo(oat_index);
1513 // Copy ArtFields and methods to their locations and update the array for convenience.
1514 for (auto& pair : native_object_relocations_) {
1515 NativeObjectRelocation& relocation = pair.second;
1516 // Only work with fields and methods that are in the current oat file.
1517 if (relocation.oat_index != oat_index) {
1520 auto* dest = image_info.image_->Begin() + relocation.offset;
1521 DCHECK_GE(dest, image_info.image_->Begin() + image_info.image_end_);
1522 DCHECK(!IsInBootImage(pair.first));
1523 switch (relocation.type) {
1524 case kNativeObjectRelocationTypeArtField: {
1525 memcpy(dest, pair.first, sizeof(ArtField));
1526 reinterpret_cast<ArtField*>(dest)->SetDeclaringClass(
1527 GetImageAddress(reinterpret_cast<ArtField*>(pair.first)->GetDeclaringClass()));
1530 case kNativeObjectRelocationTypeArtMethodClean:
1531 case kNativeObjectRelocationTypeArtMethodDirty: {
1532 CopyAndFixupMethod(reinterpret_cast<ArtMethod*>(pair.first),
1533 reinterpret_cast<ArtMethod*>(dest),
1537 // For arrays, copy just the header since the elements will get copied by their corresponding
1539 case kNativeObjectRelocationTypeArtFieldArray: {
1540 memcpy(dest, pair.first, LengthPrefixedArray<ArtField>::ComputeSize(0));
1543 case kNativeObjectRelocationTypeArtMethodArrayClean:
1544 case kNativeObjectRelocationTypeArtMethodArrayDirty: {
1545 size_t size = ArtMethod::Size(target_ptr_size_);
1546 size_t alignment = ArtMethod::Alignment(target_ptr_size_);
1547 memcpy(dest, pair.first, LengthPrefixedArray<ArtMethod>::ComputeSize(0, size, alignment));
1548 // Clear padding to avoid non-deterministic data in the image (and placate valgrind).
1549 reinterpret_cast<LengthPrefixedArray<ArtMethod>*>(dest)->ClearPadding(size, alignment);
1552 case kNativeObjectRelocationTypeDexCacheArray:
1553 // Nothing to copy here, everything is done in FixupDexCache().
1557 // Fixup the image method roots.
1558 auto* image_header = reinterpret_cast<ImageHeader*>(image_info.image_->Begin());
1559 const ImageSection& methods_section = image_header->GetMethodsSection();
1560 for (size_t i = 0; i < ImageHeader::kImageMethodsCount; ++i) {
1561 ArtMethod* method = image_methods_[i];
1562 CHECK(method != nullptr);
1563 // Only place runtime methods in the image of the default oat file.
1564 if (method->IsRuntimeMethod() && oat_index != GetDefaultOatIndex()) {
1567 if (!IsInBootImage(method)) {
1568 auto it = native_object_relocations_.find(method);
1569 CHECK(it != native_object_relocations_.end()) << "No forwarding for " << PrettyMethod(method);
1570 NativeObjectRelocation& relocation = it->second;
1571 CHECK(methods_section.Contains(relocation.offset)) << relocation.offset << " not in "
1573 CHECK(relocation.IsArtMethodRelocation()) << relocation.type;
1574 method = reinterpret_cast<ArtMethod*>(global_image_begin_ + it->second.offset);
1576 image_header->SetImageMethod(static_cast<ImageHeader::ImageMethod>(i), method);
1578 FixupRootVisitor root_visitor(this);
1580 // Write the intern table into the image.
1581 if (image_info.intern_table_bytes_ > 0) {
1582 const ImageSection& intern_table_section = image_header->GetImageSection(
1583 ImageHeader::kSectionInternedStrings);
1584 InternTable* const intern_table = image_info.intern_table_.get();
1585 uint8_t* const intern_table_memory_ptr =
1586 image_info.image_->Begin() + intern_table_section.Offset();
1587 const size_t intern_table_bytes = intern_table->WriteToMemory(intern_table_memory_ptr);
1588 CHECK_EQ(intern_table_bytes, image_info.intern_table_bytes_);
1589 // Fixup the pointers in the newly written intern table to contain image addresses.
1590 InternTable temp_intern_table;
1591 // Note that we require that ReadFromMemory does not make an internal copy of the elements so that
1592 // the VisitRoots() will update the memory directly rather than the copies.
1593 // This also relies on visit roots not doing any verification which could fail after we update
1594 // the roots to be the image addresses.
1595 temp_intern_table.AddTableFromMemory(intern_table_memory_ptr);
1596 CHECK_EQ(temp_intern_table.Size(), intern_table->Size());
1597 temp_intern_table.VisitRoots(&root_visitor, kVisitRootFlagAllRoots);
1599 // Write the class table(s) into the image. class_table_bytes_ may be 0 if there are multiple
1600 // class loaders. Writing multiple class tables into the image is currently unsupported.
1601 if (image_info.class_table_bytes_ > 0u) {
1602 const ImageSection& class_table_section = image_header->GetImageSection(
1603 ImageHeader::kSectionClassTable);
1604 uint8_t* const class_table_memory_ptr =
1605 image_info.image_->Begin() + class_table_section.Offset();
1606 ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
1608 ClassTable* table = image_info.class_table_.get();
1609 CHECK(table != nullptr);
1610 const size_t class_table_bytes = table->WriteToMemory(class_table_memory_ptr);
1611 CHECK_EQ(class_table_bytes, image_info.class_table_bytes_);
1612 // Fixup the pointers in the newly written class table to contain image addresses. See
1613 // above comment for intern tables.
1614 ClassTable temp_class_table;
1615 temp_class_table.ReadFromMemory(class_table_memory_ptr);
1616 CHECK_EQ(temp_class_table.NumZygoteClasses(), table->NumNonZygoteClasses() +
1617 table->NumZygoteClasses());
1618 BufferedRootVisitor<kDefaultBufferedRootCount> buffered_visitor(&root_visitor,
1619 RootInfo(kRootUnknown));
1620 temp_class_table.VisitRoots(buffered_visitor);
1624 void ImageWriter::CopyAndFixupObjects() {
1625 gc::Heap* heap = Runtime::Current()->GetHeap();
1626 heap->VisitObjects(CopyAndFixupObjectsCallback, this);
1627 // Fix up the object previously had hash codes.
1628 for (const auto& hash_pair : saved_hashcode_map_) {
1629 Object* obj = hash_pair.first;
1630 DCHECK_EQ(obj->GetLockWord<kVerifyNone>(false).ReadBarrierState(), 0U);
1631 obj->SetLockWord<kVerifyNone>(LockWord::FromHashCode(hash_pair.second, 0U), false);
1633 saved_hashcode_map_.clear();
1636 void ImageWriter::CopyAndFixupObjectsCallback(Object* obj, void* arg) {
1637 DCHECK(obj != nullptr);
1638 DCHECK(arg != nullptr);
1639 reinterpret_cast<ImageWriter*>(arg)->CopyAndFixupObject(obj);
1642 void ImageWriter::FixupPointerArray(mirror::Object* dst, mirror::PointerArray* arr,
1643 mirror::Class* klass, Bin array_type) {
1644 CHECK(klass->IsArrayClass());
1645 CHECK(arr->IsIntArray() || arr->IsLongArray()) << PrettyClass(klass) << " " << arr;
1646 // Fixup int and long pointers for the ArtMethod or ArtField arrays.
1647 const size_t num_elements = arr->GetLength();
1648 dst->SetClass(GetImageAddress(arr->GetClass()));
1649 auto* dest_array = down_cast<mirror::PointerArray*>(dst);
1650 for (size_t i = 0, count = num_elements; i < count; ++i) {
1651 void* elem = arr->GetElementPtrSize<void*>(i, target_ptr_size_);
1652 if (elem != nullptr && !IsInBootImage(elem)) {
1653 auto it = native_object_relocations_.find(elem);
1654 if (UNLIKELY(it == native_object_relocations_.end())) {
1655 if (it->second.IsArtMethodRelocation()) {
1656 auto* method = reinterpret_cast<ArtMethod*>(elem);
1657 LOG(FATAL) << "No relocation entry for ArtMethod " << PrettyMethod(method) << " @ "
1658 << method << " idx=" << i << "/" << num_elements << " with declaring class "
1659 << PrettyClass(method->GetDeclaringClass());
1661 CHECK_EQ(array_type, kBinArtField);
1662 auto* field = reinterpret_cast<ArtField*>(elem);
1663 LOG(FATAL) << "No relocation entry for ArtField " << PrettyField(field) << " @ "
1664 << field << " idx=" << i << "/" << num_elements << " with declaring class "
1665 << PrettyClass(field->GetDeclaringClass());
1669 ImageInfo& image_info = GetImageInfo(it->second.oat_index);
1670 elem = image_info.image_begin_ + it->second.offset;
1673 dest_array->SetElementPtrSize<false, true>(i, elem, target_ptr_size_);
1677 void ImageWriter::CopyAndFixupObject(Object* obj) {
1678 if (IsInBootImage(obj)) {
1681 size_t offset = GetImageOffset(obj);
1682 size_t oat_index = GetOatIndex(obj);
1683 ImageInfo& image_info = GetImageInfo(oat_index);
1684 auto* dst = reinterpret_cast<Object*>(image_info.image_->Begin() + offset);
1685 DCHECK_LT(offset, image_info.image_end_);
1686 const auto* src = reinterpret_cast<const uint8_t*>(obj);
1688 image_info.image_bitmap_->Set(dst); // Mark the obj as live.
1690 const size_t n = obj->SizeOf();
1691 DCHECK_LE(offset + n, image_info.image_->Size());
1692 memcpy(dst, src, n);
1694 // Write in a hash code of objects which have inflated monitors or a hash code in their monitor
1696 const auto it = saved_hashcode_map_.find(obj);
1697 dst->SetLockWord(it != saved_hashcode_map_.end() ?
1698 LockWord::FromHashCode(it->second, 0u) : LockWord::Default(), false);
1699 FixupObject(obj, dst);
1702 // Rewrite all the references in the copied object to point to their image address equivalent
1703 class FixupVisitor {
1705 FixupVisitor(ImageWriter* image_writer, Object* copy) : image_writer_(image_writer), copy_(copy) {
1708 // Ignore class roots since we don't have a way to map them to the destination. These are handled
1709 // with other logic.
1710 void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED)
1712 void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {}
1715 void operator()(Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
1716 REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
1717 Object* ref = obj->GetFieldObject<Object, kVerifyNone>(offset);
1718 // Use SetFieldObjectWithoutWriteBarrier to avoid card marking since we are writing to the
1720 copy_->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>(
1722 image_writer_->GetImageAddress(ref));
1725 // java.lang.ref.Reference visitor.
1726 void operator()(mirror::Class* klass ATTRIBUTE_UNUSED, mirror::Reference* ref) const
1727 SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
1728 copy_->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>(
1729 mirror::Reference::ReferentOffset(),
1730 image_writer_->GetImageAddress(ref->GetReferent()));
1734 ImageWriter* const image_writer_;
1735 mirror::Object* const copy_;
1738 class FixupClassVisitor FINAL : public FixupVisitor {
1740 FixupClassVisitor(ImageWriter* image_writer, Object* copy) : FixupVisitor(image_writer, copy) {
1743 void operator()(Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const
1744 REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
1745 DCHECK(obj->IsClass());
1746 FixupVisitor::operator()(obj, offset, /*is_static*/false);
1749 void operator()(mirror::Class* klass ATTRIBUTE_UNUSED,
1750 mirror::Reference* ref ATTRIBUTE_UNUSED) const
1751 SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) {
1752 LOG(FATAL) << "Reference not expected here.";
1756 uintptr_t ImageWriter::NativeOffsetInImage(void* obj) {
1757 DCHECK(obj != nullptr);
1758 DCHECK(!IsInBootImage(obj));
1759 auto it = native_object_relocations_.find(obj);
1760 CHECK(it != native_object_relocations_.end()) << obj << " spaces "
1761 << Runtime::Current()->GetHeap()->DumpSpaces();
1762 const NativeObjectRelocation& relocation = it->second;
1763 return relocation.offset;
1766 template <typename T>
1767 T* ImageWriter::NativeLocationInImage(T* obj) {
1768 if (obj == nullptr || IsInBootImage(obj)) {
1771 auto it = native_object_relocations_.find(obj);
1772 CHECK(it != native_object_relocations_.end()) << obj << " spaces "
1773 << Runtime::Current()->GetHeap()->DumpSpaces();
1774 const NativeObjectRelocation& relocation = it->second;
1775 ImageInfo& image_info = GetImageInfo(relocation.oat_index);
1776 return reinterpret_cast<T*>(image_info.image_begin_ + relocation.offset);
1780 template <typename T>
1781 T* ImageWriter::NativeCopyLocation(T* obj, mirror::DexCache* dex_cache) {
1782 if (obj == nullptr || IsInBootImage(obj)) {
1785 size_t oat_index = GetOatIndexForDexCache(dex_cache);
1786 ImageInfo& image_info = GetImageInfo(oat_index);
1787 return reinterpret_cast<T*>(image_info.image_->Begin() + NativeOffsetInImage(obj));
1791 class NativeLocationVisitor {
1793 explicit NativeLocationVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {}
1795 template <typename T>
1796 T* operator()(T* ptr) const SHARED_REQUIRES(Locks::mutator_lock_) {
1797 return image_writer_->NativeLocationInImage(ptr);
1801 ImageWriter* const image_writer_;
1804 void ImageWriter::FixupClass(mirror::Class* orig, mirror::Class* copy) {
1805 orig->FixupNativePointers(copy, target_ptr_size_, NativeLocationVisitor(this));
1806 FixupClassVisitor visitor(this, copy);
1807 static_cast<mirror::Object*>(orig)->VisitReferences(visitor, visitor);
1809 // Remove the clinitThreadId. This is required for image determinism.
1810 copy->SetClinitThreadId(static_cast<pid_t>(0));
1813 void ImageWriter::FixupObject(Object* orig, Object* copy) {
1814 DCHECK(orig != nullptr);
1815 DCHECK(copy != nullptr);
1816 if (kUseBakerOrBrooksReadBarrier) {
1817 orig->AssertReadBarrierPointer();
1818 if (kUseBrooksReadBarrier) {
1819 // Note the address 'copy' isn't the same as the image address of 'orig'.
1820 copy->SetReadBarrierPointer(GetImageAddress(orig));
1821 DCHECK_EQ(copy->GetReadBarrierPointer(), GetImageAddress(orig));
1824 auto* klass = orig->GetClass();
1825 if (klass->IsIntArrayClass() || klass->IsLongArrayClass()) {
1826 // Is this a native pointer array?
1827 auto it = pointer_arrays_.find(down_cast<mirror::PointerArray*>(orig));
1828 if (it != pointer_arrays_.end()) {
1829 // Should only need to fixup every pointer array exactly once.
1830 FixupPointerArray(copy, down_cast<mirror::PointerArray*>(orig), klass, it->second);
1831 pointer_arrays_.erase(it);
1835 if (orig->IsClass()) {
1836 FixupClass(orig->AsClass<kVerifyNone>(), down_cast<mirror::Class*>(copy));
1838 if (klass == mirror::Method::StaticClass() || klass == mirror::Constructor::StaticClass()) {
1839 // Need to go update the ArtMethod.
1840 auto* dest = down_cast<mirror::AbstractMethod*>(copy);
1841 auto* src = down_cast<mirror::AbstractMethod*>(orig);
1842 ArtMethod* src_method = src->GetArtMethod();
1843 auto it = native_object_relocations_.find(src_method);
1844 CHECK(it != native_object_relocations_.end())
1845 << "Missing relocation for AbstractMethod.artMethod " << PrettyMethod(src_method);
1847 reinterpret_cast<ArtMethod*>(global_image_begin_ + it->second.offset));
1848 } else if (!klass->IsArrayClass()) {
1849 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1850 if (klass == class_linker->GetClassRoot(ClassLinker::kJavaLangDexCache)) {
1851 FixupDexCache(down_cast<mirror::DexCache*>(orig), down_cast<mirror::DexCache*>(copy));
1852 } else if (klass->IsClassLoaderClass()) {
1853 mirror::ClassLoader* copy_loader = down_cast<mirror::ClassLoader*>(copy);
1854 // If src is a ClassLoader, set the class table to null so that it gets recreated by the
1856 copy_loader->SetClassTable(nullptr);
1857 // Also set allocator to null to be safe. The allocator is created when we create the class
1858 // table. We also never expect to unload things in the image since they are held live as
1860 copy_loader->SetAllocator(nullptr);
1863 FixupVisitor visitor(this, copy);
1864 orig->VisitReferences(visitor, visitor);
1869 class ImageAddressVisitor {
1871 explicit ImageAddressVisitor(ImageWriter* image_writer) : image_writer_(image_writer) {}
1873 template <typename T>
1874 T* operator()(T* ptr) const SHARED_REQUIRES(Locks::mutator_lock_) {
1875 return image_writer_->GetImageAddress(ptr);
1879 ImageWriter* const image_writer_;
1883 void ImageWriter::FixupDexCache(mirror::DexCache* orig_dex_cache,
1884 mirror::DexCache* copy_dex_cache) {
1885 // Though the DexCache array fields are usually treated as native pointers, we set the full
1886 // 64-bit values here, clearing the top 32 bits for 32-bit targets. The zero-extension is
1887 // done by casting to the unsigned type uintptr_t before casting to int64_t, i.e.
1888 // static_cast<int64_t>(reinterpret_cast<uintptr_t>(image_begin_ + offset))).
1889 GcRoot<mirror::String>* orig_strings = orig_dex_cache->GetStrings();
1890 if (orig_strings != nullptr) {
1891 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::StringsOffset(),
1892 NativeLocationInImage(orig_strings),
1893 /*pointer size*/8u);
1894 orig_dex_cache->FixupStrings(NativeCopyLocation(orig_strings, orig_dex_cache),
1895 ImageAddressVisitor(this));
1897 GcRoot<mirror::Class>* orig_types = orig_dex_cache->GetResolvedTypes();
1898 if (orig_types != nullptr) {
1899 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedTypesOffset(),
1900 NativeLocationInImage(orig_types),
1901 /*pointer size*/8u);
1902 orig_dex_cache->FixupResolvedTypes(NativeCopyLocation(orig_types, orig_dex_cache),
1903 ImageAddressVisitor(this));
1905 ArtMethod** orig_methods = orig_dex_cache->GetResolvedMethods();
1906 if (orig_methods != nullptr) {
1907 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedMethodsOffset(),
1908 NativeLocationInImage(orig_methods),
1909 /*pointer size*/8u);
1910 ArtMethod** copy_methods = NativeCopyLocation(orig_methods, orig_dex_cache);
1911 for (size_t i = 0, num = orig_dex_cache->NumResolvedMethods(); i != num; ++i) {
1912 ArtMethod* orig = mirror::DexCache::GetElementPtrSize(orig_methods, i, target_ptr_size_);
1913 // NativeLocationInImage also handles runtime methods since these have relocation info.
1914 ArtMethod* copy = NativeLocationInImage(orig);
1915 mirror::DexCache::SetElementPtrSize(copy_methods, i, copy, target_ptr_size_);
1918 ArtField** orig_fields = orig_dex_cache->GetResolvedFields();
1919 if (orig_fields != nullptr) {
1920 copy_dex_cache->SetFieldPtrWithSize<false>(mirror::DexCache::ResolvedFieldsOffset(),
1921 NativeLocationInImage(orig_fields),
1922 /*pointer size*/8u);
1923 ArtField** copy_fields = NativeCopyLocation(orig_fields, orig_dex_cache);
1924 for (size_t i = 0, num = orig_dex_cache->NumResolvedFields(); i != num; ++i) {
1925 ArtField* orig = mirror::DexCache::GetElementPtrSize(orig_fields, i, target_ptr_size_);
1926 ArtField* copy = NativeLocationInImage(orig);
1927 mirror::DexCache::SetElementPtrSize(copy_fields, i, copy, target_ptr_size_);
1931 // Remove the DexFile pointers. They will be fixed up when the runtime loads the oat file. Leaving
1932 // compiler pointers in here will make the output non-deterministic.
1933 copy_dex_cache->SetDexFile(nullptr);
1936 const uint8_t* ImageWriter::GetOatAddress(OatAddress type) const {
1937 DCHECK_LT(type, kOatAddressCount);
1938 // If we are compiling an app image, we need to use the stubs of the boot image.
1939 if (compile_app_image_) {
1940 // Use the current image pointers.
1941 const std::vector<gc::space::ImageSpace*>& image_spaces =
1942 Runtime::Current()->GetHeap()->GetBootImageSpaces();
1943 DCHECK(!image_spaces.empty());
1944 const OatFile* oat_file = image_spaces[0]->GetOatFile();
1945 CHECK(oat_file != nullptr);
1946 const OatHeader& header = oat_file->GetOatHeader();
1948 // TODO: We could maybe clean this up if we stored them in an array in the oat header.
1949 case kOatAddressQuickGenericJNITrampoline:
1950 return static_cast<const uint8_t*>(header.GetQuickGenericJniTrampoline());
1951 case kOatAddressInterpreterToInterpreterBridge:
1952 return static_cast<const uint8_t*>(header.GetInterpreterToInterpreterBridge());
1953 case kOatAddressInterpreterToCompiledCodeBridge:
1954 return static_cast<const uint8_t*>(header.GetInterpreterToCompiledCodeBridge());
1955 case kOatAddressJNIDlsymLookup:
1956 return static_cast<const uint8_t*>(header.GetJniDlsymLookup());
1957 case kOatAddressQuickIMTConflictTrampoline:
1958 return static_cast<const uint8_t*>(header.GetQuickImtConflictTrampoline());
1959 case kOatAddressQuickResolutionTrampoline:
1960 return static_cast<const uint8_t*>(header.GetQuickResolutionTrampoline());
1961 case kOatAddressQuickToInterpreterBridge:
1962 return static_cast<const uint8_t*>(header.GetQuickToInterpreterBridge());
1967 const ImageInfo& primary_image_info = GetImageInfo(0);
1968 return GetOatAddressForOffset(primary_image_info.oat_address_offsets_[type], primary_image_info);
1971 const uint8_t* ImageWriter::GetQuickCode(ArtMethod* method,
1972 const ImageInfo& image_info,
1973 bool* quick_is_interpreted) {
1974 DCHECK(!method->IsResolutionMethod()) << PrettyMethod(method);
1975 DCHECK(!method->IsImtConflictMethod()) << PrettyMethod(method);
1976 DCHECK(!method->IsImtUnimplementedMethod()) << PrettyMethod(method);
1977 DCHECK(method->IsInvokable()) << PrettyMethod(method);
1978 DCHECK(!IsInBootImage(method)) << PrettyMethod(method);
1980 // Use original code if it exists. Otherwise, set the code pointer to the resolution
1983 // Quick entrypoint:
1984 const void* quick_oat_entry_point =
1985 method->GetEntryPointFromQuickCompiledCodePtrSize(target_ptr_size_);
1986 const uint8_t* quick_code;
1988 if (UNLIKELY(IsInBootImage(method->GetDeclaringClass()))) {
1989 DCHECK(method->IsCopied());
1990 // If the code is not in the oat file corresponding to this image (e.g. default methods)
1991 quick_code = reinterpret_cast<const uint8_t*>(quick_oat_entry_point);
1993 uint32_t quick_oat_code_offset = PointerToLowMemUInt32(quick_oat_entry_point);
1994 quick_code = GetOatAddressForOffset(quick_oat_code_offset, image_info);
1997 *quick_is_interpreted = false;
1998 if (quick_code != nullptr && (!method->IsStatic() || method->IsConstructor() ||
1999 method->GetDeclaringClass()->IsInitialized())) {
2000 // We have code for a non-static or initialized method, just use the code.
2001 } else if (quick_code == nullptr && method->IsNative() &&
2002 (!method->IsStatic() || method->GetDeclaringClass()->IsInitialized())) {
2003 // Non-static or initialized native method missing compiled code, use generic JNI version.
2004 quick_code = GetOatAddress(kOatAddressQuickGenericJNITrampoline);
2005 } else if (quick_code == nullptr && !method->IsNative()) {
2006 // We don't have code at all for a non-native method, use the interpreter.
2007 quick_code = GetOatAddress(kOatAddressQuickToInterpreterBridge);
2008 *quick_is_interpreted = true;
2010 CHECK(!method->GetDeclaringClass()->IsInitialized());
2011 // We have code for a static method, but need to go through the resolution stub for class
2013 quick_code = GetOatAddress(kOatAddressQuickResolutionTrampoline);
2015 if (!IsInBootOatFile(quick_code)) {
2016 // DCHECK_GE(quick_code, oat_data_begin_);
2021 void ImageWriter::CopyAndFixupMethod(ArtMethod* orig,
2023 const ImageInfo& image_info) {
2024 memcpy(copy, orig, ArtMethod::Size(target_ptr_size_));
2026 copy->SetDeclaringClass(GetImageAddress(orig->GetDeclaringClassUnchecked()));
2028 ArtMethod** orig_resolved_methods = orig->GetDexCacheResolvedMethods(target_ptr_size_);
2029 copy->SetDexCacheResolvedMethods(NativeLocationInImage(orig_resolved_methods), target_ptr_size_);
2030 GcRoot<mirror::Class>* orig_resolved_types = orig->GetDexCacheResolvedTypes(target_ptr_size_);
2031 copy->SetDexCacheResolvedTypes(NativeLocationInImage(orig_resolved_types), target_ptr_size_);
2033 // OatWriter replaces the code_ with an offset value. Here we re-adjust to a pointer relative to
2036 // The resolution method has a special trampoline to call.
2037 Runtime* runtime = Runtime::Current();
2038 if (UNLIKELY(orig == runtime->GetResolutionMethod())) {
2039 copy->SetEntryPointFromQuickCompiledCodePtrSize(
2040 GetOatAddress(kOatAddressQuickResolutionTrampoline), target_ptr_size_);
2041 } else if (UNLIKELY(orig == runtime->GetImtConflictMethod() ||
2042 orig == runtime->GetImtUnimplementedMethod())) {
2043 copy->SetEntryPointFromQuickCompiledCodePtrSize(
2044 GetOatAddress(kOatAddressQuickIMTConflictTrampoline), target_ptr_size_);
2045 } else if (UNLIKELY(orig->IsRuntimeMethod())) {
2046 bool found_one = false;
2047 for (size_t i = 0; i < static_cast<size_t>(Runtime::kLastCalleeSaveType); ++i) {
2048 auto idx = static_cast<Runtime::CalleeSaveType>(i);
2049 if (runtime->HasCalleeSaveMethod(idx) && runtime->GetCalleeSaveMethod(idx) == orig) {
2054 CHECK(found_one) << "Expected to find callee save method but got " << PrettyMethod(orig);
2055 CHECK(copy->IsRuntimeMethod());
2057 // We assume all methods have code. If they don't currently then we set them to the use the
2058 // resolution trampoline. Abstract methods never have code and so we need to make sure their
2059 // use results in an AbstractMethodError. We use the interpreter to achieve this.
2060 if (UNLIKELY(!orig->IsInvokable())) {
2061 copy->SetEntryPointFromQuickCompiledCodePtrSize(
2062 GetOatAddress(kOatAddressQuickToInterpreterBridge), target_ptr_size_);
2064 bool quick_is_interpreted;
2065 const uint8_t* quick_code = GetQuickCode(orig, image_info, &quick_is_interpreted);
2066 copy->SetEntryPointFromQuickCompiledCodePtrSize(quick_code, target_ptr_size_);
2069 if (orig->IsNative()) {
2070 // The native method's pointer is set to a stub to lookup via dlsym.
2071 // Note this is not the code_ pointer, that is handled above.
2072 copy->SetEntryPointFromJniPtrSize(
2073 GetOatAddress(kOatAddressJNIDlsymLookup), target_ptr_size_);
2079 size_t ImageWriter::GetBinSizeSum(ImageWriter::ImageInfo& image_info, ImageWriter::Bin up_to) const {
2080 DCHECK_LE(up_to, kBinSize);
2081 return std::accumulate(&image_info.bin_slot_sizes_[0],
2082 &image_info.bin_slot_sizes_[up_to],
2086 ImageWriter::BinSlot::BinSlot(uint32_t lockword) : lockword_(lockword) {
2087 // These values may need to get updated if more bins are added to the enum Bin
2088 static_assert(kBinBits == 3, "wrong number of bin bits");
2089 static_assert(kBinShift == 27, "wrong number of shift");
2090 static_assert(sizeof(BinSlot) == sizeof(LockWord), "BinSlot/LockWord must have equal sizes");
2092 DCHECK_LT(GetBin(), kBinSize);
2093 DCHECK_ALIGNED(GetIndex(), kObjectAlignment);
2096 ImageWriter::BinSlot::BinSlot(Bin bin, uint32_t index)
2097 : BinSlot(index | (static_cast<uint32_t>(bin) << kBinShift)) {
2098 DCHECK_EQ(index, GetIndex());
2101 ImageWriter::Bin ImageWriter::BinSlot::GetBin() const {
2102 return static_cast<Bin>((lockword_ & kBinMask) >> kBinShift);
2105 uint32_t ImageWriter::BinSlot::GetIndex() const {
2106 return lockword_ & ~kBinMask;
2109 ImageWriter::Bin ImageWriter::BinTypeForNativeRelocationType(NativeObjectRelocationType type) {
2111 case kNativeObjectRelocationTypeArtField:
2112 case kNativeObjectRelocationTypeArtFieldArray:
2113 return kBinArtField;
2114 case kNativeObjectRelocationTypeArtMethodClean:
2115 case kNativeObjectRelocationTypeArtMethodArrayClean:
2116 return kBinArtMethodClean;
2117 case kNativeObjectRelocationTypeArtMethodDirty:
2118 case kNativeObjectRelocationTypeArtMethodArrayDirty:
2119 return kBinArtMethodDirty;
2120 case kNativeObjectRelocationTypeDexCacheArray:
2121 return kBinDexCacheArray;
2126 size_t ImageWriter::GetOatIndex(mirror::Object* obj) const {
2127 if (compile_app_image_) {
2128 return GetDefaultOatIndex();
2130 mirror::DexCache* dex_cache =
2131 obj->IsDexCache() ? obj->AsDexCache()
2132 : obj->IsClass() ? obj->AsClass()->GetDexCache()
2133 : obj->GetClass()->GetDexCache();
2134 return GetOatIndexForDexCache(dex_cache);
2138 size_t ImageWriter::GetOatIndexForDexFile(const DexFile* dex_file) const {
2139 if (compile_app_image_) {
2140 return GetDefaultOatIndex();
2142 auto it = dex_file_oat_index_map_.find(dex_file);
2143 DCHECK(it != dex_file_oat_index_map_.end()) << dex_file->GetLocation();
2148 size_t ImageWriter::GetOatIndexForDexCache(mirror::DexCache* dex_cache) const {
2149 if (dex_cache == nullptr) {
2150 return GetDefaultOatIndex();
2152 return GetOatIndexForDexFile(dex_cache->GetDexFile());
2156 void ImageWriter::UpdateOatFileLayout(size_t oat_index,
2157 size_t oat_loaded_size,
2158 size_t oat_data_offset,
2159 size_t oat_data_size) {
2160 const uint8_t* images_end = image_infos_.back().image_begin_ + image_infos_.back().image_size_;
2161 for (const ImageInfo& info : image_infos_) {
2162 DCHECK_LE(info.image_begin_ + info.image_size_, images_end);
2164 DCHECK(images_end != nullptr); // Image space must be ready.
2166 ImageInfo& cur_image_info = GetImageInfo(oat_index);
2167 cur_image_info.oat_file_begin_ = images_end + cur_image_info.oat_offset_;
2168 cur_image_info.oat_loaded_size_ = oat_loaded_size;
2169 cur_image_info.oat_data_begin_ = cur_image_info.oat_file_begin_ + oat_data_offset;
2170 cur_image_info.oat_size_ = oat_data_size;
2172 if (compile_app_image_) {
2173 CHECK_EQ(oat_filenames_.size(), 1u) << "App image should have no next image.";
2177 // Update the oat_offset of the next image info.
2178 if (oat_index + 1u != oat_filenames_.size()) {
2179 // There is a following one.
2180 ImageInfo& next_image_info = GetImageInfo(oat_index + 1u);
2181 next_image_info.oat_offset_ = cur_image_info.oat_offset_ + oat_loaded_size;
2185 void ImageWriter::UpdateOatFileHeader(size_t oat_index, const OatHeader& oat_header) {
2186 ImageInfo& cur_image_info = GetImageInfo(oat_index);
2187 cur_image_info.oat_checksum_ = oat_header.GetChecksum();
2189 if (oat_index == GetDefaultOatIndex()) {
2190 // Primary oat file, read the trampolines.
2191 cur_image_info.oat_address_offsets_[kOatAddressInterpreterToInterpreterBridge] =
2192 oat_header.GetInterpreterToInterpreterBridgeOffset();
2193 cur_image_info.oat_address_offsets_[kOatAddressInterpreterToCompiledCodeBridge] =
2194 oat_header.GetInterpreterToCompiledCodeBridgeOffset();
2195 cur_image_info.oat_address_offsets_[kOatAddressJNIDlsymLookup] =
2196 oat_header.GetJniDlsymLookupOffset();
2197 cur_image_info.oat_address_offsets_[kOatAddressQuickGenericJNITrampoline] =
2198 oat_header.GetQuickGenericJniTrampolineOffset();
2199 cur_image_info.oat_address_offsets_[kOatAddressQuickIMTConflictTrampoline] =
2200 oat_header.GetQuickImtConflictTrampolineOffset();
2201 cur_image_info.oat_address_offsets_[kOatAddressQuickResolutionTrampoline] =
2202 oat_header.GetQuickResolutionTrampolineOffset();
2203 cur_image_info.oat_address_offsets_[kOatAddressQuickToInterpreterBridge] =
2204 oat_header.GetQuickToInterpreterBridgeOffset();
2208 ImageWriter::ImageWriter(
2209 const CompilerDriver& compiler_driver,
2210 uintptr_t image_begin,
2212 bool compile_app_image,
2213 ImageHeader::StorageMode image_storage_mode,
2214 const std::vector<const char*>& oat_filenames,
2215 const std::unordered_map<const DexFile*, size_t>& dex_file_oat_index_map)
2216 : compiler_driver_(compiler_driver),
2217 global_image_begin_(reinterpret_cast<uint8_t*>(image_begin)),
2218 image_objects_offset_begin_(0),
2219 compile_pic_(compile_pic),
2220 compile_app_image_(compile_app_image),
2221 target_ptr_size_(InstructionSetPointerSize(compiler_driver_.GetInstructionSet())),
2222 image_infos_(oat_filenames.size()),
2223 image_method_array_(ImageHeader::kImageMethodsCount),
2226 image_storage_mode_(image_storage_mode),
2227 oat_filenames_(oat_filenames),
2228 dex_file_oat_index_map_(dex_file_oat_index_map) {
2229 CHECK_NE(image_begin, 0U);
2230 std::fill_n(image_methods_, arraysize(image_methods_), nullptr);
2231 CHECK_EQ(compile_app_image, !Runtime::Current()->GetHeap()->GetBootImageSpaces().empty())
2232 << "Compiling a boot image should occur iff there are no boot image spaces loaded";
2235 ImageWriter::ImageInfo::ImageInfo()
2236 : intern_table_(new InternTable),
2237 class_table_(new ClassTable) {}