📄 serialize.cc.svn-base
字号:
}static int IndexOf(const List<Object**>& list, Object** element) { for (int i = 0; i < list.length(); i++) { if (list[i] == element) return i; } return -1;}void Serializer::PutGlobalHandleStack(const List<Handle<Object> >& stack) { writer_->PutC('['); writer_->PutInt(stack.length()); for (int i = stack.length() - 1; i >= 0; i--) { writer_->PutC('|'); int gh_index = IndexOf(global_handles_, stack[i].location()); CHECK_GE(gh_index, 0); writer_->PutInt(gh_index); } writer_->PutC(']');}void Serializer::PutContextStack() { List<Handle<Object> > contexts(2); while (HandleScopeImplementer::instance()->HasSavedContexts()) { Handle<Object> context = HandleScopeImplementer::instance()->RestoreContext(); contexts.Add(context); } for (int i = contexts.length() - 1; i >= 0; i--) { HandleScopeImplementer::instance()->SaveContext(contexts[i]); } PutGlobalHandleStack(contexts); List<Handle<Object> > security_contexts(2); while (HandleScopeImplementer::instance()->HasSavedSecurityContexts()) { Handle<Object> context = HandleScopeImplementer::instance()->RestoreSecurityContext(); security_contexts.Add(context); } for (int i = security_contexts.length() - 1; i >= 0; i--) { Handle<Object> context = security_contexts[i]; HandleScopeImplementer::instance()->SaveSecurityContext(context); } PutGlobalHandleStack(security_contexts);}void Serializer::PutEncodedAddress(Address addr) { writer_->PutC('P'); writer_->PutInt(reinterpret_cast<int>(addr));}Address Serializer::Encode(Object* o, bool* serialized) { *serialized = false; if (o->IsSmi()) { return reinterpret_cast<Address>(o); } else { HeapObject* obj = HeapObject::cast(o); if (IsVisited(obj)) { return GetSavedAddress(obj); } else { // First visit: serialize the object. *serialized = true; return PutObject(obj); } }}Address Serializer::PutObject(HeapObject* obj) { Map* map = obj->map(); InstanceType type = map->instance_type(); int size = obj->SizeFromMap(map); // Simulate the allocation of obj to predict where it will be // allocated during deserialization. Address addr = Allocate(obj).Encode(); SaveAddress(obj, addr); if (type == CODE_TYPE) { Code* code = Code::cast(obj); // Ensure Code objects contain Object pointers, not Addresses. code->ConvertICTargetsFromAddressToObject(); LOG(CodeMoveEvent(code->address(), addr)); } // Write out the object prologue: type, size, and simulated address of obj. writer_->PutC('['); CHECK_EQ(0, size & kObjectAlignmentMask); writer_->PutInt(type); writer_->PutInt(size >> kObjectAlignmentBits); PutEncodedAddress(addr); // encodes AllocationSpace // Visit all the pointers in the object other than the map. This // will recursively serialize any as-yet-unvisited objects. obj->Iterate(this); // Mark end of recursively embedded objects, start of object body. writer_->PutC('|'); // Write out the raw contents of the object. No compression, but // fast to deserialize. writer_->PutBytes(obj->address(), size); // Update pointers and external references in the written object. ReferenceUpdater updater(obj, this); obj->Iterate(&updater); updater.Update(writer_->position() - size);#ifdef DEBUG if (FLAG_debug_serialization) { // Write out the object epilogue to catch synchronization errors. PutEncodedAddress(addr); writer_->PutC(']'); }#endif if (type == CODE_TYPE) { Code* code = Code::cast(obj); // Convert relocations from Object* to Address in Code objects code->ConvertICTargetsFromObjectToAddress(); } objects_++; return addr;}RelativeAddress Serializer::Allocate(HeapObject* obj) { // Find out which AllocationSpace 'obj' is in. AllocationSpace s; bool found = false; for (int i = FIRST_SPACE; !found && i <= LAST_SPACE; i++) { s = static_cast<AllocationSpace>(i); found = Heap::InSpace(obj, s); } CHECK(found); if (s == NEW_SPACE) { Space* space = Heap::TargetSpace(obj); ASSERT(space == Heap::old_pointer_space() || space == Heap::old_data_space()); s = (space == Heap::old_pointer_space()) ? OLD_POINTER_SPACE : OLD_DATA_SPACE; } int size = obj->Size(); GCTreatment gc_treatment = DataObject; if (obj->IsFixedArray()) gc_treatment = PointerObject; else if (obj->IsCode()) gc_treatment = CodeObject; return allocator_[s]->Allocate(size, gc_treatment);}//------------------------------------------------------------------------------// Implementation of Deserializerstatic const int kInitArraySize = 32;Deserializer::Deserializer(const char* str, int len) : reader_(str, len), map_pages_(kInitArraySize), old_pointer_pages_(kInitArraySize), old_data_pages_(kInitArraySize), code_pages_(kInitArraySize), large_objects_(kInitArraySize), global_handles_(4) { root_ = true; roots_ = 0; objects_ = 0; reference_decoder_ = NULL;#ifdef DEBUG expect_debug_information_ = false;#endif}Deserializer::~Deserializer() { if (reference_decoder_) delete reference_decoder_;}void Deserializer::ExpectEncodedAddress(Address expected) { Address a = GetEncodedAddress(); USE(a); ASSERT(a == expected);}#ifdef DEBUGvoid Deserializer::Synchronize(const char* tag) { if (expect_debug_information_) { char buf[kMaxTagLength]; reader_.ExpectC('S'); int length = reader_.GetInt(); ASSERT(length <= kMaxTagLength); reader_.GetBytes(reinterpret_cast<Address>(buf), length); ASSERT_EQ(strlen(tag), length); ASSERT(strncmp(tag, buf, length) == 0); }}#endifvoid Deserializer::Deserialize() { // No active threads. ASSERT_EQ(NULL, ThreadState::FirstInUse()); // No active handles. ASSERT(HandleScopeImplementer::instance()->Blocks()->is_empty()); reference_decoder_ = new ExternalReferenceDecoder(); // By setting linear allocation only, we forbid the use of free list // allocation which is not predicted by SimulatedAddress. GetHeader(); Heap::IterateRoots(this); GetContextStack();}void Deserializer::VisitPointers(Object** start, Object** end) { bool root = root_; root_ = false; for (Object** p = start; p < end; ++p) { if (root) { roots_++; // Read the next object or pointer from the stream // pointer in the stream. int c = reader_.GetC(); if (c == '[') { *p = GetObject(); // embedded object } else { ASSERT(c == 'P'); // pointer to previously serialized object *p = Resolve(reinterpret_cast<Address>(reader_.GetInt())); } } else { // A pointer internal to a HeapObject that we've already // read: resolve it to a true address (or Smi) *p = Resolve(reinterpret_cast<Address>(*p)); } } root_ = root;}void Deserializer::VisitExternalReferences(Address* start, Address* end) { for (Address* p = start; p < end; ++p) { uint32_t code = reinterpret_cast<uint32_t>(*p); *p = reference_decoder_->Decode(code); }}void Deserializer::VisitRuntimeEntry(RelocInfo* rinfo) { uint32_t* pc = reinterpret_cast<uint32_t*>(rinfo->pc()); uint32_t encoding = *pc; Address target = reference_decoder_->Decode(encoding); rinfo->set_target_address(target);}void Deserializer::GetFlags() { reader_.ExpectC('F'); int argc = reader_.GetInt() + 1; char** argv = NewArray<char*>(argc); reader_.ExpectC('['); for (int i = 1; i < argc; i++) { if (i > 1) reader_.ExpectC('|'); argv[i] = reader_.GetString(); } reader_.ExpectC(']'); has_log_ = false; for (int i = 1; i < argc; i++) { if (strcmp("--log_code", argv[i]) == 0) { has_log_ = true; } else if (strcmp("--nouse_ic", argv[i]) == 0) { FLAG_use_ic = false; } else if (strcmp("--debug_code", argv[i]) == 0) { FLAG_debug_code = true; } else if (strcmp("--nolazy", argv[i]) == 0) { FLAG_lazy = false; } DeleteArray(argv[i]); } DeleteArray(argv);}void Deserializer::GetLog() { if (has_log_) { reader_.ExpectC('L'); char* snapshot_log = reader_.GetString();#ifdef ENABLE_LOGGING_AND_PROFILING if (FLAG_log_code) { LOG(Preamble(snapshot_log)); }#endif DeleteArray(snapshot_log); }}static void InitPagedSpace(PagedSpace* space, int capacity, List<Page*>* page_list) { space->EnsureCapacity(capacity); // TODO(1240712): PagedSpace::EnsureCapacity can return false due to // a failure to allocate from the OS to expand the space. PageIterator it(space, PageIterator::ALL_PAGES); while (it.has_next()) page_list->Add(it.next());}void Deserializer::GetHeader() { reader_.ExpectC('D');#ifdef DEBUG expect_debug_information_ = reader_.GetC() == '1';#else // In release mode, don't attempt to read a snapshot containing // synchronization tags. if (reader_.GetC() != '0') FATAL("Snapshot contains synchronization tags.");#endif // Ensure sufficient capacity in paged memory spaces to avoid growth // during deserialization. reader_.ExpectC('S'); reader_.ExpectC('['); InitPagedSpace(Heap::old_pointer_space(), reader_.GetInt(), &old_pointer_pages_); reader_.ExpectC('|'); InitPagedSpace(Heap::old_data_space(), reader_.GetInt(), &old_data_pages_); reader_.ExpectC('|'); InitPagedSpace(Heap::code_space(), reader_.GetInt(), &code_pages_); reader_.ExpectC('|'); InitPagedSpace(Heap::map_space(), reader_.GetInt(), &map_pages_); reader_.ExpectC(']'); // Create placeholders for global handles later to be fill during // IterateRoots. reader_.ExpectC('G'); reader_.ExpectC('['); int c = reader_.GetC(); while (c != ']') { ASSERT(c == 'N'); global_handles_.Add(GlobalHandles::Create(NULL).location()); c = reader_.GetC(); }}void Deserializer::GetGlobalHandleStack(List<Handle<Object> >* stack) { reader_.ExpectC('['); int length = reader_.GetInt(); for (int i = 0; i < length; i++) { reader_.ExpectC('|'); int gh_index = reader_.GetInt(); stack->Add(global_handles_[gh_index]); } reader_.ExpectC(']');}void Deserializer::GetContextStack() { List<Handle<Object> > entered_contexts(2); GetGlobalHandleStack(&entered_contexts); for (int i = 0; i < entered_contexts.length(); i++) { HandleScopeImplementer::instance()->SaveContext(entered_contexts[i]); } List<Handle<Object> > security_contexts(2); GetGlobalHandleStack(&security_contexts); for (int i = 0; i < security_contexts.length(); i++) { HandleScopeImplementer::instance()-> SaveSecurityContext(security_contexts[i]); }}Address Deserializer::GetEncodedAddress() { reader_.ExpectC('P'); return reinterpret_cast<Address>(reader_.GetInt());}Object* Deserializer::GetObject() { // Read the prologue: type, size and encoded address. InstanceType type = static_cast<InstanceType>(reader_.GetInt()); int size = reader_.GetInt() << kObjectAlignmentBits; Address a = GetEncodedAddress(); // Get a raw object of the right size in the right space. AllocationSpace space = GetSpace(a); Object *o; if (IsLargeExecutableObject(a)) { o = Heap::lo_space()->AllocateRawCode(size); } else if (IsLargeFixedArray(a)) { o = Heap::lo_space()->AllocateRawFixedArray(size); } else { o = Heap::AllocateRaw(size, space); } ASSERT(!o->IsFailure()); // Check that the simulation of heap allocation was correct. ASSERT(o == Resolve(a)); // Read any recursively embedded objects. int c = reader_.GetC(); while (c == '[') { GetObject(); c = reader_.GetC(); } ASSERT(c == '|'); HeapObject* obj = reinterpret_cast<HeapObject*>(o); // Read the uninterpreted contents of the object after the map reader_.GetBytes(obj->address(), size);#ifdef DEBUG if (expect_debug_information_) { // Read in the epilogue to check that we're still synchronized ExpectEncodedAddress(a); reader_.ExpectC(']'); }#endif // Resolve the encoded pointers we just read in. // Same as obj->Iterate(this), but doesn't rely on the map pointer being set. VisitPointer(reinterpret_cast<Object**>(obj->address())); obj->IterateBody(type, size, this); if (type == CODE_TYPE) { Code* code = Code::cast(obj); // Convert relocations from Object* to Address in Code objects code->ConvertICTargetsFromObjectToAddress(); LOG(CodeMoveEvent(a, code->address())); } objects_++; return o;}static inline Object* ResolvePaged(int page_index, int page_offset, PagedSpace* space, List<Page*>* page_list) { ASSERT(page_index < page_list->length()); Address address = (*page_list)[page_index]->OffsetToAddress(page_offset); return HeapObject::FromAddress(address);}template<typename T>void ConcatReversed(List<T> * target, const List<T> & source) { for (int i = source.length() - 1; i >= 0; i--) { target->Add(source[i]); }}Object* Deserializer::Resolve(Address encoded) { Object* o = reinterpret_cast<Object*>(encoded); if (o->IsSmi()) return o; // Encoded addresses of HeapObjects always have 'HeapObject' tags. ASSERT(o->IsHeapObject()); switch (GetSpace(encoded)) { // For Map space and Old space, we cache the known Pages in map_pages, // old_pointer_pages and old_data_pages. Even though MapSpace keeps a list // of page addresses, we don't rely on it since GetObject uses AllocateRaw, // and that appears not to update the page list. case MAP_SPACE: return ResolvePaged(PageIndex(encoded), PageOffset(encoded), Heap::map_space(), &map_pages_); case OLD_POINTER_SPACE: return ResolvePaged(PageIndex(encoded), PageOffset(encoded), Heap::old_pointer_space(), &old_pointer_pages_); case OLD_DATA_SPACE: return ResolvePaged(PageIndex(encoded), PageOffset(encoded), Heap::old_data_space(), &old_data_pages_); case CODE_SPACE: return ResolvePaged(PageIndex(encoded), PageOffset(encoded), Heap::code_space(), &code_pages_); case NEW_SPACE: return HeapObject::FromAddress(Heap::NewSpaceStart() + NewSpaceOffset(encoded)); case LO_SPACE: // Cache the known large_objects, allocated one per 'page' int index = LargeObjectIndex(encoded); if (index >= large_objects_.length()) { int new_object_count = Heap::lo_space()->PageCount() - large_objects_.length(); List<Object*> new_objects(new_object_count); LargeObjectIterator it(Heap::lo_space()); for (int i = 0; i < new_object_count; i++) { new_objects.Add(it.next()); }#ifdef DEBUG for (int i = large_objects_.length() - 1; i >= 0; i--) { ASSERT(it.next() == large_objects_[i]); }#endif ConcatReversed(&large_objects_, new_objects); ASSERT(index < large_objects_.length()); } return large_objects_[index]; // s.page_offset() is ignored. } UNREACHABLE(); return NULL;}} } // namespace v8::internal
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -