⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 heap.cc.svn-base

📁 Google浏览器V8内核代码
💻 SVN-BASE
📖 第 1 页 / 共 5 页
字号:
// Copyright 2006-2008 the V8 project authors. All rights reserved.// Redistribution and use in source and binary forms, with or without// modification, are permitted provided that the following conditions are// met:////     * Redistributions of source code must retain the above copyright//       notice, this list of conditions and the following disclaimer.//     * Redistributions in binary form must reproduce the above//       copyright notice, this list of conditions and the following//       disclaimer in the documentation and/or other materials provided//       with the distribution.//     * Neither the name of Google Inc. nor the names of its//       contributors may be used to endorse or promote products derived//       from this software without specific prior written permission.//// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.#include "v8.h"#include "accessors.h"#include "api.h"#include "bootstrapper.h"#include "codegen-inl.h"#include "compilation-cache.h"#include "debug.h"#include "global-handles.h"#include "jsregexp.h"#include "mark-compact.h"#include "natives.h"#include "scanner.h"#include "scopeinfo.h"#include "v8threads.h"namespace v8 { namespace internal {#define ROOT_ALLOCATION(type, name) type* Heap::name##_;  ROOT_LIST(ROOT_ALLOCATION)#undef ROOT_ALLOCATION#define STRUCT_ALLOCATION(NAME, Name, name) Map* Heap::name##_map_;  STRUCT_LIST(STRUCT_ALLOCATION)#undef STRUCT_ALLOCATION#define SYMBOL_ALLOCATION(name, string) String* Heap::name##_;  SYMBOL_LIST(SYMBOL_ALLOCATION)#undef SYMBOL_ALLOCATIONNewSpace* Heap::new_space_ = NULL;OldSpace* Heap::old_pointer_space_ = NULL;OldSpace* Heap::old_data_space_ = NULL;OldSpace* Heap::code_space_ = NULL;MapSpace* Heap::map_space_ = NULL;LargeObjectSpace* Heap::lo_space_ = NULL;int Heap::promoted_space_limit_ = 0;int Heap::old_gen_exhausted_ = false;int Heap::amount_of_external_allocated_memory_ = 0;int Heap::amount_of_external_allocated_memory_at_last_global_gc_ = 0;// semispace_size_ should be a power of 2 and old_generation_size_ should be// a multiple of Page::kPageSize.int Heap::semispace_size_  = 1*MB;int Heap::old_generation_size_ = 512*MB;int Heap::initial_semispace_size_ = 256*KB;GCCallback Heap::global_gc_prologue_callback_ = NULL;GCCallback Heap::global_gc_epilogue_callback_ = NULL;// Variables set based on semispace_size_ and old_generation_size_ in// ConfigureHeap.int Heap::young_generation_size_ = 0;  // Will be 2 * semispace_size_.// Double the new space after this many scavenge collections.int Heap::new_space_growth_limit_ = 8;int Heap::scavenge_count_ = 0;Heap::HeapState Heap::gc_state_ = NOT_IN_GC;int Heap::mc_count_ = 0;int Heap::gc_count_ = 0;#ifdef DEBUGbool Heap::allocation_allowed_ = true;int Heap::allocation_timeout_ = 0;bool Heap::disallow_allocation_failure_ = false;#endif  // DEBUGint Heap::Capacity() {  if (!HasBeenSetup()) return 0;  return new_space_->Capacity() +      old_pointer_space_->Capacity() +      old_data_space_->Capacity() +      code_space_->Capacity() +      map_space_->Capacity();}int Heap::Available() {  if (!HasBeenSetup()) return 0;  return new_space_->Available() +      old_pointer_space_->Available() +      old_data_space_->Available() +      code_space_->Available() +      map_space_->Available();}bool Heap::HasBeenSetup() {  return new_space_ != NULL &&         old_pointer_space_ != NULL &&         old_data_space_ != NULL &&         code_space_ != NULL &&         map_space_ != NULL &&         lo_space_ != NULL;}GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space) {  // Is global GC requested?  if (space != NEW_SPACE || FLAG_gc_global) {    Counters::gc_compactor_caused_by_request.Increment();    return MARK_COMPACTOR;  }  // Is enough data promoted to justify a global GC?  if (PromotedSpaceSize() + PromotedExternalMemorySize()      > promoted_space_limit_) {    Counters::gc_compactor_caused_by_promoted_data.Increment();    return MARK_COMPACTOR;  }  // Have allocation in OLD and LO failed?  if (old_gen_exhausted_) {    Counters::gc_compactor_caused_by_oldspace_exhaustion.Increment();    return MARK_COMPACTOR;  }  // Is there enough space left in OLD to guarantee that a scavenge can  // succeed?  //  // Note that MemoryAllocator->MaxAvailable() undercounts the memory available  // for object promotion. It counts only the bytes that the memory  // allocator has not yet allocated from the OS and assigned to any space,  // and does not count available bytes already in the old space or code  // space.  Undercounting is safe---we may get an unrequested full GC when  // a scavenge would have succeeded.  if (MemoryAllocator::MaxAvailable() <= new_space_->Size()) {    Counters::gc_compactor_caused_by_oldspace_exhaustion.Increment();    return MARK_COMPACTOR;  }  // Default  return SCAVENGER;}// TODO(1238405): Combine the infrastructure for --heap-stats and// --log-gc to avoid the complicated preprocessor and flag testing.#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)void Heap::ReportStatisticsBeforeGC() {  // Heap::ReportHeapStatistics will also log NewSpace statistics when  // compiled with ENABLE_LOGGING_AND_PROFILING and --log-gc is set.  The  // following logic is used to avoid double logging.#if defined(DEBUG) && defined(ENABLE_LOGGING_AND_PROFILING)  if (FLAG_heap_stats || FLAG_log_gc) new_space_->CollectStatistics();  if (FLAG_heap_stats) {    ReportHeapStatistics("Before GC");  } else if (FLAG_log_gc) {    new_space_->ReportStatistics();  }  if (FLAG_heap_stats || FLAG_log_gc) new_space_->ClearHistograms();#elif defined(DEBUG)  if (FLAG_heap_stats) {    new_space_->CollectStatistics();    ReportHeapStatistics("Before GC");    new_space_->ClearHistograms();  }#elif defined(ENABLE_LOGGING_AND_PROFILING)  if (FLAG_log_gc) {    new_space_->CollectStatistics();    new_space_->ReportStatistics();    new_space_->ClearHistograms();  }#endif}// TODO(1238405): Combine the infrastructure for --heap-stats and// --log-gc to avoid the complicated preprocessor and flag testing.void Heap::ReportStatisticsAfterGC() {  // Similar to the before GC, we use some complicated logic to ensure that  // NewSpace statistics are logged exactly once when --log-gc is turned on.#if defined(DEBUG) && defined(ENABLE_LOGGING_AND_PROFILING)  if (FLAG_heap_stats) {    ReportHeapStatistics("After GC");  } else if (FLAG_log_gc) {    new_space_->ReportStatistics();  }#elif defined(DEBUG)  if (FLAG_heap_stats) ReportHeapStatistics("After GC");#elif defined(ENABLE_LOGGING_AND_PROFILING)  if (FLAG_log_gc) new_space_->ReportStatistics();#endif}#endif  // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)void Heap::GarbageCollectionPrologue() {  RegExpImpl::NewSpaceCollectionPrologue();  gc_count_++;#ifdef DEBUG  ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);  allow_allocation(false);  if (FLAG_verify_heap) {    Verify();  }  if (FLAG_gc_verbose) Print();  if (FLAG_print_rset) {    // Not all spaces have remembered set bits that we care about.    old_pointer_space_->PrintRSet();    map_space_->PrintRSet();    lo_space_->PrintRSet();  }#endif#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)  ReportStatisticsBeforeGC();#endif}int Heap::SizeOfObjects() {  int total = 0;  AllSpaces spaces;  while (Space* space = spaces.next()) total += space->Size();  return total;}void Heap::GarbageCollectionEpilogue() {#ifdef DEBUG  allow_allocation(true);  ZapFromSpace();  if (FLAG_verify_heap) {    Verify();  }  if (FLAG_print_global_handles) GlobalHandles::Print();  if (FLAG_print_handles) PrintHandles();  if (FLAG_gc_verbose) Print();  if (FLAG_code_stats) ReportCodeStatistics("After GC");#endif  Counters::alive_after_last_gc.Set(SizeOfObjects());  SymbolTable* symbol_table = SymbolTable::cast(Heap::symbol_table_);  Counters::symbol_table_capacity.Set(symbol_table->Capacity());  Counters::number_of_symbols.Set(symbol_table->NumberOfElements());#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)  ReportStatisticsAfterGC();#endif}void Heap::CollectAllGarbage() {  // Since we are ignoring the return value, the exact choice of space does  // not matter, so long as we do not specify NEW_SPACE, which would not  // cause a full GC.  CollectGarbage(0, OLD_POINTER_SPACE);}bool Heap::CollectGarbage(int requested_size, AllocationSpace space) {  // The VM is in the GC state until exiting this function.  VMState state(GC);#ifdef DEBUG  // Reset the allocation timeout to the GC interval, but make sure to  // allow at least a few allocations after a collection. The reason  // for this is that we have a lot of allocation sequences and we  // assume that a garbage collection will allow the subsequent  // allocation attempts to go through.  allocation_timeout_ = Max(6, FLAG_gc_interval);#endif  { GCTracer tracer;    GarbageCollectionPrologue();    // The GC count was incremented in the prologue.  Tell the tracer about    // it.    tracer.set_gc_count(gc_count_);    GarbageCollector collector = SelectGarbageCollector(space);    // Tell the tracer which collector we've selected.    tracer.set_collector(collector);    StatsRate* rate = (collector == SCAVENGER)        ? &Counters::gc_scavenger        : &Counters::gc_compactor;    rate->Start();    PerformGarbageCollection(space, collector, &tracer);    rate->Stop();    GarbageCollectionEpilogue();  }#ifdef ENABLE_LOGGING_AND_PROFILING  if (FLAG_log_gc) HeapProfiler::WriteSample();#endif  switch (space) {    case NEW_SPACE:      return new_space_->Available() >= requested_size;    case OLD_POINTER_SPACE:      return old_pointer_space_->Available() >= requested_size;    case OLD_DATA_SPACE:      return old_data_space_->Available() >= requested_size;    case CODE_SPACE:      return code_space_->Available() >= requested_size;    case MAP_SPACE:      return map_space_->Available() >= requested_size;    case LO_SPACE:      return lo_space_->Available() >= requested_size;  }  return false;}void Heap::PerformScavenge() {  GCTracer tracer;  PerformGarbageCollection(NEW_SPACE, SCAVENGER, &tracer);}void Heap::PerformGarbageCollection(AllocationSpace space,                                    GarbageCollector collector,                                    GCTracer* tracer) {  if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) {    ASSERT(!allocation_allowed_);    global_gc_prologue_callback_();  }  if (collector == MARK_COMPACTOR) {    MarkCompact(tracer);    int promoted_space_size = PromotedSpaceSize();    promoted_space_limit_ =        promoted_space_size + Max(2 * MB, (promoted_space_size/100) * 35);    old_gen_exhausted_ = false;    // If we have used the mark-compact collector to collect the new    // space, and it has not compacted the new space, we force a    // separate scavenge collection.  This is a hack.  It covers the    // case where (1) a new space collection was requested, (2) the    // collector selection policy selected the mark-compact collector,    // and (3) the mark-compact collector policy selected not to    // compact the new space.  In that case, there is no more (usable)    // free space in the new space after the collection compared to    // before.    if (space == NEW_SPACE && !MarkCompactCollector::HasCompacted()) {      Scavenge();    }  } else {    Scavenge();  }  Counters::objs_since_last_young.Set(0);  // Process weak handles post gc.  GlobalHandles::PostGarbageCollectionProcessing();  if (collector == MARK_COMPACTOR) {    // Register the amount of external allocated memory.    amount_of_external_allocated_memory_at_last_global_gc_ =        amount_of_external_allocated_memory_;  }  if (collector == MARK_COMPACTOR && global_gc_epilogue_callback_) {    ASSERT(!allocation_allowed_);    global_gc_epilogue_callback_();  }}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -