📄 jsgc.c
字号:
/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- * * ***** BEGIN LICENSE BLOCK ***** * Version: MPL 1.1/GPL 2.0/LGPL 2.1 * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * http://www.mozilla.org/MPL/ * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is Mozilla Communicator client code, released * March 31, 1998. * * The Initial Developer of the Original Code is * Netscape Communications Corporation. * Portions created by the Initial Developer are Copyright (C) 1998 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Alternatively, the contents of this file may be used under the terms of * either of the GNU General Public License Version 2 or later (the "GPL"), * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), * in which case the provisions of the GPL or the LGPL are applicable instead * of those above. If you wish to allow use of your version of this file only * under the terms of either the GPL or the LGPL, and not to allow others to * use your version of this file under the terms of the MPL, indicate your * decision by deleting the provisions above and replace them with the notice * and other provisions required by the GPL or the LGPL. If you do not delete * the provisions above, a recipient may use your version of this file under * the terms of any one of the MPL, the GPL or the LGPL. * * ***** END LICENSE BLOCK ***** *//* * JS Mark-and-Sweep Garbage Collector. * * This GC allocates only fixed-sized things big enough to contain two words * (pointers) on any host architecture. It allocates from an arena pool (see * jsarena.h). It uses an ideally parallel array of flag bytes to hold the * mark bit, finalizer type index, etc. * * XXX swizzle page to freelist for better locality of reference */#include "jsstddef.h"#include <stdlib.h> /* for free, called by JS_ARENA_DESTROY */#include <string.h> /* for memset, called by jsarena.h macros if DEBUG */#include "jstypes.h"#include "jsarena.h" /* Added by JSIFY */#include "jsutil.h" /* Added by JSIFY */#include "jshash.h" /* Added by JSIFY */#include "jsapi.h"#include "jsatom.h"#include "jscntxt.h"#include "jsconfig.h"#include "jsdbgapi.h"#include "jsfun.h"#include "jsgc.h"#include "jsinterp.h"#include "jslock.h"#include "jsnum.h"#include "jsobj.h"#include "jsscope.h"#include "jsscript.h"#include "jsstr.h"/* * GC arena sizing depends on amortizing arena overhead using a large number * of things per arena, and on the thing/flags ratio of 8:1 on most platforms. * * On 64-bit platforms, we would have half as many things per arena because * pointers are twice as big, so we double the bytes for things per arena. * This preserves the 1024 byte flags sub-arena size, which relates to the * GC_PAGE_SIZE (see below for why). */#if JS_BYTES_PER_WORD == 8# define GC_THINGS_SHIFT 14 /* 16KB for things on Alpha, etc. */#else# define GC_THINGS_SHIFT 13 /* 8KB for things on most platforms */#endif#define GC_THINGS_SIZE JS_BIT(GC_THINGS_SHIFT)#define GC_FLAGS_SIZE (GC_THINGS_SIZE / sizeof(JSGCThing))#define GC_ARENA_SIZE (GC_THINGS_SIZE + GC_FLAGS_SIZE)/* * The private JSGCThing struct, which describes a gcFreeList element. */struct JSGCThing { JSGCThing *next; uint8 *flagp;};/* * A GC arena contains one flag byte for each thing in its heap, and supports * O(1) lookup of a flag given its thing's address. * * To implement this, we take advantage of the thing/flags numerology: given * the 8K bytes worth of GC-things, there are 1K flag bytes. We mask a thing's * address with ~1023 to find a JSGCPageInfo record at the front of a mythical * "GC page" within the larger 8K thing arena. That JSGCPageInfo contains a * pointer to the 128 flag bytes corresponding to the things in the page, so we * index into this flags array using the thing's index within its page. * * To align thing pages on 1024-byte boundaries, we must allocate the 9KB of * flags+things arena payload, then find the first 0 mod 1024 boundary after * the first payload address. That's where things start, with a JSGCPageInfo * taking up the first thing-slot, as usual for 0 mod 1024 byte boundaries. * The effect of this alignment trick is to split the flags into at most 2 * discontiguous spans, one before the things and one after (if we're really * lucky, and the arena payload starts on a 0 mod 1024 byte boundary, no need * to split). * * The overhead of this scheme for most platforms is (16+8*(8+1))/(16+9K) or * .95% (assuming 16 byte JSArena header size, and 8 byte JSGCThing size). * * Here's some ASCII art showing an arena: * * split * | * V * +--+-------+-------+-------+-------+-------+-------+-------+-------+-----+ * |fB| tp0 | tp1 | tp2 | tp3 | tp4 | tp5 | tp6 | tp7 | fA | * +--+-------+-------+-------+-------+-------+-------+-------+-------+-----+ * ^ ^ * tI ---------+ | * tJ -------------------------------------------+ * * - fB are the "before split" flags, fA are the "after split" flags * - tp0-tp7 are the 8 thing pages * - thing tI points into tp1, whose flags are below the split, in fB * - thing tJ points into tp5, clearly above the split * * In general, one of the thing pages will have some of its things' flags on * the low side of the split, and the rest of its things' flags on the high * side. All the other pages have flags only below or only above. Therefore * we'll have to test something to decide whether the split divides flags in * a given thing's page. So we store the split pointer (the pointer to tp0) * in each JSGCPageInfo, along with the flags pointer for the 128 flag bytes * ideally starting, for tp0 things, at the beginning of the arena's payload * (at the start of fB). * * That is, each JSGCPageInfo's flags pointer is 128 bytes from the previous, * or at the start of the arena if there is no previous page in this arena. * Thus these ideal 128-byte flag pages run contiguously from the start of the * arena (right over the split!), and the JSGCPageInfo flags pointers contain * no discontinuities over the split created by the thing pages. So if, for a * given JSGCPageInfo *pi, we find that * * pi->flags + ((jsuword)thing % 1024) / sizeof(JSGCThing) >= pi->split * * then we must add GC_THINGS_SIZE to the nominal flags pointer to jump over * all the thing pages that split the flags into two discontiguous spans. * * (If we need to implement card-marking for an incremental GC write barrier, * we can use the low byte of the pi->split pointer as the card-mark, for an * extremely efficient write barrier: when mutating an object obj, just store * a 1 byte at (uint8 *) ((jsuword)obj & ~1023) for little-endian platforms. * When finding flags, we'll of course have to mask split with ~255, but it is * guaranteed to be 1024-byte aligned, so no information is lost by overlaying * the card-mark byte on split's low byte.) */#define GC_PAGE_SHIFT 10#define GC_PAGE_MASK ((jsuword) JS_BITMASK(GC_PAGE_SHIFT))#define GC_PAGE_SIZE JS_BIT(GC_PAGE_SHIFT)typedef struct JSGCPageInfo { uint8 *split; uint8 *flags;} JSGCPageInfo;#define FIRST_THING_PAGE(a) (((a)->base + GC_FLAGS_SIZE) & ~GC_PAGE_MASK)static JSGCThing *gc_new_arena(JSArenaPool *pool){ uint8 *flagp, *split, *pagep, *limit; JSArena *a; JSGCThing *thing; JSGCPageInfo *pi; /* Use JS_ArenaAllocate to grab another 9K-net-size hunk of space. */ flagp = (uint8 *) JS_ArenaAllocate(pool, GC_ARENA_SIZE); if (!flagp) return NULL; a = pool->current; /* Reset a->avail to start at the flags split, aka the first thing page. */ a->avail = FIRST_THING_PAGE(a); split = pagep = (uint8 *) a->avail; a->avail += sizeof(JSGCPageInfo); thing = (JSGCThing *) a->avail; a->avail += sizeof(JSGCThing); /* Initialize the JSGCPageInfo records at the start of every thing page. */ limit = pagep + GC_THINGS_SIZE; do { pi = (JSGCPageInfo *) pagep; pi->split = split; pi->flags = flagp; flagp += GC_PAGE_SIZE >> (GC_THINGS_SHIFT - GC_PAGE_SHIFT); pagep += GC_PAGE_SIZE; } while (pagep < limit); return thing;}uint8 *js_GetGCThingFlags(void *thing){ JSGCPageInfo *pi; uint8 *flagp; pi = (JSGCPageInfo *) ((jsuword)thing & ~GC_PAGE_MASK); flagp = pi->flags + ((jsuword)thing & GC_PAGE_MASK) / sizeof(JSGCThing); if (flagp >= pi->split) flagp += GC_THINGS_SIZE; return flagp;}JSBooljs_IsAboutToBeFinalized(JSContext *cx, void *thing){ uint8 flags = *js_GetGCThingFlags(thing); return !(flags & (GCF_MARK | GCF_LOCKMASK | GCF_FINAL));}typedef void (*GCFinalizeOp)(JSContext *cx, JSGCThing *thing);static GCFinalizeOp gc_finalizers[GCX_NTYPES];intNjs_ChangeExternalStringFinalizer(JSStringFinalizeOp oldop, JSStringFinalizeOp newop){ uintN i; for (i = GCX_EXTERNAL_STRING; i < GCX_NTYPES; i++) { if (gc_finalizers[i] == (GCFinalizeOp) oldop) { gc_finalizers[i] = (GCFinalizeOp) newop; return (intN) i; } } return -1;}#ifdef JS_GCMETER#define METER(x) x#else#define METER(x) /* nothing */#endif/* Initial size of the gcRootsHash table (SWAG, small enough to amortize). */#define GC_ROOTS_SIZE 256#define GC_FINALIZE_LEN 1024JSBooljs_InitGC(JSRuntime *rt, uint32 maxbytes){ JS_ASSERT(sizeof(JSGCThing) == sizeof(JSGCPageInfo)); JS_ASSERT(sizeof(JSGCThing) >= sizeof(JSObject)); JS_ASSERT(sizeof(JSGCThing) >= sizeof(JSString)); JS_ASSERT(sizeof(JSGCThing) >= sizeof(jsdouble)); JS_ASSERT(GC_FLAGS_SIZE >= GC_PAGE_SIZE); JS_ASSERT(sizeof(JSStackHeader) >= 2 * sizeof(jsval)); if (!gc_finalizers[GCX_OBJECT]) { gc_finalizers[GCX_OBJECT] = (GCFinalizeOp)js_FinalizeObject; gc_finalizers[GCX_STRING] = (GCFinalizeOp)js_FinalizeString;#ifdef DEBUG gc_finalizers[GCX_DOUBLE] = (GCFinalizeOp)js_FinalizeDouble;#endif gc_finalizers[GCX_MUTABLE_STRING] = (GCFinalizeOp)js_FinalizeString; } JS_InitArenaPool(&rt->gcArenaPool, "gc-arena", GC_ARENA_SIZE, sizeof(JSGCThing)); if (!JS_DHashTableInit(&rt->gcRootsHash, JS_DHashGetStubOps(), NULL, sizeof(JSGCRootHashEntry), GC_ROOTS_SIZE)) { rt->gcRootsHash.ops = NULL; return JS_FALSE; } rt->gcLocksHash = NULL; /* create lazily */ rt->gcMaxBytes = maxbytes; return JS_TRUE;}#ifdef JS_GCMETERvoidjs_DumpGCStats(JSRuntime *rt, FILE *fp){ fprintf(fp, "\nGC allocation statistics:\n"); fprintf(fp, " bytes currently allocated: %lu\n", rt->gcBytes); fprintf(fp, " alloc attempts: %lu\n", rt->gcStats.alloc); fprintf(fp, " GC freelist length: %lu\n", rt->gcStats.freelen); fprintf(fp, " recycles through GC freelist: %lu\n", rt->gcStats.recycle); fprintf(fp, "alloc retries after running GC: %lu\n", rt->gcStats.retry); fprintf(fp, " allocation failures: %lu\n", rt->gcStats.fail); fprintf(fp, " valid lock calls: %lu\n", rt->gcStats.lock); fprintf(fp, " valid unlock calls: %lu\n", rt->gcStats.unlock); fprintf(fp, " locks that hit stuck counts: %lu\n", rt->gcStats.stuck); fprintf(fp, " unlocks that saw stuck counts: %lu\n", rt->gcStats.unstuck); fprintf(fp, " mark recursion depth: %lu\n", rt->gcStats.depth); fprintf(fp, " maximum mark recursion depth: %lu\n", rt->gcStats.maxdepth); fprintf(fp, " maximum GC nesting level: %lu\n", rt->gcStats.maxlevel); fprintf(fp, " potentially useful GC calls: %lu\n", rt->gcStats.poke); fprintf(fp, " useless GC calls: %lu\n", rt->gcStats.nopoke); fprintf(fp, " thing arenas freed so far: %lu\n", rt->gcStats.afree); fprintf(fp, " extra stack segments scanned: %lu\n", rt->gcStats.stackseg); fprintf(fp, " stack segment slots scanned: %lu\n", rt->gcStats.segslots);#ifdef JS_ARENAMETER JS_DumpArenaStats(fp);#endif}#endif#ifdef DEBUGJS_STATIC_DLL_CALLBACK(JSDHashOperator)js_root_printer(JSDHashTable *table, JSDHashEntryHdr *hdr, uint32 i, void *arg){ uint32 *leakedroots = (uint32 *)arg; JSGCRootHashEntry *rhe = (JSGCRootHashEntry *)hdr; (*leakedroots)++; fprintf(stderr, "JS engine warning: leaking GC root \'%s\' at %p\n", rhe->name ? (char *)rhe->name : "", rhe->root); return JS_DHASH_NEXT;}#endifvoidjs_FinishGC(JSRuntime *rt){#ifdef JS_ARENAMETER JS_DumpArenaStats(stdout);#endif#ifdef JS_GCMETER js_DumpGCStats(rt, stdout);#endif JS_FinishArenaPool(&rt->gcArenaPool); JS_ArenaFinish(); if (rt->gcRootsHash.ops) {#ifdef DEBUG uint32 leakedroots = 0; /* Warn (but don't assert) debug builds of any remaining roots. */ JS_DHashTableEnumerate(&rt->gcRootsHash, js_root_printer, &leakedroots); if (leakedroots > 0) { if (leakedroots == 1) {
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -