2009-06-10 18:29:44 -07:00
|
|
|
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
|
2007-03-22 10:30:00 -07:00
|
|
|
* vim: set ts=8 sw=4 et tw=78:
|
|
|
|
*
|
|
|
|
* ***** BEGIN LICENSE BLOCK *****
|
|
|
|
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
|
|
|
*
|
|
|
|
* The contents of this file are subject to the Mozilla Public License Version
|
|
|
|
* 1.1 (the "License"); you may not use this file except in compliance with
|
|
|
|
* the License. You may obtain a copy of the License at
|
|
|
|
* http://www.mozilla.org/MPL/
|
|
|
|
*
|
|
|
|
* Software distributed under the License is distributed on an "AS IS" basis,
|
|
|
|
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
|
|
|
* for the specific language governing rights and limitations under the
|
|
|
|
* License.
|
|
|
|
*
|
|
|
|
* The Original Code is Mozilla Communicator client code, released
|
|
|
|
* March 31, 1998.
|
|
|
|
*
|
|
|
|
* The Initial Developer of the Original Code is
|
|
|
|
* Netscape Communications Corporation.
|
|
|
|
* Portions created by the Initial Developer are Copyright (C) 1998
|
|
|
|
* the Initial Developer. All Rights Reserved.
|
|
|
|
*
|
|
|
|
* Contributor(s):
|
|
|
|
*
|
|
|
|
* Alternatively, the contents of this file may be used under the terms of
|
|
|
|
* either of the GNU General Public License Version 2 or later (the "GPL"),
|
|
|
|
* or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
|
|
|
* in which case the provisions of the GPL or the LGPL are applicable instead
|
|
|
|
* of those above. If you wish to allow use of your version of this file only
|
|
|
|
* under the terms of either the GPL or the LGPL, and not to allow others to
|
|
|
|
* use your version of this file under the terms of the MPL, indicate your
|
|
|
|
* decision by deleting the provisions above and replace them with the notice
|
|
|
|
* and other provisions required by the GPL or the LGPL. If you do not delete
|
|
|
|
* the provisions above, a recipient may use your version of this file under
|
|
|
|
* the terms of any one of the MPL, the GPL or the LGPL.
|
|
|
|
*
|
|
|
|
* ***** END LICENSE BLOCK ***** */
|
|
|
|
|
|
|
|
/*
|
|
|
|
* JS Mark-and-Sweep Garbage Collector.
|
|
|
|
*
|
|
|
|
* This GC allocates fixed-sized things with sizes up to GC_NBYTES_MAX (see
|
|
|
|
* jsgc.h). It allocates from a special GC arena pool with each arena allocated
|
|
|
|
* using malloc. It uses an ideally parallel array of flag bytes to hold the
|
|
|
|
* mark bit, finalizer type index, etc.
|
|
|
|
*
|
|
|
|
* XXX swizzle page to freelist for better locality of reference
|
|
|
|
*/
|
2008-05-28 19:07:32 -07:00
|
|
|
#include <math.h>
|
2007-03-22 10:30:00 -07:00
|
|
|
#include <string.h> /* for memset used when DEBUG */
|
|
|
|
#include "jstypes.h"
|
2009-03-18 11:38:16 -07:00
|
|
|
#include "jsstdint.h"
|
2010-10-01 16:46:54 -07:00
|
|
|
#include "jsutil.h"
|
|
|
|
#include "jshash.h"
|
2007-03-22 10:30:00 -07:00
|
|
|
#include "jsbit.h"
|
|
|
|
#include "jsclist.h"
|
2008-05-28 19:07:32 -07:00
|
|
|
#include "jsprf.h"
|
|
|
|
#include "jsapi.h"
|
|
|
|
#include "jsatom.h"
|
2007-03-22 10:30:00 -07:00
|
|
|
#include "jscntxt.h"
|
2008-09-05 10:19:17 -07:00
|
|
|
#include "jsversion.h"
|
2011-04-18 15:42:07 -07:00
|
|
|
#include "jsdbg.h"
|
2007-03-22 10:30:00 -07:00
|
|
|
#include "jsdbgapi.h"
|
|
|
|
#include "jsexn.h"
|
|
|
|
#include "jsfun.h"
|
|
|
|
#include "jsgc.h"
|
2010-04-12 13:59:19 -07:00
|
|
|
#include "jsgcchunk.h"
|
2011-04-15 16:56:08 -07:00
|
|
|
#include "jsgcmark.h"
|
2007-03-22 10:30:00 -07:00
|
|
|
#include "jsinterp.h"
|
|
|
|
#include "jsiter.h"
|
|
|
|
#include "jslock.h"
|
|
|
|
#include "jsnum.h"
|
|
|
|
#include "jsobj.h"
|
2007-07-08 02:03:34 -07:00
|
|
|
#include "jsparse.h"
|
2010-05-18 19:21:43 -07:00
|
|
|
#include "jsproxy.h"
|
2007-03-22 10:30:00 -07:00
|
|
|
#include "jsscope.h"
|
|
|
|
#include "jsscript.h"
|
2009-01-30 15:40:05 -08:00
|
|
|
#include "jsstaticcheck.h"
|
2007-03-22 10:30:00 -07:00
|
|
|
#include "jsstr.h"
|
2010-09-02 20:04:33 -07:00
|
|
|
#include "methodjit/MethodJIT.h"
|
2007-03-22 10:30:00 -07:00
|
|
|
|
|
|
|
#if JS_HAS_XML_SUPPORT
|
|
|
|
#include "jsxml.h"
|
|
|
|
#endif
|
|
|
|
|
2010-09-01 14:09:54 -07:00
|
|
|
#include "jsprobes.h"
|
2009-12-30 03:06:26 -08:00
|
|
|
#include "jsobjinlines.h"
|
2010-05-20 13:50:08 -07:00
|
|
|
#include "jshashtable.h"
|
2011-04-16 21:23:44 -07:00
|
|
|
#include "jsweakmap.h"
|
2009-12-30 03:06:26 -08:00
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
#include "jsstrinlines.h"
|
|
|
|
#include "jscompartment.h"
|
|
|
|
|
2010-06-21 05:22:32 -07:00
|
|
|
#ifdef MOZ_VALGRIND
|
|
|
|
# define JS_VALGRIND
|
|
|
|
#endif
|
|
|
|
#ifdef JS_VALGRIND
|
|
|
|
# include <valgrind/memcheck.h>
|
|
|
|
#endif
|
|
|
|
|
2010-01-22 14:49:18 -08:00
|
|
|
using namespace js;
|
2010-09-24 10:54:39 -07:00
|
|
|
using namespace js::gc;
|
2010-01-22 14:49:18 -08:00
|
|
|
|
2009-03-05 03:12:50 -08:00
|
|
|
/*
|
2010-07-14 23:19:36 -07:00
|
|
|
* Check that JSTRACE_XML follows JSTRACE_OBJECT and JSTRACE_STRING.
|
2009-03-05 03:12:50 -08:00
|
|
|
*/
|
|
|
|
JS_STATIC_ASSERT(JSTRACE_OBJECT == 0);
|
2010-07-14 23:19:36 -07:00
|
|
|
JS_STATIC_ASSERT(JSTRACE_STRING == 1);
|
2011-03-23 11:57:44 -07:00
|
|
|
JS_STATIC_ASSERT(JSTRACE_SHAPE == 2);
|
|
|
|
JS_STATIC_ASSERT(JSTRACE_XML == 3);
|
2009-03-05 03:12:50 -08:00
|
|
|
|
|
|
|
/*
|
2011-03-23 11:57:44 -07:00
|
|
|
* JS_IS_VALID_TRACE_KIND assumes that JSTRACE_SHAPE is the last non-xml
|
2009-03-05 03:12:50 -08:00
|
|
|
* trace kind when JS_HAS_XML_SUPPORT is false.
|
|
|
|
*/
|
2011-03-23 11:57:44 -07:00
|
|
|
JS_STATIC_ASSERT(JSTRACE_SHAPE + 1 == JSTRACE_XML);
|
2009-03-05 03:12:50 -08:00
|
|
|
|
2010-08-24 18:57:14 -07:00
|
|
|
#ifdef JS_GCMETER
|
|
|
|
# define METER(x) ((void) (x))
|
|
|
|
# define METER_IF(condition, x) ((void) ((condition) && (x)))
|
|
|
|
#else
|
|
|
|
# define METER(x) ((void) 0)
|
|
|
|
# define METER_IF(condition, x) ((void) 0)
|
|
|
|
#endif
|
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
# define METER_UPDATE_MAX(maxLval, rval) \
|
|
|
|
METER_IF((maxLval) < (rval), (maxLval) = (rval))
|
2010-01-14 00:27:32 -08:00
|
|
|
|
2011-03-23 11:57:37 -07:00
|
|
|
namespace js {
|
|
|
|
namespace gc {
|
2010-01-14 00:27:32 -08:00
|
|
|
|
2010-10-13 11:49:22 -07:00
|
|
|
/* This array should be const, but that doesn't link right under GCC. */
|
|
|
|
FinalizeKind slotsToThingKind[] = {
|
|
|
|
/* 0 */ FINALIZE_OBJECT0, FINALIZE_OBJECT2, FINALIZE_OBJECT2, FINALIZE_OBJECT4,
|
|
|
|
/* 4 */ FINALIZE_OBJECT4, FINALIZE_OBJECT8, FINALIZE_OBJECT8, FINALIZE_OBJECT8,
|
|
|
|
/* 8 */ FINALIZE_OBJECT8, FINALIZE_OBJECT12, FINALIZE_OBJECT12, FINALIZE_OBJECT12,
|
|
|
|
/* 12 */ FINALIZE_OBJECT12, FINALIZE_OBJECT16, FINALIZE_OBJECT16, FINALIZE_OBJECT16,
|
|
|
|
/* 16 */ FINALIZE_OBJECT16
|
|
|
|
};
|
|
|
|
|
|
|
|
JS_STATIC_ASSERT(JS_ARRAY_LENGTH(slotsToThingKind) == SLOTS_TO_THING_KIND_LIMIT);
|
|
|
|
|
2011-04-19 22:30:10 -07:00
|
|
|
const uint8 GCThingSizeMap[] = {
|
|
|
|
sizeof(JSObject), /* FINALIZE_OBJECT0 */
|
|
|
|
sizeof(JSObject), /* FINALIZE_OBJECT0_BACKGROUND */
|
2011-04-21 15:29:24 -07:00
|
|
|
sizeof(JSObject_Slots2), /* FINALIZE_OBJECT2 */
|
|
|
|
sizeof(JSObject_Slots2), /* FINALIZE_OBJECT2_BACKGROUND */
|
|
|
|
sizeof(JSObject_Slots4), /* FINALIZE_OBJECT4 */
|
|
|
|
sizeof(JSObject_Slots4), /* FINALIZE_OBJECT4_BACKGROUND */
|
|
|
|
sizeof(JSObject_Slots8), /* FINALIZE_OBJECT8 */
|
|
|
|
sizeof(JSObject_Slots8), /* FINALIZE_OBJECT8_BACKGROUND */
|
2011-04-19 22:30:10 -07:00
|
|
|
sizeof(JSObject_Slots12), /* FINALIZE_OBJECT12 */
|
|
|
|
sizeof(JSObject_Slots12), /* FINALIZE_OBJECT12_BACKGROUND */
|
|
|
|
sizeof(JSObject_Slots16), /* FINALIZE_OBJECT16 */
|
|
|
|
sizeof(JSObject_Slots16), /* FINALIZE_OBJECT16_BACKGROUND */
|
|
|
|
sizeof(JSFunction), /* FINALIZE_FUNCTION */
|
|
|
|
sizeof(Shape), /* FINALIZE_SHAPE */
|
|
|
|
#if JS_HAS_XML_SUPPORT
|
|
|
|
sizeof(JSXML), /* FINALIZE_XML */
|
2010-09-24 10:54:39 -07:00
|
|
|
#endif
|
2011-04-19 22:30:10 -07:00
|
|
|
sizeof(JSShortString), /* FINALIZE_SHORT_STRING */
|
|
|
|
sizeof(JSString), /* FINALIZE_STRING */
|
2011-05-19 12:01:08 -07:00
|
|
|
sizeof(JSExternalString), /* FINALIZE_EXTERNAL_STRING */
|
2011-04-19 22:30:10 -07:00
|
|
|
};
|
2010-04-12 10:15:30 -07:00
|
|
|
|
2011-04-19 22:30:10 -07:00
|
|
|
JS_STATIC_ASSERT(JS_ARRAY_LENGTH(GCThingSizeMap) == FINALIZE_LIMIT);
|
2010-04-12 10:15:30 -07:00
|
|
|
|
2011-05-20 03:38:31 -07:00
|
|
|
inline FreeCell *
|
|
|
|
ArenaHeader::getFreeList() const
|
|
|
|
{
|
|
|
|
/*
|
|
|
|
* Do not allow to access the free list when its real head is still stored
|
|
|
|
* in FreeLists and is not synchronized with this one.
|
|
|
|
*/
|
|
|
|
JS_ASSERT(compartment);
|
|
|
|
JS_ASSERT_IF(freeList &&
|
|
|
|
compartment->freeLists.finalizables[getThingKind()] &&
|
|
|
|
this == compartment->freeLists.finalizables[getThingKind()]->arenaHeader(),
|
|
|
|
freeList == compartment->freeLists.finalizables[getThingKind()]);
|
|
|
|
return freeList;
|
|
|
|
}
|
|
|
|
|
2011-04-13 13:43:33 -07:00
|
|
|
template<typename T>
|
|
|
|
inline bool
|
2011-05-19 12:01:08 -07:00
|
|
|
Arena::finalize(JSContext *cx)
|
2011-04-13 13:43:33 -07:00
|
|
|
{
|
2011-04-19 22:30:10 -07:00
|
|
|
JS_ASSERT(aheader.compartment);
|
|
|
|
JS_ASSERT(!aheader.getMarkingDelay()->link);
|
2011-04-21 15:29:24 -07:00
|
|
|
|
2011-05-19 12:01:08 -07:00
|
|
|
uintptr_t nextFree = reinterpret_cast<uintptr_t>(aheader.getFreeList());
|
2011-04-13 13:43:33 -07:00
|
|
|
FreeCell *freeList = NULL;
|
|
|
|
FreeCell **tailp = &freeList;
|
|
|
|
bool allClear = true;
|
|
|
|
|
2011-05-19 12:01:08 -07:00
|
|
|
uintptr_t thing = thingsStart(sizeof(T));
|
|
|
|
uintptr_t end = thingsEnd();
|
2011-04-13 13:43:33 -07:00
|
|
|
|
|
|
|
if (!nextFree) {
|
2011-05-19 12:01:08 -07:00
|
|
|
nextFree = end;
|
2011-04-13 13:43:33 -07:00
|
|
|
} else {
|
2011-05-19 12:01:08 -07:00
|
|
|
JS_ASSERT(thing <= nextFree);
|
|
|
|
JS_ASSERT(nextFree < end);
|
2011-04-13 13:43:33 -07:00
|
|
|
}
|
|
|
|
|
2011-05-19 12:01:08 -07:00
|
|
|
for (;; thing += sizeof(T)) {
|
|
|
|
if (thing == nextFree) {
|
|
|
|
if (thing == end)
|
2011-04-13 13:43:33 -07:00
|
|
|
break;
|
2011-05-19 12:01:08 -07:00
|
|
|
FreeCell *nextLink = reinterpret_cast<FreeCell *>(nextFree)->link;
|
|
|
|
if (!nextLink) {
|
|
|
|
nextFree = end;
|
2011-04-13 13:43:33 -07:00
|
|
|
} else {
|
2011-05-19 12:01:08 -07:00
|
|
|
nextFree = reinterpret_cast<uintptr_t>(nextLink);
|
|
|
|
JS_ASSERT(thing < nextFree);
|
|
|
|
JS_ASSERT(nextFree < end);
|
2011-04-13 13:43:33 -07:00
|
|
|
}
|
|
|
|
} else {
|
2011-05-19 12:01:08 -07:00
|
|
|
T *t = reinterpret_cast<T *>(thing);
|
|
|
|
if (t->isMarked()) {
|
|
|
|
allClear = false;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
t->finalize(cx);
|
2011-04-13 13:43:33 -07:00
|
|
|
#ifdef DEBUG
|
2011-05-19 12:01:08 -07:00
|
|
|
memset(t, JS_FREE_PATTERN, sizeof(T));
|
2011-04-13 13:43:33 -07:00
|
|
|
#endif
|
|
|
|
}
|
2011-05-19 12:01:08 -07:00
|
|
|
FreeCell *freeCell = reinterpret_cast<FreeCell *>(thing);
|
|
|
|
*tailp = freeCell;
|
|
|
|
tailp = &freeCell->link;
|
2011-04-13 13:43:33 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
#ifdef DEBUG
|
|
|
|
/* Check that the free list is consistent. */
|
|
|
|
unsigned nfree = 0;
|
|
|
|
if (freeList) {
|
|
|
|
JS_ASSERT(tailp != &freeList);
|
2011-05-19 12:01:08 -07:00
|
|
|
FreeCell *freeCell = freeList;
|
2011-04-13 13:43:33 -07:00
|
|
|
for (;;) {
|
|
|
|
++nfree;
|
2011-05-19 12:01:08 -07:00
|
|
|
if (&freeCell->link == tailp)
|
2011-04-13 13:43:33 -07:00
|
|
|
break;
|
2011-05-19 12:01:08 -07:00
|
|
|
JS_ASSERT(freeCell < freeCell->link);
|
|
|
|
freeCell = freeCell->link;
|
2011-04-13 13:43:33 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
if (allClear) {
|
2011-05-19 12:01:08 -07:00
|
|
|
JS_ASSERT(nfree == Arena::thingsPerArena(sizeof(T)));
|
|
|
|
JS_ASSERT(freeList->address() == thingsStart(sizeof(T)));
|
|
|
|
JS_ASSERT(tailp == &reinterpret_cast<FreeCell *>(end - sizeof(T))->link);
|
2011-04-13 13:43:33 -07:00
|
|
|
} else {
|
2011-05-19 12:01:08 -07:00
|
|
|
JS_ASSERT(nfree < Arena::thingsPerArena(sizeof(T)));
|
2011-04-13 13:43:33 -07:00
|
|
|
}
|
2011-04-19 22:30:10 -07:00
|
|
|
#endif
|
|
|
|
*tailp = NULL;
|
2011-05-20 03:38:31 -07:00
|
|
|
aheader.setFreeList(freeList);
|
2011-04-13 13:43:33 -07:00
|
|
|
return allClear;
|
|
|
|
}
|
|
|
|
|
2011-04-25 13:05:30 -07:00
|
|
|
/*
|
|
|
|
* Finalize arenas from the list. On return listHeadp points to the list of
|
|
|
|
* non-empty arenas.
|
|
|
|
*/
|
|
|
|
template<typename T>
|
|
|
|
static void
|
|
|
|
FinalizeArenas(JSContext *cx, ArenaHeader **listHeadp)
|
|
|
|
{
|
|
|
|
ArenaHeader **ap = listHeadp;
|
|
|
|
while (ArenaHeader *aheader = *ap) {
|
2011-05-19 12:01:08 -07:00
|
|
|
bool allClear = aheader->getArena()->finalize<T>(cx);
|
2011-04-25 13:05:30 -07:00
|
|
|
if (allClear) {
|
|
|
|
*ap = aheader->next;
|
|
|
|
aheader->chunk()->releaseArena(aheader);
|
|
|
|
} else {
|
|
|
|
ap = &aheader->next;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
#ifdef DEBUG
|
2011-01-07 23:44:57 -08:00
|
|
|
bool
|
2011-03-23 11:57:37 -07:00
|
|
|
checkArenaListAllUnmarked(JSCompartment *comp)
|
|
|
|
{
|
2011-01-07 23:44:57 -08:00
|
|
|
for (unsigned i = 0; i < FINALIZE_LIMIT; i++) {
|
|
|
|
if (comp->arenas[i].markedThingsInArenaList())
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
2010-09-24 10:54:39 -07:00
|
|
|
#endif
|
2010-05-18 03:01:33 -07:00
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
} /* namespace gc */
|
|
|
|
} /* namespace js */
|
2010-04-12 13:59:19 -07:00
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
void
|
|
|
|
JSCompartment::finishArenaLists()
|
2010-01-14 00:27:32 -08:00
|
|
|
{
|
2011-04-19 22:30:10 -07:00
|
|
|
for (unsigned i = 0; i < FINALIZE_LIMIT; i++)
|
|
|
|
arenas[i].releaseAll(i);
|
2008-02-26 13:01:42 -08:00
|
|
|
}
|
|
|
|
|
2011-04-25 13:05:30 -07:00
|
|
|
void
|
2010-09-24 10:54:39 -07:00
|
|
|
Chunk::init(JSRuntime *rt)
|
2010-04-12 10:15:30 -07:00
|
|
|
{
|
2010-09-24 10:54:39 -07:00
|
|
|
info.runtime = rt;
|
|
|
|
info.age = 0;
|
|
|
|
info.emptyArenaLists.init();
|
2011-04-19 22:30:10 -07:00
|
|
|
info.emptyArenaLists.cellFreeList = &arenas[0].aheader;
|
|
|
|
ArenaHeader *aheader = &arenas[0].aheader;
|
|
|
|
ArenaHeader *last = &arenas[JS_ARRAY_LENGTH(arenas) - 1].aheader;
|
|
|
|
while (aheader < last) {
|
|
|
|
ArenaHeader *following = reinterpret_cast<ArenaHeader *>(aheader->address() + ArenaSize);
|
|
|
|
aheader->next = following;
|
|
|
|
aheader->compartment = NULL;
|
|
|
|
aheader = following;
|
2010-09-24 10:54:39 -07:00
|
|
|
}
|
2011-04-19 22:30:10 -07:00
|
|
|
last->next = NULL;
|
|
|
|
last->compartment = NULL;
|
2010-09-24 10:54:39 -07:00
|
|
|
info.numFree = ArenasPerChunk;
|
2011-04-19 22:30:10 -07:00
|
|
|
for (size_t i = 0; i != JS_ARRAY_LENGTH(markingDelay); ++i)
|
|
|
|
markingDelay[i].init();
|
2010-04-12 10:15:30 -07:00
|
|
|
}
|
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
bool
|
|
|
|
Chunk::unused()
|
2010-04-12 10:15:30 -07:00
|
|
|
{
|
2010-09-24 10:54:39 -07:00
|
|
|
return info.numFree == ArenasPerChunk;
|
|
|
|
}
|
2010-07-15 17:58:36 -07:00
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
bool
|
|
|
|
Chunk::hasAvailableArenas()
|
|
|
|
{
|
|
|
|
return info.numFree > 0;
|
2010-04-12 10:15:30 -07:00
|
|
|
}
|
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
bool
|
|
|
|
Chunk::withinArenasRange(Cell *cell)
|
2010-04-12 13:59:19 -07:00
|
|
|
{
|
2010-09-24 10:54:39 -07:00
|
|
|
uintptr_t addr = uintptr_t(cell);
|
|
|
|
if (addr >= uintptr_t(&arenas[0]) && addr < uintptr_t(&arenas[ArenasPerChunk]))
|
|
|
|
return true;
|
|
|
|
return false;
|
2010-04-12 13:59:19 -07:00
|
|
|
}
|
|
|
|
|
2011-05-19 12:01:08 -07:00
|
|
|
/* Turn arena cells into a free list starting from the first thing. */
|
|
|
|
template<size_t thingSize>
|
|
|
|
static inline FreeCell *
|
|
|
|
BuildFreeList(ArenaHeader *aheader)
|
|
|
|
{
|
|
|
|
uintptr_t thing = aheader->getArena()->thingsStart(thingSize);
|
|
|
|
uintptr_t end = aheader->getArena()->thingsEnd();
|
|
|
|
FreeCell *first = reinterpret_cast<FreeCell *>(thing);
|
|
|
|
FreeCell **prevp = &first->link;
|
|
|
|
|
|
|
|
for (thing += thingSize; thing != end; thing += thingSize) {
|
|
|
|
JS_ASSERT(thing < end);
|
|
|
|
FreeCell *cell = reinterpret_cast<FreeCell *>(thing);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Here prevp points to the link field of the previous cell in the
|
|
|
|
* list. Write the address of the following cell into it.
|
|
|
|
*/
|
|
|
|
*prevp = cell;
|
|
|
|
prevp = &cell->link;
|
|
|
|
}
|
|
|
|
|
|
|
|
*prevp = NULL;
|
|
|
|
return first;
|
|
|
|
}
|
|
|
|
|
|
|
|
template <size_t thingSize>
|
2011-04-19 22:30:10 -07:00
|
|
|
ArenaHeader *
|
2011-04-13 13:43:33 -07:00
|
|
|
Chunk::allocateArena(JSContext *cx, unsigned thingKind)
|
2010-04-12 13:59:19 -07:00
|
|
|
{
|
2011-04-13 13:43:33 -07:00
|
|
|
JSCompartment *comp = cx->compartment;
|
2010-09-24 10:54:39 -07:00
|
|
|
JS_ASSERT(hasAvailableArenas());
|
2011-04-19 22:30:10 -07:00
|
|
|
ArenaHeader *aheader = info.emptyArenaLists.getTypedFreeList(thingKind);
|
|
|
|
if (!aheader) {
|
|
|
|
aheader = info.emptyArenaLists.getOtherArena();
|
2011-05-19 12:01:08 -07:00
|
|
|
aheader->setFreeList(BuildFreeList<thingSize>(aheader));
|
2011-04-19 22:30:10 -07:00
|
|
|
}
|
|
|
|
JS_ASSERT(!aheader->compartment);
|
|
|
|
JS_ASSERT(!aheader->getMarkingDelay()->link);
|
|
|
|
aheader->compartment = comp;
|
|
|
|
aheader->setThingKind(thingKind);
|
2010-09-24 10:54:39 -07:00
|
|
|
--info.numFree;
|
2010-10-07 13:43:52 -07:00
|
|
|
JSRuntime *rt = info.runtime;
|
2011-04-13 13:43:33 -07:00
|
|
|
|
2011-04-19 22:30:10 -07:00
|
|
|
JS_ATOMIC_ADD(&rt->gcBytes, ArenaSize);
|
|
|
|
JS_ATOMIC_ADD(&comp->gcBytes, ArenaSize);
|
2011-04-13 13:43:33 -07:00
|
|
|
METER(JS_ATOMIC_INCREMENT(&rt->gcStats.nallarenas));
|
2011-01-07 23:44:57 -08:00
|
|
|
if (comp->gcBytes >= comp->gcTriggerBytes)
|
|
|
|
TriggerCompartmentGC(comp);
|
2011-04-13 13:43:33 -07:00
|
|
|
|
2011-04-19 22:30:10 -07:00
|
|
|
return aheader;
|
2010-04-12 13:59:19 -07:00
|
|
|
}
|
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
void
|
2011-04-19 22:30:10 -07:00
|
|
|
Chunk::releaseArena(ArenaHeader *aheader)
|
2010-04-12 13:59:19 -07:00
|
|
|
{
|
2010-09-24 10:54:39 -07:00
|
|
|
JSRuntime *rt = info.runtime;
|
2011-04-13 13:43:33 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
2011-04-25 13:05:30 -07:00
|
|
|
Maybe<AutoLockGC> maybeLock;
|
2011-04-28 15:48:51 -07:00
|
|
|
if (rt->gcHelperThread.sweeping)
|
2011-04-25 13:05:30 -07:00
|
|
|
maybeLock.construct(info.runtime);
|
2011-04-13 13:43:33 -07:00
|
|
|
#endif
|
2011-04-19 22:30:10 -07:00
|
|
|
JSCompartment *comp = aheader->compartment;
|
2010-09-24 10:54:39 -07:00
|
|
|
METER(rt->gcStats.afree++);
|
|
|
|
JS_ASSERT(rt->gcStats.nallarenas != 0);
|
2011-04-13 13:43:33 -07:00
|
|
|
METER(JS_ATOMIC_DECREMENT(&rt->gcStats.nallarenas));
|
2010-04-12 13:59:19 -07:00
|
|
|
|
2011-04-19 22:30:10 -07:00
|
|
|
JS_ASSERT(size_t(rt->gcBytes) >= ArenaSize);
|
|
|
|
JS_ASSERT(size_t(comp->gcBytes) >= ArenaSize);
|
2011-04-13 13:43:33 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
if (rt->gcHelperThread.sweeping) {
|
2011-04-19 22:30:10 -07:00
|
|
|
rt->reduceGCTriggerBytes(GC_HEAP_GROWTH_FACTOR * ArenaSize);
|
|
|
|
comp->reduceGCTriggerBytes(GC_HEAP_GROWTH_FACTOR * ArenaSize);
|
2011-04-13 13:43:33 -07:00
|
|
|
}
|
|
|
|
#endif
|
2011-05-16 16:16:55 -07:00
|
|
|
JS_ATOMIC_ADD(&rt->gcBytes, -int32(ArenaSize));
|
|
|
|
JS_ATOMIC_ADD(&comp->gcBytes, -int32(ArenaSize));
|
2011-04-19 22:30:10 -07:00
|
|
|
info.emptyArenaLists.insert(aheader);
|
|
|
|
aheader->compartment = NULL;
|
2010-09-24 10:54:39 -07:00
|
|
|
++info.numFree;
|
|
|
|
if (unused())
|
|
|
|
info.age = 0;
|
2010-04-12 13:59:19 -07:00
|
|
|
}
|
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
JSRuntime *
|
|
|
|
Chunk::getRuntime()
|
|
|
|
{
|
|
|
|
return info.runtime;
|
2010-01-14 00:27:32 -08:00
|
|
|
}
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-08-05 05:16:56 -07:00
|
|
|
inline jsuword
|
2010-04-12 13:59:19 -07:00
|
|
|
GetGCChunk(JSRuntime *rt)
|
2009-09-17 15:40:37 -07:00
|
|
|
{
|
2010-06-07 02:17:15 -07:00
|
|
|
void *p = rt->gcChunkAllocator->alloc();
|
2010-03-25 16:11:27 -07:00
|
|
|
#ifdef MOZ_GCTIMER
|
2010-04-12 13:59:19 -07:00
|
|
|
if (p)
|
|
|
|
JS_ATOMIC_INCREMENT(&newChunkCount);
|
2009-09-17 15:40:37 -07:00
|
|
|
#endif
|
2010-04-12 13:59:19 -07:00
|
|
|
METER_IF(p, rt->gcStats.nchunks++);
|
|
|
|
METER_UPDATE_MAX(rt->gcStats.maxnchunks, rt->gcStats.nchunks);
|
2010-08-05 05:16:56 -07:00
|
|
|
return reinterpret_cast<jsuword>(p);
|
2009-09-17 15:40:37 -07:00
|
|
|
}
|
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
inline void
|
2010-04-22 23:58:44 -07:00
|
|
|
ReleaseGCChunk(JSRuntime *rt, jsuword chunk)
|
2009-09-17 15:40:37 -07:00
|
|
|
{
|
2010-04-22 23:58:44 -07:00
|
|
|
void *p = reinterpret_cast<void *>(chunk);
|
2010-04-12 13:59:19 -07:00
|
|
|
JS_ASSERT(p);
|
2010-03-25 16:11:27 -07:00
|
|
|
#ifdef MOZ_GCTIMER
|
|
|
|
JS_ATOMIC_INCREMENT(&destroyChunkCount);
|
|
|
|
#endif
|
2010-04-12 13:59:19 -07:00
|
|
|
JS_ASSERT(rt->gcStats.nchunks != 0);
|
|
|
|
METER(rt->gcStats.nchunks--);
|
2011-03-31 01:14:12 -07:00
|
|
|
rt->gcChunkAllocator->free_(p);
|
2009-09-17 15:40:37 -07:00
|
|
|
}
|
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
inline Chunk *
|
|
|
|
AllocateGCChunk(JSRuntime *rt)
|
|
|
|
{
|
|
|
|
Chunk *p = (Chunk *)rt->gcChunkAllocator->alloc();
|
|
|
|
#ifdef MOZ_GCTIMER
|
|
|
|
if (p)
|
|
|
|
JS_ATOMIC_INCREMENT(&newChunkCount);
|
|
|
|
#endif
|
|
|
|
METER_IF(p, rt->gcStats.nchunks++);
|
|
|
|
return p;
|
|
|
|
}
|
|
|
|
|
|
|
|
inline void
|
|
|
|
ReleaseGCChunk(JSRuntime *rt, Chunk *p)
|
|
|
|
{
|
|
|
|
JS_ASSERT(p);
|
|
|
|
#ifdef MOZ_GCTIMER
|
|
|
|
JS_ATOMIC_INCREMENT(&destroyChunkCount);
|
|
|
|
#endif
|
|
|
|
JS_ASSERT(rt->gcStats.nchunks != 0);
|
|
|
|
METER(rt->gcStats.nchunks--);
|
2011-03-31 01:14:12 -07:00
|
|
|
rt->gcChunkAllocator->free_(p);
|
2010-09-24 10:54:39 -07:00
|
|
|
}
|
|
|
|
|
2011-04-25 13:05:30 -07:00
|
|
|
inline Chunk *
|
|
|
|
PickChunk(JSContext *cx)
|
2007-09-16 06:03:17 -07:00
|
|
|
{
|
2011-04-25 13:05:30 -07:00
|
|
|
Chunk *chunk = cx->compartment->chunk;
|
|
|
|
if (chunk && chunk->hasAvailableArenas())
|
|
|
|
return chunk;
|
|
|
|
|
|
|
|
/*
|
|
|
|
* The chunk used for the last allocation is full, search all chunks for
|
|
|
|
* free arenas.
|
|
|
|
*/
|
|
|
|
JSRuntime *rt = cx->runtime;
|
2010-09-24 10:54:39 -07:00
|
|
|
for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront()) {
|
2011-04-25 13:05:30 -07:00
|
|
|
chunk = r.front();
|
|
|
|
if (chunk->hasAvailableArenas()) {
|
|
|
|
cx->compartment->chunk = chunk;
|
|
|
|
return chunk;
|
|
|
|
}
|
2010-04-12 13:59:19 -07:00
|
|
|
}
|
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
chunk = AllocateGCChunk(rt);
|
|
|
|
if (!chunk)
|
|
|
|
return NULL;
|
2010-08-05 05:16:56 -07:00
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
/*
|
2010-09-24 15:07:02 -07:00
|
|
|
* FIXME bug 583732 - chunk is newly allocated and cannot be present in
|
2010-09-24 10:54:39 -07:00
|
|
|
* the table so using ordinary lookupForAdd is suboptimal here.
|
|
|
|
*/
|
|
|
|
GCChunkSet::AddPtr p = rt->gcChunkSet.lookupForAdd(chunk);
|
|
|
|
JS_ASSERT(!p);
|
|
|
|
if (!rt->gcChunkSet.add(p, chunk)) {
|
|
|
|
ReleaseGCChunk(rt, chunk);
|
|
|
|
return NULL;
|
2010-08-05 05:16:56 -07:00
|
|
|
}
|
2009-09-17 15:40:37 -07:00
|
|
|
|
2011-04-25 13:05:30 -07:00
|
|
|
chunk->init(rt);
|
|
|
|
cx->compartment->chunk = chunk;
|
2010-09-24 10:54:39 -07:00
|
|
|
return chunk;
|
2010-04-22 23:58:44 -07:00
|
|
|
}
|
2010-04-12 13:59:19 -07:00
|
|
|
|
2010-04-22 23:58:44 -07:00
|
|
|
static void
|
2010-09-24 10:54:39 -07:00
|
|
|
ExpireGCChunks(JSRuntime *rt)
|
2010-04-22 23:58:44 -07:00
|
|
|
{
|
2011-02-19 22:59:49 -08:00
|
|
|
static const size_t MaxAge = 3;
|
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
/* Remove unused chunks. */
|
|
|
|
AutoLockGC lock(rt);
|
2010-04-12 13:59:19 -07:00
|
|
|
|
2011-02-19 22:59:49 -08:00
|
|
|
rt->gcChunksWaitingToExpire = 0;
|
2010-08-05 05:16:56 -07:00
|
|
|
for (GCChunkSet::Enum e(rt->gcChunkSet); !e.empty(); e.popFront()) {
|
2010-09-24 10:54:39 -07:00
|
|
|
Chunk *chunk = e.front();
|
|
|
|
JS_ASSERT(chunk->info.runtime == rt);
|
2011-02-19 22:59:49 -08:00
|
|
|
if (chunk->unused()) {
|
|
|
|
if (chunk->info.age++ > MaxAge) {
|
|
|
|
e.removeFront();
|
|
|
|
ReleaseGCChunk(rt, chunk);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
rt->gcChunksWaitingToExpire++;
|
2010-04-12 13:59:19 -07:00
|
|
|
}
|
|
|
|
}
|
2010-04-22 23:58:44 -07:00
|
|
|
}
|
2009-09-17 15:40:37 -07:00
|
|
|
|
2010-07-15 17:58:36 -07:00
|
|
|
JS_FRIEND_API(bool)
|
2011-03-23 11:57:15 -07:00
|
|
|
IsAboutToBeFinalized(JSContext *cx, const void *thing)
|
2007-03-22 10:30:00 -07:00
|
|
|
{
|
2011-03-14 13:59:53 -07:00
|
|
|
if (JSAtom::isStatic(thing))
|
2009-12-22 12:50:44 -08:00
|
|
|
return false;
|
2011-01-07 23:44:57 -08:00
|
|
|
JS_ASSERT(cx);
|
|
|
|
|
2011-03-23 11:57:15 -07:00
|
|
|
JSCompartment *thingCompartment = reinterpret_cast<const Cell *>(thing)->compartment();
|
2011-01-07 23:44:57 -08:00
|
|
|
JSRuntime *rt = cx->runtime;
|
2011-01-08 20:06:29 -08:00
|
|
|
JS_ASSERT(rt == thingCompartment->rt);
|
2011-01-07 23:44:57 -08:00
|
|
|
if (rt->gcCurrentCompartment != NULL && rt->gcCurrentCompartment != thingCompartment)
|
|
|
|
return false;
|
2009-12-22 12:50:44 -08:00
|
|
|
|
2011-03-23 11:57:15 -07:00
|
|
|
return !reinterpret_cast<const Cell *>(thing)->isMarked();
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
2010-07-15 17:58:36 -07:00
|
|
|
JS_FRIEND_API(bool)
|
2011-02-16 12:47:08 -08:00
|
|
|
js_GCThingIsMarked(void *thing, uintN color = BLACK)
|
2010-07-15 17:58:36 -07:00
|
|
|
{
|
2010-09-24 10:54:39 -07:00
|
|
|
JS_ASSERT(thing);
|
|
|
|
AssertValidColor(thing, color);
|
2011-05-04 09:26:44 -07:00
|
|
|
JS_ASSERT(!JSAtom::isStatic(thing));
|
2010-09-24 10:54:39 -07:00
|
|
|
return reinterpret_cast<Cell *>(thing)->isMarked(color);
|
2010-07-15 17:58:36 -07:00
|
|
|
}
|
|
|
|
|
2010-12-17 16:33:04 -08:00
|
|
|
/*
|
|
|
|
* 1/8 life for JIT code. After this number of microseconds have passed, 1/8 of all
|
|
|
|
* JIT code is discarded in inactive compartments, regardless of how often that
|
|
|
|
* code runs.
|
|
|
|
*/
|
|
|
|
static const int64 JIT_SCRIPT_EIGHTH_LIFETIME = 120 * 1000 * 1000;
|
|
|
|
|
2007-03-22 10:30:00 -07:00
|
|
|
JSBool
|
2009-09-17 15:40:37 -07:00
|
|
|
js_InitGC(JSRuntime *rt, uint32 maxbytes)
|
|
|
|
{
|
2010-08-05 05:16:56 -07:00
|
|
|
/*
|
|
|
|
* Make room for at least 16 chunks so the table would not grow before
|
|
|
|
* the browser starts up.
|
|
|
|
*/
|
|
|
|
if (!rt->gcChunkSet.init(16))
|
|
|
|
return false;
|
|
|
|
|
2010-05-20 13:50:08 -07:00
|
|
|
if (!rt->gcRootsHash.init(256))
|
2009-12-24 01:31:07 -08:00
|
|
|
return false;
|
2010-06-22 02:19:04 -07:00
|
|
|
|
2010-05-20 13:50:08 -07:00
|
|
|
if (!rt->gcLocksHash.init(256))
|
2009-12-24 01:31:07 -08:00
|
|
|
return false;
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-04-27 12:40:28 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
2010-09-07 14:08:20 -07:00
|
|
|
rt->gcLock = JS_NEW_LOCK();
|
|
|
|
if (!rt->gcLock)
|
|
|
|
return false;
|
|
|
|
rt->gcDone = JS_NEW_CONDVAR(rt->gcLock);
|
|
|
|
if (!rt->gcDone)
|
|
|
|
return false;
|
|
|
|
rt->requestDone = JS_NEW_CONDVAR(rt->gcLock);
|
|
|
|
if (!rt->requestDone)
|
|
|
|
return false;
|
|
|
|
if (!rt->gcHelperThread.init(rt))
|
2010-04-27 06:46:24 -07:00
|
|
|
return false;
|
2010-04-27 12:40:28 -07:00
|
|
|
#endif
|
2010-04-27 06:46:24 -07:00
|
|
|
|
2009-08-25 14:42:42 -07:00
|
|
|
/*
|
|
|
|
* Separate gcMaxMallocBytes from gcMaxBytes but initialize to maxbytes
|
|
|
|
* for default backward API compatibility.
|
|
|
|
*/
|
2009-10-18 08:40:19 -07:00
|
|
|
rt->gcMaxBytes = maxbytes;
|
|
|
|
rt->setGCMaxMallocBytes(maxbytes);
|
2008-09-12 15:11:48 -07:00
|
|
|
rt->gcEmptyArenaPoolLifespan = 30000;
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2009-08-25 14:42:42 -07:00
|
|
|
/*
|
|
|
|
* The assigned value prevents GC from running when GC memory is too low
|
|
|
|
* (during JS engine start).
|
|
|
|
*/
|
|
|
|
rt->setGCLastBytes(8192);
|
2009-01-27 09:21:51 -08:00
|
|
|
|
2010-12-17 16:33:04 -08:00
|
|
|
rt->gcJitReleaseTime = PRMJ_Now() + JIT_SCRIPT_EIGHTH_LIFETIME;
|
|
|
|
|
2010-03-10 15:34:12 -08:00
|
|
|
METER(PodZero(&rt->gcStats));
|
2009-12-24 01:31:07 -08:00
|
|
|
return true;
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
2010-06-04 07:22:28 -07:00
|
|
|
namespace js {
|
|
|
|
|
2011-04-19 22:30:10 -07:00
|
|
|
inline bool
|
|
|
|
InFreeList(ArenaHeader *aheader, void *thing)
|
|
|
|
{
|
2011-05-20 03:38:31 -07:00
|
|
|
for (FreeCell *cursor = aheader->getFreeList(); cursor; cursor = cursor->link) {
|
2011-04-19 22:30:10 -07:00
|
|
|
JS_ASSERT(!cursor->isMarked());
|
|
|
|
JS_ASSERT_IF(cursor->link, cursor < cursor->link);
|
|
|
|
|
|
|
|
/* If the cursor moves past the thing, it's not in the freelist. */
|
|
|
|
if (thing < cursor)
|
|
|
|
break;
|
|
|
|
|
|
|
|
/* If we find it on the freelist, it's dead. */
|
|
|
|
if (thing == cursor)
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2010-10-13 11:49:22 -07:00
|
|
|
template <typename T>
|
2011-04-19 22:30:10 -07:00
|
|
|
inline ConservativeGCTest
|
|
|
|
MarkArenaPtrConservatively(JSTracer *trc, ArenaHeader *aheader, uintptr_t addr)
|
2010-10-13 11:49:22 -07:00
|
|
|
{
|
2011-04-19 22:30:10 -07:00
|
|
|
JS_ASSERT(aheader->compartment);
|
|
|
|
JS_ASSERT(sizeof(T) == aheader->getThingSize());
|
|
|
|
|
2011-05-19 12:01:08 -07:00
|
|
|
uintptr_t offset = addr & ArenaMask;
|
|
|
|
uintptr_t minOffset = Arena::thingsStartOffset(sizeof(T));
|
|
|
|
if (offset < minOffset)
|
2011-04-19 22:30:10 -07:00
|
|
|
return CGCT_NOTARENA;
|
|
|
|
|
|
|
|
/* addr can point inside the thing so we must align the address. */
|
2011-05-19 12:01:08 -07:00
|
|
|
uintptr_t shift = (offset - minOffset) % sizeof(T);
|
2011-04-19 22:30:10 -07:00
|
|
|
T *thing = reinterpret_cast<T *>(addr - shift);
|
|
|
|
|
|
|
|
if (InFreeList(aheader, thing))
|
|
|
|
return CGCT_NOTLIVE;
|
|
|
|
|
|
|
|
MarkRoot(trc, thing, "machine stack");
|
|
|
|
|
|
|
|
#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
|
|
|
|
if (IS_GC_MARKING_TRACER(trc) && static_cast<GCMarker *>(trc)->conservativeDumpFileName)
|
|
|
|
static_cast<GCMarker *>(trc)->conservativeRoots.append(thing);
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#if defined JS_DUMP_CONSERVATIVE_GC_ROOTS || defined JS_GCMETER
|
|
|
|
if (IS_GC_MARKING_TRACER(trc) && shift)
|
|
|
|
static_cast<GCMarker *>(trc)->conservativeStats.unaligned++;
|
|
|
|
#endif
|
|
|
|
return CGCT_VALID;
|
2010-10-13 11:49:22 -07:00
|
|
|
}
|
|
|
|
|
2010-08-05 05:16:56 -07:00
|
|
|
/*
|
2011-04-19 22:30:10 -07:00
|
|
|
* Returns CGCT_VALID and mark it if the w can be a live GC thing and sets
|
|
|
|
* thingKind accordingly. Otherwise returns the reason for rejection.
|
2010-08-05 05:16:56 -07:00
|
|
|
*/
|
|
|
|
inline ConservativeGCTest
|
2011-04-19 22:30:10 -07:00
|
|
|
MarkIfGCThingWord(JSTracer *trc, jsuword w)
|
2010-06-04 07:22:28 -07:00
|
|
|
{
|
2010-06-22 02:19:04 -07:00
|
|
|
/*
|
|
|
|
* We assume that the compiler never uses sub-word alignment to store
|
2010-07-14 23:19:36 -07:00
|
|
|
* pointers and does not tag pointers on its own. Additionally, the value
|
|
|
|
* representation for all values and the jsid representation for GC-things
|
|
|
|
* do not touch the low two bits. Thus any word with the low two bits set
|
|
|
|
* is not a valid GC-thing.
|
2010-06-22 02:19:04 -07:00
|
|
|
*/
|
2010-07-14 23:19:36 -07:00
|
|
|
JS_STATIC_ASSERT(JSID_TYPE_STRING == 0 && JSID_TYPE_OBJECT == 4);
|
|
|
|
if (w & 0x3)
|
2010-08-05 05:16:56 -07:00
|
|
|
return CGCT_LOWBITSET;
|
2010-06-04 07:22:28 -07:00
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
/*
|
|
|
|
* An object jsid has its low bits tagged. In the value representation on
|
|
|
|
* 64-bit, the high bits are tagged.
|
|
|
|
*/
|
2010-08-05 05:16:56 -07:00
|
|
|
const jsuword JSID_PAYLOAD_MASK = ~jsuword(JSID_TYPE_MASK);
|
2010-07-14 23:19:36 -07:00
|
|
|
#if JS_BITS_PER_WORD == 32
|
2011-04-19 22:30:10 -07:00
|
|
|
jsuword addr = w & JSID_PAYLOAD_MASK;
|
2010-07-14 23:19:36 -07:00
|
|
|
#elif JS_BITS_PER_WORD == 64
|
2011-04-19 22:30:10 -07:00
|
|
|
jsuword addr = w & JSID_PAYLOAD_MASK & JSVAL_PAYLOAD_MASK;
|
2010-07-14 23:19:36 -07:00
|
|
|
#endif
|
2010-06-04 07:22:28 -07:00
|
|
|
|
2011-04-19 22:30:10 -07:00
|
|
|
Chunk *chunk = Chunk::fromAddress(addr);
|
2010-09-24 10:54:39 -07:00
|
|
|
|
2011-04-19 22:30:10 -07:00
|
|
|
if (!trc->context->runtime->gcChunkSet.has(chunk))
|
2010-08-05 05:16:56 -07:00
|
|
|
return CGCT_NOTCHUNK;
|
|
|
|
|
2011-04-19 22:30:10 -07:00
|
|
|
/*
|
|
|
|
* We query for pointers outside the arena array after checking for an
|
|
|
|
* allocated chunk. Such pointers are rare and we want to reject them
|
|
|
|
* after doing more likely rejections.
|
|
|
|
*/
|
|
|
|
if (!Chunk::withinArenasRange(addr))
|
2010-08-05 05:16:56 -07:00
|
|
|
return CGCT_NOTARENA;
|
2010-06-24 01:30:56 -07:00
|
|
|
|
2011-04-19 22:30:10 -07:00
|
|
|
ArenaHeader *aheader = &chunk->arenas[Chunk::arenaIndex(addr)].aheader;
|
2010-06-04 07:22:28 -07:00
|
|
|
|
2011-04-06 13:05:16 -07:00
|
|
|
if (!aheader->compartment)
|
2011-04-19 22:30:10 -07:00
|
|
|
return CGCT_FREEARENA;
|
2011-04-06 13:05:16 -07:00
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
ConservativeGCTest test;
|
2011-04-19 22:30:10 -07:00
|
|
|
unsigned thingKind = aheader->getThingKind();
|
2010-06-04 07:22:28 -07:00
|
|
|
|
2010-11-17 12:39:45 -08:00
|
|
|
switch (thingKind) {
|
2011-03-23 11:57:37 -07:00
|
|
|
case FINALIZE_OBJECT0:
|
2011-04-13 13:43:33 -07:00
|
|
|
case FINALIZE_OBJECT0_BACKGROUND:
|
2011-04-19 22:30:10 -07:00
|
|
|
test = MarkArenaPtrConservatively<JSObject>(trc, aheader, addr);
|
2011-03-23 11:57:37 -07:00
|
|
|
break;
|
|
|
|
case FINALIZE_OBJECT2:
|
2011-04-13 13:43:33 -07:00
|
|
|
case FINALIZE_OBJECT2_BACKGROUND:
|
2011-04-19 22:30:10 -07:00
|
|
|
test = MarkArenaPtrConservatively<JSObject_Slots2>(trc, aheader, addr);
|
2011-03-23 11:57:37 -07:00
|
|
|
break;
|
|
|
|
case FINALIZE_OBJECT4:
|
2011-04-13 13:43:33 -07:00
|
|
|
case FINALIZE_OBJECT4_BACKGROUND:
|
2011-04-19 22:30:10 -07:00
|
|
|
test = MarkArenaPtrConservatively<JSObject_Slots4>(trc, aheader, addr);
|
2011-03-23 11:57:37 -07:00
|
|
|
break;
|
|
|
|
case FINALIZE_OBJECT8:
|
2011-04-13 13:43:33 -07:00
|
|
|
case FINALIZE_OBJECT8_BACKGROUND:
|
2011-04-19 22:30:10 -07:00
|
|
|
test = MarkArenaPtrConservatively<JSObject_Slots8>(trc, aheader, addr);
|
2011-03-23 11:57:37 -07:00
|
|
|
break;
|
|
|
|
case FINALIZE_OBJECT12:
|
2011-04-13 13:43:33 -07:00
|
|
|
case FINALIZE_OBJECT12_BACKGROUND:
|
2011-04-19 22:30:10 -07:00
|
|
|
test = MarkArenaPtrConservatively<JSObject_Slots12>(trc, aheader, addr);
|
2011-03-23 11:57:37 -07:00
|
|
|
break;
|
|
|
|
case FINALIZE_OBJECT16:
|
2011-04-13 13:43:33 -07:00
|
|
|
case FINALIZE_OBJECT16_BACKGROUND:
|
2011-04-19 22:30:10 -07:00
|
|
|
test = MarkArenaPtrConservatively<JSObject_Slots16>(trc, aheader, addr);
|
2011-03-23 11:57:37 -07:00
|
|
|
break;
|
|
|
|
case FINALIZE_STRING:
|
2011-04-19 22:30:10 -07:00
|
|
|
test = MarkArenaPtrConservatively<JSString>(trc, aheader, addr);
|
2011-03-23 11:57:37 -07:00
|
|
|
break;
|
|
|
|
case FINALIZE_EXTERNAL_STRING:
|
2011-04-19 22:30:10 -07:00
|
|
|
test = MarkArenaPtrConservatively<JSExternalString>(trc, aheader, addr);
|
2011-03-23 11:57:37 -07:00
|
|
|
break;
|
|
|
|
case FINALIZE_SHORT_STRING:
|
2011-04-19 22:30:10 -07:00
|
|
|
test = MarkArenaPtrConservatively<JSShortString>(trc, aheader, addr);
|
2011-03-23 11:57:37 -07:00
|
|
|
break;
|
|
|
|
case FINALIZE_FUNCTION:
|
2011-04-19 22:30:10 -07:00
|
|
|
test = MarkArenaPtrConservatively<JSFunction>(trc, aheader, addr);
|
2011-03-23 11:57:37 -07:00
|
|
|
break;
|
2011-03-23 11:57:44 -07:00
|
|
|
case FINALIZE_SHAPE:
|
2011-04-19 22:30:10 -07:00
|
|
|
test = MarkArenaPtrConservatively<Shape>(trc, aheader, addr);
|
2011-03-23 11:57:44 -07:00
|
|
|
break;
|
2010-09-24 10:54:39 -07:00
|
|
|
#if JS_HAS_XML_SUPPORT
|
2011-03-23 11:57:37 -07:00
|
|
|
case FINALIZE_XML:
|
2011-04-19 22:30:10 -07:00
|
|
|
test = MarkArenaPtrConservatively<JSXML>(trc, aheader, addr);
|
2011-03-23 11:57:37 -07:00
|
|
|
break;
|
2010-09-24 10:54:39 -07:00
|
|
|
#endif
|
2011-03-23 11:57:37 -07:00
|
|
|
default:
|
|
|
|
test = CGCT_WRONGTAG;
|
|
|
|
JS_NOT_REACHED("wrong tag");
|
2010-06-04 07:22:28 -07:00
|
|
|
}
|
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
return test;
|
2010-08-05 05:16:56 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
MarkWordConservatively(JSTracer *trc, jsuword w)
|
|
|
|
{
|
2010-06-04 07:22:28 -07:00
|
|
|
/*
|
2010-08-05 05:16:56 -07:00
|
|
|
* The conservative scanner may access words that valgrind considers as
|
|
|
|
* undefined. To avoid false positives and not to alter valgrind view of
|
|
|
|
* the memory we make as memcheck-defined the argument, a copy of the
|
|
|
|
* original word. See bug 572678.
|
2010-06-04 07:22:28 -07:00
|
|
|
*/
|
2010-08-05 05:16:56 -07:00
|
|
|
#ifdef JS_VALGRIND
|
|
|
|
VALGRIND_MAKE_MEM_DEFINED(&w, sizeof(w));
|
|
|
|
#endif
|
2010-06-04 07:22:28 -07:00
|
|
|
|
2011-04-19 22:30:10 -07:00
|
|
|
MarkIfGCThingWord(trc, w);
|
2010-06-04 07:22:28 -07:00
|
|
|
}
|
|
|
|
|
2010-08-05 05:16:56 -07:00
|
|
|
static void
|
2010-11-08 14:35:06 -08:00
|
|
|
MarkRangeConservatively(JSTracer *trc, const jsuword *begin, const jsuword *end)
|
2010-06-04 07:22:28 -07:00
|
|
|
{
|
|
|
|
JS_ASSERT(begin <= end);
|
2010-11-08 14:35:06 -08:00
|
|
|
for (const jsuword *i = begin; i != end; ++i)
|
2010-08-05 05:16:56 -07:00
|
|
|
MarkWordConservatively(trc, *i);
|
2010-06-04 07:22:28 -07:00
|
|
|
}
|
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
static void
|
2011-03-13 07:45:02 -07:00
|
|
|
MarkThreadDataConservatively(JSTracer *trc, ThreadData *td)
|
2010-08-30 11:46:18 -07:00
|
|
|
{
|
|
|
|
ConservativeGCThreadData *ctd = &td->conservativeGC;
|
|
|
|
JS_ASSERT(ctd->hasStackToScan());
|
|
|
|
jsuword *stackMin, *stackEnd;
|
|
|
|
#if JS_STACK_GROWTH_DIRECTION > 0
|
|
|
|
stackMin = td->nativeStackBase;
|
|
|
|
stackEnd = ctd->nativeStackTop;
|
|
|
|
#else
|
|
|
|
stackMin = ctd->nativeStackTop + 1;
|
|
|
|
stackEnd = td->nativeStackBase;
|
|
|
|
#endif
|
|
|
|
JS_ASSERT(stackMin <= stackEnd);
|
|
|
|
MarkRangeConservatively(trc, stackMin, stackEnd);
|
|
|
|
MarkRangeConservatively(trc, ctd->registerSnapshot.words,
|
|
|
|
JS_ARRAY_END(ctd->registerSnapshot.words));
|
|
|
|
}
|
|
|
|
|
2010-07-29 17:13:08 -07:00
|
|
|
void
|
2010-08-05 15:57:34 -07:00
|
|
|
MarkStackRangeConservatively(JSTracer *trc, Value *beginv, Value *endv)
|
2010-07-29 17:13:08 -07:00
|
|
|
{
|
2010-11-08 14:35:06 -08:00
|
|
|
const jsuword *begin = beginv->payloadWord();
|
|
|
|
const jsuword *end = endv->payloadWord();;
|
2010-08-05 15:57:34 -07:00
|
|
|
#ifdef JS_NUNBOX32
|
|
|
|
/*
|
|
|
|
* With 64-bit jsvals on 32-bit systems, we can optimize a bit by
|
|
|
|
* scanning only the payloads.
|
|
|
|
*/
|
|
|
|
JS_ASSERT(begin <= end);
|
2010-11-08 14:35:06 -08:00
|
|
|
for (const jsuword *i = begin; i != end; i += sizeof(Value)/sizeof(jsuword))
|
2010-08-05 15:57:34 -07:00
|
|
|
MarkWordConservatively(trc, *i);
|
|
|
|
#else
|
|
|
|
MarkRangeConservatively(trc, begin, end);
|
|
|
|
#endif
|
2010-07-29 17:13:08 -07:00
|
|
|
}
|
|
|
|
|
2010-06-04 07:22:28 -07:00
|
|
|
void
|
2010-08-05 05:16:56 -07:00
|
|
|
MarkConservativeStackRoots(JSTracer *trc)
|
2010-06-04 07:22:28 -07:00
|
|
|
{
|
2010-08-30 11:46:18 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
for (JSThread::Map::Range r = trc->context->runtime->threads.all(); !r.empty(); r.popFront()) {
|
|
|
|
JSThread *thread = r.front().value;
|
|
|
|
ConservativeGCThreadData *ctd = &thread->data.conservativeGC;
|
|
|
|
if (ctd->hasStackToScan()) {
|
2010-10-22 10:48:06 -07:00
|
|
|
JS_ASSERT_IF(!thread->data.requestDepth, thread->suspendCount);
|
2010-08-30 11:46:18 -07:00
|
|
|
MarkThreadDataConservatively(trc, &thread->data);
|
|
|
|
} else {
|
|
|
|
JS_ASSERT(!thread->suspendCount);
|
2010-10-22 10:48:06 -07:00
|
|
|
JS_ASSERT(thread->data.requestDepth <= ctd->requestThreshold);
|
2010-06-04 07:22:28 -07:00
|
|
|
}
|
|
|
|
}
|
2010-08-30 11:46:18 -07:00
|
|
|
#else
|
|
|
|
MarkThreadDataConservatively(trc, &trc->context->runtime->threadData);
|
2010-09-07 14:08:20 -07:00
|
|
|
#endif
|
2010-06-04 07:22:28 -07:00
|
|
|
}
|
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
JS_NEVER_INLINE void
|
|
|
|
ConservativeGCThreadData::recordStackTop()
|
2010-06-04 07:22:28 -07:00
|
|
|
{
|
|
|
|
/* Update the native stack pointer if it points to a bigger stack. */
|
|
|
|
jsuword dummy;
|
2010-08-30 11:46:18 -07:00
|
|
|
nativeStackTop = &dummy;
|
2010-06-04 07:22:28 -07:00
|
|
|
|
2010-12-23 07:18:36 -08:00
|
|
|
/*
|
|
|
|
* To record and update the register snapshot for the conservative
|
|
|
|
* scanning with the latest values we use setjmp.
|
|
|
|
*/
|
2010-06-04 07:22:28 -07:00
|
|
|
#if defined(_MSC_VER)
|
|
|
|
# pragma warning(push)
|
|
|
|
# pragma warning(disable: 4611)
|
|
|
|
#endif
|
2010-12-23 07:18:36 -08:00
|
|
|
(void) setjmp(registerSnapshot.jmpbuf);
|
2010-06-04 07:22:28 -07:00
|
|
|
#if defined(_MSC_VER)
|
|
|
|
# pragma warning(pop)
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
static inline void
|
|
|
|
RecordNativeStackTopForGC(JSContext *cx)
|
2010-06-04 07:22:28 -07:00
|
|
|
{
|
2010-08-30 11:46:18 -07:00
|
|
|
ConservativeGCThreadData *ctd = &JS_THREAD_DATA(cx)->conservativeGC;
|
2010-09-07 14:08:20 -07:00
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
/* Record the stack top here only if we are called from a request. */
|
2011-04-13 09:27:37 -07:00
|
|
|
JS_ASSERT(cx->thread()->data.requestDepth >= ctd->requestThreshold);
|
|
|
|
if (cx->thread()->data.requestDepth == ctd->requestThreshold)
|
2010-08-30 11:46:18 -07:00
|
|
|
return;
|
2010-06-04 07:22:28 -07:00
|
|
|
#endif
|
2010-08-30 11:46:18 -07:00
|
|
|
ctd->recordStackTop();
|
2010-06-04 07:22:28 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
} /* namespace js */
|
|
|
|
|
2007-03-22 10:30:00 -07:00
|
|
|
#ifdef DEBUG
|
|
|
|
static void
|
|
|
|
CheckLeakedRoots(JSRuntime *rt);
|
|
|
|
#endif
|
|
|
|
|
|
|
|
void
|
|
|
|
js_FinishGC(JSRuntime *rt)
|
|
|
|
{
|
2010-07-11 00:09:34 -07:00
|
|
|
#ifdef JS_ARENAMETER
|
|
|
|
JS_DumpArenaStats(stdout);
|
2007-03-22 10:30:00 -07:00
|
|
|
#endif
|
|
|
|
#ifdef JS_GCMETER
|
2009-12-24 01:31:07 -08:00
|
|
|
if (JS_WANT_GC_METER_PRINT)
|
|
|
|
js_DumpGCStats(rt, stdout);
|
2007-03-22 10:30:00 -07:00
|
|
|
#endif
|
2010-09-27 15:35:10 -07:00
|
|
|
|
2011-02-04 10:59:07 -08:00
|
|
|
/* Delete all remaining Compartments. */
|
2010-09-24 10:54:39 -07:00
|
|
|
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c) {
|
|
|
|
JSCompartment *comp = *c;
|
|
|
|
comp->finishArenaLists();
|
Bug 634155: Account for NewCompartment's memory, and change allocation APIs (r=nnethercote)
This changes the allocation API, in the following way:
js_malloc -> {cx->,rt->,OffTheBooks::}malloc
js_calloc -> {cx->,rt->,OffTheBooks::}calloc
js_realloc -> {cx->,rt->,OffTheBooks::}realloc
js_free -> {cx->,rt->,Foreground::,UnwantedForeground::}free
js_new -> {cx->,rt->,OffTheBooks::}new_
js_new_array -> {cx->,rt->,OffTheBooks::}new_array
js_delete -> {cx->,rt->,Foreground::,UnwantedForeground::}delete_
This is to move as many allocations as possible through a JSContext (so that they may be aken into account by gcMallocBytes) and to move as many deallocations to the background as possible (except on error paths).
2011-03-31 01:13:49 -07:00
|
|
|
Foreground::delete_(comp);
|
2010-09-24 10:54:39 -07:00
|
|
|
}
|
|
|
|
rt->compartments.clear();
|
2011-01-13 14:42:36 -08:00
|
|
|
rt->atomsCompartment = NULL;
|
2010-09-24 10:54:39 -07:00
|
|
|
|
|
|
|
for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
|
|
|
|
ReleaseGCChunk(rt, r.front());
|
|
|
|
rt->gcChunkSet.clear();
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-04-27 12:40:28 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
2010-09-07 14:08:20 -07:00
|
|
|
rt->gcHelperThread.finish(rt);
|
2010-04-27 12:40:28 -07:00
|
|
|
#endif
|
2010-04-22 23:58:44 -07:00
|
|
|
|
2010-06-22 02:19:04 -07:00
|
|
|
#ifdef DEBUG
|
2010-05-20 13:50:08 -07:00
|
|
|
if (!rt->gcRootsHash.empty())
|
2007-03-22 10:30:00 -07:00
|
|
|
CheckLeakedRoots(rt);
|
|
|
|
#endif
|
2010-05-20 13:50:08 -07:00
|
|
|
rt->gcRootsHash.clear();
|
|
|
|
rt->gcLocksHash.clear();
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
JSBool
|
2010-07-14 23:19:36 -07:00
|
|
|
js_AddRoot(JSContext *cx, Value *vp, const char *name)
|
2007-03-22 10:30:00 -07:00
|
|
|
{
|
2010-07-14 23:19:36 -07:00
|
|
|
JSBool ok = js_AddRootRT(cx->runtime, Jsvalify(vp), name);
|
2007-03-22 10:30:00 -07:00
|
|
|
if (!ok)
|
|
|
|
JS_ReportOutOfMemory(cx);
|
|
|
|
return ok;
|
|
|
|
}
|
|
|
|
|
|
|
|
JSBool
|
2010-06-07 17:05:02 -07:00
|
|
|
js_AddGCThingRoot(JSContext *cx, void **rp, const char *name)
|
|
|
|
{
|
|
|
|
JSBool ok = js_AddGCThingRootRT(cx->runtime, rp, name);
|
|
|
|
if (!ok)
|
|
|
|
JS_ReportOutOfMemory(cx);
|
|
|
|
return ok;
|
|
|
|
}
|
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
JS_FRIEND_API(JSBool)
|
|
|
|
js_AddRootRT(JSRuntime *rt, jsval *vp, const char *name)
|
2007-03-22 10:30:00 -07:00
|
|
|
{
|
|
|
|
/*
|
|
|
|
* Due to the long-standing, but now removed, use of rt->gcLock across the
|
|
|
|
* bulk of js_GC, API users have come to depend on JS_AddRoot etc. locking
|
|
|
|
* properly with a racing GC, without calling JS_AddRoot from a request.
|
|
|
|
* We have to preserve API compatibility here, now that we avoid holding
|
|
|
|
* rt->gcLock across the mark phase (including the root hashtable mark).
|
|
|
|
*/
|
2010-04-08 05:54:18 -07:00
|
|
|
AutoLockGC lock(rt);
|
2009-02-06 20:05:32 -08:00
|
|
|
js_WaitForGC(rt);
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
return !!rt->gcRootsHash.put((void *)vp,
|
|
|
|
RootInfo(name, JS_GC_ROOT_VALUE_PTR));
|
2010-06-07 17:05:02 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
JS_FRIEND_API(JSBool)
|
|
|
|
js_AddGCThingRootRT(JSRuntime *rt, void **rp, const char *name)
|
|
|
|
{
|
2010-07-14 23:19:36 -07:00
|
|
|
/*
|
|
|
|
* Due to the long-standing, but now removed, use of rt->gcLock across the
|
|
|
|
* bulk of js_GC, API users have come to depend on JS_AddRoot etc. locking
|
|
|
|
* properly with a racing GC, without calling JS_AddRoot from a request.
|
|
|
|
* We have to preserve API compatibility here, now that we avoid holding
|
|
|
|
* rt->gcLock across the mark phase (including the root hashtable mark).
|
|
|
|
*/
|
|
|
|
AutoLockGC lock(rt);
|
|
|
|
js_WaitForGC(rt);
|
|
|
|
|
|
|
|
return !!rt->gcRootsHash.put((void *)rp,
|
|
|
|
RootInfo(name, JS_GC_ROOT_GCTHING_PTR));
|
2010-06-07 17:05:02 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
JS_FRIEND_API(JSBool)
|
2007-03-22 10:30:00 -07:00
|
|
|
js_RemoveRoot(JSRuntime *rt, void *rp)
|
|
|
|
{
|
|
|
|
/*
|
2010-07-14 23:19:36 -07:00
|
|
|
* Due to the JS_RemoveRootRT API, we may be called outside of a request.
|
2007-03-22 10:30:00 -07:00
|
|
|
* Same synchronization drill as above in js_AddRoot.
|
|
|
|
*/
|
2010-04-08 05:54:18 -07:00
|
|
|
AutoLockGC lock(rt);
|
2009-02-06 20:05:32 -08:00
|
|
|
js_WaitForGC(rt);
|
2010-05-20 13:50:08 -07:00
|
|
|
rt->gcRootsHash.remove(rp);
|
2007-03-22 10:30:00 -07:00
|
|
|
rt->gcPoke = JS_TRUE;
|
|
|
|
return JS_TRUE;
|
|
|
|
}
|
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
typedef RootedValueMap::Range RootRange;
|
|
|
|
typedef RootedValueMap::Entry RootEntry;
|
|
|
|
typedef RootedValueMap::Enum RootEnum;
|
|
|
|
|
2007-03-22 10:30:00 -07:00
|
|
|
#ifdef DEBUG
|
|
|
|
|
|
|
|
static void
|
|
|
|
CheckLeakedRoots(JSRuntime *rt)
|
|
|
|
{
|
|
|
|
uint32 leakedroots = 0;
|
|
|
|
|
|
|
|
/* Warn (but don't assert) debug builds of any remaining roots. */
|
2010-07-14 23:19:36 -07:00
|
|
|
for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront()) {
|
|
|
|
RootEntry &entry = r.front();
|
2010-05-20 13:50:08 -07:00
|
|
|
leakedroots++;
|
|
|
|
fprintf(stderr,
|
|
|
|
"JS engine warning: leaking GC root \'%s\' at %p\n",
|
2010-07-14 23:19:36 -07:00
|
|
|
entry.value.name ? entry.value.name : "", entry.key);
|
2010-05-20 13:50:08 -07:00
|
|
|
}
|
2010-07-14 23:19:36 -07:00
|
|
|
|
2007-03-22 10:30:00 -07:00
|
|
|
if (leakedroots > 0) {
|
|
|
|
if (leakedroots == 1) {
|
|
|
|
fprintf(stderr,
|
2008-02-16 22:56:40 -08:00
|
|
|
"JS engine warning: 1 GC root remains after destroying the JSRuntime at %p.\n"
|
2007-03-22 10:30:00 -07:00
|
|
|
" This root may point to freed memory. Objects reachable\n"
|
2008-02-26 13:01:42 -08:00
|
|
|
" through it have not been finalized.\n",
|
|
|
|
(void *) rt);
|
2007-03-22 10:30:00 -07:00
|
|
|
} else {
|
|
|
|
fprintf(stderr,
|
2008-02-16 22:56:40 -08:00
|
|
|
"JS engine warning: %lu GC roots remain after destroying the JSRuntime at %p.\n"
|
2007-03-22 10:30:00 -07:00
|
|
|
" These roots may point to freed memory. Objects reachable\n"
|
|
|
|
" through them have not been finalized.\n",
|
2008-02-26 13:01:42 -08:00
|
|
|
(unsigned long) leakedroots, (void *) rt);
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
js_DumpNamedRoots(JSRuntime *rt,
|
2010-07-14 23:19:36 -07:00
|
|
|
void (*dump)(const char *name, void *rp, JSGCRootType type, void *data),
|
2007-03-22 10:30:00 -07:00
|
|
|
void *data)
|
|
|
|
{
|
2010-07-14 23:19:36 -07:00
|
|
|
for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront()) {
|
|
|
|
RootEntry &entry = r.front();
|
|
|
|
if (const char *name = entry.value.name)
|
|
|
|
dump(name, entry.key, entry.value.type, data);
|
2010-05-20 13:50:08 -07:00
|
|
|
}
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
#endif /* DEBUG */
|
|
|
|
|
|
|
|
uint32
|
|
|
|
js_MapGCRoots(JSRuntime *rt, JSGCRootMapFun map, void *data)
|
|
|
|
{
|
2010-04-08 05:54:18 -07:00
|
|
|
AutoLockGC lock(rt);
|
2010-07-14 23:19:36 -07:00
|
|
|
int ct = 0;
|
|
|
|
for (RootEnum e(rt->gcRootsHash); !e.empty(); e.popFront()) {
|
|
|
|
RootEntry &entry = e.front();
|
|
|
|
|
|
|
|
ct++;
|
|
|
|
intN mapflags = map(entry.key, entry.value.type, entry.value.name, data);
|
2010-05-20 13:50:08 -07:00
|
|
|
|
|
|
|
if (mapflags & JS_MAP_GCROOT_REMOVE)
|
|
|
|
e.removeFront();
|
|
|
|
if (mapflags & JS_MAP_GCROOT_STOP)
|
|
|
|
break;
|
|
|
|
}
|
2010-07-14 23:19:36 -07:00
|
|
|
|
|
|
|
return ct;
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
2009-08-25 14:42:42 -07:00
|
|
|
void
|
|
|
|
JSRuntime::setGCLastBytes(size_t lastBytes)
|
|
|
|
{
|
|
|
|
gcLastBytes = lastBytes;
|
2011-05-10 22:37:11 -07:00
|
|
|
float trigger = float(Max(lastBytes, GC_ARENA_ALLOCATION_TRIGGER)) * GC_HEAP_GROWTH_FACTOR;
|
|
|
|
gcTriggerBytes = size_t(Min(float(gcMaxBytes), trigger));
|
2009-08-25 14:42:42 -07:00
|
|
|
}
|
|
|
|
|
2011-04-13 13:43:33 -07:00
|
|
|
void
|
|
|
|
JSRuntime::reduceGCTriggerBytes(uint32 amount) {
|
|
|
|
JS_ASSERT(amount > 0);
|
|
|
|
JS_ASSERT((gcTriggerBytes - amount) > 0);
|
|
|
|
if (gcTriggerBytes - amount < GC_ARENA_ALLOCATION_TRIGGER * GC_HEAP_GROWTH_FACTOR)
|
|
|
|
return;
|
|
|
|
gcTriggerBytes -= amount;
|
|
|
|
}
|
|
|
|
|
2011-01-07 23:44:57 -08:00
|
|
|
void
|
|
|
|
JSCompartment::setGCLastBytes(size_t lastBytes)
|
|
|
|
{
|
|
|
|
gcLastBytes = lastBytes;
|
2011-05-10 22:37:11 -07:00
|
|
|
float trigger = float(Max(lastBytes, GC_ARENA_ALLOCATION_TRIGGER)) * GC_HEAP_GROWTH_FACTOR;
|
|
|
|
gcTriggerBytes = size_t(Min(float(rt->gcMaxBytes), trigger));
|
2011-01-07 23:44:57 -08:00
|
|
|
}
|
|
|
|
|
2011-04-13 13:43:33 -07:00
|
|
|
void
|
|
|
|
JSCompartment::reduceGCTriggerBytes(uint32 amount) {
|
|
|
|
JS_ASSERT(amount > 0);
|
|
|
|
JS_ASSERT((gcTriggerBytes - amount) > 0);
|
|
|
|
if (gcTriggerBytes - amount < GC_ARENA_ALLOCATION_TRIGGER * GC_HEAP_GROWTH_FACTOR)
|
|
|
|
return;
|
|
|
|
gcTriggerBytes -= amount;
|
|
|
|
}
|
|
|
|
|
2011-04-25 13:05:30 -07:00
|
|
|
namespace js {
|
|
|
|
namespace gc {
|
|
|
|
|
|
|
|
inline ArenaHeader *
|
|
|
|
ArenaList::searchForFreeArena()
|
|
|
|
{
|
|
|
|
while (ArenaHeader *aheader = *cursor) {
|
|
|
|
cursor = &aheader->next;
|
2011-05-20 03:38:31 -07:00
|
|
|
if (aheader->hasFreeList())
|
2011-04-25 13:05:30 -07:00
|
|
|
return aheader;
|
|
|
|
}
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
2011-05-19 12:01:08 -07:00
|
|
|
template <size_t thingSize>
|
2011-04-25 13:05:30 -07:00
|
|
|
inline ArenaHeader *
|
|
|
|
ArenaList::getArenaWithFreeList(JSContext *cx, unsigned thingKind)
|
|
|
|
{
|
|
|
|
Chunk *chunk;
|
|
|
|
|
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
/*
|
|
|
|
* We cannot search the arena list for free things while the
|
|
|
|
* background finalization runs and can modify head or cursor at any
|
|
|
|
* moment.
|
|
|
|
*/
|
|
|
|
if (backgroundFinalizeState == BFS_DONE) {
|
|
|
|
check_arena_list:
|
|
|
|
if (ArenaHeader *aheader = searchForFreeArena())
|
|
|
|
return aheader;
|
|
|
|
}
|
|
|
|
|
|
|
|
AutoLockGC lock(cx->runtime);
|
|
|
|
|
|
|
|
for (;;) {
|
|
|
|
if (backgroundFinalizeState == BFS_JUST_FINISHED) {
|
|
|
|
/*
|
|
|
|
* Before we took the GC lock or while waiting for the background
|
|
|
|
* finalization to finish the latter added new arenas to the list.
|
|
|
|
* Check the list again for free things outside the GC lock.
|
|
|
|
*/
|
|
|
|
JS_ASSERT(*cursor);
|
|
|
|
backgroundFinalizeState = BFS_DONE;
|
|
|
|
goto check_arena_list;
|
|
|
|
}
|
|
|
|
|
|
|
|
JS_ASSERT(!*cursor);
|
|
|
|
chunk = PickChunk(cx);
|
|
|
|
if (chunk || backgroundFinalizeState == BFS_DONE)
|
|
|
|
break;
|
|
|
|
|
|
|
|
/*
|
|
|
|
* If the background finalization still runs, wait for it to
|
|
|
|
* finish and retry to check if it populated the arena list or
|
|
|
|
* added new empty arenas.
|
|
|
|
*/
|
|
|
|
JS_ASSERT(backgroundFinalizeState == BFS_RUN);
|
|
|
|
cx->runtime->gcHelperThread.waitBackgroundSweepEnd(cx->runtime, false);
|
|
|
|
JS_ASSERT(backgroundFinalizeState == BFS_JUST_FINISHED ||
|
|
|
|
backgroundFinalizeState == BFS_DONE);
|
|
|
|
}
|
|
|
|
|
|
|
|
#else /* !JS_THREADSAFE */
|
|
|
|
|
|
|
|
if (ArenaHeader *aheader = searchForFreeArena())
|
|
|
|
return aheader;
|
|
|
|
chunk = PickChunk(cx);
|
|
|
|
|
|
|
|
#endif /* !JS_THREADSAFE */
|
|
|
|
|
|
|
|
if (!chunk) {
|
2011-06-02 13:02:21 -07:00
|
|
|
GCREASON(CHUNK);
|
2011-04-25 13:05:30 -07:00
|
|
|
TriggerGC(cx->runtime);
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
* While we still hold the GC lock get the arena from the chunk and add it
|
|
|
|
* to the head of the list before the cursor to prevent checking the arena
|
|
|
|
* for the free things.
|
|
|
|
*/
|
2011-05-19 12:01:08 -07:00
|
|
|
ArenaHeader *aheader = chunk->allocateArena<thingSize>(cx, thingKind);
|
2011-04-25 13:05:30 -07:00
|
|
|
aheader->next = head;
|
|
|
|
if (cursor == &head)
|
|
|
|
cursor = &aheader->next;
|
|
|
|
head = aheader;
|
|
|
|
return aheader;
|
2009-11-12 03:53:25 -08:00
|
|
|
}
|
|
|
|
|
2011-04-25 13:05:30 -07:00
|
|
|
template<typename T>
|
|
|
|
void
|
|
|
|
ArenaList::finalizeNow(JSContext *cx)
|
|
|
|
{
|
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
JS_ASSERT(backgroundFinalizeState == BFS_DONE);
|
|
|
|
#endif
|
|
|
|
METER(stats.narenas = uint32(ArenaHeader::CountListLength(head)));
|
|
|
|
FinalizeArenas<T>(cx, &head);
|
|
|
|
METER(stats.livearenas = uint32(ArenaHeader::CountListLength(head)));
|
|
|
|
cursor = &head;
|
2010-07-28 11:20:19 -07:00
|
|
|
}
|
|
|
|
|
2011-04-25 13:05:30 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
template<typename T>
|
|
|
|
inline void
|
|
|
|
ArenaList::finalizeLater(JSContext *cx)
|
|
|
|
{
|
|
|
|
JS_ASSERT_IF(head,
|
|
|
|
head->getThingKind() == FINALIZE_OBJECT0_BACKGROUND ||
|
|
|
|
head->getThingKind() == FINALIZE_OBJECT2_BACKGROUND ||
|
|
|
|
head->getThingKind() == FINALIZE_OBJECT4_BACKGROUND ||
|
|
|
|
head->getThingKind() == FINALIZE_OBJECT8_BACKGROUND ||
|
|
|
|
head->getThingKind() == FINALIZE_OBJECT12_BACKGROUND ||
|
|
|
|
head->getThingKind() == FINALIZE_OBJECT16_BACKGROUND ||
|
|
|
|
head->getThingKind() == FINALIZE_SHORT_STRING ||
|
|
|
|
head->getThingKind() == FINALIZE_STRING);
|
|
|
|
JS_ASSERT(!cx->runtime->gcHelperThread.sweeping);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* The state can be just-finished if we have not allocated any GC things
|
|
|
|
* from the arena list after the previous background finalization.
|
|
|
|
*/
|
|
|
|
JS_ASSERT(backgroundFinalizeState == BFS_DONE ||
|
|
|
|
backgroundFinalizeState == BFS_JUST_FINISHED);
|
|
|
|
|
|
|
|
if (head && cx->gcBackgroundFree && cx->gcBackgroundFree->finalizeVector.append(head)) {
|
|
|
|
head = NULL;
|
|
|
|
cursor = &head;
|
|
|
|
backgroundFinalizeState = BFS_RUN;
|
|
|
|
} else {
|
|
|
|
backgroundFinalizeState = BFS_DONE;
|
|
|
|
finalizeNow<T>(cx);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/*static*/ void
|
|
|
|
ArenaList::backgroundFinalize(JSContext *cx, ArenaHeader *listHead)
|
|
|
|
{
|
|
|
|
JS_ASSERT(listHead);
|
|
|
|
unsigned thingKind = listHead->getThingKind();
|
|
|
|
JSCompartment *comp = listHead->compartment;
|
|
|
|
ArenaList *al = &comp->arenas[thingKind];
|
|
|
|
METER(al->stats.narenas = uint32(ArenaHeader::CountListLength(listHead)));
|
|
|
|
|
|
|
|
switch (thingKind) {
|
|
|
|
default:
|
|
|
|
JS_NOT_REACHED("wrong kind");
|
|
|
|
break;
|
|
|
|
case FINALIZE_OBJECT0_BACKGROUND:
|
|
|
|
FinalizeArenas<JSObject>(cx, &listHead);
|
|
|
|
break;
|
|
|
|
case FINALIZE_OBJECT2_BACKGROUND:
|
|
|
|
FinalizeArenas<JSObject_Slots2>(cx, &listHead);
|
|
|
|
break;
|
|
|
|
case FINALIZE_OBJECT4_BACKGROUND:
|
|
|
|
FinalizeArenas<JSObject_Slots4>(cx, &listHead);
|
|
|
|
break;
|
|
|
|
case FINALIZE_OBJECT8_BACKGROUND:
|
|
|
|
FinalizeArenas<JSObject_Slots8>(cx, &listHead);
|
|
|
|
break;
|
|
|
|
case FINALIZE_OBJECT12_BACKGROUND:
|
|
|
|
FinalizeArenas<JSObject_Slots12>(cx, &listHead);
|
|
|
|
break;
|
|
|
|
case FINALIZE_OBJECT16_BACKGROUND:
|
|
|
|
FinalizeArenas<JSObject_Slots16>(cx, &listHead);
|
|
|
|
break;
|
|
|
|
case FINALIZE_STRING:
|
|
|
|
FinalizeArenas<JSString>(cx, &listHead);
|
|
|
|
break;
|
|
|
|
case FINALIZE_SHORT_STRING:
|
|
|
|
FinalizeArenas<JSShortString>(cx, &listHead);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
* In stats we should not reflect the arenas allocated after the GC has
|
|
|
|
* finished. So we do not add to livearenas the arenas from al->head.
|
|
|
|
*/
|
|
|
|
METER(al->stats.livearenas = uint32(ArenaHeader::CountListLength(listHead)));
|
|
|
|
|
|
|
|
/*
|
|
|
|
* After we finish the finalization al->cursor must point to the end of
|
|
|
|
* the head list as we emptied the list before the background finalization
|
|
|
|
* and the allocation adds new arenas before the cursor.
|
|
|
|
*/
|
|
|
|
AutoLockGC lock(cx->runtime);
|
|
|
|
JS_ASSERT(al->backgroundFinalizeState == BFS_RUN);
|
|
|
|
JS_ASSERT(!*al->cursor);
|
|
|
|
if (listHead) {
|
|
|
|
*al->cursor = listHead;
|
|
|
|
al->backgroundFinalizeState = BFS_JUST_FINISHED;
|
|
|
|
} else {
|
|
|
|
al->backgroundFinalizeState = BFS_DONE;
|
|
|
|
}
|
|
|
|
METER(UpdateCompartmentGCStats(comp, thingKind));
|
|
|
|
}
|
|
|
|
|
|
|
|
#endif /* JS_THREADSAFE */
|
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
#ifdef DEBUG
|
|
|
|
bool
|
|
|
|
CheckAllocation(JSContext *cx)
|
2009-10-15 23:10:54 -07:00
|
|
|
{
|
|
|
|
#ifdef JS_THREADSAFE
|
2011-04-13 09:27:37 -07:00
|
|
|
JS_ASSERT(cx->thread());
|
2010-09-24 10:54:39 -07:00
|
|
|
#endif
|
|
|
|
JS_ASSERT(!cx->runtime->gcRunning);
|
|
|
|
return true;
|
|
|
|
}
|
2009-10-15 23:10:54 -07:00
|
|
|
#endif
|
|
|
|
|
2010-10-07 13:43:52 -07:00
|
|
|
inline bool
|
|
|
|
NeedLastDitchGC(JSContext *cx)
|
|
|
|
{
|
|
|
|
JSRuntime *rt = cx->runtime;
|
2011-01-07 23:44:57 -08:00
|
|
|
return rt->gcIsNeeded;
|
2010-10-07 13:43:52 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Return false only if the GC run but could not bring its memory usage under
|
|
|
|
* JSRuntime::gcMaxBytes.
|
|
|
|
*/
|
|
|
|
static bool
|
|
|
|
RunLastDitchGC(JSContext *cx)
|
|
|
|
{
|
|
|
|
JSRuntime *rt = cx->runtime;
|
|
|
|
METER(rt->gcStats.lastditch++);
|
|
|
|
#ifdef JS_THREADSAFE
|
2011-04-28 15:48:51 -07:00
|
|
|
Maybe<AutoUnlockAtomsCompartment> maybeUnlockAtomsCompartment;
|
2011-04-28 15:48:51 -07:00
|
|
|
if (cx->compartment == rt->atomsCompartment && rt->atomsCompartmentIsLocked)
|
|
|
|
maybeUnlockAtomsCompartment.construct(cx);
|
2010-10-07 13:43:52 -07:00
|
|
|
#endif
|
|
|
|
/* The last ditch GC preserves all atoms. */
|
|
|
|
AutoKeepAtoms keep(rt);
|
2011-06-02 13:02:21 -07:00
|
|
|
GCREASON(LASTDITCH);
|
2011-01-07 23:44:57 -08:00
|
|
|
js_GC(cx, rt->gcTriggerCompartment, GC_NORMAL);
|
2010-10-07 13:43:52 -07:00
|
|
|
|
2011-04-13 13:43:33 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
if (rt->gcBytes >= rt->gcMaxBytes)
|
|
|
|
cx->runtime->gcHelperThread.waitBackgroundSweepEnd(cx->runtime);
|
|
|
|
#endif
|
|
|
|
|
2010-10-07 13:43:52 -07:00
|
|
|
return rt->gcBytes < rt->gcMaxBytes;
|
|
|
|
}
|
|
|
|
|
2011-06-01 17:48:52 -07:00
|
|
|
static inline bool
|
|
|
|
IsGCAllowed(JSContext *cx)
|
|
|
|
{
|
|
|
|
return !JS_ON_TRACE(cx) && !JS_THREAD_DATA(cx)->waiveGCQuota;
|
|
|
|
}
|
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
template <typename T>
|
2011-04-21 15:20:12 -07:00
|
|
|
inline Cell *
|
2010-10-13 11:49:22 -07:00
|
|
|
RefillTypedFreeList(JSContext *cx, unsigned thingKind)
|
2010-09-24 10:54:39 -07:00
|
|
|
{
|
2011-05-19 06:34:37 -07:00
|
|
|
JS_ASSERT(!cx->runtime->gcRunning);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* For compatibility with older code we tolerate calling the allocator
|
|
|
|
* during the GC in optimized builds.
|
|
|
|
*/
|
|
|
|
if (cx->runtime->gcRunning)
|
|
|
|
return NULL;
|
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
JSCompartment *compartment = cx->compartment;
|
2011-04-21 15:20:12 -07:00
|
|
|
JS_ASSERT(!compartment->freeLists.finalizables[thingKind]);
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2011-06-01 17:48:52 -07:00
|
|
|
bool canGC = IsGCAllowed(cx);
|
2011-04-25 13:05:30 -07:00
|
|
|
bool runGC = canGC && JS_UNLIKELY(NeedLastDitchGC(cx));
|
|
|
|
for (;;) {
|
|
|
|
if (runGC) {
|
2010-10-07 13:43:52 -07:00
|
|
|
if (!RunLastDitchGC(cx))
|
|
|
|
break;
|
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
/*
|
|
|
|
* The JSGC_END callback can legitimately allocate new GC
|
|
|
|
* things and populate the free list. If that happens, just
|
|
|
|
* return that list head.
|
|
|
|
*/
|
2011-04-21 15:20:12 -07:00
|
|
|
if (Cell *thing = compartment->freeLists.getNext(thingKind))
|
|
|
|
return thing;
|
2010-09-24 10:54:39 -07:00
|
|
|
}
|
2011-05-19 12:01:08 -07:00
|
|
|
ArenaHeader *aheader =
|
|
|
|
compartment->arenas[thingKind].getArenaWithFreeList<sizeof(T)>(cx, thingKind);
|
2011-04-19 22:30:10 -07:00
|
|
|
if (aheader) {
|
2011-04-25 13:05:30 -07:00
|
|
|
JS_ASSERT(sizeof(T) == aheader->getThingSize());
|
2011-04-21 15:20:12 -07:00
|
|
|
return compartment->freeLists.populate(aheader, thingKind);
|
2010-09-24 10:54:39 -07:00
|
|
|
}
|
2010-10-07 13:43:52 -07:00
|
|
|
|
|
|
|
/*
|
2011-04-25 13:05:30 -07:00
|
|
|
* We failed to allocate any arena. Run the GC if we can unless we
|
|
|
|
* have done it already.
|
2010-10-07 13:43:52 -07:00
|
|
|
*/
|
2011-04-25 13:05:30 -07:00
|
|
|
if (!canGC || runGC)
|
|
|
|
break;
|
|
|
|
runGC = true;
|
|
|
|
}
|
2010-10-07 13:43:52 -07:00
|
|
|
|
|
|
|
METER(cx->runtime->gcStats.fail++);
|
|
|
|
js_ReportOutOfMemory(cx);
|
2011-04-21 15:20:12 -07:00
|
|
|
return NULL;
|
2010-09-24 10:54:39 -07:00
|
|
|
}
|
|
|
|
|
2011-04-21 15:20:12 -07:00
|
|
|
Cell *
|
2010-10-13 11:49:22 -07:00
|
|
|
RefillFinalizableFreeList(JSContext *cx, unsigned thingKind)
|
|
|
|
{
|
|
|
|
switch (thingKind) {
|
|
|
|
case FINALIZE_OBJECT0:
|
2011-04-13 13:43:33 -07:00
|
|
|
case FINALIZE_OBJECT0_BACKGROUND:
|
2010-10-13 11:49:22 -07:00
|
|
|
return RefillTypedFreeList<JSObject>(cx, thingKind);
|
|
|
|
case FINALIZE_OBJECT2:
|
2011-04-13 13:43:33 -07:00
|
|
|
case FINALIZE_OBJECT2_BACKGROUND:
|
2010-10-13 11:49:22 -07:00
|
|
|
return RefillTypedFreeList<JSObject_Slots2>(cx, thingKind);
|
|
|
|
case FINALIZE_OBJECT4:
|
2011-04-13 13:43:33 -07:00
|
|
|
case FINALIZE_OBJECT4_BACKGROUND:
|
2010-10-13 11:49:22 -07:00
|
|
|
return RefillTypedFreeList<JSObject_Slots4>(cx, thingKind);
|
|
|
|
case FINALIZE_OBJECT8:
|
2011-04-13 13:43:33 -07:00
|
|
|
case FINALIZE_OBJECT8_BACKGROUND:
|
2010-10-13 11:49:22 -07:00
|
|
|
return RefillTypedFreeList<JSObject_Slots8>(cx, thingKind);
|
|
|
|
case FINALIZE_OBJECT12:
|
2011-04-13 13:43:33 -07:00
|
|
|
case FINALIZE_OBJECT12_BACKGROUND:
|
2010-10-13 11:49:22 -07:00
|
|
|
return RefillTypedFreeList<JSObject_Slots12>(cx, thingKind);
|
|
|
|
case FINALIZE_OBJECT16:
|
2011-04-13 13:43:33 -07:00
|
|
|
case FINALIZE_OBJECT16_BACKGROUND:
|
2010-10-13 11:49:22 -07:00
|
|
|
return RefillTypedFreeList<JSObject_Slots16>(cx, thingKind);
|
|
|
|
case FINALIZE_STRING:
|
|
|
|
return RefillTypedFreeList<JSString>(cx, thingKind);
|
2010-11-15 12:39:00 -08:00
|
|
|
case FINALIZE_EXTERNAL_STRING:
|
|
|
|
return RefillTypedFreeList<JSExternalString>(cx, thingKind);
|
2010-10-13 11:49:22 -07:00
|
|
|
case FINALIZE_SHORT_STRING:
|
|
|
|
return RefillTypedFreeList<JSShortString>(cx, thingKind);
|
|
|
|
case FINALIZE_FUNCTION:
|
|
|
|
return RefillTypedFreeList<JSFunction>(cx, thingKind);
|
2011-03-23 11:57:44 -07:00
|
|
|
case FINALIZE_SHAPE:
|
|
|
|
return RefillTypedFreeList<Shape>(cx, thingKind);
|
2010-09-24 10:54:39 -07:00
|
|
|
#if JS_HAS_XML_SUPPORT
|
2010-10-13 11:49:22 -07:00
|
|
|
case FINALIZE_XML:
|
|
|
|
return RefillTypedFreeList<JSXML>(cx, thingKind);
|
2010-09-24 10:54:39 -07:00
|
|
|
#endif
|
2010-10-13 11:49:22 -07:00
|
|
|
default:
|
|
|
|
JS_NOT_REACHED("bad finalize kind");
|
2011-04-21 15:20:12 -07:00
|
|
|
return NULL;
|
2010-10-13 11:49:22 -07:00
|
|
|
}
|
|
|
|
}
|
2010-09-24 10:54:39 -07:00
|
|
|
|
2011-04-21 15:20:12 -07:00
|
|
|
} /* namespace gc */
|
|
|
|
} /* namespace js */
|
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
uint32
|
2011-03-14 13:59:53 -07:00
|
|
|
js_GetGCThingTraceKind(void *thing)
|
|
|
|
{
|
2010-09-24 10:54:39 -07:00
|
|
|
return GetGCThingTraceKind(thing);
|
2009-10-15 02:53:40 -07:00
|
|
|
}
|
|
|
|
|
2007-03-22 10:30:00 -07:00
|
|
|
JSBool
|
|
|
|
js_LockGCThingRT(JSRuntime *rt, void *thing)
|
|
|
|
{
|
2009-09-10 04:13:59 -07:00
|
|
|
if (!thing)
|
2009-12-24 01:31:07 -08:00
|
|
|
return true;
|
2010-05-20 13:50:08 -07:00
|
|
|
|
2011-03-23 17:40:11 -07:00
|
|
|
AutoLockGC lock(rt);
|
|
|
|
if (GCLocks::Ptr p = rt->gcLocksHash.lookupWithDefault(thing, 0))
|
2010-05-20 13:50:08 -07:00
|
|
|
p->value++;
|
2011-03-23 17:40:11 -07:00
|
|
|
else
|
|
|
|
return false;
|
2010-05-20 13:50:08 -07:00
|
|
|
|
|
|
|
METER(rt->gcStats.lock++);
|
|
|
|
return true;
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
2009-12-13 23:55:17 -08:00
|
|
|
void
|
2007-03-22 10:30:00 -07:00
|
|
|
js_UnlockGCThingRT(JSRuntime *rt, void *thing)
|
|
|
|
{
|
2009-09-10 04:13:59 -07:00
|
|
|
if (!thing)
|
2009-12-13 23:55:17 -08:00
|
|
|
return;
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-04-08 05:54:18 -07:00
|
|
|
AutoLockGC lock(rt);
|
2010-05-20 13:50:08 -07:00
|
|
|
GCLocks::Ptr p = rt->gcLocksHash.lookup(thing);
|
|
|
|
|
|
|
|
if (p) {
|
2009-12-24 01:31:07 -08:00
|
|
|
rt->gcPoke = true;
|
2010-05-20 13:50:08 -07:00
|
|
|
if (--p->value == 0)
|
|
|
|
rt->gcLocksHash.remove(p);
|
|
|
|
|
2009-12-24 01:31:07 -08:00
|
|
|
METER(rt->gcStats.unlock++);
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2010-07-26 11:44:04 -07:00
|
|
|
namespace js {
|
|
|
|
|
2007-03-22 10:30:00 -07:00
|
|
|
/*
|
2009-12-13 23:55:17 -08:00
|
|
|
* When the native stack is low, the GC does not call JS_TraceChildren to mark
|
|
|
|
* the reachable "children" of the thing. Rather the thing is put aside and
|
|
|
|
* JS_TraceChildren is called later with more space on the C stack.
|
|
|
|
*
|
|
|
|
* To implement such delayed marking of the children with minimal overhead for
|
2010-09-24 10:54:39 -07:00
|
|
|
* the normal case of sufficient native stack, the code adds a field per
|
2011-06-14 19:21:47 -07:00
|
|
|
* arena. The field markingDelay->link links all arenas with delayed things
|
2010-10-07 13:43:52 -07:00
|
|
|
* into a stack list with the pointer to stack top in
|
2010-09-24 10:54:39 -07:00
|
|
|
* GCMarker::unmarkedArenaStackTop. delayMarkingChildren adds
|
2010-07-26 11:44:04 -07:00
|
|
|
* arenas to the stack as necessary while markDelayedChildren pops the arenas
|
2010-04-12 13:59:19 -07:00
|
|
|
* from the stack until it empties.
|
2007-03-22 10:30:00 -07:00
|
|
|
*/
|
|
|
|
|
2010-08-05 05:16:56 -07:00
|
|
|
GCMarker::GCMarker(JSContext *cx)
|
2011-04-15 16:56:08 -07:00
|
|
|
: color(0),
|
2011-04-19 22:30:10 -07:00
|
|
|
unmarkedArenaStackTop(MarkingDelay::stackBottom()),
|
2011-04-15 16:56:08 -07:00
|
|
|
objStack(cx->runtime->gcMarkStackObjs, sizeof(cx->runtime->gcMarkStackObjs)),
|
2011-05-27 15:17:47 -07:00
|
|
|
ropeStack(cx->runtime->gcMarkStackRopes, sizeof(cx->runtime->gcMarkStackRopes)),
|
2011-04-15 16:56:08 -07:00
|
|
|
xmlStack(cx->runtime->gcMarkStackXMLs, sizeof(cx->runtime->gcMarkStackXMLs)),
|
|
|
|
largeStack(cx->runtime->gcMarkStackLarges, sizeof(cx->runtime->gcMarkStackLarges))
|
2010-08-05 05:16:56 -07:00
|
|
|
{
|
|
|
|
JS_TRACER_INIT(this, cx, NULL);
|
|
|
|
#ifdef DEBUG
|
2011-04-19 22:30:10 -07:00
|
|
|
markLaterArenas = 0;
|
2010-08-05 05:16:56 -07:00
|
|
|
#endif
|
|
|
|
#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
|
|
|
|
conservativeDumpFileName = getenv("JS_DUMP_CONSERVATIVE_GC_ROOTS");
|
|
|
|
memset(&conservativeStats, 0, sizeof(conservativeStats));
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
GCMarker::~GCMarker()
|
|
|
|
{
|
|
|
|
#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
|
|
|
|
dumpConservativeRoots();
|
|
|
|
#endif
|
|
|
|
#ifdef JS_GCMETER
|
|
|
|
/* Update total stats. */
|
|
|
|
context->runtime->gcStats.conservative.add(conservativeStats);
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2010-07-26 11:44:04 -07:00
|
|
|
void
|
2011-03-23 11:57:15 -07:00
|
|
|
GCMarker::delayMarkingChildren(const void *thing)
|
2007-03-22 10:30:00 -07:00
|
|
|
{
|
2011-03-23 11:57:15 -07:00
|
|
|
const Cell *cell = reinterpret_cast<const Cell *>(thing);
|
2011-04-19 22:30:10 -07:00
|
|
|
ArenaHeader *aheader = cell->arenaHeader();
|
|
|
|
if (aheader->getMarkingDelay()->link) {
|
|
|
|
/* Arena already scheduled to be marked later */
|
2010-09-24 10:54:39 -07:00
|
|
|
return;
|
2007-09-16 06:03:17 -07:00
|
|
|
}
|
2011-04-19 22:30:10 -07:00
|
|
|
aheader->getMarkingDelay()->link = unmarkedArenaStackTop;
|
|
|
|
unmarkedArenaStackTop = aheader;
|
|
|
|
METER(markLaterArenas++);
|
|
|
|
METER_UPDATE_MAX(cell->compartment()->rt->gcStats.maxunmarked, markLaterArenas);
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
2011-04-19 22:30:10 -07:00
|
|
|
static void
|
2011-05-19 12:01:08 -07:00
|
|
|
MarkDelayedChildren(JSTracer *trc, ArenaHeader *aheader)
|
2007-03-22 10:30:00 -07:00
|
|
|
{
|
2011-05-19 12:01:08 -07:00
|
|
|
unsigned traceKind = GetFinalizableTraceKind(aheader->getThingKind());
|
|
|
|
size_t thingSize = aheader->getThingSize();
|
|
|
|
Arena *a = aheader->getArena();
|
|
|
|
uintptr_t end = a->thingsEnd();
|
|
|
|
for (uintptr_t thing = a->thingsStart(thingSize); thing != end; thing += thingSize) {
|
|
|
|
Cell *t = reinterpret_cast<Cell *>(thing);
|
|
|
|
if (t->isMarked())
|
|
|
|
JS_TraceChildren(trc, t, traceKind);
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
2010-09-24 10:54:39 -07:00
|
|
|
}
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
void
|
|
|
|
GCMarker::markDelayedChildren()
|
|
|
|
{
|
2011-04-19 22:30:10 -07:00
|
|
|
while (unmarkedArenaStackTop != MarkingDelay::stackBottom()) {
|
2007-03-22 10:30:00 -07:00
|
|
|
/*
|
2011-04-19 22:30:10 -07:00
|
|
|
* If marking gets delayed at the same arena again, we must repeat
|
|
|
|
* marking of its things. For that we pop arena from the stack and
|
|
|
|
* clear its nextDelayedMarking before we begin the marking.
|
2007-03-22 10:30:00 -07:00
|
|
|
*/
|
2011-04-19 22:30:10 -07:00
|
|
|
ArenaHeader *aheader = unmarkedArenaStackTop;
|
|
|
|
unmarkedArenaStackTop = aheader->getMarkingDelay()->link;
|
|
|
|
JS_ASSERT(unmarkedArenaStackTop);
|
|
|
|
aheader->getMarkingDelay()->link = NULL;
|
2010-10-08 16:25:57 -07:00
|
|
|
#ifdef DEBUG
|
2011-04-19 22:30:10 -07:00
|
|
|
JS_ASSERT(markLaterArenas);
|
|
|
|
markLaterArenas--;
|
2010-10-08 16:25:57 -07:00
|
|
|
#endif
|
2011-05-19 12:01:08 -07:00
|
|
|
MarkDelayedChildren(this, aheader);
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
2011-04-19 22:30:10 -07:00
|
|
|
JS_ASSERT(!markLaterArenas);
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
} /* namespace js */
|
|
|
|
|
2011-04-19 22:30:10 -07:00
|
|
|
#ifdef DEBUG
|
|
|
|
static void
|
|
|
|
EmptyMarkCallback(JSTracer *trc, void *thing, uint32 kind)
|
|
|
|
{
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2010-05-20 13:50:08 -07:00
|
|
|
static void
|
2010-07-14 23:19:36 -07:00
|
|
|
gc_root_traversal(JSTracer *trc, const RootEntry &entry)
|
2007-03-22 10:30:00 -07:00
|
|
|
{
|
|
|
|
#ifdef DEBUG
|
2010-07-14 23:19:36 -07:00
|
|
|
void *ptr;
|
|
|
|
if (entry.value.type == JS_GC_ROOT_GCTHING_PTR) {
|
|
|
|
ptr = *reinterpret_cast<void **>(entry.key);
|
|
|
|
} else {
|
|
|
|
Value *vp = reinterpret_cast<Value *>(entry.key);
|
2010-09-24 10:54:39 -07:00
|
|
|
ptr = vp->isGCThing() ? vp->toGCThing() : NULL;
|
2010-07-14 23:19:36 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
if (ptr) {
|
2011-03-14 13:59:53 -07:00
|
|
|
if (!JSAtom::isStatic(ptr)) {
|
2011-04-19 22:30:10 -07:00
|
|
|
/* Use conservative machinery to find if ptr is a valid GC thing. */
|
|
|
|
JSTracer checker;
|
|
|
|
JS_TRACER_INIT(&checker, trc->context, EmptyMarkCallback);
|
|
|
|
ConservativeGCTest test = MarkIfGCThingWord(&checker, reinterpret_cast<jsuword>(ptr));
|
|
|
|
if (test != CGCT_VALID && entry.value.name) {
|
2009-12-13 23:55:17 -08:00
|
|
|
fprintf(stderr,
|
2007-03-22 10:30:00 -07:00
|
|
|
"JS API usage error: the address passed to JS_AddNamedRoot currently holds an\n"
|
2010-07-14 23:19:36 -07:00
|
|
|
"invalid gcthing. This is usually caused by a missing call to JS_RemoveRoot.\n"
|
2007-03-22 10:30:00 -07:00
|
|
|
"The root's name is \"%s\".\n",
|
2010-07-14 23:19:36 -07:00
|
|
|
entry.value.name);
|
2009-12-13 23:55:17 -08:00
|
|
|
}
|
2011-04-19 22:30:10 -07:00
|
|
|
JS_ASSERT(test == CGCT_VALID);
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
}
|
2010-07-14 23:19:36 -07:00
|
|
|
#endif
|
|
|
|
JS_SET_TRACING_NAME(trc, entry.value.name ? entry.value.name : "root");
|
|
|
|
if (entry.value.type == JS_GC_ROOT_GCTHING_PTR)
|
|
|
|
MarkGCThing(trc, *reinterpret_cast<void **>(entry.key));
|
|
|
|
else
|
|
|
|
MarkValueRaw(trc, *reinterpret_cast<Value *>(entry.key));
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
2010-05-20 13:50:08 -07:00
|
|
|
static void
|
|
|
|
gc_lock_traversal(const GCLocks::Entry &entry, JSTracer *trc)
|
2007-03-22 10:30:00 -07:00
|
|
|
{
|
2010-05-20 13:50:08 -07:00
|
|
|
JS_ASSERT(entry.value >= 1);
|
2010-09-24 10:54:39 -07:00
|
|
|
MarkGCThing(trc, entry.key, "locked object");
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2011-04-13 09:27:37 -07:00
|
|
|
js_TraceStackFrame(JSTracer *trc, StackFrame *fp)
|
2007-03-22 10:30:00 -07:00
|
|
|
{
|
2010-08-09 22:43:33 -07:00
|
|
|
MarkObject(trc, fp->scopeChain(), "scope chain");
|
|
|
|
if (fp->isDummyFrame())
|
|
|
|
return;
|
2010-08-12 15:46:03 -07:00
|
|
|
if (fp->hasArgsObj())
|
2010-08-09 22:43:33 -07:00
|
|
|
MarkObject(trc, fp->argsObj(), "arguments");
|
2011-03-14 11:30:36 -07:00
|
|
|
js_TraceScript(trc, fp->script());
|
|
|
|
fp->script()->compartment->active = true;
|
2010-08-09 22:43:33 -07:00
|
|
|
MarkValue(trc, fp->returnValue(), "rval");
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
void
|
|
|
|
AutoIdArray::trace(JSTracer *trc)
|
|
|
|
{
|
|
|
|
JS_ASSERT(tag == IDARRAY);
|
|
|
|
gc::MarkIdRange(trc, idArray->length, idArray->vector, "JSAutoIdArray.idArray");
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
AutoEnumStateRooter::trace(JSTracer *trc)
|
|
|
|
{
|
2011-03-30 03:10:12 -07:00
|
|
|
gc::MarkObject(trc, *obj, "js::AutoEnumStateRooter.obj");
|
2010-09-24 10:54:39 -07:00
|
|
|
}
|
|
|
|
|
2010-06-16 14:13:01 -07:00
|
|
|
inline void
|
|
|
|
AutoGCRooter::trace(JSTracer *trc)
|
|
|
|
{
|
|
|
|
switch (tag) {
|
|
|
|
case JSVAL:
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkValue(trc, static_cast<AutoValueRooter *>(this)->val, "js::AutoValueRooter.val");
|
2010-06-16 14:13:01 -07:00
|
|
|
return;
|
|
|
|
|
2010-08-29 11:57:08 -07:00
|
|
|
case SHAPE:
|
2011-03-23 11:57:44 -07:00
|
|
|
MarkShape(trc, static_cast<AutoShapeRooter *>(this)->shape, "js::AutoShapeRooter.val");
|
2010-06-16 14:13:01 -07:00
|
|
|
return;
|
|
|
|
|
|
|
|
case PARSER:
|
|
|
|
static_cast<Parser *>(this)->trace(trc);
|
|
|
|
return;
|
|
|
|
|
|
|
|
case SCRIPT:
|
|
|
|
if (JSScript *script = static_cast<AutoScriptRooter *>(this)->script)
|
|
|
|
js_TraceScript(trc, script);
|
|
|
|
return;
|
|
|
|
|
|
|
|
case ENUMERATOR:
|
|
|
|
static_cast<AutoEnumStateRooter *>(this)->trace(trc);
|
|
|
|
return;
|
|
|
|
|
|
|
|
case IDARRAY: {
|
|
|
|
JSIdArray *ida = static_cast<AutoIdArray *>(this)->idArray;
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkIdRange(trc, ida->length, ida->vector, "js::AutoIdArray.idArray");
|
2010-06-16 14:13:01 -07:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
case DESCRIPTORS: {
|
2010-07-14 23:19:36 -07:00
|
|
|
PropDescArray &descriptors =
|
|
|
|
static_cast<AutoPropDescArrayRooter *>(this)->descriptors;
|
2010-06-16 14:13:01 -07:00
|
|
|
for (size_t i = 0, len = descriptors.length(); i < len; i++) {
|
2010-07-14 23:19:36 -07:00
|
|
|
PropDesc &desc = descriptors[i];
|
|
|
|
MarkValue(trc, desc.pd, "PropDesc::pd");
|
|
|
|
MarkValue(trc, desc.value, "PropDesc::value");
|
|
|
|
MarkValue(trc, desc.get, "PropDesc::get");
|
|
|
|
MarkValue(trc, desc.set, "PropDesc::set");
|
2010-06-16 14:13:01 -07:00
|
|
|
}
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
case DESCRIPTOR : {
|
2010-07-14 23:19:36 -07:00
|
|
|
PropertyDescriptor &desc = *static_cast<AutoPropertyDescriptorRooter *>(this);
|
2010-06-16 14:13:01 -07:00
|
|
|
if (desc.obj)
|
2010-08-09 22:43:33 -07:00
|
|
|
MarkObject(trc, *desc.obj, "Descriptor::obj");
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkValue(trc, desc.value, "Descriptor::value");
|
2010-07-17 01:51:07 -07:00
|
|
|
if ((desc.attrs & JSPROP_GETTER) && desc.getter)
|
2010-08-09 22:43:33 -07:00
|
|
|
MarkObject(trc, *CastAsObject(desc.getter), "Descriptor::get");
|
2010-07-17 01:51:07 -07:00
|
|
|
if (desc.attrs & JSPROP_SETTER && desc.setter)
|
2010-08-09 22:43:33 -07:00
|
|
|
MarkObject(trc, *CastAsObject(desc.setter), "Descriptor::set");
|
2010-06-16 14:13:01 -07:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
case NAMESPACES: {
|
2010-06-19 11:58:00 -07:00
|
|
|
JSXMLArray &array = static_cast<AutoNamespaceArray *>(this)->array;
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkObjectRange(trc, array.length, reinterpret_cast<JSObject **>(array.vector),
|
|
|
|
"JSXMLArray.vector");
|
2010-06-16 14:13:01 -07:00
|
|
|
array.cursors->trace(trc);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
case XML:
|
|
|
|
js_TraceXML(trc, static_cast<AutoXMLRooter *>(this)->xml);
|
|
|
|
return;
|
|
|
|
|
|
|
|
case OBJECT:
|
2010-07-14 23:19:36 -07:00
|
|
|
if (JSObject *obj = static_cast<AutoObjectRooter *>(this)->obj)
|
2010-08-09 22:43:33 -07:00
|
|
|
MarkObject(trc, *obj, "js::AutoObjectRooter.obj");
|
2010-06-16 14:13:01 -07:00
|
|
|
return;
|
|
|
|
|
|
|
|
case ID:
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkId(trc, static_cast<AutoIdRooter *>(this)->id_, "js::AutoIdRooter.val");
|
|
|
|
return;
|
|
|
|
|
|
|
|
case VALVECTOR: {
|
2011-03-30 03:10:12 -07:00
|
|
|
AutoValueVector::VectorImpl &vector = static_cast<AutoValueVector *>(this)->vector;
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkValueRange(trc, vector.length(), vector.begin(), "js::AutoValueVector.vector");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
case STRING:
|
2011-03-30 03:10:12 -07:00
|
|
|
if (JSString *str = static_cast<AutoStringRooter *>(this)->str)
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkString(trc, str, "js::AutoStringRooter.str");
|
2010-06-16 14:13:01 -07:00
|
|
|
return;
|
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
case IDVECTOR: {
|
2011-03-30 03:10:12 -07:00
|
|
|
AutoIdVector::VectorImpl &vector = static_cast<AutoIdVector *>(this)->vector;
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkIdRange(trc, vector.length(), vector.begin(), "js::AutoIdVector.vector");
|
2010-06-16 14:13:01 -07:00
|
|
|
return;
|
|
|
|
}
|
2011-01-05 14:50:30 -08:00
|
|
|
|
2011-02-07 12:06:32 -08:00
|
|
|
case SHAPEVECTOR: {
|
2011-03-30 03:10:12 -07:00
|
|
|
AutoShapeVector::VectorImpl &vector = static_cast<js::AutoShapeVector *>(this)->vector;
|
2011-02-07 12:06:32 -08:00
|
|
|
MarkShapeRange(trc, vector.length(), vector.begin(), "js::AutoShapeVector.vector");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2011-01-05 14:50:30 -08:00
|
|
|
case BINDINGS: {
|
|
|
|
static_cast<js::AutoBindingsRooter *>(this)->bindings.trace(trc);
|
|
|
|
return;
|
|
|
|
}
|
2010-06-16 14:13:01 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
JS_ASSERT(tag >= 0);
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkValueRange(trc, tag, static_cast<AutoArrayRooter *>(this)->array, "js::AutoArrayRooter.array");
|
2010-06-16 14:13:01 -07:00
|
|
|
}
|
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
namespace js {
|
|
|
|
|
2011-01-08 18:55:54 -08:00
|
|
|
JS_FRIEND_API(void)
|
2010-08-30 11:46:18 -07:00
|
|
|
MarkContext(JSTracer *trc, JSContext *acx)
|
2007-04-16 23:53:37 -07:00
|
|
|
{
|
2010-03-03 17:52:26 -08:00
|
|
|
/* Stack frames and slots are traced by StackSpace::mark. */
|
2007-04-16 23:53:37 -07:00
|
|
|
|
|
|
|
/* Mark other roots-by-definition in acx. */
|
2011-01-27 02:54:58 -08:00
|
|
|
if (acx->globalObject && !acx->hasRunOption(JSOPTION_UNROOTED_GLOBAL))
|
2010-09-24 10:54:39 -07:00
|
|
|
MarkObject(trc, *acx->globalObject, "global object");
|
2011-01-07 02:03:14 -08:00
|
|
|
if (acx->isExceptionPending())
|
|
|
|
MarkValue(trc, acx->getPendingException(), "exception");
|
2007-04-16 23:53:37 -07:00
|
|
|
|
2010-03-28 13:34:16 -07:00
|
|
|
for (js::AutoGCRooter *gcr = acx->autoGCRooters; gcr; gcr = gcr->down)
|
|
|
|
gcr->trace(trc);
|
|
|
|
|
2007-04-16 23:53:37 -07:00
|
|
|
if (acx->sharpObjectMap.depth > 0)
|
|
|
|
js_TraceSharpMap(trc, &acx->sharpObjectMap);
|
2008-12-18 12:06:45 -08:00
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkValue(trc, acx->iterValue, "iterValue");
|
2007-04-16 23:53:37 -07:00
|
|
|
}
|
|
|
|
|
2009-01-30 15:40:05 -08:00
|
|
|
JS_REQUIRES_STACK void
|
2010-08-30 11:46:18 -07:00
|
|
|
MarkRuntime(JSTracer *trc)
|
2007-04-16 23:53:37 -07:00
|
|
|
{
|
|
|
|
JSRuntime *rt = trc->context->runtime;
|
|
|
|
|
2010-07-26 01:24:27 -07:00
|
|
|
if (rt->state != JSRTS_LANDING)
|
2010-08-05 05:16:56 -07:00
|
|
|
MarkConservativeStackRoots(trc);
|
2010-07-26 01:24:27 -07:00
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront())
|
|
|
|
gc_root_traversal(trc, r.front());
|
2010-05-20 13:50:08 -07:00
|
|
|
|
|
|
|
for (GCLocks::Range r = rt->gcLocksHash.all(); !r.empty(); r.popFront())
|
|
|
|
gc_lock_traversal(r.front(), trc);
|
|
|
|
|
2010-04-23 15:15:42 -07:00
|
|
|
js_TraceAtomState(trc);
|
2009-11-12 14:13:25 -08:00
|
|
|
js_MarkTraps(trc);
|
2007-04-16 23:53:37 -07:00
|
|
|
|
2011-03-23 11:57:19 -07:00
|
|
|
JSContext *iter = NULL;
|
2010-06-04 07:22:28 -07:00
|
|
|
while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter))
|
2010-08-30 11:46:18 -07:00
|
|
|
MarkContext(trc, acx);
|
2007-05-01 03:09:46 -07:00
|
|
|
|
2011-01-12 16:56:23 -08:00
|
|
|
#ifdef JS_TRACER
|
|
|
|
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
|
|
|
|
(*c)->traceMonitor.mark(trc);
|
|
|
|
#endif
|
|
|
|
|
2010-05-13 10:50:43 -07:00
|
|
|
for (ThreadDataIter i(rt); !i.empty(); i.popFront())
|
|
|
|
i.threadData()->mark(trc);
|
2009-08-17 14:50:57 -07:00
|
|
|
|
2010-06-04 07:22:28 -07:00
|
|
|
/*
|
2010-07-26 01:24:27 -07:00
|
|
|
* We mark extra roots at the last thing so it can use use additional
|
|
|
|
* colors to implement cycle collection.
|
2010-06-04 07:22:28 -07:00
|
|
|
*/
|
2010-07-15 17:58:36 -07:00
|
|
|
if (rt->gcExtraRootsTraceOp)
|
|
|
|
rt->gcExtraRootsTraceOp(trc, rt->gcExtraRootsData);
|
2010-08-01 09:58:03 -07:00
|
|
|
|
|
|
|
#ifdef DEBUG
|
|
|
|
if (rt->functionMeterFilename) {
|
|
|
|
for (int k = 0; k < 2; k++) {
|
|
|
|
typedef JSRuntime::FunctionCountMap HM;
|
|
|
|
HM &h = (k == 0) ? rt->methodReadBarrierCountMap : rt->unjoinedFunctionCountMap;
|
|
|
|
for (HM::Range r = h.all(); !r.empty(); r.popFront()) {
|
|
|
|
JSFunction *fun = r.front().key;
|
|
|
|
JS_CALL_OBJECT_TRACER(trc, fun, "FunctionCountMap key");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#endif
|
2007-04-16 23:53:37 -07:00
|
|
|
}
|
|
|
|
|
2010-07-28 11:20:19 -07:00
|
|
|
void
|
2010-09-07 14:08:20 -07:00
|
|
|
TriggerGC(JSRuntime *rt)
|
2010-07-28 11:20:19 -07:00
|
|
|
{
|
|
|
|
JS_ASSERT(!rt->gcRunning);
|
|
|
|
if (rt->gcIsNeeded)
|
|
|
|
return;
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Trigger the GC when it is safe to call an operation callback on any
|
|
|
|
* thread.
|
|
|
|
*/
|
2010-09-07 14:08:20 -07:00
|
|
|
rt->gcIsNeeded = true;
|
2011-01-07 23:44:57 -08:00
|
|
|
rt->gcTriggerCompartment = NULL;
|
2010-09-07 14:08:20 -07:00
|
|
|
TriggerAllOperationCallbacks(rt);
|
2010-07-28 11:20:19 -07:00
|
|
|
}
|
|
|
|
|
2011-01-07 23:44:57 -08:00
|
|
|
void
|
|
|
|
TriggerCompartmentGC(JSCompartment *comp)
|
|
|
|
{
|
|
|
|
JSRuntime *rt = comp->rt;
|
|
|
|
JS_ASSERT(!rt->gcRunning);
|
2011-06-02 13:02:21 -07:00
|
|
|
GCREASON(COMPARTMENT);
|
2011-01-07 23:44:57 -08:00
|
|
|
|
2011-06-01 17:48:52 -07:00
|
|
|
if (rt->gcZeal()) {
|
2011-01-07 23:44:57 -08:00
|
|
|
TriggerGC(rt);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2011-01-13 14:42:36 -08:00
|
|
|
if (rt->gcMode != JSGC_MODE_COMPARTMENT || comp == rt->atomsCompartment) {
|
2011-01-07 23:44:57 -08:00
|
|
|
/* We can't do a compartmental GC of the default compartment. */
|
|
|
|
TriggerGC(rt);
|
|
|
|
return;
|
|
|
|
}
|
2011-04-21 15:29:24 -07:00
|
|
|
|
2011-01-07 23:44:57 -08:00
|
|
|
if (rt->gcIsNeeded) {
|
|
|
|
/* If we need to GC more than one compartment, run a full GC. */
|
|
|
|
if (rt->gcTriggerCompartment != comp)
|
|
|
|
rt->gcTriggerCompartment = NULL;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (rt->gcBytes > 8192 && rt->gcBytes >= 3 * (rt->gcTriggerBytes / 2)) {
|
|
|
|
/* If we're using significantly more than our quota, do a full GC. */
|
|
|
|
TriggerGC(rt);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Trigger the GC when it is safe to call an operation callback on any
|
|
|
|
* thread.
|
|
|
|
*/
|
|
|
|
rt->gcIsNeeded = true;
|
|
|
|
rt->gcTriggerCompartment = comp;
|
|
|
|
TriggerAllOperationCallbacks(comp->rt);
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MaybeGC(JSContext *cx)
|
|
|
|
{
|
|
|
|
JSRuntime *rt = cx->runtime;
|
|
|
|
|
2011-06-01 17:48:52 -07:00
|
|
|
if (rt->gcZeal()) {
|
2011-06-02 13:02:21 -07:00
|
|
|
GCREASON(MAYBEGC);
|
2011-01-07 23:44:57 -08:00
|
|
|
js_GC(cx, NULL, GC_NORMAL);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
JSCompartment *comp = cx->compartment;
|
|
|
|
if (rt->gcIsNeeded) {
|
2011-06-02 13:02:21 -07:00
|
|
|
GCREASON(MAYBEGC);
|
2011-02-07 11:24:08 -08:00
|
|
|
js_GC(cx, (comp == rt->gcTriggerCompartment) ? comp : NULL, GC_NORMAL);
|
2011-01-07 23:44:57 -08:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2011-06-02 13:02:21 -07:00
|
|
|
if (comp->gcBytes > 8192 && comp->gcBytes >= 3 * (comp->gcTriggerBytes / 4)) {
|
|
|
|
GCREASON(MAYBEGC);
|
2011-02-07 11:24:08 -08:00
|
|
|
js_GC(cx, (rt->gcMode == JSGC_MODE_COMPARTMENT) ? comp : NULL, GC_NORMAL);
|
2011-06-02 13:02:21 -07:00
|
|
|
}
|
2011-01-07 23:44:57 -08:00
|
|
|
}
|
|
|
|
|
2010-09-07 14:08:20 -07:00
|
|
|
} /* namespace js */
|
|
|
|
|
2009-03-24 05:07:35 -07:00
|
|
|
void
|
2010-12-22 12:02:25 -08:00
|
|
|
js_DestroyScriptsToGC(JSContext *cx, JSCompartment *comp)
|
2008-08-20 22:18:42 -07:00
|
|
|
{
|
2009-03-24 05:07:35 -07:00
|
|
|
JSScript **listp, *script;
|
2008-08-20 22:18:42 -07:00
|
|
|
|
2010-12-22 12:02:25 -08:00
|
|
|
for (size_t i = 0; i != JS_ARRAY_LENGTH(comp->scriptsToGC); ++i) {
|
|
|
|
listp = &comp->scriptsToGC[i];
|
2009-03-24 05:07:35 -07:00
|
|
|
while ((script = *listp) != NULL) {
|
|
|
|
*listp = script->u.nextToGC;
|
|
|
|
script->u.nextToGC = NULL;
|
2011-02-11 13:23:18 -08:00
|
|
|
js_DestroyCachedScript(cx, script);
|
2009-03-24 05:07:35 -07:00
|
|
|
}
|
2008-08-20 22:18:42 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2011-04-13 13:43:33 -07:00
|
|
|
void
|
2011-04-25 13:05:30 -07:00
|
|
|
JSCompartment::finalizeObjectArenaLists(JSContext *cx)
|
2011-04-13 13:43:33 -07:00
|
|
|
{
|
2011-04-25 13:05:30 -07:00
|
|
|
arenas[FINALIZE_OBJECT0]. finalizeNow<JSObject>(cx);
|
|
|
|
arenas[FINALIZE_OBJECT2]. finalizeNow<JSObject_Slots2>(cx);
|
|
|
|
arenas[FINALIZE_OBJECT4]. finalizeNow<JSObject_Slots4>(cx);
|
|
|
|
arenas[FINALIZE_OBJECT8]. finalizeNow<JSObject_Slots8>(cx);
|
|
|
|
arenas[FINALIZE_OBJECT12].finalizeNow<JSObject_Slots12>(cx);
|
|
|
|
arenas[FINALIZE_OBJECT16].finalizeNow<JSObject_Slots16>(cx);
|
|
|
|
arenas[FINALIZE_FUNCTION].finalizeNow<JSFunction>(cx);
|
2011-04-13 13:43:33 -07:00
|
|
|
|
|
|
|
#ifdef JS_THREADSAFE
|
2011-04-25 13:05:30 -07:00
|
|
|
arenas[FINALIZE_OBJECT0_BACKGROUND]. finalizeLater<JSObject>(cx);
|
|
|
|
arenas[FINALIZE_OBJECT2_BACKGROUND]. finalizeLater<JSObject_Slots2>(cx);
|
|
|
|
arenas[FINALIZE_OBJECT4_BACKGROUND]. finalizeLater<JSObject_Slots4>(cx);
|
|
|
|
arenas[FINALIZE_OBJECT8_BACKGROUND]. finalizeLater<JSObject_Slots8>(cx);
|
|
|
|
arenas[FINALIZE_OBJECT12_BACKGROUND].finalizeLater<JSObject_Slots12>(cx);
|
|
|
|
arenas[FINALIZE_OBJECT16_BACKGROUND].finalizeLater<JSObject_Slots16>(cx);
|
2011-04-13 13:43:33 -07:00
|
|
|
#endif
|
|
|
|
|
2011-01-07 23:44:57 -08:00
|
|
|
#if JS_HAS_XML_SUPPORT
|
2011-04-25 13:05:30 -07:00
|
|
|
arenas[FINALIZE_XML].finalizeNow<JSXML>(cx);
|
2011-01-07 23:44:57 -08:00
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2011-04-25 13:05:30 -07:00
|
|
|
JSCompartment::finalizeStringArenaLists(JSContext *cx)
|
2011-01-07 23:44:57 -08:00
|
|
|
{
|
2011-04-13 13:43:33 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
2011-04-25 13:05:30 -07:00
|
|
|
arenas[FINALIZE_SHORT_STRING].finalizeLater<JSShortString>(cx);
|
|
|
|
arenas[FINALIZE_STRING].finalizeLater<JSString>(cx);
|
2011-04-13 13:43:33 -07:00
|
|
|
#else
|
2011-04-25 13:05:30 -07:00
|
|
|
arenas[FINALIZE_SHORT_STRING].finalizeNow<JSShortString>(cx);
|
|
|
|
arenas[FINALIZE_STRING].finalizeNow<JSString>(cx);
|
2011-04-13 13:43:33 -07:00
|
|
|
#endif
|
2011-04-25 13:05:30 -07:00
|
|
|
arenas[FINALIZE_EXTERNAL_STRING].finalizeNow<JSExternalString>(cx);
|
2011-01-07 23:44:57 -08:00
|
|
|
}
|
|
|
|
|
2011-03-28 13:17:46 -07:00
|
|
|
void
|
2011-04-25 13:05:30 -07:00
|
|
|
JSCompartment::finalizeShapeArenaLists(JSContext *cx)
|
2011-03-28 13:17:46 -07:00
|
|
|
{
|
2011-04-25 13:05:30 -07:00
|
|
|
arenas[FINALIZE_SHAPE].finalizeNow<Shape>(cx);
|
2011-03-28 13:17:46 -07:00
|
|
|
}
|
|
|
|
|
2010-04-27 06:46:24 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
|
|
|
|
namespace js {
|
|
|
|
|
2010-09-07 14:08:20 -07:00
|
|
|
bool
|
|
|
|
GCHelperThread::init(JSRuntime *rt)
|
|
|
|
{
|
|
|
|
if (!(wakeup = PR_NewCondVar(rt->gcLock)))
|
|
|
|
return false;
|
|
|
|
if (!(sweepingDone = PR_NewCondVar(rt->gcLock)))
|
|
|
|
return false;
|
|
|
|
|
|
|
|
thread = PR_CreateThread(PR_USER_THREAD, threadMain, rt, PR_PRIORITY_NORMAL,
|
|
|
|
PR_LOCAL_THREAD, PR_JOINABLE_THREAD, 0);
|
|
|
|
return !!thread;
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
GCHelperThread::finish(JSRuntime *rt)
|
|
|
|
{
|
|
|
|
PRThread *join = NULL;
|
|
|
|
{
|
|
|
|
AutoLockGC lock(rt);
|
|
|
|
if (thread && !shutdown) {
|
|
|
|
shutdown = true;
|
|
|
|
PR_NotifyCondVar(wakeup);
|
|
|
|
join = thread;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (join) {
|
|
|
|
/* PR_DestroyThread is not necessary. */
|
|
|
|
PR_JoinThread(join);
|
|
|
|
}
|
|
|
|
if (wakeup)
|
|
|
|
PR_DestroyCondVar(wakeup);
|
|
|
|
if (sweepingDone)
|
|
|
|
PR_DestroyCondVar(sweepingDone);
|
|
|
|
}
|
|
|
|
|
|
|
|
/* static */
|
|
|
|
void
|
|
|
|
GCHelperThread::threadMain(void *arg)
|
|
|
|
{
|
|
|
|
JSRuntime *rt = static_cast<JSRuntime *>(arg);
|
|
|
|
rt->gcHelperThread.threadLoop(rt);
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
GCHelperThread::threadLoop(JSRuntime *rt)
|
|
|
|
{
|
|
|
|
AutoLockGC lock(rt);
|
|
|
|
while (!shutdown) {
|
|
|
|
/*
|
|
|
|
* Sweeping can be true here on the first iteration if a GC and the
|
|
|
|
* corresponding startBackgroundSweep call happen before this thread
|
|
|
|
* has a chance to run.
|
|
|
|
*/
|
|
|
|
if (!sweeping)
|
|
|
|
PR_WaitCondVar(wakeup, PR_INTERVAL_NO_TIMEOUT);
|
|
|
|
if (sweeping) {
|
|
|
|
AutoUnlockGC unlock(rt);
|
|
|
|
doSweep();
|
|
|
|
}
|
|
|
|
sweeping = false;
|
|
|
|
PR_NotifyAllCondVar(sweepingDone);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
GCHelperThread::startBackgroundSweep(JSRuntime *rt)
|
|
|
|
{
|
|
|
|
/* The caller takes the GC lock. */
|
|
|
|
JS_ASSERT(!sweeping);
|
|
|
|
sweeping = true;
|
|
|
|
PR_NotifyCondVar(wakeup);
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2011-04-25 13:05:30 -07:00
|
|
|
GCHelperThread::waitBackgroundSweepEnd(JSRuntime *rt, bool gcUnlocked)
|
2010-09-07 14:08:20 -07:00
|
|
|
{
|
2011-04-25 13:05:30 -07:00
|
|
|
Maybe<AutoLockGC> lock;
|
|
|
|
if (gcUnlocked)
|
|
|
|
lock.construct(rt);
|
2010-09-07 14:08:20 -07:00
|
|
|
while (sweeping)
|
|
|
|
PR_WaitCondVar(sweepingDone, PR_INTERVAL_NO_TIMEOUT);
|
|
|
|
}
|
|
|
|
|
2010-04-28 17:17:34 -07:00
|
|
|
JS_FRIEND_API(void)
|
2010-09-07 14:08:20 -07:00
|
|
|
GCHelperThread::replenishAndFreeLater(void *ptr)
|
2010-04-27 06:46:24 -07:00
|
|
|
{
|
|
|
|
JS_ASSERT(freeCursor == freeCursorEnd);
|
|
|
|
do {
|
|
|
|
if (freeCursor && !freeVector.append(freeCursorEnd - FREE_ARRAY_LENGTH))
|
|
|
|
break;
|
2011-03-31 01:14:12 -07:00
|
|
|
freeCursor = (void **) OffTheBooks::malloc_(FREE_ARRAY_SIZE);
|
2010-04-27 06:46:24 -07:00
|
|
|
if (!freeCursor) {
|
|
|
|
freeCursorEnd = NULL;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
freeCursorEnd = freeCursor + FREE_ARRAY_LENGTH;
|
|
|
|
*freeCursor++ = ptr;
|
|
|
|
return;
|
|
|
|
} while (false);
|
2011-03-31 01:14:12 -07:00
|
|
|
Foreground::free_(ptr);
|
2010-04-27 06:46:24 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2010-09-07 14:08:20 -07:00
|
|
|
GCHelperThread::doSweep()
|
2010-04-27 06:46:24 -07:00
|
|
|
{
|
2011-04-13 13:43:33 -07:00
|
|
|
JS_ASSERT(cx);
|
2011-04-25 13:05:30 -07:00
|
|
|
for (ArenaHeader **i = finalizeVector.begin(); i != finalizeVector.end(); ++i)
|
|
|
|
ArenaList::backgroundFinalize(cx, *i);
|
2011-04-13 13:43:33 -07:00
|
|
|
finalizeVector.resize(0);
|
2011-05-12 15:29:38 -07:00
|
|
|
ExpireGCChunks(cx->runtime);
|
2011-04-13 13:43:33 -07:00
|
|
|
cx = NULL;
|
2011-04-19 22:30:10 -07:00
|
|
|
|
2010-04-27 06:46:24 -07:00
|
|
|
if (freeCursor) {
|
|
|
|
void **array = freeCursorEnd - FREE_ARRAY_LENGTH;
|
|
|
|
freeElementsAndArray(array, freeCursor);
|
|
|
|
freeCursor = freeCursorEnd = NULL;
|
|
|
|
} else {
|
|
|
|
JS_ASSERT(!freeCursorEnd);
|
|
|
|
}
|
|
|
|
for (void ***iter = freeVector.begin(); iter != freeVector.end(); ++iter) {
|
|
|
|
void **array = *iter;
|
|
|
|
freeElementsAndArray(array, array + FREE_ARRAY_LENGTH);
|
|
|
|
}
|
2010-09-07 14:08:20 -07:00
|
|
|
freeVector.resize(0);
|
2010-04-27 06:46:24 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
#endif /* JS_THREADSAFE */
|
|
|
|
|
2010-06-23 14:35:10 -07:00
|
|
|
static void
|
2011-01-08 20:06:29 -08:00
|
|
|
SweepCrossCompartmentWrappers(JSContext *cx)
|
2010-06-23 14:35:10 -07:00
|
|
|
{
|
|
|
|
JSRuntime *rt = cx->runtime;
|
2010-12-17 16:33:04 -08:00
|
|
|
/*
|
|
|
|
* Figure out how much JIT code should be released from inactive compartments.
|
2011-01-18 15:20:39 -08:00
|
|
|
* If multiple eighth-lives have passed, compound the release interval linearly;
|
2010-12-17 16:33:04 -08:00
|
|
|
* if enough time has passed, all inactive JIT code will be released.
|
|
|
|
*/
|
|
|
|
uint32 releaseInterval = 0;
|
|
|
|
int64 now = PRMJ_Now();
|
|
|
|
if (now >= rt->gcJitReleaseTime) {
|
|
|
|
releaseInterval = 8;
|
|
|
|
while (now >= rt->gcJitReleaseTime) {
|
|
|
|
if (--releaseInterval == 1)
|
|
|
|
rt->gcJitReleaseTime = now;
|
|
|
|
rt->gcJitReleaseTime += JIT_SCRIPT_EIGHTH_LIFETIME;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2011-04-15 16:56:08 -07:00
|
|
|
/*
|
|
|
|
* Sweep the compartment:
|
|
|
|
* (1) Remove dead wrappers from the compartment map.
|
|
|
|
* (2) Finalize any unused empty shapes.
|
|
|
|
* (3) Sweep the trace JIT of unused code.
|
|
|
|
* (4) Sweep the method JIT ICs and release infrequently used JIT code.
|
|
|
|
*/
|
2011-01-18 15:20:39 -08:00
|
|
|
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
|
2011-01-08 20:06:29 -08:00
|
|
|
(*c)->sweep(cx, releaseInterval);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
SweepCompartments(JSContext *cx, JSGCInvocationKind gckind)
|
|
|
|
{
|
|
|
|
JSRuntime *rt = cx->runtime;
|
|
|
|
JSCompartmentCallback callback = rt->compartmentCallback;
|
2011-01-18 15:20:39 -08:00
|
|
|
|
|
|
|
/* Skip the atomsCompartment. */
|
|
|
|
JSCompartment **read = rt->compartments.begin() + 1;
|
2011-01-08 20:06:29 -08:00
|
|
|
JSCompartment **end = rt->compartments.end();
|
|
|
|
JSCompartment **write = read;
|
2011-01-18 15:20:39 -08:00
|
|
|
JS_ASSERT(rt->compartments.length() >= 1);
|
|
|
|
JS_ASSERT(*rt->compartments.begin() == rt->atomsCompartment);
|
2011-01-13 19:04:03 -08:00
|
|
|
|
2010-06-23 14:35:10 -07:00
|
|
|
while (read < end) {
|
2011-01-18 15:20:39 -08:00
|
|
|
JSCompartment *compartment = *read++;
|
|
|
|
|
2011-04-18 15:42:07 -07:00
|
|
|
if (compartment->isAboutToBeCollected(gckind)) {
|
2011-01-13 19:04:03 -08:00
|
|
|
JS_ASSERT(compartment->freeLists.isEmpty());
|
2011-01-18 15:20:39 -08:00
|
|
|
if (callback)
|
2011-05-22 03:09:28 -07:00
|
|
|
JS_ALWAYS_TRUE(callback(cx, compartment, JSCOMPARTMENT_DESTROY));
|
2011-01-18 15:20:39 -08:00
|
|
|
if (compartment->principals)
|
|
|
|
JSPRINCIPALS_DROP(cx, compartment->principals);
|
Bug 634155: Account for NewCompartment's memory, and change allocation APIs (r=nnethercote)
This changes the allocation API, in the following way:
js_malloc -> {cx->,rt->,OffTheBooks::}malloc
js_calloc -> {cx->,rt->,OffTheBooks::}calloc
js_realloc -> {cx->,rt->,OffTheBooks::}realloc
js_free -> {cx->,rt->,Foreground::,UnwantedForeground::}free
js_new -> {cx->,rt->,OffTheBooks::}new_
js_new_array -> {cx->,rt->,OffTheBooks::}new_array
js_delete -> {cx->,rt->,Foreground::,UnwantedForeground::}delete_
This is to move as many allocations as possible through a JSContext (so that they may be aken into account by gcMallocBytes) and to move as many deallocations to the background as possible (except on error paths).
2011-03-31 01:13:49 -07:00
|
|
|
cx->delete_(compartment);
|
2011-01-18 15:20:39 -08:00
|
|
|
continue;
|
2010-06-23 14:35:10 -07:00
|
|
|
}
|
2011-01-18 15:20:39 -08:00
|
|
|
*write++ = compartment;
|
2010-06-23 14:35:10 -07:00
|
|
|
}
|
|
|
|
rt->compartments.resize(write - rt->compartments.begin());
|
|
|
|
}
|
|
|
|
|
2010-04-08 05:54:18 -07:00
|
|
|
/*
|
2011-04-25 13:05:30 -07:00
|
|
|
* Perform mark-and-sweep GC.
|
|
|
|
*
|
|
|
|
* In a JS_THREADSAFE build, the calling thread must be rt->gcThread and each
|
|
|
|
* other thread must be either outside all requests or blocked waiting for GC
|
|
|
|
* to finish. Note that the caller does not hold rt->gcLock.
|
|
|
|
* If comp is set, we perform a single-compartment GC.
|
2010-04-08 05:54:18 -07:00
|
|
|
*/
|
|
|
|
static void
|
2011-04-25 13:05:30 -07:00
|
|
|
MarkAndSweep(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind GCTIMER_PARAM)
|
2010-04-08 05:54:18 -07:00
|
|
|
{
|
2011-04-25 13:05:30 -07:00
|
|
|
JS_ASSERT_IF(comp, gckind != GC_LAST_CONTEXT);
|
|
|
|
JS_ASSERT_IF(comp, comp != comp->rt->atomsCompartment);
|
|
|
|
JS_ASSERT_IF(comp, comp->rt->gcMode == JSGC_MODE_COMPARTMENT);
|
|
|
|
|
2010-04-08 05:54:18 -07:00
|
|
|
JSRuntime *rt = cx->runtime;
|
2011-04-25 13:05:30 -07:00
|
|
|
rt->gcNumber++;
|
2010-04-08 05:54:18 -07:00
|
|
|
|
|
|
|
/* Clear gcIsNeeded now, when we are about to start a normal GC cycle. */
|
2011-01-07 23:44:57 -08:00
|
|
|
rt->gcIsNeeded = false;
|
|
|
|
rt->gcTriggerCompartment = NULL;
|
2010-04-08 05:54:18 -07:00
|
|
|
|
|
|
|
/* Reset malloc counter. */
|
|
|
|
rt->resetGCMallocBytes();
|
|
|
|
|
|
|
|
#ifdef JS_DUMP_SCOPE_METERS
|
|
|
|
{
|
|
|
|
extern void js_DumpScopeMeters(JSRuntime *rt);
|
|
|
|
js_DumpScopeMeters(rt);
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Reset the property cache's type id generator so we can compress ids.
|
|
|
|
* Same for the protoHazardShape proxy-shape standing in for all object
|
|
|
|
* prototypes having readonly or setter properties.
|
|
|
|
*/
|
2011-06-01 17:48:52 -07:00
|
|
|
if (rt->shapeGen & SHAPE_OVERFLOW_BIT || (rt->gcZeal() && !rt->gcCurrentCompartment)) {
|
2010-04-08 05:54:18 -07:00
|
|
|
rt->gcRegenShapes = true;
|
2011-02-04 10:59:07 -08:00
|
|
|
rt->shapeGen = 0;
|
2010-04-08 05:54:18 -07:00
|
|
|
rt->protoHazardShape = 0;
|
|
|
|
}
|
2011-01-07 23:44:57 -08:00
|
|
|
|
|
|
|
if (rt->gcCurrentCompartment) {
|
|
|
|
rt->gcCurrentCompartment->purge(cx);
|
|
|
|
} else {
|
|
|
|
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
|
|
|
|
(*c)->purge(cx);
|
|
|
|
}
|
2010-04-08 05:54:18 -07:00
|
|
|
|
|
|
|
js_PurgeThreads(cx);
|
|
|
|
{
|
|
|
|
JSContext *iter = NULL;
|
|
|
|
while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter))
|
|
|
|
acx->purge();
|
|
|
|
}
|
|
|
|
|
2011-03-28 13:17:46 -07:00
|
|
|
JS_ASSERT_IF(comp, !rt->gcRegenShapes);
|
2010-04-08 05:54:17 -07:00
|
|
|
|
|
|
|
/*
|
|
|
|
* Mark phase.
|
|
|
|
*/
|
2011-05-20 13:40:06 -07:00
|
|
|
GCTIMESTAMP(startMark);
|
2010-07-26 11:44:04 -07:00
|
|
|
GCMarker gcmarker(cx);
|
|
|
|
JS_ASSERT(IS_GC_MARKING_TRACER(&gcmarker));
|
|
|
|
JS_ASSERT(gcmarker.getMarkColor() == BLACK);
|
|
|
|
rt->gcMarkingTracer = &gcmarker;
|
2011-04-25 13:05:30 -07:00
|
|
|
|
2010-08-05 05:16:56 -07:00
|
|
|
for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
|
2011-04-21 15:29:24 -07:00
|
|
|
r.front()->bitmap.clear();
|
2010-09-07 14:08:20 -07:00
|
|
|
|
2011-03-28 13:17:46 -07:00
|
|
|
if (comp) {
|
|
|
|
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
|
|
|
|
(*c)->markCrossCompartmentWrappers(&gcmarker);
|
|
|
|
} else {
|
|
|
|
js_MarkScriptFilenames(rt);
|
|
|
|
}
|
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
MarkRuntime(&gcmarker);
|
2010-04-08 05:54:17 -07:00
|
|
|
|
2011-04-15 16:56:08 -07:00
|
|
|
gcmarker.drainMarkStack();
|
2010-04-08 05:54:17 -07:00
|
|
|
|
2011-03-07 16:56:17 -08:00
|
|
|
/*
|
|
|
|
* Mark weak roots.
|
|
|
|
*/
|
|
|
|
while (true) {
|
2011-04-16 21:23:44 -07:00
|
|
|
if (!js_TraceWatchPoints(&gcmarker) &&
|
2011-06-15 14:14:09 -07:00
|
|
|
!WeakMapBase::markAllIteratively(&gcmarker) &&
|
2011-04-18 15:42:07 -07:00
|
|
|
!Debug::mark(&gcmarker, comp, gckind))
|
|
|
|
{
|
2011-03-07 16:56:17 -08:00
|
|
|
break;
|
2011-04-16 21:23:44 -07:00
|
|
|
}
|
2011-04-15 16:56:08 -07:00
|
|
|
gcmarker.drainMarkStack();
|
2011-03-07 16:56:17 -08:00
|
|
|
}
|
|
|
|
|
2010-04-08 05:54:17 -07:00
|
|
|
rt->gcMarkingTracer = NULL;
|
|
|
|
|
2010-07-15 17:58:36 -07:00
|
|
|
if (rt->gcCallback)
|
|
|
|
(void) rt->gcCallback(cx, JSGC_MARK_END);
|
|
|
|
|
2011-03-28 13:17:46 -07:00
|
|
|
#ifdef DEBUG
|
|
|
|
/* Make sure that we didn't mark an object in another compartment */
|
|
|
|
if (comp) {
|
|
|
|
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
|
|
|
|
JS_ASSERT_IF(*c != comp && *c != rt->atomsCompartment, checkArenaListAllUnmarked(*c));
|
|
|
|
}
|
|
|
|
#endif
|
2010-04-08 05:54:17 -07:00
|
|
|
|
|
|
|
/*
|
|
|
|
* Sweep phase.
|
|
|
|
*
|
|
|
|
* Finalize as we sweep, outside of rt->gcLock but with rt->gcRunning set
|
|
|
|
* so that any attempt to allocate a GC-thing from a finalizer will fail,
|
|
|
|
* rather than nest badly and leave the unmarked newborn to be swept.
|
|
|
|
*
|
2011-03-14 13:55:55 -07:00
|
|
|
* We first sweep atom state so we can use IsAboutToBeFinalized on
|
2010-07-26 11:44:04 -07:00
|
|
|
* JSString held in a hashtable to check if the hashtable entry can be
|
|
|
|
* freed. Note that even after the entry is freed, JSObject finalizers can
|
|
|
|
* continue to access the corresponding JSString* assuming that they are
|
|
|
|
* unique. This works since the atomization API must not be called during
|
|
|
|
* the GC.
|
2010-04-08 05:54:17 -07:00
|
|
|
*/
|
2011-05-20 13:40:06 -07:00
|
|
|
GCTIMESTAMP(startSweep);
|
2011-04-16 21:23:44 -07:00
|
|
|
|
|
|
|
/* Finalize unreachable (key,value) pairs in all weak maps. */
|
2011-06-14 19:21:47 -07:00
|
|
|
WeakMapBase::sweepAll(&gcmarker);
|
2011-04-16 21:23:44 -07:00
|
|
|
|
2010-04-08 05:54:17 -07:00
|
|
|
js_SweepAtomState(cx);
|
|
|
|
|
|
|
|
/* Finalize watch points associated with unreachable objects. */
|
|
|
|
js_SweepWatchPoints(cx);
|
|
|
|
|
2011-05-20 22:40:33 -07:00
|
|
|
if (!comp)
|
Automatically turn debug mode on/off when adding/removing debuggees.
This allows most of the tests to run without the -d command-line flag.
Now a compartment is in debug mode if
* JSD1 wants debug mode on, thanks to a JS_SetDebugMode* call; OR
* JSD2 wants debug mode on, because a live Debug object has a debuggee
global in that compartment.
Since this patch only adds the second half of the rule, JSD1 should be
unaffected.
The new rule has three issues:
1. When removeDebuggee is called, it can cause debug mode to be turned
off for a compartment. If any scripts from that compartment are on
the stack, and the methodjit is enabled, returning to those stack
frames will crash.
2. When a Debug object is GC'd, it can cause debug mode to be turned off
for one or more compartments. This causes the same problem with
returning to deleted methodjit code, but the fix is different: such
Debug objects simply should not be GC'd.
3. Setting .enabled to false still does not turn off debug mode
anywhere, so it does not reduce overhead as much as it should.
A possible fix for issue #1 would be to make such removeDebuggee calls
throw; a different possibility is to turn off debug mode but leave all
the scripts alone, accepting the performance loss (as we do for JSD1 in
JSCompartment::setDebugModeFromC). The fix to issues #2 and #3 is to
tweak the rule--and to tweak the rule for Debug object GC-reachability.
--HG--
rename : js/src/jit-test/tests/debug/Debug-ctor.js => js/src/jit-test/tests/debug/Debug-ctor-01.js
2011-06-02 19:58:46 -07:00
|
|
|
Debug::sweepAll(cx);
|
2011-04-27 16:22:28 -07:00
|
|
|
|
2010-04-08 05:54:17 -07:00
|
|
|
/*
|
2011-04-21 15:29:24 -07:00
|
|
|
* We finalize objects before other GC things to ensure that object's finalizer
|
|
|
|
* can access them even if they will be freed. Sweep the runtime's property trees
|
2011-03-28 13:17:46 -07:00
|
|
|
* after finalizing objects, in case any had watchpoints referencing tree nodes.
|
|
|
|
* Do this before sweeping compartments, so that we sweep all shapes in
|
|
|
|
* unreachable compartments.
|
2010-04-08 05:54:17 -07:00
|
|
|
*/
|
2011-03-28 13:17:46 -07:00
|
|
|
if (comp) {
|
|
|
|
comp->sweep(cx, 0);
|
2011-04-25 13:05:30 -07:00
|
|
|
comp->finalizeObjectArenaLists(cx);
|
2011-05-20 13:40:06 -07:00
|
|
|
GCTIMESTAMP(sweepObjectEnd);
|
2011-04-25 13:05:30 -07:00
|
|
|
comp->finalizeStringArenaLists(cx);
|
2011-05-20 13:40:06 -07:00
|
|
|
GCTIMESTAMP(sweepStringEnd);
|
2011-04-25 13:05:30 -07:00
|
|
|
comp->finalizeShapeArenaLists(cx);
|
2011-05-20 13:40:06 -07:00
|
|
|
GCTIMESTAMP(sweepShapeEnd);
|
2011-03-28 13:17:46 -07:00
|
|
|
} else {
|
|
|
|
SweepCrossCompartmentWrappers(cx);
|
|
|
|
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); c++)
|
2011-04-25 13:05:30 -07:00
|
|
|
(*c)->finalizeObjectArenaLists(cx);
|
2011-01-07 23:44:57 -08:00
|
|
|
|
2011-05-20 13:40:06 -07:00
|
|
|
GCTIMESTAMP(sweepObjectEnd);
|
2010-04-08 05:54:17 -07:00
|
|
|
|
2011-03-28 13:17:46 -07:00
|
|
|
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); c++)
|
2011-04-25 13:05:30 -07:00
|
|
|
(*c)->finalizeStringArenaLists(cx);
|
2010-09-07 14:08:20 -07:00
|
|
|
|
2011-05-20 13:40:06 -07:00
|
|
|
GCTIMESTAMP(sweepStringEnd);
|
2010-04-08 05:54:17 -07:00
|
|
|
|
2011-03-28 13:17:46 -07:00
|
|
|
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); c++)
|
2011-04-25 13:05:30 -07:00
|
|
|
(*c)->finalizeShapeArenaLists(cx);
|
2011-03-28 13:17:46 -07:00
|
|
|
|
2011-05-20 13:40:06 -07:00
|
|
|
GCTIMESTAMP(sweepShapeEnd);
|
2011-03-28 13:17:46 -07:00
|
|
|
|
2011-04-25 13:05:30 -07:00
|
|
|
#ifdef DEBUG
|
2011-03-28 13:17:46 -07:00
|
|
|
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
|
|
|
|
(*c)->propertyTree.dumpShapeStats();
|
2011-04-25 13:05:30 -07:00
|
|
|
#endif
|
|
|
|
#ifdef JS_GCMETER
|
|
|
|
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
|
|
|
|
UpdateAllCompartmentGCStats(*c);
|
|
|
|
#endif
|
2011-03-28 13:17:46 -07:00
|
|
|
}
|
2011-02-04 10:59:07 -08:00
|
|
|
|
2011-04-25 13:05:30 -07:00
|
|
|
#ifdef DEBUG
|
|
|
|
PropertyTree::dumpShapes(cx);
|
|
|
|
#endif
|
2011-02-04 10:59:07 -08:00
|
|
|
|
2011-03-28 13:17:46 -07:00
|
|
|
if (!comp) {
|
|
|
|
SweepCompartments(cx, gckind);
|
2010-04-08 05:54:17 -07:00
|
|
|
|
2011-03-28 13:17:46 -07:00
|
|
|
/*
|
|
|
|
* Sweep script filenames after sweeping functions in the generic loop
|
|
|
|
* above. In this way when a scripted function's finalizer destroys the
|
|
|
|
* script and calls rt->destroyScriptHook, the hook can still access the
|
|
|
|
* script's filename. See bug 323267.
|
|
|
|
*/
|
|
|
|
js_SweepScriptFilenames(rt);
|
|
|
|
}
|
2010-04-08 05:54:17 -07:00
|
|
|
|
2011-05-12 15:29:38 -07:00
|
|
|
#ifndef JS_THREADSAFE
|
2010-04-08 05:54:17 -07:00
|
|
|
/*
|
|
|
|
* Destroy arenas after we finished the sweeping so finalizers can safely
|
2011-03-14 13:55:55 -07:00
|
|
|
* use IsAboutToBeFinalized().
|
2011-05-12 15:29:38 -07:00
|
|
|
* This is done on the GCHelperThread if JS_THREADSAFE is defined.
|
2010-04-08 05:54:17 -07:00
|
|
|
*/
|
2010-09-24 10:54:39 -07:00
|
|
|
ExpireGCChunks(rt);
|
2011-05-12 15:29:38 -07:00
|
|
|
#endif
|
2011-05-20 13:40:06 -07:00
|
|
|
GCTIMESTAMP(sweepDestroyEnd);
|
2010-04-08 05:54:17 -07:00
|
|
|
|
|
|
|
if (rt->gcCallback)
|
|
|
|
(void) rt->gcCallback(cx, JSGC_FINALIZE_END);
|
|
|
|
#ifdef DEBUG_srcnotesize
|
|
|
|
{ extern void DumpSrcNoteSizeHist();
|
|
|
|
DumpSrcNoteSizeHist();
|
|
|
|
printf("GC HEAP SIZE %lu\n", (unsigned long)rt->gcBytes);
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2010-04-22 05:31:00 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
2010-05-27 07:57:55 -07:00
|
|
|
|
|
|
|
/*
|
|
|
|
* If the GC is running and we're called on another thread, wait for this GC
|
|
|
|
* activation to finish. We can safely wait here without fear of deadlock (in
|
|
|
|
* the case where we are called within a request on another thread's context)
|
|
|
|
* because the GC doesn't set rt->gcRunning until after it has waited for all
|
|
|
|
* active requests to end.
|
|
|
|
*
|
|
|
|
* We call here js_CurrentThreadId() after checking for rt->gcState to avoid
|
|
|
|
* an expensive call when the GC is not running.
|
|
|
|
*/
|
|
|
|
void
|
|
|
|
js_WaitForGC(JSRuntime *rt)
|
|
|
|
{
|
|
|
|
if (rt->gcRunning && rt->gcThread->id != js_CurrentThreadId()) {
|
|
|
|
do {
|
|
|
|
JS_AWAIT_GC_DONE(rt);
|
|
|
|
} while (rt->gcRunning);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2010-04-22 05:31:00 -07:00
|
|
|
/*
|
2010-05-14 06:55:17 -07:00
|
|
|
* GC is running on another thread. Temporarily suspend all requests running
|
|
|
|
* on the current thread and wait until the GC is done.
|
2010-04-22 05:31:00 -07:00
|
|
|
*/
|
|
|
|
static void
|
2010-05-22 12:49:58 -07:00
|
|
|
LetOtherGCFinish(JSContext *cx)
|
2010-04-22 05:31:00 -07:00
|
|
|
{
|
|
|
|
JSRuntime *rt = cx->runtime;
|
|
|
|
JS_ASSERT(rt->gcThread);
|
2011-04-13 09:27:37 -07:00
|
|
|
JS_ASSERT(cx->thread() != rt->gcThread);
|
2010-04-22 05:31:00 -07:00
|
|
|
|
2011-04-13 09:27:37 -07:00
|
|
|
size_t requestDebit = cx->thread()->data.requestDepth ? 1 : 0;
|
2010-04-22 05:31:00 -07:00
|
|
|
JS_ASSERT(requestDebit <= rt->requestCount);
|
|
|
|
#ifdef JS_TRACER
|
|
|
|
JS_ASSERT_IF(requestDebit == 0, !JS_ON_TRACE(cx));
|
|
|
|
#endif
|
|
|
|
if (requestDebit != 0) {
|
|
|
|
#ifdef JS_TRACER
|
|
|
|
if (JS_ON_TRACE(cx)) {
|
|
|
|
/*
|
|
|
|
* Leave trace before we decrease rt->requestCount and notify the
|
|
|
|
* GC. Otherwise the GC may start immediately after we unlock while
|
|
|
|
* this thread is still on trace.
|
|
|
|
*/
|
|
|
|
AutoUnlockGC unlock(rt);
|
|
|
|
LeaveTrace(cx);
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
rt->requestCount -= requestDebit;
|
|
|
|
if (rt->requestCount == 0)
|
|
|
|
JS_NOTIFY_REQUEST_DONE(rt);
|
2011-05-20 03:38:31 -07:00
|
|
|
|
|
|
|
/*
|
|
|
|
* Update the native stack before we wait so the GC thread see the
|
|
|
|
* correct stack bounds.
|
|
|
|
*/
|
|
|
|
RecordNativeStackTopForGC(cx);
|
2010-05-22 12:49:58 -07:00
|
|
|
}
|
2010-04-22 05:31:00 -07:00
|
|
|
|
2010-05-22 12:49:58 -07:00
|
|
|
/*
|
|
|
|
* Check that we did not release the GC lock above and let the GC to
|
|
|
|
* finish before we wait.
|
|
|
|
*/
|
|
|
|
JS_ASSERT(rt->gcThread);
|
2010-04-22 05:31:00 -07:00
|
|
|
|
2010-05-22 12:49:58 -07:00
|
|
|
/*
|
|
|
|
* Wait for GC to finish on the other thread, even if requestDebit is 0
|
|
|
|
* and even if GC has not started yet because the gcThread is waiting in
|
2010-08-30 11:46:18 -07:00
|
|
|
* AutoGCSession. This ensures that js_GC never returns without a full GC
|
2010-05-22 12:49:58 -07:00
|
|
|
* cycle happening.
|
|
|
|
*/
|
|
|
|
do {
|
|
|
|
JS_AWAIT_GC_DONE(rt);
|
|
|
|
} while (rt->gcThread);
|
|
|
|
|
|
|
|
rt->requestCount += requestDebit;
|
2010-04-22 05:31:00 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
#endif
|
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
class AutoGCSession {
|
|
|
|
public:
|
|
|
|
explicit AutoGCSession(JSContext *cx);
|
|
|
|
~AutoGCSession();
|
|
|
|
|
|
|
|
private:
|
|
|
|
JSContext *context;
|
|
|
|
|
|
|
|
/* Disable copy constructor or assignments */
|
|
|
|
AutoGCSession(const AutoGCSession&);
|
|
|
|
void operator=(const AutoGCSession&);
|
|
|
|
};
|
|
|
|
|
2010-04-22 05:31:00 -07:00
|
|
|
/*
|
2010-08-30 11:46:18 -07:00
|
|
|
* Start a new GC session. Together with LetOtherGCFinish this function
|
|
|
|
* contains the rendezvous algorithm by which we stop the world for GC.
|
2010-04-22 05:31:00 -07:00
|
|
|
*
|
2010-05-14 06:55:17 -07:00
|
|
|
* This thread becomes the GC thread. Wait for all other threads to quiesce.
|
2010-08-30 11:46:18 -07:00
|
|
|
* Then set rt->gcRunning and return.
|
2010-04-22 05:31:00 -07:00
|
|
|
*/
|
2010-08-30 11:46:18 -07:00
|
|
|
AutoGCSession::AutoGCSession(JSContext *cx)
|
|
|
|
: context(cx)
|
2010-04-22 05:31:00 -07:00
|
|
|
{
|
|
|
|
JSRuntime *rt = cx->runtime;
|
2010-08-30 11:46:18 -07:00
|
|
|
|
|
|
|
#ifdef JS_THREADSAFE
|
2011-04-13 09:27:37 -07:00
|
|
|
if (rt->gcThread && rt->gcThread != cx->thread())
|
2010-08-30 11:46:18 -07:00
|
|
|
LetOtherGCFinish(cx);
|
|
|
|
#endif
|
|
|
|
|
2010-05-14 06:55:17 -07:00
|
|
|
JS_ASSERT(!rt->gcRunning);
|
2010-04-22 05:31:00 -07:00
|
|
|
|
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
/* No other thread is in GC, so indicate that we're now in GC. */
|
2010-05-14 06:55:17 -07:00
|
|
|
JS_ASSERT(!rt->gcThread);
|
2011-04-13 09:27:37 -07:00
|
|
|
rt->gcThread = cx->thread();
|
2010-04-22 05:31:00 -07:00
|
|
|
|
|
|
|
/*
|
2010-07-22 13:59:59 -07:00
|
|
|
* Notify operation callbacks on other threads, which will give them a
|
|
|
|
* chance to yield their requests. Threads without requests perform their
|
|
|
|
* callback at some later point, which then will be unnecessary, but
|
|
|
|
* harmless.
|
2010-04-22 05:31:00 -07:00
|
|
|
*/
|
2010-07-22 13:59:59 -07:00
|
|
|
for (JSThread::Map::Range r = rt->threads.all(); !r.empty(); r.popFront()) {
|
|
|
|
JSThread *thread = r.front().value;
|
2011-04-13 09:27:37 -07:00
|
|
|
if (thread != cx->thread())
|
2010-10-22 10:48:06 -07:00
|
|
|
thread->data.triggerOperationCallback(rt);
|
2010-07-22 13:59:59 -07:00
|
|
|
}
|
2010-04-22 05:31:00 -07:00
|
|
|
|
|
|
|
/*
|
2010-06-26 13:31:54 -07:00
|
|
|
* Discount the request on the current thread from contributing to
|
2010-05-14 06:55:17 -07:00
|
|
|
* rt->requestCount before we wait for all other requests to finish.
|
2010-04-22 05:31:00 -07:00
|
|
|
* JS_NOTIFY_REQUEST_DONE, which will wake us up, is only called on
|
|
|
|
* rt->requestCount transitions to 0.
|
|
|
|
*/
|
2011-04-13 09:27:37 -07:00
|
|
|
size_t requestDebit = cx->thread()->data.requestDepth ? 1 : 0;
|
2010-04-22 05:31:00 -07:00
|
|
|
JS_ASSERT(requestDebit <= rt->requestCount);
|
|
|
|
if (requestDebit != rt->requestCount) {
|
|
|
|
rt->requestCount -= requestDebit;
|
|
|
|
|
|
|
|
do {
|
|
|
|
JS_AWAIT_REQUEST_DONE(rt);
|
|
|
|
} while (rt->requestCount > 0);
|
|
|
|
rt->requestCount += requestDebit;
|
|
|
|
}
|
|
|
|
|
2010-05-14 06:55:17 -07:00
|
|
|
#endif /* JS_THREADSAFE */
|
2010-04-22 05:31:00 -07:00
|
|
|
|
|
|
|
/*
|
|
|
|
* Set rt->gcRunning here within the GC lock, and after waiting for any
|
2010-05-14 06:55:17 -07:00
|
|
|
* active requests to end. This way js_WaitForGC called outside a request
|
|
|
|
* would not block on the GC that is waiting for other requests to finish
|
|
|
|
* with rt->gcThread set while JS_BeginRequest would do such wait.
|
2010-04-22 05:31:00 -07:00
|
|
|
*/
|
2010-05-14 06:55:17 -07:00
|
|
|
rt->gcRunning = true;
|
2010-04-22 05:31:00 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
/* End the current GC session and allow other threads to proceed. */
|
2010-08-30 11:46:18 -07:00
|
|
|
AutoGCSession::~AutoGCSession()
|
2010-04-22 05:31:00 -07:00
|
|
|
{
|
2010-08-30 11:46:18 -07:00
|
|
|
JSRuntime *rt = context->runtime;
|
2010-05-14 06:55:17 -07:00
|
|
|
rt->gcRunning = false;
|
2010-04-22 05:31:00 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
2011-04-13 09:27:37 -07:00
|
|
|
JS_ASSERT(rt->gcThread == context->thread());
|
2010-04-22 05:31:00 -07:00
|
|
|
rt->gcThread = NULL;
|
|
|
|
JS_NOTIFY_GC_DONE(rt);
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2010-04-08 05:54:18 -07:00
|
|
|
/*
|
2010-05-14 06:55:17 -07:00
|
|
|
* GC, repeatedly if necessary, until we think we have not created any new
|
2011-04-25 13:05:30 -07:00
|
|
|
* garbage and no other threads are demanding more GC. We disable inlining
|
|
|
|
* to ensure that the bottom of the stack with possible GC roots recorded in
|
|
|
|
* js_GC excludes any pointers we use during the marking implementation.
|
2010-04-08 05:54:18 -07:00
|
|
|
*/
|
2011-04-25 13:05:30 -07:00
|
|
|
static JS_NEVER_INLINE void
|
|
|
|
GCCycle(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind GCTIMER_PARAM)
|
2010-04-08 05:54:18 -07:00
|
|
|
{
|
|
|
|
JSRuntime *rt = cx->runtime;
|
|
|
|
|
2011-04-25 13:05:30 -07:00
|
|
|
/*
|
|
|
|
* Recursive GC is no-op and a call from another thread waits the started
|
|
|
|
* GC cycle to finish.
|
|
|
|
*/
|
2010-08-30 11:46:18 -07:00
|
|
|
if (rt->gcMarkAndSweep) {
|
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
JS_ASSERT(rt->gcThread);
|
2011-04-13 09:27:37 -07:00
|
|
|
if (rt->gcThread != cx->thread()) {
|
2010-08-30 11:46:18 -07:00
|
|
|
/* We do not return until another GC finishes. */
|
|
|
|
LetOtherGCFinish(cx);
|
2010-08-29 12:41:24 -07:00
|
|
|
}
|
2010-08-30 11:46:18 -07:00
|
|
|
#endif
|
|
|
|
return;
|
2010-08-29 12:41:24 -07:00
|
|
|
}
|
2010-09-07 14:08:20 -07:00
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
AutoGCSession gcsession(cx);
|
2010-04-22 05:32:13 -07:00
|
|
|
|
2011-05-22 11:50:08 -07:00
|
|
|
/*
|
|
|
|
* Don't GC if any thread is reporting an OOM. We check the flag after we
|
|
|
|
* have set up the GC session and know that the thread that reported OOM
|
|
|
|
* is either the current thread or waits for the GC to complete on this
|
|
|
|
* thread.
|
|
|
|
*/
|
|
|
|
if (rt->inOOMReport) {
|
|
|
|
JS_ASSERT(gckind != GC_LAST_CONTEXT);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2011-02-04 10:59:07 -08:00
|
|
|
/*
|
|
|
|
* We should not be depending on cx->compartment in the GC, so set it to
|
|
|
|
* NULL to look for violations.
|
|
|
|
*/
|
2011-03-14 10:37:05 -07:00
|
|
|
SwitchToCompartment sc(cx, (JSCompartment *)NULL);
|
2011-02-22 12:45:18 -08:00
|
|
|
|
2011-01-07 23:44:57 -08:00
|
|
|
JS_ASSERT(!rt->gcCurrentCompartment);
|
|
|
|
rt->gcCurrentCompartment = comp;
|
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
rt->gcMarkAndSweep = true;
|
2011-04-25 13:05:30 -07:00
|
|
|
{
|
2010-05-14 06:55:17 -07:00
|
|
|
AutoUnlockGC unlock(rt);
|
|
|
|
|
2011-04-13 13:43:33 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
2011-04-25 13:05:30 -07:00
|
|
|
/*
|
|
|
|
* As we about to purge caches and clear the mark bits we must wait
|
|
|
|
* for any background finalization to finish.
|
|
|
|
*/
|
|
|
|
JS_ASSERT(!cx->gcBackgroundFree);
|
|
|
|
rt->gcHelperThread.waitBackgroundSweepEnd(rt);
|
|
|
|
if (gckind != GC_LAST_CONTEXT && rt->state != JSRTS_LANDING) {
|
|
|
|
cx->gcBackgroundFree = &rt->gcHelperThread;
|
|
|
|
cx->gcBackgroundFree->setContext(cx);
|
2011-04-13 13:43:33 -07:00
|
|
|
}
|
|
|
|
#endif
|
2011-04-25 13:05:30 -07:00
|
|
|
MarkAndSweep(cx, comp, gckind GCTIMER_ARG);
|
|
|
|
}
|
2010-05-14 06:55:17 -07:00
|
|
|
|
2010-09-07 14:08:20 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
2011-04-25 13:05:30 -07:00
|
|
|
if (gckind != GC_LAST_CONTEXT && rt->state != JSRTS_LANDING) {
|
|
|
|
JS_ASSERT(cx->gcBackgroundFree == &rt->gcHelperThread);
|
|
|
|
cx->gcBackgroundFree = NULL;
|
|
|
|
rt->gcHelperThread.startBackgroundSweep(rt);
|
|
|
|
} else {
|
|
|
|
JS_ASSERT(!cx->gcBackgroundFree);
|
|
|
|
}
|
2010-09-07 14:08:20 -07:00
|
|
|
#endif
|
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
rt->gcMarkAndSweep = false;
|
2010-05-14 06:55:17 -07:00
|
|
|
rt->gcRegenShapes = false;
|
|
|
|
rt->setGCLastBytes(rt->gcBytes);
|
2011-01-07 23:44:57 -08:00
|
|
|
rt->gcCurrentCompartment = NULL;
|
2011-06-14 19:21:47 -07:00
|
|
|
rt->gcWeakMapList = NULL;
|
2011-01-07 23:44:57 -08:00
|
|
|
|
2011-02-04 10:59:07 -08:00
|
|
|
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
|
2011-01-07 23:44:57 -08:00
|
|
|
(*c)->setGCLastBytes((*c)->gcBytes);
|
2010-04-22 05:32:13 -07:00
|
|
|
}
|
|
|
|
|
2007-03-22 10:30:00 -07:00
|
|
|
void
|
2011-01-07 23:44:57 -08:00
|
|
|
js_GC(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind)
|
2007-03-22 10:30:00 -07:00
|
|
|
{
|
2010-04-22 05:31:05 -07:00
|
|
|
JSRuntime *rt = cx->runtime;
|
2007-03-22 10:30:00 -07:00
|
|
|
|
|
|
|
/*
|
|
|
|
* Don't collect garbage if the runtime isn't up, and cx is not the last
|
|
|
|
* context in the runtime. The last context must force a GC, and nothing
|
|
|
|
* should suppress that final collection or there may be shutdown leaks,
|
|
|
|
* or runtime bloat until the next context is created.
|
|
|
|
*/
|
|
|
|
if (rt->state != JSRTS_UP && gckind != GC_LAST_CONTEXT)
|
|
|
|
return;
|
2010-04-22 05:35:21 -07:00
|
|
|
|
2011-05-22 11:50:08 -07:00
|
|
|
if (JS_ON_TRACE(cx)) {
|
|
|
|
JS_ASSERT(gckind != GC_LAST_CONTEXT);
|
|
|
|
return;
|
|
|
|
}
|
2010-08-30 11:46:18 -07:00
|
|
|
|
|
|
|
RecordNativeStackTopForGC(cx);
|
2010-10-05 10:09:50 -07:00
|
|
|
|
2011-05-20 13:40:06 -07:00
|
|
|
GCTIMER_BEGIN(rt, comp);
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-05-14 06:55:17 -07:00
|
|
|
do {
|
|
|
|
/*
|
|
|
|
* Let the API user decide to defer a GC if it wants to (unless this
|
|
|
|
* is the last context). Invoke the callback regardless. Sample the
|
|
|
|
* callback in case we are freely racing with a JS_SetGCCallback{,RT}
|
|
|
|
* on another thread.
|
|
|
|
*/
|
|
|
|
if (JSGCCallback callback = rt->gcCallback) {
|
|
|
|
if (!callback(cx, JSGC_BEGIN) && gckind != GC_LAST_CONTEXT)
|
|
|
|
return;
|
|
|
|
}
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-04-22 05:34:28 -07:00
|
|
|
{
|
2011-04-13 13:43:33 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
rt->gcHelperThread.waitBackgroundSweepEnd(rt);
|
|
|
|
#endif
|
2010-04-22 05:34:28 -07:00
|
|
|
/* Lock out other GC allocator and collector invocations. */
|
2010-10-07 13:43:52 -07:00
|
|
|
AutoLockGC lock(rt);
|
2011-04-25 13:05:30 -07:00
|
|
|
rt->gcPoke = false;
|
|
|
|
GCCycle(cx, comp, gckind GCTIMER_ARG);
|
2010-05-14 06:55:17 -07:00
|
|
|
}
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-05-14 06:55:17 -07:00
|
|
|
/* We re-sample the callback again as the finalizers can change it. */
|
2010-10-07 13:43:52 -07:00
|
|
|
if (JSGCCallback callback = rt->gcCallback)
|
2010-05-14 06:55:17 -07:00
|
|
|
(void) callback(cx, JSGC_END);
|
2010-04-22 05:34:28 -07:00
|
|
|
|
2010-05-14 06:55:17 -07:00
|
|
|
/*
|
2011-04-25 13:05:30 -07:00
|
|
|
* On shutdown, iterate until finalizers or the JSGC_END callback
|
|
|
|
* stop creating garbage.
|
2010-05-14 06:55:17 -07:00
|
|
|
*/
|
|
|
|
} while (gckind == GC_LAST_CONTEXT && rt->gcPoke);
|
2010-09-24 10:54:39 -07:00
|
|
|
#ifdef JS_GCMETER
|
|
|
|
js_DumpGCStats(cx->runtime, stderr);
|
|
|
|
#endif
|
2010-04-22 05:35:21 -07:00
|
|
|
GCTIMER_END(gckind == GC_LAST_CONTEXT);
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
2010-05-14 06:55:17 -07:00
|
|
|
|
2010-06-17 15:23:17 -07:00
|
|
|
namespace js {
|
2010-08-30 11:46:18 -07:00
|
|
|
|
2011-05-20 03:38:31 -07:00
|
|
|
class AutoCopyFreeListToArenas {
|
|
|
|
JSRuntime *rt;
|
2010-05-14 06:55:17 -07:00
|
|
|
|
2011-05-20 03:38:31 -07:00
|
|
|
public:
|
|
|
|
AutoCopyFreeListToArenas(JSRuntime *rt)
|
|
|
|
: rt(rt) {
|
|
|
|
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
|
|
|
|
(*c)->freeLists.copyToArenas();
|
2010-07-19 13:36:49 -07:00
|
|
|
}
|
|
|
|
|
2011-05-20 03:38:31 -07:00
|
|
|
~AutoCopyFreeListToArenas() {
|
|
|
|
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
|
|
|
|
(*c)->freeLists.clearInArenas();
|
2010-06-04 16:32:10 -07:00
|
|
|
}
|
2011-05-20 03:38:31 -07:00
|
|
|
};
|
2010-09-24 10:54:39 -07:00
|
|
|
|
|
|
|
void
|
|
|
|
TraceRuntime(JSTracer *trc)
|
|
|
|
{
|
|
|
|
LeaveTrace(trc->context);
|
|
|
|
|
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
{
|
|
|
|
JSContext *cx = trc->context;
|
|
|
|
JSRuntime *rt = cx->runtime;
|
2011-04-13 09:27:37 -07:00
|
|
|
if (rt->gcThread != cx->thread()) {
|
2011-05-20 03:38:31 -07:00
|
|
|
AutoLockGC lock(rt);
|
2010-09-24 10:54:39 -07:00
|
|
|
AutoGCSession gcsession(cx);
|
2011-05-20 03:38:31 -07:00
|
|
|
|
|
|
|
rt->gcHelperThread.waitBackgroundSweepEnd(rt, false);
|
2010-09-24 10:54:39 -07:00
|
|
|
AutoUnlockGC unlock(rt);
|
2011-05-20 03:38:31 -07:00
|
|
|
|
|
|
|
AutoCopyFreeListToArenas copy(rt);
|
2010-09-24 10:54:39 -07:00
|
|
|
RecordNativeStackTopForGC(trc->context);
|
|
|
|
MarkRuntime(trc);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#else
|
2011-05-22 00:27:01 -07:00
|
|
|
AutoCopyFreeListToArenas copy(trc->context->runtime);
|
2010-09-24 10:54:39 -07:00
|
|
|
RecordNativeStackTopForGC(trc->context);
|
|
|
|
#endif
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Calls from inside a normal GC or a recursive calls are OK and do not
|
|
|
|
* require session setup.
|
|
|
|
*/
|
|
|
|
MarkRuntime(trc);
|
2010-06-04 16:32:10 -07:00
|
|
|
}
|
2010-09-24 10:54:39 -07:00
|
|
|
|
2011-05-19 17:11:16 -07:00
|
|
|
static void
|
|
|
|
IterateCompartmentCells(JSContext *cx, JSCompartment *comp, uint64 traceKindMask,
|
|
|
|
void *data, IterateCallback callback)
|
|
|
|
{
|
|
|
|
for (unsigned thingKind = 0; thingKind < FINALIZE_LIMIT; thingKind++) {
|
|
|
|
size_t traceKind = GetFinalizableTraceKind(thingKind);
|
|
|
|
if (traceKindMask && !TraceKindInMask(traceKind, traceKindMask))
|
|
|
|
continue;
|
|
|
|
|
2011-05-19 12:01:08 -07:00
|
|
|
size_t thingSize = GCThingSizeMap[thingKind];
|
|
|
|
ArenaHeader *aheader = comp->arenas[thingKind].getHead();
|
|
|
|
for (; aheader; aheader = aheader->next) {
|
|
|
|
Arena *a = aheader->getArena();
|
|
|
|
uintptr_t end = a->thingsEnd();
|
|
|
|
FreeCell *nextFree = aheader->getFreeList();
|
|
|
|
for (uintptr_t thing = a->thingsStart(thingSize); thing != end; thing += thingSize) {
|
|
|
|
Cell *t = reinterpret_cast<Cell *>(thing);
|
|
|
|
if (t == nextFree)
|
|
|
|
nextFree = nextFree->link;
|
|
|
|
else
|
|
|
|
(*callback)(cx, data, traceKind, t);
|
2011-05-19 17:11:16 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
IterateCells(JSContext *cx, JSCompartment *comp, uint64 traceKindMask,
|
|
|
|
void *data, IterateCallback callback)
|
|
|
|
{
|
|
|
|
LeaveTrace(cx);
|
|
|
|
|
|
|
|
JSRuntime *rt = cx->runtime;
|
|
|
|
JS_ASSERT(!rt->gcRunning);
|
|
|
|
|
|
|
|
AutoLockGC lock(rt);
|
|
|
|
AutoGCSession gcsession(cx);
|
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
rt->gcHelperThread.waitBackgroundSweepEnd(rt, false);
|
|
|
|
#endif
|
|
|
|
AutoUnlockGC unlock(rt);
|
|
|
|
|
2011-05-20 03:38:31 -07:00
|
|
|
AutoCopyFreeListToArenas copy(rt);
|
2011-05-19 17:11:16 -07:00
|
|
|
if (comp) {
|
|
|
|
IterateCompartmentCells(cx, comp, traceKindMask, data, callback);
|
|
|
|
} else {
|
|
|
|
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
|
|
|
|
IterateCompartmentCells(cx, *c, traceKindMask, data, callback);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2011-05-22 03:09:28 -07:00
|
|
|
namespace gc {
|
|
|
|
|
|
|
|
JSCompartment *
|
|
|
|
NewCompartment(JSContext *cx, JSPrincipals *principals)
|
|
|
|
{
|
|
|
|
JSRuntime *rt = cx->runtime;
|
|
|
|
JSCompartment *compartment = cx->new_<JSCompartment>(rt);
|
|
|
|
if (compartment && compartment->init()) {
|
|
|
|
if (principals) {
|
|
|
|
compartment->principals = principals;
|
|
|
|
JSPRINCIPALS_HOLD(cx, principals);
|
|
|
|
}
|
|
|
|
|
|
|
|
compartment->setGCLastBytes(8192);
|
|
|
|
|
2011-06-02 10:06:36 -07:00
|
|
|
/*
|
|
|
|
* Before reporting the OOM condition, |lock| needs to be cleaned up,
|
|
|
|
* hence the scoping.
|
|
|
|
*/
|
|
|
|
{
|
|
|
|
AutoLockGC lock(rt);
|
|
|
|
if (rt->compartments.append(compartment))
|
|
|
|
return compartment;
|
|
|
|
}
|
|
|
|
|
|
|
|
js_ReportOutOfMemory(cx);
|
2011-05-22 03:09:28 -07:00
|
|
|
}
|
|
|
|
Foreground::delete_(compartment);
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
2011-06-01 17:48:52 -07:00
|
|
|
void
|
|
|
|
RunDebugGC(JSContext *cx)
|
|
|
|
{
|
|
|
|
#ifdef JS_GC_ZEAL
|
|
|
|
if (IsGCAllowed(cx)) {
|
|
|
|
JSRuntime *rt = cx->runtime;
|
|
|
|
|
|
|
|
/*
|
|
|
|
* If rt->gcDebugCompartmentGC is true, only GC the current
|
|
|
|
* compartment. But don't GC the atoms compartment.
|
|
|
|
*/
|
|
|
|
rt->gcTriggerCompartment = rt->gcDebugCompartmentGC ? cx->compartment : NULL;
|
|
|
|
if (rt->gcTriggerCompartment == rt->atomsCompartment)
|
|
|
|
rt->gcTriggerCompartment = NULL;
|
|
|
|
|
|
|
|
RunLastDitchGC(cx);
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2011-05-22 03:09:28 -07:00
|
|
|
} /* namespace gc */
|
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
} /* namespace js */
|