2009-06-10 18:29:44 -07:00
|
|
|
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
|
2007-03-22 10:30:00 -07:00
|
|
|
* vim: set ts=8 sw=4 et tw=78:
|
|
|
|
*
|
|
|
|
* ***** BEGIN LICENSE BLOCK *****
|
|
|
|
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
|
|
|
*
|
|
|
|
* The contents of this file are subject to the Mozilla Public License Version
|
|
|
|
* 1.1 (the "License"); you may not use this file except in compliance with
|
|
|
|
* the License. You may obtain a copy of the License at
|
|
|
|
* http://www.mozilla.org/MPL/
|
|
|
|
*
|
|
|
|
* Software distributed under the License is distributed on an "AS IS" basis,
|
|
|
|
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
|
|
|
* for the specific language governing rights and limitations under the
|
|
|
|
* License.
|
|
|
|
*
|
|
|
|
* The Original Code is Mozilla Communicator client code, released
|
|
|
|
* March 31, 1998.
|
|
|
|
*
|
|
|
|
* The Initial Developer of the Original Code is
|
|
|
|
* Netscape Communications Corporation.
|
|
|
|
* Portions created by the Initial Developer are Copyright (C) 1998
|
|
|
|
* the Initial Developer. All Rights Reserved.
|
|
|
|
*
|
|
|
|
* Contributor(s):
|
|
|
|
*
|
|
|
|
* Alternatively, the contents of this file may be used under the terms of
|
|
|
|
* either of the GNU General Public License Version 2 or later (the "GPL"),
|
|
|
|
* or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
|
|
|
* in which case the provisions of the GPL or the LGPL are applicable instead
|
|
|
|
* of those above. If you wish to allow use of your version of this file only
|
|
|
|
* under the terms of either the GPL or the LGPL, and not to allow others to
|
|
|
|
* use your version of this file under the terms of the MPL, indicate your
|
|
|
|
* decision by deleting the provisions above and replace them with the notice
|
|
|
|
* and other provisions required by the GPL or the LGPL. If you do not delete
|
|
|
|
* the provisions above, a recipient may use your version of this file under
|
|
|
|
* the terms of any one of the MPL, the GPL or the LGPL.
|
|
|
|
*
|
|
|
|
* ***** END LICENSE BLOCK ***** */
|
|
|
|
|
|
|
|
/*
|
|
|
|
* JS Mark-and-Sweep Garbage Collector.
|
|
|
|
*
|
|
|
|
* This GC allocates fixed-sized things with sizes up to GC_NBYTES_MAX (see
|
|
|
|
* jsgc.h). It allocates from a special GC arena pool with each arena allocated
|
|
|
|
* using malloc. It uses an ideally parallel array of flag bytes to hold the
|
|
|
|
* mark bit, finalizer type index, etc.
|
|
|
|
*
|
|
|
|
* XXX swizzle page to freelist for better locality of reference
|
|
|
|
*/
|
2008-05-28 19:07:32 -07:00
|
|
|
#include <math.h>
|
2007-03-22 10:30:00 -07:00
|
|
|
#include <string.h> /* for memset used when DEBUG */
|
|
|
|
#include "jstypes.h"
|
2009-03-18 11:38:16 -07:00
|
|
|
#include "jsstdint.h"
|
2010-10-01 16:46:54 -07:00
|
|
|
#include "jsutil.h"
|
|
|
|
#include "jshash.h"
|
2007-03-22 10:30:00 -07:00
|
|
|
#include "jsbit.h"
|
|
|
|
#include "jsclist.h"
|
2008-05-28 19:07:32 -07:00
|
|
|
#include "jsprf.h"
|
|
|
|
#include "jsapi.h"
|
|
|
|
#include "jsatom.h"
|
2011-06-20 11:44:20 -07:00
|
|
|
#include "jscompartment.h"
|
2011-07-07 17:31:24 -07:00
|
|
|
#include "jscrashreport.h"
|
|
|
|
#include "jscrashformat.h"
|
2007-03-22 10:30:00 -07:00
|
|
|
#include "jscntxt.h"
|
2008-09-05 10:19:17 -07:00
|
|
|
#include "jsversion.h"
|
2007-03-22 10:30:00 -07:00
|
|
|
#include "jsdbgapi.h"
|
|
|
|
#include "jsexn.h"
|
|
|
|
#include "jsfun.h"
|
|
|
|
#include "jsgc.h"
|
2010-04-12 13:59:19 -07:00
|
|
|
#include "jsgcchunk.h"
|
2011-04-15 16:56:08 -07:00
|
|
|
#include "jsgcmark.h"
|
2011-06-20 11:44:20 -07:00
|
|
|
#include "jshashtable.h"
|
2007-03-22 10:30:00 -07:00
|
|
|
#include "jsinterp.h"
|
|
|
|
#include "jsiter.h"
|
|
|
|
#include "jslock.h"
|
|
|
|
#include "jsnum.h"
|
|
|
|
#include "jsobj.h"
|
2007-07-08 02:03:34 -07:00
|
|
|
#include "jsparse.h"
|
2011-06-20 11:44:20 -07:00
|
|
|
#include "jsprobes.h"
|
2010-05-18 19:21:43 -07:00
|
|
|
#include "jsproxy.h"
|
2007-03-22 10:30:00 -07:00
|
|
|
#include "jsscope.h"
|
|
|
|
#include "jsscript.h"
|
2009-01-30 15:40:05 -08:00
|
|
|
#include "jsstaticcheck.h"
|
2011-07-27 15:44:43 -07:00
|
|
|
#include "jswatchpoint.h"
|
2011-06-20 11:44:20 -07:00
|
|
|
#include "jsweakmap.h"
|
2007-03-22 10:30:00 -07:00
|
|
|
#if JS_HAS_XML_SUPPORT
|
|
|
|
#include "jsxml.h"
|
|
|
|
#endif
|
|
|
|
|
2011-06-20 11:44:20 -07:00
|
|
|
#include "methodjit/MethodJIT.h"
|
|
|
|
#include "vm/String.h"
|
2011-07-27 16:03:34 -07:00
|
|
|
#include "vm/Debugger.h"
|
2011-06-20 11:44:20 -07:00
|
|
|
|
2009-12-30 03:06:26 -08:00
|
|
|
#include "jsobjinlines.h"
|
|
|
|
|
2011-06-20 11:44:20 -07:00
|
|
|
#include "vm/String-inl.h"
|
2010-09-24 10:54:39 -07:00
|
|
|
|
2010-06-21 05:22:32 -07:00
|
|
|
#ifdef MOZ_VALGRIND
|
|
|
|
# define JS_VALGRIND
|
|
|
|
#endif
|
|
|
|
#ifdef JS_VALGRIND
|
|
|
|
# include <valgrind/memcheck.h>
|
|
|
|
#endif
|
|
|
|
|
2010-01-22 14:49:18 -08:00
|
|
|
using namespace js;
|
2010-09-24 10:54:39 -07:00
|
|
|
using namespace js::gc;
|
2010-01-22 14:49:18 -08:00
|
|
|
|
2009-03-05 03:12:50 -08:00
|
|
|
/*
|
2010-07-14 23:19:36 -07:00
|
|
|
* Check that JSTRACE_XML follows JSTRACE_OBJECT and JSTRACE_STRING.
|
2009-03-05 03:12:50 -08:00
|
|
|
*/
|
|
|
|
JS_STATIC_ASSERT(JSTRACE_OBJECT == 0);
|
2010-07-14 23:19:36 -07:00
|
|
|
JS_STATIC_ASSERT(JSTRACE_STRING == 1);
|
2011-03-23 11:57:44 -07:00
|
|
|
JS_STATIC_ASSERT(JSTRACE_SHAPE == 2);
|
|
|
|
JS_STATIC_ASSERT(JSTRACE_XML == 3);
|
2009-03-05 03:12:50 -08:00
|
|
|
|
|
|
|
/*
|
2011-03-23 11:57:44 -07:00
|
|
|
* JS_IS_VALID_TRACE_KIND assumes that JSTRACE_SHAPE is the last non-xml
|
2009-03-05 03:12:50 -08:00
|
|
|
* trace kind when JS_HAS_XML_SUPPORT is false.
|
|
|
|
*/
|
2011-03-23 11:57:44 -07:00
|
|
|
JS_STATIC_ASSERT(JSTRACE_SHAPE + 1 == JSTRACE_XML);
|
2009-03-05 03:12:50 -08:00
|
|
|
|
2011-03-23 11:57:37 -07:00
|
|
|
namespace js {
|
|
|
|
namespace gc {
|
2010-01-14 00:27:32 -08:00
|
|
|
|
2010-10-13 11:49:22 -07:00
|
|
|
/* This array should be const, but that doesn't link right under GCC. */
|
|
|
|
FinalizeKind slotsToThingKind[] = {
|
|
|
|
/* 0 */ FINALIZE_OBJECT0, FINALIZE_OBJECT2, FINALIZE_OBJECT2, FINALIZE_OBJECT4,
|
|
|
|
/* 4 */ FINALIZE_OBJECT4, FINALIZE_OBJECT8, FINALIZE_OBJECT8, FINALIZE_OBJECT8,
|
|
|
|
/* 8 */ FINALIZE_OBJECT8, FINALIZE_OBJECT12, FINALIZE_OBJECT12, FINALIZE_OBJECT12,
|
|
|
|
/* 12 */ FINALIZE_OBJECT12, FINALIZE_OBJECT16, FINALIZE_OBJECT16, FINALIZE_OBJECT16,
|
|
|
|
/* 16 */ FINALIZE_OBJECT16
|
|
|
|
};
|
|
|
|
|
|
|
|
JS_STATIC_ASSERT(JS_ARRAY_LENGTH(slotsToThingKind) == SLOTS_TO_THING_KIND_LIMIT);
|
|
|
|
|
2011-04-19 22:30:10 -07:00
|
|
|
const uint8 GCThingSizeMap[] = {
|
|
|
|
sizeof(JSObject), /* FINALIZE_OBJECT0 */
|
|
|
|
sizeof(JSObject), /* FINALIZE_OBJECT0_BACKGROUND */
|
2011-04-21 15:29:24 -07:00
|
|
|
sizeof(JSObject_Slots2), /* FINALIZE_OBJECT2 */
|
|
|
|
sizeof(JSObject_Slots2), /* FINALIZE_OBJECT2_BACKGROUND */
|
|
|
|
sizeof(JSObject_Slots4), /* FINALIZE_OBJECT4 */
|
|
|
|
sizeof(JSObject_Slots4), /* FINALIZE_OBJECT4_BACKGROUND */
|
|
|
|
sizeof(JSObject_Slots8), /* FINALIZE_OBJECT8 */
|
|
|
|
sizeof(JSObject_Slots8), /* FINALIZE_OBJECT8_BACKGROUND */
|
2011-04-19 22:30:10 -07:00
|
|
|
sizeof(JSObject_Slots12), /* FINALIZE_OBJECT12 */
|
|
|
|
sizeof(JSObject_Slots12), /* FINALIZE_OBJECT12_BACKGROUND */
|
|
|
|
sizeof(JSObject_Slots16), /* FINALIZE_OBJECT16 */
|
|
|
|
sizeof(JSObject_Slots16), /* FINALIZE_OBJECT16_BACKGROUND */
|
|
|
|
sizeof(JSFunction), /* FINALIZE_FUNCTION */
|
|
|
|
sizeof(Shape), /* FINALIZE_SHAPE */
|
|
|
|
#if JS_HAS_XML_SUPPORT
|
|
|
|
sizeof(JSXML), /* FINALIZE_XML */
|
2010-09-24 10:54:39 -07:00
|
|
|
#endif
|
2011-04-19 22:30:10 -07:00
|
|
|
sizeof(JSShortString), /* FINALIZE_SHORT_STRING */
|
|
|
|
sizeof(JSString), /* FINALIZE_STRING */
|
2011-05-19 12:01:08 -07:00
|
|
|
sizeof(JSExternalString), /* FINALIZE_EXTERNAL_STRING */
|
2011-04-19 22:30:10 -07:00
|
|
|
};
|
2010-04-12 10:15:30 -07:00
|
|
|
|
2011-04-19 22:30:10 -07:00
|
|
|
JS_STATIC_ASSERT(JS_ARRAY_LENGTH(GCThingSizeMap) == FINALIZE_LIMIT);
|
2010-04-12 10:15:30 -07:00
|
|
|
|
2011-05-11 05:46:33 -07:00
|
|
|
#ifdef DEBUG
|
|
|
|
void
|
|
|
|
ArenaHeader::checkSynchronizedWithFreeList() const
|
2011-05-20 03:38:31 -07:00
|
|
|
{
|
|
|
|
/*
|
|
|
|
* Do not allow to access the free list when its real head is still stored
|
|
|
|
* in FreeLists and is not synchronized with this one.
|
|
|
|
*/
|
2011-07-24 09:14:10 -07:00
|
|
|
JS_ASSERT(allocated());
|
2011-05-11 05:46:33 -07:00
|
|
|
|
|
|
|
/*
|
|
|
|
* We can be called from the background finalization thread when the free
|
|
|
|
* list in the compartment can mutate at any moment. We cannot do any
|
|
|
|
* checks in this case.
|
|
|
|
*/
|
|
|
|
if (!compartment->rt->gcRunning)
|
|
|
|
return;
|
|
|
|
|
2011-08-05 09:43:59 -07:00
|
|
|
FreeSpan firstSpan = FreeSpan::decodeOffsets(arenaAddress(), firstFreeSpanOffsets);
|
2011-05-11 05:46:33 -07:00
|
|
|
if (firstSpan.isEmpty())
|
|
|
|
return;
|
|
|
|
FreeSpan *list = &compartment->freeLists.lists[getThingKind()];
|
|
|
|
if (list->isEmpty() || firstSpan.arenaAddress() != list->arenaAddress())
|
|
|
|
return;
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Here this arena has free things, FreeList::lists[thingKind] is not
|
|
|
|
* empty and also points to this arena. Thus they must the same.
|
|
|
|
*/
|
2011-08-05 09:43:59 -07:00
|
|
|
JS_ASSERT(firstSpan.isSameNonEmptySpan(list));
|
2011-05-20 03:38:31 -07:00
|
|
|
}
|
2011-05-11 05:46:33 -07:00
|
|
|
#endif
|
2011-05-20 03:38:31 -07:00
|
|
|
|
2011-04-13 13:43:33 -07:00
|
|
|
template<typename T>
|
|
|
|
inline bool
|
2011-05-19 12:01:08 -07:00
|
|
|
Arena::finalize(JSContext *cx)
|
2011-04-13 13:43:33 -07:00
|
|
|
{
|
2011-07-24 09:14:10 -07:00
|
|
|
JS_ASSERT(aheader.allocated());
|
2011-04-19 22:30:10 -07:00
|
|
|
JS_ASSERT(!aheader.getMarkingDelay()->link);
|
2011-04-21 15:29:24 -07:00
|
|
|
|
2011-05-19 12:01:08 -07:00
|
|
|
uintptr_t thing = thingsStart(sizeof(T));
|
2011-08-05 09:43:59 -07:00
|
|
|
uintptr_t lastByte = thingsEnd() - 1;
|
2011-04-13 13:43:33 -07:00
|
|
|
|
2011-05-11 05:46:33 -07:00
|
|
|
FreeSpan nextFree(aheader.getFirstFreeSpan());
|
|
|
|
nextFree.checkSpan();
|
2011-04-13 13:43:33 -07:00
|
|
|
|
2011-05-11 05:46:33 -07:00
|
|
|
FreeSpan newListHead;
|
|
|
|
FreeSpan *newListTail = &newListHead;
|
|
|
|
uintptr_t newFreeSpanStart = 0;
|
|
|
|
bool allClear = true;
|
|
|
|
#ifdef DEBUG
|
|
|
|
size_t nmarked = 0;
|
|
|
|
#endif
|
2011-05-19 12:01:08 -07:00
|
|
|
for (;; thing += sizeof(T)) {
|
2011-08-05 09:43:59 -07:00
|
|
|
JS_ASSERT(thing <= lastByte + 1);
|
|
|
|
if (thing == nextFree.first) {
|
|
|
|
JS_ASSERT(nextFree.last <= lastByte);
|
|
|
|
if (nextFree.last == lastByte)
|
2011-04-13 13:43:33 -07:00
|
|
|
break;
|
2011-08-05 09:43:59 -07:00
|
|
|
JS_ASSERT(Arena::isAligned(nextFree.last, sizeof(T)));
|
2011-05-11 05:46:33 -07:00
|
|
|
if (!newFreeSpanStart)
|
|
|
|
newFreeSpanStart = thing;
|
2011-08-05 09:43:59 -07:00
|
|
|
thing = nextFree.last;
|
2011-05-11 05:46:33 -07:00
|
|
|
nextFree = *nextFree.nextSpan();
|
|
|
|
nextFree.checkSpan();
|
2011-04-13 13:43:33 -07:00
|
|
|
} else {
|
2011-05-19 12:01:08 -07:00
|
|
|
T *t = reinterpret_cast<T *>(thing);
|
|
|
|
if (t->isMarked()) {
|
|
|
|
allClear = false;
|
2011-04-13 13:43:33 -07:00
|
|
|
#ifdef DEBUG
|
2011-05-11 05:46:33 -07:00
|
|
|
nmarked++;
|
|
|
|
#endif
|
|
|
|
if (newFreeSpanStart) {
|
|
|
|
JS_ASSERT(thing >= thingsStart(sizeof(T)) + sizeof(T));
|
2011-08-05 09:43:59 -07:00
|
|
|
newListTail->first = newFreeSpanStart;
|
|
|
|
newListTail->last = thing - sizeof(T);
|
|
|
|
newListTail = newListTail->nextSpanUnchecked(sizeof(T));
|
2011-05-11 05:46:33 -07:00
|
|
|
newFreeSpanStart = 0;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if (!newFreeSpanStart)
|
|
|
|
newFreeSpanStart = thing;
|
|
|
|
t->finalize(cx);
|
2011-08-12 06:57:45 -07:00
|
|
|
JS_POISON(t, JS_FREE_PATTERN, sizeof(T));
|
2011-05-11 05:46:33 -07:00
|
|
|
}
|
2011-04-13 13:43:33 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (allClear) {
|
2011-05-11 05:46:33 -07:00
|
|
|
JS_ASSERT(newListTail == &newListHead);
|
|
|
|
JS_ASSERT(newFreeSpanStart == thingsStart(sizeof(T)));
|
|
|
|
return true;
|
2011-04-13 13:43:33 -07:00
|
|
|
}
|
2011-05-11 05:46:33 -07:00
|
|
|
|
2011-08-05 09:43:59 -07:00
|
|
|
newListTail->first = newFreeSpanStart ? newFreeSpanStart : nextFree.first;
|
|
|
|
JS_ASSERT(Arena::isAligned(newListTail->first, sizeof(T)));
|
|
|
|
newListTail->last = lastByte;
|
2011-05-11 05:46:33 -07:00
|
|
|
|
|
|
|
#ifdef DEBUG
|
|
|
|
size_t nfree = 0;
|
2011-08-05 09:43:59 -07:00
|
|
|
for (const FreeSpan *span = &newListHead; span != newListTail; span = span->nextSpan()) {
|
2011-05-11 05:46:33 -07:00
|
|
|
span->checkSpan();
|
2011-08-05 09:43:59 -07:00
|
|
|
JS_ASSERT(Arena::isAligned(span->first, sizeof(T)));
|
|
|
|
JS_ASSERT(Arena::isAligned(span->last, sizeof(T)));
|
|
|
|
nfree += (span->last - span->first) / sizeof(T) + 1;
|
2011-05-11 05:46:33 -07:00
|
|
|
JS_ASSERT(nfree + nmarked <= thingsPerArena(sizeof(T)));
|
2011-04-13 13:43:33 -07:00
|
|
|
}
|
2011-08-05 09:43:59 -07:00
|
|
|
nfree += (newListTail->last + 1 - newListTail->first) / sizeof(T);
|
2011-05-11 05:46:33 -07:00
|
|
|
JS_ASSERT(nfree + nmarked == thingsPerArena(sizeof(T)));
|
2011-04-19 22:30:10 -07:00
|
|
|
#endif
|
2011-05-11 05:46:33 -07:00
|
|
|
aheader.setFirstFreeSpan(&newListHead);
|
|
|
|
|
|
|
|
return false;
|
2011-04-13 13:43:33 -07:00
|
|
|
}
|
|
|
|
|
2011-04-25 13:05:30 -07:00
|
|
|
/*
|
|
|
|
* Finalize arenas from the list. On return listHeadp points to the list of
|
|
|
|
* non-empty arenas.
|
|
|
|
*/
|
|
|
|
template<typename T>
|
|
|
|
static void
|
|
|
|
FinalizeArenas(JSContext *cx, ArenaHeader **listHeadp)
|
|
|
|
{
|
|
|
|
ArenaHeader **ap = listHeadp;
|
|
|
|
while (ArenaHeader *aheader = *ap) {
|
2011-05-19 12:01:08 -07:00
|
|
|
bool allClear = aheader->getArena()->finalize<T>(cx);
|
2011-04-25 13:05:30 -07:00
|
|
|
if (allClear) {
|
|
|
|
*ap = aheader->next;
|
|
|
|
aheader->chunk()->releaseArena(aheader);
|
|
|
|
} else {
|
|
|
|
ap = &aheader->next;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
#ifdef DEBUG
|
2011-01-07 23:44:57 -08:00
|
|
|
bool
|
2011-03-23 11:57:37 -07:00
|
|
|
checkArenaListAllUnmarked(JSCompartment *comp)
|
|
|
|
{
|
2011-01-07 23:44:57 -08:00
|
|
|
for (unsigned i = 0; i < FINALIZE_LIMIT; i++) {
|
|
|
|
if (comp->arenas[i].markedThingsInArenaList())
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
2010-09-24 10:54:39 -07:00
|
|
|
#endif
|
2010-05-18 03:01:33 -07:00
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
} /* namespace gc */
|
|
|
|
} /* namespace js */
|
2010-04-12 13:59:19 -07:00
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
void
|
|
|
|
JSCompartment::finishArenaLists()
|
2010-01-14 00:27:32 -08:00
|
|
|
{
|
2011-04-19 22:30:10 -07:00
|
|
|
for (unsigned i = 0; i < FINALIZE_LIMIT; i++)
|
|
|
|
arenas[i].releaseAll(i);
|
2008-02-26 13:01:42 -08:00
|
|
|
}
|
|
|
|
|
2011-04-25 13:05:30 -07:00
|
|
|
void
|
2010-09-24 10:54:39 -07:00
|
|
|
Chunk::init(JSRuntime *rt)
|
2010-04-12 10:15:30 -07:00
|
|
|
{
|
2010-09-24 10:54:39 -07:00
|
|
|
info.runtime = rt;
|
|
|
|
info.age = 0;
|
|
|
|
info.numFree = ArenasPerChunk;
|
2011-07-24 09:14:10 -07:00
|
|
|
|
|
|
|
/* Assemble all arenas into a linked list and mark them as not allocated. */
|
|
|
|
ArenaHeader **prevp = &info.emptyArenaListHead;
|
|
|
|
Arena *end = &arenas[JS_ARRAY_LENGTH(arenas)];
|
|
|
|
for (Arena *a = &arenas[0]; a != end; ++a) {
|
|
|
|
#ifdef DEBUG
|
|
|
|
memset(a, ArenaSize, JS_FREE_PATTERN);
|
|
|
|
#endif
|
|
|
|
*prevp = &a->aheader;
|
|
|
|
a->aheader.setAsNotAllocated();
|
|
|
|
prevp = &a->aheader.next;
|
|
|
|
}
|
|
|
|
*prevp = NULL;
|
|
|
|
|
2011-04-19 22:30:10 -07:00
|
|
|
for (size_t i = 0; i != JS_ARRAY_LENGTH(markingDelay); ++i)
|
|
|
|
markingDelay[i].init();
|
2011-07-26 00:55:23 -07:00
|
|
|
|
|
|
|
/*
|
|
|
|
* The rest of info fields is initailzied in PickChunk. We do not clear
|
|
|
|
* the mark bitmap as that is done at the start of the next GC.
|
|
|
|
*/
|
2010-04-12 10:15:30 -07:00
|
|
|
}
|
|
|
|
|
2011-07-26 00:55:23 -07:00
|
|
|
inline Chunk **
|
|
|
|
GetAvailableChunkList(JSCompartment *comp)
|
2010-04-12 10:15:30 -07:00
|
|
|
{
|
2011-07-26 00:55:23 -07:00
|
|
|
JSRuntime *rt = comp->rt;
|
|
|
|
return comp->isSystemCompartment
|
|
|
|
? &rt->gcSystemAvailableChunkListHead
|
|
|
|
: &rt->gcUserAvailableChunkListHead;
|
2010-09-24 10:54:39 -07:00
|
|
|
}
|
2010-07-15 17:58:36 -07:00
|
|
|
|
2011-07-26 00:55:23 -07:00
|
|
|
inline void
|
|
|
|
Chunk::addToAvailableList(JSCompartment *comp)
|
2010-09-24 10:54:39 -07:00
|
|
|
{
|
2011-07-26 00:55:23 -07:00
|
|
|
Chunk **listHeadp = GetAvailableChunkList(comp);
|
|
|
|
JS_ASSERT(!info.prevp);
|
|
|
|
JS_ASSERT(!info.next);
|
|
|
|
info.prevp = listHeadp;
|
|
|
|
Chunk *head = *listHeadp;
|
|
|
|
if (head) {
|
|
|
|
JS_ASSERT(head->info.prevp == listHeadp);
|
|
|
|
head->info.prevp = &info.next;
|
|
|
|
}
|
|
|
|
info.next = head;
|
|
|
|
*listHeadp = this;
|
2010-04-12 10:15:30 -07:00
|
|
|
}
|
|
|
|
|
2011-07-26 00:55:23 -07:00
|
|
|
inline void
|
|
|
|
Chunk::removeFromAvailableList()
|
2010-04-12 13:59:19 -07:00
|
|
|
{
|
2011-07-26 00:55:23 -07:00
|
|
|
JS_ASSERT(info.prevp);
|
|
|
|
*info.prevp = info.next;
|
|
|
|
if (info.next) {
|
|
|
|
JS_ASSERT(info.next->info.prevp == &info.next);
|
|
|
|
info.next->info.prevp = info.prevp;
|
|
|
|
}
|
|
|
|
info.prevp = NULL;
|
|
|
|
info.next = NULL;
|
2010-04-12 13:59:19 -07:00
|
|
|
}
|
|
|
|
|
2011-05-19 12:01:08 -07:00
|
|
|
template <size_t thingSize>
|
2011-04-19 22:30:10 -07:00
|
|
|
ArenaHeader *
|
2011-04-13 13:43:33 -07:00
|
|
|
Chunk::allocateArena(JSContext *cx, unsigned thingKind)
|
2010-04-12 13:59:19 -07:00
|
|
|
{
|
2011-04-13 13:43:33 -07:00
|
|
|
JSCompartment *comp = cx->compartment;
|
2010-09-24 10:54:39 -07:00
|
|
|
JS_ASSERT(hasAvailableArenas());
|
2011-05-11 05:46:33 -07:00
|
|
|
ArenaHeader *aheader = info.emptyArenaListHead;
|
|
|
|
info.emptyArenaListHead = aheader->next;
|
|
|
|
aheader->init(comp, thingKind, thingSize);
|
2010-09-24 10:54:39 -07:00
|
|
|
--info.numFree;
|
2011-04-13 13:43:33 -07:00
|
|
|
|
2011-07-26 00:55:23 -07:00
|
|
|
if (!hasAvailableArenas())
|
|
|
|
removeFromAvailableList();
|
|
|
|
|
2011-05-11 05:46:33 -07:00
|
|
|
JSRuntime *rt = info.runtime;
|
2010-09-20 12:43:53 -07:00
|
|
|
Probes::resizeHeap(comp, rt->gcBytes, rt->gcBytes + ArenaSize);
|
2011-04-19 22:30:10 -07:00
|
|
|
JS_ATOMIC_ADD(&rt->gcBytes, ArenaSize);
|
|
|
|
JS_ATOMIC_ADD(&comp->gcBytes, ArenaSize);
|
2011-01-07 23:44:57 -08:00
|
|
|
if (comp->gcBytes >= comp->gcTriggerBytes)
|
|
|
|
TriggerCompartmentGC(comp);
|
2011-04-13 13:43:33 -07:00
|
|
|
|
2011-04-19 22:30:10 -07:00
|
|
|
return aheader;
|
2010-04-12 13:59:19 -07:00
|
|
|
}
|
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
void
|
2011-04-19 22:30:10 -07:00
|
|
|
Chunk::releaseArena(ArenaHeader *aheader)
|
2010-04-12 13:59:19 -07:00
|
|
|
{
|
2011-07-24 09:14:10 -07:00
|
|
|
JS_ASSERT(aheader->allocated());
|
2010-09-24 10:54:39 -07:00
|
|
|
JSRuntime *rt = info.runtime;
|
2011-04-13 13:43:33 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
2011-04-25 13:05:30 -07:00
|
|
|
Maybe<AutoLockGC> maybeLock;
|
2011-04-28 15:48:51 -07:00
|
|
|
if (rt->gcHelperThread.sweeping)
|
2011-04-25 13:05:30 -07:00
|
|
|
maybeLock.construct(info.runtime);
|
2011-04-13 13:43:33 -07:00
|
|
|
#endif
|
2011-04-19 22:30:10 -07:00
|
|
|
JSCompartment *comp = aheader->compartment;
|
2010-04-12 13:59:19 -07:00
|
|
|
|
2010-09-20 12:43:53 -07:00
|
|
|
Probes::resizeHeap(comp, rt->gcBytes, rt->gcBytes - ArenaSize);
|
2011-04-19 22:30:10 -07:00
|
|
|
JS_ASSERT(size_t(rt->gcBytes) >= ArenaSize);
|
|
|
|
JS_ASSERT(size_t(comp->gcBytes) >= ArenaSize);
|
2011-04-13 13:43:33 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
if (rt->gcHelperThread.sweeping) {
|
2011-04-19 22:30:10 -07:00
|
|
|
rt->reduceGCTriggerBytes(GC_HEAP_GROWTH_FACTOR * ArenaSize);
|
|
|
|
comp->reduceGCTriggerBytes(GC_HEAP_GROWTH_FACTOR * ArenaSize);
|
2011-04-13 13:43:33 -07:00
|
|
|
}
|
|
|
|
#endif
|
2011-05-16 16:16:55 -07:00
|
|
|
JS_ATOMIC_ADD(&rt->gcBytes, -int32(ArenaSize));
|
|
|
|
JS_ATOMIC_ADD(&comp->gcBytes, -int32(ArenaSize));
|
2011-07-24 09:14:10 -07:00
|
|
|
|
|
|
|
aheader->setAsNotAllocated();
|
2011-05-11 05:46:33 -07:00
|
|
|
aheader->next = info.emptyArenaListHead;
|
|
|
|
info.emptyArenaListHead = aheader;
|
2010-09-24 10:54:39 -07:00
|
|
|
++info.numFree;
|
2011-07-26 00:55:23 -07:00
|
|
|
if (info.numFree == 1) {
|
|
|
|
JS_ASSERT(!info.prevp);
|
|
|
|
JS_ASSERT(!info.next);
|
|
|
|
addToAvailableList(aheader->compartment);
|
|
|
|
} else if (!unused()) {
|
|
|
|
JS_ASSERT(info.prevp);
|
|
|
|
} else {
|
|
|
|
rt->gcChunkSet.remove(this);
|
|
|
|
removeFromAvailableList();
|
2011-07-25 04:04:02 -07:00
|
|
|
|
|
|
|
/*
|
|
|
|
* We keep empty chunks until we are done with finalization to allow
|
|
|
|
* calling IsAboutToBeFinalized/Cell::isMarked for finalized GC things
|
|
|
|
* in empty chunks. So we add the chunk to the empty set even during
|
|
|
|
* GC_SHRINK.
|
|
|
|
*/
|
2010-09-24 10:54:39 -07:00
|
|
|
info.age = 0;
|
2011-07-26 00:55:23 -07:00
|
|
|
info.next = rt->gcEmptyChunkListHead;
|
2011-07-25 04:04:02 -07:00
|
|
|
rt->gcEmptyChunkListHead = this;
|
|
|
|
rt->gcEmptyChunkCount++;
|
|
|
|
}
|
2010-04-12 13:59:19 -07:00
|
|
|
}
|
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
inline Chunk *
|
|
|
|
AllocateGCChunk(JSRuntime *rt)
|
|
|
|
{
|
|
|
|
Chunk *p = (Chunk *)rt->gcChunkAllocator->alloc();
|
|
|
|
#ifdef MOZ_GCTIMER
|
|
|
|
if (p)
|
|
|
|
JS_ATOMIC_INCREMENT(&newChunkCount);
|
|
|
|
#endif
|
|
|
|
return p;
|
|
|
|
}
|
|
|
|
|
|
|
|
inline void
|
|
|
|
ReleaseGCChunk(JSRuntime *rt, Chunk *p)
|
|
|
|
{
|
|
|
|
JS_ASSERT(p);
|
|
|
|
#ifdef MOZ_GCTIMER
|
|
|
|
JS_ATOMIC_INCREMENT(&destroyChunkCount);
|
|
|
|
#endif
|
2011-03-31 01:14:12 -07:00
|
|
|
rt->gcChunkAllocator->free_(p);
|
2010-09-24 10:54:39 -07:00
|
|
|
}
|
|
|
|
|
2011-04-25 13:05:30 -07:00
|
|
|
inline Chunk *
|
|
|
|
PickChunk(JSContext *cx)
|
2007-09-16 06:03:17 -07:00
|
|
|
{
|
2011-07-26 00:55:23 -07:00
|
|
|
JSCompartment *comp = cx->compartment;
|
|
|
|
JSRuntime *rt = comp->rt;
|
|
|
|
Chunk **listHeadp = GetAvailableChunkList(comp);
|
|
|
|
Chunk *chunk = *listHeadp;
|
|
|
|
if (chunk)
|
2011-04-25 13:05:30 -07:00
|
|
|
return chunk;
|
|
|
|
|
|
|
|
/*
|
2011-07-26 00:55:23 -07:00
|
|
|
* We do not have available chunks, either get one from the empty set or
|
|
|
|
* allocate one.
|
2011-04-25 13:05:30 -07:00
|
|
|
*/
|
2011-07-25 04:04:02 -07:00
|
|
|
chunk = rt->gcEmptyChunkListHead;
|
|
|
|
if (chunk) {
|
|
|
|
JS_ASSERT(chunk->unused());
|
2011-07-26 00:55:23 -07:00
|
|
|
JS_ASSERT(!rt->gcChunkSet.has(chunk));
|
2011-07-25 04:04:02 -07:00
|
|
|
JS_ASSERT(rt->gcEmptyChunkCount >= 1);
|
2011-07-26 00:55:23 -07:00
|
|
|
rt->gcEmptyChunkListHead = chunk->info.next;
|
2011-07-25 04:04:02 -07:00
|
|
|
rt->gcEmptyChunkCount--;
|
|
|
|
} else {
|
|
|
|
chunk = AllocateGCChunk(rt);
|
2011-07-04 21:14:33 -07:00
|
|
|
if (!chunk)
|
|
|
|
return NULL;
|
2011-07-25 04:04:02 -07:00
|
|
|
|
|
|
|
chunk->init(rt);
|
|
|
|
rt->gcChunkAllocationSinceLastGC = true;
|
2011-07-04 21:14:33 -07:00
|
|
|
}
|
2010-08-05 05:16:56 -07:00
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
/*
|
2010-09-24 15:07:02 -07:00
|
|
|
* FIXME bug 583732 - chunk is newly allocated and cannot be present in
|
2010-09-24 10:54:39 -07:00
|
|
|
* the table so using ordinary lookupForAdd is suboptimal here.
|
|
|
|
*/
|
2011-07-26 00:55:23 -07:00
|
|
|
GCChunkSet::AddPtr p = rt->gcChunkSet.lookupForAdd(chunk);
|
2010-09-24 10:54:39 -07:00
|
|
|
JS_ASSERT(!p);
|
2011-07-26 00:55:23 -07:00
|
|
|
if (!rt->gcChunkSet.add(p, chunk)) {
|
2011-07-25 04:04:02 -07:00
|
|
|
ReleaseGCChunk(rt, chunk);
|
|
|
|
return NULL;
|
2010-08-05 05:16:56 -07:00
|
|
|
}
|
2009-09-17 15:40:37 -07:00
|
|
|
|
2011-07-26 00:55:23 -07:00
|
|
|
chunk->info.prevp = NULL;
|
|
|
|
chunk->info.next = NULL;
|
|
|
|
chunk->addToAvailableList(comp);
|
2010-04-12 13:59:19 -07:00
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
return chunk;
|
2010-04-22 23:58:44 -07:00
|
|
|
}
|
2010-04-12 13:59:19 -07:00
|
|
|
|
2010-04-22 23:58:44 -07:00
|
|
|
static void
|
2011-06-20 14:44:26 -07:00
|
|
|
ExpireGCChunks(JSRuntime *rt, JSGCInvocationKind gckind)
|
2010-04-22 23:58:44 -07:00
|
|
|
{
|
2010-09-24 10:54:39 -07:00
|
|
|
AutoLockGC lock(rt);
|
2010-04-12 13:59:19 -07:00
|
|
|
|
2011-07-26 00:55:23 -07:00
|
|
|
/* Return old empty chunks to the system. */
|
|
|
|
for (Chunk **chunkp = &rt->gcEmptyChunkListHead; *chunkp; ) {
|
|
|
|
JS_ASSERT(rt->gcEmptyChunkCount);
|
|
|
|
Chunk *chunk = *chunkp;
|
|
|
|
JS_ASSERT(chunk->unused());
|
|
|
|
JS_ASSERT(!rt->gcChunkSet.has(chunk));
|
|
|
|
JS_ASSERT(chunk->info.age <= MAX_EMPTY_CHUNK_AGE);
|
|
|
|
if (gckind == GC_SHRINK || chunk->info.age == MAX_EMPTY_CHUNK_AGE) {
|
|
|
|
*chunkp = chunk->info.next;
|
|
|
|
--rt->gcEmptyChunkCount;
|
|
|
|
ReleaseGCChunk(rt, chunk);
|
|
|
|
} else {
|
|
|
|
/* Keep the chunk but increase its age. */
|
|
|
|
++chunk->info.age;
|
|
|
|
chunkp = &chunk->info.next;
|
2011-07-04 21:14:33 -07:00
|
|
|
}
|
|
|
|
}
|
2010-04-22 23:58:44 -07:00
|
|
|
}
|
2009-09-17 15:40:37 -07:00
|
|
|
|
2010-07-15 17:58:36 -07:00
|
|
|
JS_FRIEND_API(bool)
|
2011-03-23 11:57:15 -07:00
|
|
|
IsAboutToBeFinalized(JSContext *cx, const void *thing)
|
2007-03-22 10:30:00 -07:00
|
|
|
{
|
2011-03-14 13:59:53 -07:00
|
|
|
if (JSAtom::isStatic(thing))
|
2009-12-22 12:50:44 -08:00
|
|
|
return false;
|
2011-01-07 23:44:57 -08:00
|
|
|
JS_ASSERT(cx);
|
|
|
|
|
2011-03-23 11:57:15 -07:00
|
|
|
JSCompartment *thingCompartment = reinterpret_cast<const Cell *>(thing)->compartment();
|
2011-01-07 23:44:57 -08:00
|
|
|
JSRuntime *rt = cx->runtime;
|
2011-01-08 20:06:29 -08:00
|
|
|
JS_ASSERT(rt == thingCompartment->rt);
|
2011-01-07 23:44:57 -08:00
|
|
|
if (rt->gcCurrentCompartment != NULL && rt->gcCurrentCompartment != thingCompartment)
|
|
|
|
return false;
|
2009-12-22 12:50:44 -08:00
|
|
|
|
2011-03-23 11:57:15 -07:00
|
|
|
return !reinterpret_cast<const Cell *>(thing)->isMarked();
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
2010-07-15 17:58:36 -07:00
|
|
|
JS_FRIEND_API(bool)
|
2011-02-16 12:47:08 -08:00
|
|
|
js_GCThingIsMarked(void *thing, uintN color = BLACK)
|
2010-07-15 17:58:36 -07:00
|
|
|
{
|
2010-09-24 10:54:39 -07:00
|
|
|
JS_ASSERT(thing);
|
|
|
|
AssertValidColor(thing, color);
|
2011-05-04 09:26:44 -07:00
|
|
|
JS_ASSERT(!JSAtom::isStatic(thing));
|
2010-09-24 10:54:39 -07:00
|
|
|
return reinterpret_cast<Cell *>(thing)->isMarked(color);
|
2010-07-15 17:58:36 -07:00
|
|
|
}
|
|
|
|
|
2010-12-17 16:33:04 -08:00
|
|
|
/*
|
|
|
|
* 1/8 life for JIT code. After this number of microseconds have passed, 1/8 of all
|
|
|
|
* JIT code is discarded in inactive compartments, regardless of how often that
|
|
|
|
* code runs.
|
|
|
|
*/
|
|
|
|
static const int64 JIT_SCRIPT_EIGHTH_LIFETIME = 120 * 1000 * 1000;
|
|
|
|
|
2007-03-22 10:30:00 -07:00
|
|
|
JSBool
|
2009-09-17 15:40:37 -07:00
|
|
|
js_InitGC(JSRuntime *rt, uint32 maxbytes)
|
|
|
|
{
|
2011-07-26 00:55:23 -07:00
|
|
|
if (!rt->gcChunkSet.init(INITIAL_CHUNK_CAPACITY))
|
2011-07-04 21:14:33 -07:00
|
|
|
return false;
|
|
|
|
|
2010-05-20 13:50:08 -07:00
|
|
|
if (!rt->gcRootsHash.init(256))
|
2009-12-24 01:31:07 -08:00
|
|
|
return false;
|
2010-06-22 02:19:04 -07:00
|
|
|
|
2010-05-20 13:50:08 -07:00
|
|
|
if (!rt->gcLocksHash.init(256))
|
2009-12-24 01:31:07 -08:00
|
|
|
return false;
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-04-27 12:40:28 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
2010-09-07 14:08:20 -07:00
|
|
|
rt->gcLock = JS_NEW_LOCK();
|
|
|
|
if (!rt->gcLock)
|
|
|
|
return false;
|
|
|
|
rt->gcDone = JS_NEW_CONDVAR(rt->gcLock);
|
|
|
|
if (!rt->gcDone)
|
|
|
|
return false;
|
|
|
|
rt->requestDone = JS_NEW_CONDVAR(rt->gcLock);
|
|
|
|
if (!rt->requestDone)
|
|
|
|
return false;
|
|
|
|
if (!rt->gcHelperThread.init(rt))
|
2010-04-27 06:46:24 -07:00
|
|
|
return false;
|
2010-04-27 12:40:28 -07:00
|
|
|
#endif
|
2010-04-27 06:46:24 -07:00
|
|
|
|
2009-08-25 14:42:42 -07:00
|
|
|
/*
|
|
|
|
* Separate gcMaxMallocBytes from gcMaxBytes but initialize to maxbytes
|
|
|
|
* for default backward API compatibility.
|
|
|
|
*/
|
2009-10-18 08:40:19 -07:00
|
|
|
rt->gcMaxBytes = maxbytes;
|
|
|
|
rt->setGCMaxMallocBytes(maxbytes);
|
2008-09-12 15:11:48 -07:00
|
|
|
rt->gcEmptyArenaPoolLifespan = 30000;
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2009-08-25 14:42:42 -07:00
|
|
|
/*
|
|
|
|
* The assigned value prevents GC from running when GC memory is too low
|
|
|
|
* (during JS engine start).
|
|
|
|
*/
|
2011-06-20 14:44:26 -07:00
|
|
|
rt->setGCLastBytes(8192, GC_NORMAL);
|
2009-01-27 09:21:51 -08:00
|
|
|
|
2010-12-17 16:33:04 -08:00
|
|
|
rt->gcJitReleaseTime = PRMJ_Now() + JIT_SCRIPT_EIGHTH_LIFETIME;
|
2009-12-24 01:31:07 -08:00
|
|
|
return true;
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
2010-06-04 07:22:28 -07:00
|
|
|
namespace js {
|
|
|
|
|
2011-04-19 22:30:10 -07:00
|
|
|
inline bool
|
2011-05-11 05:46:33 -07:00
|
|
|
InFreeList(ArenaHeader *aheader, uintptr_t addr)
|
2011-04-19 22:30:10 -07:00
|
|
|
{
|
2011-05-11 05:46:33 -07:00
|
|
|
if (!aheader->hasFreeThings())
|
|
|
|
return false;
|
2011-04-19 22:30:10 -07:00
|
|
|
|
2011-05-11 05:46:33 -07:00
|
|
|
FreeSpan firstSpan(aheader->getFirstFreeSpan());
|
|
|
|
|
2011-08-05 09:43:59 -07:00
|
|
|
for (const FreeSpan *span = &firstSpan;;) {
|
2011-05-11 05:46:33 -07:00
|
|
|
/* If the thing comes fore the current span, it's not free. */
|
2011-08-05 09:43:59 -07:00
|
|
|
if (addr < span->first)
|
2011-05-11 05:46:33 -07:00
|
|
|
return false;
|
2011-04-19 22:30:10 -07:00
|
|
|
|
2011-05-11 05:46:33 -07:00
|
|
|
/*
|
|
|
|
* If we find it inside the span, it's dead. We use here "<=" and not
|
|
|
|
* "<" even for the last span as we know that thing is inside the
|
|
|
|
* arena. Thus for the last span thing < span->end.
|
|
|
|
*/
|
2011-08-05 09:43:59 -07:00
|
|
|
if (addr <= span->last)
|
2011-04-19 22:30:10 -07:00
|
|
|
return true;
|
2011-05-11 05:46:33 -07:00
|
|
|
|
|
|
|
/*
|
|
|
|
* The last possible empty span is an the end of the arena. Here
|
|
|
|
* span->end < thing < thingsEnd and so we must have more spans.
|
|
|
|
*/
|
|
|
|
span = span->nextSpan();
|
2011-04-19 22:30:10 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2010-10-13 11:49:22 -07:00
|
|
|
template <typename T>
|
2011-04-19 22:30:10 -07:00
|
|
|
inline ConservativeGCTest
|
|
|
|
MarkArenaPtrConservatively(JSTracer *trc, ArenaHeader *aheader, uintptr_t addr)
|
2010-10-13 11:49:22 -07:00
|
|
|
{
|
2011-07-24 09:14:10 -07:00
|
|
|
JS_ASSERT(aheader->allocated());
|
2011-04-19 22:30:10 -07:00
|
|
|
JS_ASSERT(sizeof(T) == aheader->getThingSize());
|
|
|
|
|
2011-05-19 12:01:08 -07:00
|
|
|
uintptr_t offset = addr & ArenaMask;
|
|
|
|
uintptr_t minOffset = Arena::thingsStartOffset(sizeof(T));
|
|
|
|
if (offset < minOffset)
|
2011-04-19 22:30:10 -07:00
|
|
|
return CGCT_NOTARENA;
|
|
|
|
|
|
|
|
/* addr can point inside the thing so we must align the address. */
|
2011-05-19 12:01:08 -07:00
|
|
|
uintptr_t shift = (offset - minOffset) % sizeof(T);
|
2011-05-11 05:46:33 -07:00
|
|
|
addr -= shift;
|
2011-04-19 22:30:10 -07:00
|
|
|
|
2011-05-11 05:46:33 -07:00
|
|
|
/*
|
|
|
|
* Check if the thing is free. We must use the list of free spans as at
|
|
|
|
* this point we no longer have the mark bits from the previous GC run and
|
|
|
|
* we must account for newly allocated things.
|
|
|
|
*/
|
|
|
|
if (InFreeList(aheader, addr))
|
2011-04-19 22:30:10 -07:00
|
|
|
return CGCT_NOTLIVE;
|
|
|
|
|
2011-05-11 05:46:33 -07:00
|
|
|
T *thing = reinterpret_cast<T *>(addr);
|
2011-04-19 22:30:10 -07:00
|
|
|
MarkRoot(trc, thing, "machine stack");
|
|
|
|
|
|
|
|
#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
|
2011-06-21 10:26:22 -07:00
|
|
|
if (IS_GC_MARKING_TRACER(trc)) {
|
|
|
|
GCMarker *marker = static_cast<GCMarker *>(trc);
|
|
|
|
if (marker->conservativeDumpFileName)
|
|
|
|
marker->conservativeRoots.append(thing);
|
|
|
|
if (shift)
|
|
|
|
marker->conservativeStats.unaligned++;
|
|
|
|
}
|
2011-04-19 22:30:10 -07:00
|
|
|
#endif
|
|
|
|
return CGCT_VALID;
|
2010-10-13 11:49:22 -07:00
|
|
|
}
|
|
|
|
|
2010-08-05 05:16:56 -07:00
|
|
|
/*
|
2011-04-19 22:30:10 -07:00
|
|
|
* Returns CGCT_VALID and mark it if the w can be a live GC thing and sets
|
|
|
|
* thingKind accordingly. Otherwise returns the reason for rejection.
|
2010-08-05 05:16:56 -07:00
|
|
|
*/
|
|
|
|
inline ConservativeGCTest
|
2011-04-19 22:30:10 -07:00
|
|
|
MarkIfGCThingWord(JSTracer *trc, jsuword w)
|
2010-06-04 07:22:28 -07:00
|
|
|
{
|
2010-06-22 02:19:04 -07:00
|
|
|
/*
|
|
|
|
* We assume that the compiler never uses sub-word alignment to store
|
2010-07-14 23:19:36 -07:00
|
|
|
* pointers and does not tag pointers on its own. Additionally, the value
|
|
|
|
* representation for all values and the jsid representation for GC-things
|
|
|
|
* do not touch the low two bits. Thus any word with the low two bits set
|
|
|
|
* is not a valid GC-thing.
|
2010-06-22 02:19:04 -07:00
|
|
|
*/
|
2010-07-14 23:19:36 -07:00
|
|
|
JS_STATIC_ASSERT(JSID_TYPE_STRING == 0 && JSID_TYPE_OBJECT == 4);
|
|
|
|
if (w & 0x3)
|
2010-08-05 05:16:56 -07:00
|
|
|
return CGCT_LOWBITSET;
|
2010-06-04 07:22:28 -07:00
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
/*
|
|
|
|
* An object jsid has its low bits tagged. In the value representation on
|
|
|
|
* 64-bit, the high bits are tagged.
|
|
|
|
*/
|
2010-08-05 05:16:56 -07:00
|
|
|
const jsuword JSID_PAYLOAD_MASK = ~jsuword(JSID_TYPE_MASK);
|
2010-07-14 23:19:36 -07:00
|
|
|
#if JS_BITS_PER_WORD == 32
|
2011-04-19 22:30:10 -07:00
|
|
|
jsuword addr = w & JSID_PAYLOAD_MASK;
|
2010-07-14 23:19:36 -07:00
|
|
|
#elif JS_BITS_PER_WORD == 64
|
2011-04-19 22:30:10 -07:00
|
|
|
jsuword addr = w & JSID_PAYLOAD_MASK & JSVAL_PAYLOAD_MASK;
|
2010-07-14 23:19:36 -07:00
|
|
|
#endif
|
2010-06-04 07:22:28 -07:00
|
|
|
|
2011-04-19 22:30:10 -07:00
|
|
|
Chunk *chunk = Chunk::fromAddress(addr);
|
2010-09-24 10:54:39 -07:00
|
|
|
|
2011-07-26 00:55:23 -07:00
|
|
|
if (!trc->context->runtime->gcChunkSet.has(chunk))
|
2010-08-05 05:16:56 -07:00
|
|
|
return CGCT_NOTCHUNK;
|
|
|
|
|
2011-04-19 22:30:10 -07:00
|
|
|
/*
|
|
|
|
* We query for pointers outside the arena array after checking for an
|
|
|
|
* allocated chunk. Such pointers are rare and we want to reject them
|
|
|
|
* after doing more likely rejections.
|
|
|
|
*/
|
|
|
|
if (!Chunk::withinArenasRange(addr))
|
2010-08-05 05:16:56 -07:00
|
|
|
return CGCT_NOTARENA;
|
2010-06-24 01:30:56 -07:00
|
|
|
|
2011-04-19 22:30:10 -07:00
|
|
|
ArenaHeader *aheader = &chunk->arenas[Chunk::arenaIndex(addr)].aheader;
|
2010-06-04 07:22:28 -07:00
|
|
|
|
2011-07-24 09:14:10 -07:00
|
|
|
if (!aheader->allocated())
|
2011-04-19 22:30:10 -07:00
|
|
|
return CGCT_FREEARENA;
|
2011-04-06 13:05:16 -07:00
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
ConservativeGCTest test;
|
2011-04-19 22:30:10 -07:00
|
|
|
unsigned thingKind = aheader->getThingKind();
|
2010-06-04 07:22:28 -07:00
|
|
|
|
2010-11-17 12:39:45 -08:00
|
|
|
switch (thingKind) {
|
2011-03-23 11:57:37 -07:00
|
|
|
case FINALIZE_OBJECT0:
|
2011-04-13 13:43:33 -07:00
|
|
|
case FINALIZE_OBJECT0_BACKGROUND:
|
2011-04-19 22:30:10 -07:00
|
|
|
test = MarkArenaPtrConservatively<JSObject>(trc, aheader, addr);
|
2011-03-23 11:57:37 -07:00
|
|
|
break;
|
|
|
|
case FINALIZE_OBJECT2:
|
2011-04-13 13:43:33 -07:00
|
|
|
case FINALIZE_OBJECT2_BACKGROUND:
|
2011-04-19 22:30:10 -07:00
|
|
|
test = MarkArenaPtrConservatively<JSObject_Slots2>(trc, aheader, addr);
|
2011-03-23 11:57:37 -07:00
|
|
|
break;
|
|
|
|
case FINALIZE_OBJECT4:
|
2011-04-13 13:43:33 -07:00
|
|
|
case FINALIZE_OBJECT4_BACKGROUND:
|
2011-04-19 22:30:10 -07:00
|
|
|
test = MarkArenaPtrConservatively<JSObject_Slots4>(trc, aheader, addr);
|
2011-03-23 11:57:37 -07:00
|
|
|
break;
|
|
|
|
case FINALIZE_OBJECT8:
|
2011-04-13 13:43:33 -07:00
|
|
|
case FINALIZE_OBJECT8_BACKGROUND:
|
2011-04-19 22:30:10 -07:00
|
|
|
test = MarkArenaPtrConservatively<JSObject_Slots8>(trc, aheader, addr);
|
2011-03-23 11:57:37 -07:00
|
|
|
break;
|
|
|
|
case FINALIZE_OBJECT12:
|
2011-04-13 13:43:33 -07:00
|
|
|
case FINALIZE_OBJECT12_BACKGROUND:
|
2011-04-19 22:30:10 -07:00
|
|
|
test = MarkArenaPtrConservatively<JSObject_Slots12>(trc, aheader, addr);
|
2011-03-23 11:57:37 -07:00
|
|
|
break;
|
|
|
|
case FINALIZE_OBJECT16:
|
2011-04-13 13:43:33 -07:00
|
|
|
case FINALIZE_OBJECT16_BACKGROUND:
|
2011-04-19 22:30:10 -07:00
|
|
|
test = MarkArenaPtrConservatively<JSObject_Slots16>(trc, aheader, addr);
|
2011-03-23 11:57:37 -07:00
|
|
|
break;
|
|
|
|
case FINALIZE_STRING:
|
2011-04-19 22:30:10 -07:00
|
|
|
test = MarkArenaPtrConservatively<JSString>(trc, aheader, addr);
|
2011-03-23 11:57:37 -07:00
|
|
|
break;
|
|
|
|
case FINALIZE_EXTERNAL_STRING:
|
2011-04-19 22:30:10 -07:00
|
|
|
test = MarkArenaPtrConservatively<JSExternalString>(trc, aheader, addr);
|
2011-03-23 11:57:37 -07:00
|
|
|
break;
|
|
|
|
case FINALIZE_SHORT_STRING:
|
2011-04-19 22:30:10 -07:00
|
|
|
test = MarkArenaPtrConservatively<JSShortString>(trc, aheader, addr);
|
2011-03-23 11:57:37 -07:00
|
|
|
break;
|
|
|
|
case FINALIZE_FUNCTION:
|
2011-04-19 22:30:10 -07:00
|
|
|
test = MarkArenaPtrConservatively<JSFunction>(trc, aheader, addr);
|
2011-03-23 11:57:37 -07:00
|
|
|
break;
|
2011-03-23 11:57:44 -07:00
|
|
|
case FINALIZE_SHAPE:
|
2011-04-19 22:30:10 -07:00
|
|
|
test = MarkArenaPtrConservatively<Shape>(trc, aheader, addr);
|
2011-03-23 11:57:44 -07:00
|
|
|
break;
|
2010-09-24 10:54:39 -07:00
|
|
|
#if JS_HAS_XML_SUPPORT
|
2011-03-23 11:57:37 -07:00
|
|
|
case FINALIZE_XML:
|
2011-04-19 22:30:10 -07:00
|
|
|
test = MarkArenaPtrConservatively<JSXML>(trc, aheader, addr);
|
2011-03-23 11:57:37 -07:00
|
|
|
break;
|
2010-09-24 10:54:39 -07:00
|
|
|
#endif
|
2011-03-23 11:57:37 -07:00
|
|
|
default:
|
|
|
|
test = CGCT_WRONGTAG;
|
|
|
|
JS_NOT_REACHED("wrong tag");
|
2010-06-04 07:22:28 -07:00
|
|
|
}
|
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
return test;
|
2010-08-05 05:16:56 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
MarkWordConservatively(JSTracer *trc, jsuword w)
|
|
|
|
{
|
2010-06-04 07:22:28 -07:00
|
|
|
/*
|
2010-08-05 05:16:56 -07:00
|
|
|
* The conservative scanner may access words that valgrind considers as
|
|
|
|
* undefined. To avoid false positives and not to alter valgrind view of
|
|
|
|
* the memory we make as memcheck-defined the argument, a copy of the
|
|
|
|
* original word. See bug 572678.
|
2010-06-04 07:22:28 -07:00
|
|
|
*/
|
2010-08-05 05:16:56 -07:00
|
|
|
#ifdef JS_VALGRIND
|
|
|
|
VALGRIND_MAKE_MEM_DEFINED(&w, sizeof(w));
|
|
|
|
#endif
|
2010-06-04 07:22:28 -07:00
|
|
|
|
2011-04-19 22:30:10 -07:00
|
|
|
MarkIfGCThingWord(trc, w);
|
2010-06-04 07:22:28 -07:00
|
|
|
}
|
|
|
|
|
2010-08-05 05:16:56 -07:00
|
|
|
static void
|
2010-11-08 14:35:06 -08:00
|
|
|
MarkRangeConservatively(JSTracer *trc, const jsuword *begin, const jsuword *end)
|
2010-06-04 07:22:28 -07:00
|
|
|
{
|
|
|
|
JS_ASSERT(begin <= end);
|
2010-11-08 14:35:06 -08:00
|
|
|
for (const jsuword *i = begin; i != end; ++i)
|
2010-08-05 05:16:56 -07:00
|
|
|
MarkWordConservatively(trc, *i);
|
2010-06-04 07:22:28 -07:00
|
|
|
}
|
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
static void
|
2011-03-13 07:45:02 -07:00
|
|
|
MarkThreadDataConservatively(JSTracer *trc, ThreadData *td)
|
2010-08-30 11:46:18 -07:00
|
|
|
{
|
|
|
|
ConservativeGCThreadData *ctd = &td->conservativeGC;
|
|
|
|
JS_ASSERT(ctd->hasStackToScan());
|
|
|
|
jsuword *stackMin, *stackEnd;
|
|
|
|
#if JS_STACK_GROWTH_DIRECTION > 0
|
|
|
|
stackMin = td->nativeStackBase;
|
|
|
|
stackEnd = ctd->nativeStackTop;
|
|
|
|
#else
|
|
|
|
stackMin = ctd->nativeStackTop + 1;
|
|
|
|
stackEnd = td->nativeStackBase;
|
|
|
|
#endif
|
|
|
|
JS_ASSERT(stackMin <= stackEnd);
|
|
|
|
MarkRangeConservatively(trc, stackMin, stackEnd);
|
|
|
|
MarkRangeConservatively(trc, ctd->registerSnapshot.words,
|
|
|
|
JS_ARRAY_END(ctd->registerSnapshot.words));
|
|
|
|
}
|
|
|
|
|
2010-07-29 17:13:08 -07:00
|
|
|
void
|
2010-08-05 15:57:34 -07:00
|
|
|
MarkStackRangeConservatively(JSTracer *trc, Value *beginv, Value *endv)
|
2010-07-29 17:13:08 -07:00
|
|
|
{
|
2010-11-08 14:35:06 -08:00
|
|
|
const jsuword *begin = beginv->payloadWord();
|
|
|
|
const jsuword *end = endv->payloadWord();;
|
2010-08-05 15:57:34 -07:00
|
|
|
#ifdef JS_NUNBOX32
|
|
|
|
/*
|
|
|
|
* With 64-bit jsvals on 32-bit systems, we can optimize a bit by
|
|
|
|
* scanning only the payloads.
|
|
|
|
*/
|
|
|
|
JS_ASSERT(begin <= end);
|
2010-11-08 14:35:06 -08:00
|
|
|
for (const jsuword *i = begin; i != end; i += sizeof(Value)/sizeof(jsuword))
|
2010-08-05 15:57:34 -07:00
|
|
|
MarkWordConservatively(trc, *i);
|
|
|
|
#else
|
|
|
|
MarkRangeConservatively(trc, begin, end);
|
|
|
|
#endif
|
2010-07-29 17:13:08 -07:00
|
|
|
}
|
|
|
|
|
2010-06-04 07:22:28 -07:00
|
|
|
void
|
2010-08-05 05:16:56 -07:00
|
|
|
MarkConservativeStackRoots(JSTracer *trc)
|
2010-06-04 07:22:28 -07:00
|
|
|
{
|
2010-08-30 11:46:18 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
for (JSThread::Map::Range r = trc->context->runtime->threads.all(); !r.empty(); r.popFront()) {
|
|
|
|
JSThread *thread = r.front().value;
|
|
|
|
ConservativeGCThreadData *ctd = &thread->data.conservativeGC;
|
|
|
|
if (ctd->hasStackToScan()) {
|
2010-10-22 10:48:06 -07:00
|
|
|
JS_ASSERT_IF(!thread->data.requestDepth, thread->suspendCount);
|
2010-08-30 11:46:18 -07:00
|
|
|
MarkThreadDataConservatively(trc, &thread->data);
|
|
|
|
} else {
|
|
|
|
JS_ASSERT(!thread->suspendCount);
|
2010-10-22 10:48:06 -07:00
|
|
|
JS_ASSERT(thread->data.requestDepth <= ctd->requestThreshold);
|
2010-06-04 07:22:28 -07:00
|
|
|
}
|
|
|
|
}
|
2010-08-30 11:46:18 -07:00
|
|
|
#else
|
|
|
|
MarkThreadDataConservatively(trc, &trc->context->runtime->threadData);
|
2010-09-07 14:08:20 -07:00
|
|
|
#endif
|
2010-06-04 07:22:28 -07:00
|
|
|
}
|
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
JS_NEVER_INLINE void
|
|
|
|
ConservativeGCThreadData::recordStackTop()
|
2010-06-04 07:22:28 -07:00
|
|
|
{
|
|
|
|
/* Update the native stack pointer if it points to a bigger stack. */
|
|
|
|
jsuword dummy;
|
2010-08-30 11:46:18 -07:00
|
|
|
nativeStackTop = &dummy;
|
2010-06-04 07:22:28 -07:00
|
|
|
|
2010-12-23 07:18:36 -08:00
|
|
|
/*
|
|
|
|
* To record and update the register snapshot for the conservative
|
|
|
|
* scanning with the latest values we use setjmp.
|
|
|
|
*/
|
2010-06-04 07:22:28 -07:00
|
|
|
#if defined(_MSC_VER)
|
|
|
|
# pragma warning(push)
|
|
|
|
# pragma warning(disable: 4611)
|
|
|
|
#endif
|
2010-12-23 07:18:36 -08:00
|
|
|
(void) setjmp(registerSnapshot.jmpbuf);
|
2010-06-04 07:22:28 -07:00
|
|
|
#if defined(_MSC_VER)
|
|
|
|
# pragma warning(pop)
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
static inline void
|
|
|
|
RecordNativeStackTopForGC(JSContext *cx)
|
2010-06-04 07:22:28 -07:00
|
|
|
{
|
2010-08-30 11:46:18 -07:00
|
|
|
ConservativeGCThreadData *ctd = &JS_THREAD_DATA(cx)->conservativeGC;
|
2010-09-07 14:08:20 -07:00
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
/* Record the stack top here only if we are called from a request. */
|
2011-04-13 09:27:37 -07:00
|
|
|
JS_ASSERT(cx->thread()->data.requestDepth >= ctd->requestThreshold);
|
|
|
|
if (cx->thread()->data.requestDepth == ctd->requestThreshold)
|
2010-08-30 11:46:18 -07:00
|
|
|
return;
|
2010-06-04 07:22:28 -07:00
|
|
|
#endif
|
2010-08-30 11:46:18 -07:00
|
|
|
ctd->recordStackTop();
|
2010-06-04 07:22:28 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
} /* namespace js */
|
|
|
|
|
2007-03-22 10:30:00 -07:00
|
|
|
#ifdef DEBUG
|
|
|
|
static void
|
|
|
|
CheckLeakedRoots(JSRuntime *rt);
|
|
|
|
#endif
|
|
|
|
|
|
|
|
void
|
|
|
|
js_FinishGC(JSRuntime *rt)
|
|
|
|
{
|
2011-02-04 10:59:07 -08:00
|
|
|
/* Delete all remaining Compartments. */
|
2010-09-24 10:54:39 -07:00
|
|
|
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c) {
|
|
|
|
JSCompartment *comp = *c;
|
|
|
|
comp->finishArenaLists();
|
Bug 634155: Account for NewCompartment's memory, and change allocation APIs (r=nnethercote)
This changes the allocation API, in the following way:
js_malloc -> {cx->,rt->,OffTheBooks::}malloc
js_calloc -> {cx->,rt->,OffTheBooks::}calloc
js_realloc -> {cx->,rt->,OffTheBooks::}realloc
js_free -> {cx->,rt->,Foreground::,UnwantedForeground::}free
js_new -> {cx->,rt->,OffTheBooks::}new_
js_new_array -> {cx->,rt->,OffTheBooks::}new_array
js_delete -> {cx->,rt->,Foreground::,UnwantedForeground::}delete_
This is to move as many allocations as possible through a JSContext (so that they may be aken into account by gcMallocBytes) and to move as many deallocations to the background as possible (except on error paths).
2011-03-31 01:13:49 -07:00
|
|
|
Foreground::delete_(comp);
|
2010-09-24 10:54:39 -07:00
|
|
|
}
|
|
|
|
rt->compartments.clear();
|
2011-01-13 14:42:36 -08:00
|
|
|
rt->atomsCompartment = NULL;
|
2010-09-24 10:54:39 -07:00
|
|
|
|
2011-07-26 00:55:23 -07:00
|
|
|
rt->gcSystemAvailableChunkListHead = NULL;
|
|
|
|
rt->gcUserAvailableChunkListHead = NULL;
|
|
|
|
for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
|
2010-09-24 10:54:39 -07:00
|
|
|
ReleaseGCChunk(rt, r.front());
|
2011-07-26 00:55:23 -07:00
|
|
|
rt->gcChunkSet.clear();
|
|
|
|
for (Chunk *chunk = rt->gcEmptyChunkListHead; chunk; ) {
|
|
|
|
Chunk *next = chunk->info.next;
|
|
|
|
ReleaseGCChunk(rt, chunk);
|
|
|
|
chunk = next;
|
|
|
|
}
|
|
|
|
rt->gcEmptyChunkListHead = NULL;
|
|
|
|
rt->gcEmptyChunkCount = 0;
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-04-27 12:40:28 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
2010-09-07 14:08:20 -07:00
|
|
|
rt->gcHelperThread.finish(rt);
|
2010-04-27 12:40:28 -07:00
|
|
|
#endif
|
2010-04-22 23:58:44 -07:00
|
|
|
|
2010-06-22 02:19:04 -07:00
|
|
|
#ifdef DEBUG
|
2010-05-20 13:50:08 -07:00
|
|
|
if (!rt->gcRootsHash.empty())
|
2007-03-22 10:30:00 -07:00
|
|
|
CheckLeakedRoots(rt);
|
|
|
|
#endif
|
2010-05-20 13:50:08 -07:00
|
|
|
rt->gcRootsHash.clear();
|
|
|
|
rt->gcLocksHash.clear();
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
JSBool
|
2010-07-14 23:19:36 -07:00
|
|
|
js_AddRoot(JSContext *cx, Value *vp, const char *name)
|
2007-03-22 10:30:00 -07:00
|
|
|
{
|
2010-07-14 23:19:36 -07:00
|
|
|
JSBool ok = js_AddRootRT(cx->runtime, Jsvalify(vp), name);
|
2007-03-22 10:30:00 -07:00
|
|
|
if (!ok)
|
|
|
|
JS_ReportOutOfMemory(cx);
|
|
|
|
return ok;
|
|
|
|
}
|
|
|
|
|
|
|
|
JSBool
|
2010-06-07 17:05:02 -07:00
|
|
|
js_AddGCThingRoot(JSContext *cx, void **rp, const char *name)
|
|
|
|
{
|
|
|
|
JSBool ok = js_AddGCThingRootRT(cx->runtime, rp, name);
|
|
|
|
if (!ok)
|
|
|
|
JS_ReportOutOfMemory(cx);
|
|
|
|
return ok;
|
|
|
|
}
|
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
JS_FRIEND_API(JSBool)
|
|
|
|
js_AddRootRT(JSRuntime *rt, jsval *vp, const char *name)
|
2007-03-22 10:30:00 -07:00
|
|
|
{
|
|
|
|
/*
|
|
|
|
* Due to the long-standing, but now removed, use of rt->gcLock across the
|
|
|
|
* bulk of js_GC, API users have come to depend on JS_AddRoot etc. locking
|
|
|
|
* properly with a racing GC, without calling JS_AddRoot from a request.
|
|
|
|
* We have to preserve API compatibility here, now that we avoid holding
|
|
|
|
* rt->gcLock across the mark phase (including the root hashtable mark).
|
|
|
|
*/
|
2010-04-08 05:54:18 -07:00
|
|
|
AutoLockGC lock(rt);
|
2009-02-06 20:05:32 -08:00
|
|
|
js_WaitForGC(rt);
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
return !!rt->gcRootsHash.put((void *)vp,
|
|
|
|
RootInfo(name, JS_GC_ROOT_VALUE_PTR));
|
2010-06-07 17:05:02 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
JS_FRIEND_API(JSBool)
|
|
|
|
js_AddGCThingRootRT(JSRuntime *rt, void **rp, const char *name)
|
|
|
|
{
|
2010-07-14 23:19:36 -07:00
|
|
|
/*
|
|
|
|
* Due to the long-standing, but now removed, use of rt->gcLock across the
|
|
|
|
* bulk of js_GC, API users have come to depend on JS_AddRoot etc. locking
|
|
|
|
* properly with a racing GC, without calling JS_AddRoot from a request.
|
|
|
|
* We have to preserve API compatibility here, now that we avoid holding
|
|
|
|
* rt->gcLock across the mark phase (including the root hashtable mark).
|
|
|
|
*/
|
|
|
|
AutoLockGC lock(rt);
|
|
|
|
js_WaitForGC(rt);
|
|
|
|
|
|
|
|
return !!rt->gcRootsHash.put((void *)rp,
|
|
|
|
RootInfo(name, JS_GC_ROOT_GCTHING_PTR));
|
2010-06-07 17:05:02 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
JS_FRIEND_API(JSBool)
|
2007-03-22 10:30:00 -07:00
|
|
|
js_RemoveRoot(JSRuntime *rt, void *rp)
|
|
|
|
{
|
|
|
|
/*
|
2010-07-14 23:19:36 -07:00
|
|
|
* Due to the JS_RemoveRootRT API, we may be called outside of a request.
|
2007-03-22 10:30:00 -07:00
|
|
|
* Same synchronization drill as above in js_AddRoot.
|
|
|
|
*/
|
2010-04-08 05:54:18 -07:00
|
|
|
AutoLockGC lock(rt);
|
2009-02-06 20:05:32 -08:00
|
|
|
js_WaitForGC(rt);
|
2010-05-20 13:50:08 -07:00
|
|
|
rt->gcRootsHash.remove(rp);
|
2007-03-22 10:30:00 -07:00
|
|
|
rt->gcPoke = JS_TRUE;
|
|
|
|
return JS_TRUE;
|
|
|
|
}
|
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
typedef RootedValueMap::Range RootRange;
|
|
|
|
typedef RootedValueMap::Entry RootEntry;
|
|
|
|
typedef RootedValueMap::Enum RootEnum;
|
|
|
|
|
2007-03-22 10:30:00 -07:00
|
|
|
#ifdef DEBUG
|
|
|
|
|
|
|
|
static void
|
|
|
|
CheckLeakedRoots(JSRuntime *rt)
|
|
|
|
{
|
|
|
|
uint32 leakedroots = 0;
|
|
|
|
|
|
|
|
/* Warn (but don't assert) debug builds of any remaining roots. */
|
2010-07-14 23:19:36 -07:00
|
|
|
for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront()) {
|
|
|
|
RootEntry &entry = r.front();
|
2010-05-20 13:50:08 -07:00
|
|
|
leakedroots++;
|
|
|
|
fprintf(stderr,
|
|
|
|
"JS engine warning: leaking GC root \'%s\' at %p\n",
|
2010-07-14 23:19:36 -07:00
|
|
|
entry.value.name ? entry.value.name : "", entry.key);
|
2010-05-20 13:50:08 -07:00
|
|
|
}
|
2010-07-14 23:19:36 -07:00
|
|
|
|
2007-03-22 10:30:00 -07:00
|
|
|
if (leakedroots > 0) {
|
|
|
|
if (leakedroots == 1) {
|
|
|
|
fprintf(stderr,
|
2008-02-16 22:56:40 -08:00
|
|
|
"JS engine warning: 1 GC root remains after destroying the JSRuntime at %p.\n"
|
2007-03-22 10:30:00 -07:00
|
|
|
" This root may point to freed memory. Objects reachable\n"
|
2008-02-26 13:01:42 -08:00
|
|
|
" through it have not been finalized.\n",
|
|
|
|
(void *) rt);
|
2007-03-22 10:30:00 -07:00
|
|
|
} else {
|
|
|
|
fprintf(stderr,
|
2008-02-16 22:56:40 -08:00
|
|
|
"JS engine warning: %lu GC roots remain after destroying the JSRuntime at %p.\n"
|
2007-03-22 10:30:00 -07:00
|
|
|
" These roots may point to freed memory. Objects reachable\n"
|
|
|
|
" through them have not been finalized.\n",
|
2008-02-26 13:01:42 -08:00
|
|
|
(unsigned long) leakedroots, (void *) rt);
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
js_DumpNamedRoots(JSRuntime *rt,
|
2010-07-14 23:19:36 -07:00
|
|
|
void (*dump)(const char *name, void *rp, JSGCRootType type, void *data),
|
2007-03-22 10:30:00 -07:00
|
|
|
void *data)
|
|
|
|
{
|
2010-07-14 23:19:36 -07:00
|
|
|
for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront()) {
|
|
|
|
RootEntry &entry = r.front();
|
|
|
|
if (const char *name = entry.value.name)
|
|
|
|
dump(name, entry.key, entry.value.type, data);
|
2010-05-20 13:50:08 -07:00
|
|
|
}
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
#endif /* DEBUG */
|
|
|
|
|
|
|
|
uint32
|
|
|
|
js_MapGCRoots(JSRuntime *rt, JSGCRootMapFun map, void *data)
|
|
|
|
{
|
2010-04-08 05:54:18 -07:00
|
|
|
AutoLockGC lock(rt);
|
2010-07-14 23:19:36 -07:00
|
|
|
int ct = 0;
|
|
|
|
for (RootEnum e(rt->gcRootsHash); !e.empty(); e.popFront()) {
|
|
|
|
RootEntry &entry = e.front();
|
|
|
|
|
|
|
|
ct++;
|
|
|
|
intN mapflags = map(entry.key, entry.value.type, entry.value.name, data);
|
2010-05-20 13:50:08 -07:00
|
|
|
|
|
|
|
if (mapflags & JS_MAP_GCROOT_REMOVE)
|
|
|
|
e.removeFront();
|
|
|
|
if (mapflags & JS_MAP_GCROOT_STOP)
|
|
|
|
break;
|
|
|
|
}
|
2010-07-14 23:19:36 -07:00
|
|
|
|
|
|
|
return ct;
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
2009-08-25 14:42:42 -07:00
|
|
|
void
|
2011-06-20 14:44:26 -07:00
|
|
|
JSRuntime::setGCLastBytes(size_t lastBytes, JSGCInvocationKind gckind)
|
2009-08-25 14:42:42 -07:00
|
|
|
{
|
|
|
|
gcLastBytes = lastBytes;
|
2011-06-20 14:44:26 -07:00
|
|
|
|
2011-07-25 04:04:02 -07:00
|
|
|
size_t base = gckind == GC_SHRINK ? lastBytes : Max(lastBytes, GC_ALLOCATION_THRESHOLD);
|
2011-06-20 14:44:26 -07:00
|
|
|
float trigger = float(base) * GC_HEAP_GROWTH_FACTOR;
|
2011-05-10 22:37:11 -07:00
|
|
|
gcTriggerBytes = size_t(Min(float(gcMaxBytes), trigger));
|
2009-08-25 14:42:42 -07:00
|
|
|
}
|
|
|
|
|
2011-04-13 13:43:33 -07:00
|
|
|
void
|
|
|
|
JSRuntime::reduceGCTriggerBytes(uint32 amount) {
|
|
|
|
JS_ASSERT(amount > 0);
|
2011-06-20 14:44:26 -07:00
|
|
|
JS_ASSERT(gcTriggerBytes - amount >= 0);
|
2011-07-25 04:04:02 -07:00
|
|
|
if (gcTriggerBytes - amount < GC_ALLOCATION_THRESHOLD * GC_HEAP_GROWTH_FACTOR)
|
2011-04-13 13:43:33 -07:00
|
|
|
return;
|
|
|
|
gcTriggerBytes -= amount;
|
|
|
|
}
|
|
|
|
|
2011-01-07 23:44:57 -08:00
|
|
|
void
|
2011-06-20 14:44:26 -07:00
|
|
|
JSCompartment::setGCLastBytes(size_t lastBytes, JSGCInvocationKind gckind)
|
2011-01-07 23:44:57 -08:00
|
|
|
{
|
|
|
|
gcLastBytes = lastBytes;
|
2011-06-20 14:44:26 -07:00
|
|
|
|
2011-07-25 04:04:02 -07:00
|
|
|
size_t base = gckind == GC_SHRINK ? lastBytes : Max(lastBytes, GC_ALLOCATION_THRESHOLD);
|
2011-06-20 14:44:26 -07:00
|
|
|
float trigger = float(base) * GC_HEAP_GROWTH_FACTOR;
|
2011-05-10 22:37:11 -07:00
|
|
|
gcTriggerBytes = size_t(Min(float(rt->gcMaxBytes), trigger));
|
2011-01-07 23:44:57 -08:00
|
|
|
}
|
|
|
|
|
2011-04-13 13:43:33 -07:00
|
|
|
void
|
|
|
|
JSCompartment::reduceGCTriggerBytes(uint32 amount) {
|
|
|
|
JS_ASSERT(amount > 0);
|
2011-06-20 14:44:26 -07:00
|
|
|
JS_ASSERT(gcTriggerBytes - amount >= 0);
|
2011-07-25 04:04:02 -07:00
|
|
|
if (gcTriggerBytes - amount < GC_ALLOCATION_THRESHOLD * GC_HEAP_GROWTH_FACTOR)
|
2011-04-13 13:43:33 -07:00
|
|
|
return;
|
|
|
|
gcTriggerBytes -= amount;
|
|
|
|
}
|
|
|
|
|
2011-04-25 13:05:30 -07:00
|
|
|
namespace js {
|
|
|
|
namespace gc {
|
|
|
|
|
|
|
|
inline ArenaHeader *
|
|
|
|
ArenaList::searchForFreeArena()
|
|
|
|
{
|
|
|
|
while (ArenaHeader *aheader = *cursor) {
|
|
|
|
cursor = &aheader->next;
|
2011-05-11 05:46:33 -07:00
|
|
|
if (aheader->hasFreeThings())
|
2011-04-25 13:05:30 -07:00
|
|
|
return aheader;
|
|
|
|
}
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
2011-05-19 12:01:08 -07:00
|
|
|
template <size_t thingSize>
|
2011-04-25 13:05:30 -07:00
|
|
|
inline ArenaHeader *
|
|
|
|
ArenaList::getArenaWithFreeList(JSContext *cx, unsigned thingKind)
|
|
|
|
{
|
|
|
|
Chunk *chunk;
|
|
|
|
|
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
/*
|
|
|
|
* We cannot search the arena list for free things while the
|
|
|
|
* background finalization runs and can modify head or cursor at any
|
|
|
|
* moment.
|
|
|
|
*/
|
|
|
|
if (backgroundFinalizeState == BFS_DONE) {
|
|
|
|
check_arena_list:
|
|
|
|
if (ArenaHeader *aheader = searchForFreeArena())
|
|
|
|
return aheader;
|
|
|
|
}
|
|
|
|
|
|
|
|
AutoLockGC lock(cx->runtime);
|
|
|
|
|
|
|
|
for (;;) {
|
|
|
|
if (backgroundFinalizeState == BFS_JUST_FINISHED) {
|
|
|
|
/*
|
|
|
|
* Before we took the GC lock or while waiting for the background
|
|
|
|
* finalization to finish the latter added new arenas to the list.
|
|
|
|
* Check the list again for free things outside the GC lock.
|
|
|
|
*/
|
|
|
|
JS_ASSERT(*cursor);
|
|
|
|
backgroundFinalizeState = BFS_DONE;
|
|
|
|
goto check_arena_list;
|
|
|
|
}
|
|
|
|
|
|
|
|
JS_ASSERT(!*cursor);
|
|
|
|
chunk = PickChunk(cx);
|
|
|
|
if (chunk || backgroundFinalizeState == BFS_DONE)
|
|
|
|
break;
|
|
|
|
|
|
|
|
/*
|
|
|
|
* If the background finalization still runs, wait for it to
|
|
|
|
* finish and retry to check if it populated the arena list or
|
|
|
|
* added new empty arenas.
|
|
|
|
*/
|
|
|
|
JS_ASSERT(backgroundFinalizeState == BFS_RUN);
|
|
|
|
cx->runtime->gcHelperThread.waitBackgroundSweepEnd(cx->runtime, false);
|
|
|
|
JS_ASSERT(backgroundFinalizeState == BFS_JUST_FINISHED ||
|
|
|
|
backgroundFinalizeState == BFS_DONE);
|
|
|
|
}
|
|
|
|
|
|
|
|
#else /* !JS_THREADSAFE */
|
|
|
|
|
|
|
|
if (ArenaHeader *aheader = searchForFreeArena())
|
|
|
|
return aheader;
|
|
|
|
chunk = PickChunk(cx);
|
|
|
|
|
|
|
|
#endif /* !JS_THREADSAFE */
|
|
|
|
|
|
|
|
if (!chunk) {
|
2011-06-02 13:02:21 -07:00
|
|
|
GCREASON(CHUNK);
|
2011-04-25 13:05:30 -07:00
|
|
|
TriggerGC(cx->runtime);
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
* While we still hold the GC lock get the arena from the chunk and add it
|
|
|
|
* to the head of the list before the cursor to prevent checking the arena
|
|
|
|
* for the free things.
|
|
|
|
*/
|
2011-05-19 12:01:08 -07:00
|
|
|
ArenaHeader *aheader = chunk->allocateArena<thingSize>(cx, thingKind);
|
2011-04-25 13:05:30 -07:00
|
|
|
aheader->next = head;
|
|
|
|
if (cursor == &head)
|
|
|
|
cursor = &aheader->next;
|
|
|
|
head = aheader;
|
|
|
|
return aheader;
|
2009-11-12 03:53:25 -08:00
|
|
|
}
|
|
|
|
|
2011-04-25 13:05:30 -07:00
|
|
|
template<typename T>
|
|
|
|
void
|
|
|
|
ArenaList::finalizeNow(JSContext *cx)
|
|
|
|
{
|
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
JS_ASSERT(backgroundFinalizeState == BFS_DONE);
|
|
|
|
#endif
|
|
|
|
FinalizeArenas<T>(cx, &head);
|
|
|
|
cursor = &head;
|
2010-07-28 11:20:19 -07:00
|
|
|
}
|
|
|
|
|
2011-04-25 13:05:30 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
template<typename T>
|
|
|
|
inline void
|
|
|
|
ArenaList::finalizeLater(JSContext *cx)
|
|
|
|
{
|
|
|
|
JS_ASSERT_IF(head,
|
|
|
|
head->getThingKind() == FINALIZE_OBJECT0_BACKGROUND ||
|
|
|
|
head->getThingKind() == FINALIZE_OBJECT2_BACKGROUND ||
|
|
|
|
head->getThingKind() == FINALIZE_OBJECT4_BACKGROUND ||
|
|
|
|
head->getThingKind() == FINALIZE_OBJECT8_BACKGROUND ||
|
|
|
|
head->getThingKind() == FINALIZE_OBJECT12_BACKGROUND ||
|
|
|
|
head->getThingKind() == FINALIZE_OBJECT16_BACKGROUND ||
|
|
|
|
head->getThingKind() == FINALIZE_SHORT_STRING ||
|
|
|
|
head->getThingKind() == FINALIZE_STRING);
|
|
|
|
JS_ASSERT(!cx->runtime->gcHelperThread.sweeping);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* The state can be just-finished if we have not allocated any GC things
|
|
|
|
* from the arena list after the previous background finalization.
|
|
|
|
*/
|
|
|
|
JS_ASSERT(backgroundFinalizeState == BFS_DONE ||
|
|
|
|
backgroundFinalizeState == BFS_JUST_FINISHED);
|
|
|
|
|
|
|
|
if (head && cx->gcBackgroundFree && cx->gcBackgroundFree->finalizeVector.append(head)) {
|
|
|
|
head = NULL;
|
|
|
|
cursor = &head;
|
|
|
|
backgroundFinalizeState = BFS_RUN;
|
|
|
|
} else {
|
2011-05-11 05:46:33 -07:00
|
|
|
JS_ASSERT_IF(!head, cursor == &head);
|
2011-04-25 13:05:30 -07:00
|
|
|
backgroundFinalizeState = BFS_DONE;
|
|
|
|
finalizeNow<T>(cx);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/*static*/ void
|
|
|
|
ArenaList::backgroundFinalize(JSContext *cx, ArenaHeader *listHead)
|
|
|
|
{
|
|
|
|
JS_ASSERT(listHead);
|
|
|
|
unsigned thingKind = listHead->getThingKind();
|
|
|
|
JSCompartment *comp = listHead->compartment;
|
|
|
|
ArenaList *al = &comp->arenas[thingKind];
|
|
|
|
|
|
|
|
switch (thingKind) {
|
|
|
|
default:
|
|
|
|
JS_NOT_REACHED("wrong kind");
|
|
|
|
break;
|
|
|
|
case FINALIZE_OBJECT0_BACKGROUND:
|
|
|
|
FinalizeArenas<JSObject>(cx, &listHead);
|
|
|
|
break;
|
|
|
|
case FINALIZE_OBJECT2_BACKGROUND:
|
|
|
|
FinalizeArenas<JSObject_Slots2>(cx, &listHead);
|
|
|
|
break;
|
|
|
|
case FINALIZE_OBJECT4_BACKGROUND:
|
|
|
|
FinalizeArenas<JSObject_Slots4>(cx, &listHead);
|
|
|
|
break;
|
|
|
|
case FINALIZE_OBJECT8_BACKGROUND:
|
|
|
|
FinalizeArenas<JSObject_Slots8>(cx, &listHead);
|
|
|
|
break;
|
|
|
|
case FINALIZE_OBJECT12_BACKGROUND:
|
|
|
|
FinalizeArenas<JSObject_Slots12>(cx, &listHead);
|
|
|
|
break;
|
|
|
|
case FINALIZE_OBJECT16_BACKGROUND:
|
|
|
|
FinalizeArenas<JSObject_Slots16>(cx, &listHead);
|
|
|
|
break;
|
|
|
|
case FINALIZE_STRING:
|
|
|
|
FinalizeArenas<JSString>(cx, &listHead);
|
|
|
|
break;
|
|
|
|
case FINALIZE_SHORT_STRING:
|
|
|
|
FinalizeArenas<JSShortString>(cx, &listHead);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
* After we finish the finalization al->cursor must point to the end of
|
|
|
|
* the head list as we emptied the list before the background finalization
|
|
|
|
* and the allocation adds new arenas before the cursor.
|
|
|
|
*/
|
|
|
|
AutoLockGC lock(cx->runtime);
|
|
|
|
JS_ASSERT(al->backgroundFinalizeState == BFS_RUN);
|
|
|
|
JS_ASSERT(!*al->cursor);
|
|
|
|
if (listHead) {
|
|
|
|
*al->cursor = listHead;
|
|
|
|
al->backgroundFinalizeState = BFS_JUST_FINISHED;
|
|
|
|
} else {
|
|
|
|
al->backgroundFinalizeState = BFS_DONE;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#endif /* JS_THREADSAFE */
|
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
#ifdef DEBUG
|
|
|
|
bool
|
|
|
|
CheckAllocation(JSContext *cx)
|
2009-10-15 23:10:54 -07:00
|
|
|
{
|
|
|
|
#ifdef JS_THREADSAFE
|
2011-04-13 09:27:37 -07:00
|
|
|
JS_ASSERT(cx->thread());
|
2010-09-24 10:54:39 -07:00
|
|
|
#endif
|
|
|
|
JS_ASSERT(!cx->runtime->gcRunning);
|
|
|
|
return true;
|
|
|
|
}
|
2009-10-15 23:10:54 -07:00
|
|
|
#endif
|
|
|
|
|
2010-10-07 13:43:52 -07:00
|
|
|
inline bool
|
|
|
|
NeedLastDitchGC(JSContext *cx)
|
|
|
|
{
|
|
|
|
JSRuntime *rt = cx->runtime;
|
2011-01-07 23:44:57 -08:00
|
|
|
return rt->gcIsNeeded;
|
2010-10-07 13:43:52 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Return false only if the GC run but could not bring its memory usage under
|
|
|
|
* JSRuntime::gcMaxBytes.
|
|
|
|
*/
|
|
|
|
static bool
|
|
|
|
RunLastDitchGC(JSContext *cx)
|
|
|
|
{
|
|
|
|
JSRuntime *rt = cx->runtime;
|
|
|
|
#ifdef JS_THREADSAFE
|
2011-04-28 15:48:51 -07:00
|
|
|
Maybe<AutoUnlockAtomsCompartment> maybeUnlockAtomsCompartment;
|
2011-04-28 15:48:51 -07:00
|
|
|
if (cx->compartment == rt->atomsCompartment && rt->atomsCompartmentIsLocked)
|
|
|
|
maybeUnlockAtomsCompartment.construct(cx);
|
2010-10-07 13:43:52 -07:00
|
|
|
#endif
|
|
|
|
/* The last ditch GC preserves all atoms. */
|
|
|
|
AutoKeepAtoms keep(rt);
|
2011-06-02 13:02:21 -07:00
|
|
|
GCREASON(LASTDITCH);
|
2011-01-07 23:44:57 -08:00
|
|
|
js_GC(cx, rt->gcTriggerCompartment, GC_NORMAL);
|
2010-10-07 13:43:52 -07:00
|
|
|
|
2011-04-13 13:43:33 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
if (rt->gcBytes >= rt->gcMaxBytes)
|
|
|
|
cx->runtime->gcHelperThread.waitBackgroundSweepEnd(cx->runtime);
|
|
|
|
#endif
|
|
|
|
|
2010-10-07 13:43:52 -07:00
|
|
|
return rt->gcBytes < rt->gcMaxBytes;
|
|
|
|
}
|
|
|
|
|
2011-06-01 17:48:52 -07:00
|
|
|
static inline bool
|
|
|
|
IsGCAllowed(JSContext *cx)
|
|
|
|
{
|
|
|
|
return !JS_ON_TRACE(cx) && !JS_THREAD_DATA(cx)->waiveGCQuota;
|
|
|
|
}
|
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
template <typename T>
|
2011-08-05 09:43:59 -07:00
|
|
|
inline void *
|
2010-10-13 11:49:22 -07:00
|
|
|
RefillTypedFreeList(JSContext *cx, unsigned thingKind)
|
2010-09-24 10:54:39 -07:00
|
|
|
{
|
2011-05-19 06:34:37 -07:00
|
|
|
JS_ASSERT(!cx->runtime->gcRunning);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* For compatibility with older code we tolerate calling the allocator
|
|
|
|
* during the GC in optimized builds.
|
|
|
|
*/
|
|
|
|
if (cx->runtime->gcRunning)
|
|
|
|
return NULL;
|
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
JSCompartment *compartment = cx->compartment;
|
2011-05-11 05:46:33 -07:00
|
|
|
JS_ASSERT(compartment->freeLists.lists[thingKind].isEmpty());
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2011-06-01 17:48:52 -07:00
|
|
|
bool canGC = IsGCAllowed(cx);
|
2011-04-25 13:05:30 -07:00
|
|
|
bool runGC = canGC && JS_UNLIKELY(NeedLastDitchGC(cx));
|
|
|
|
for (;;) {
|
|
|
|
if (runGC) {
|
2010-10-07 13:43:52 -07:00
|
|
|
if (!RunLastDitchGC(cx))
|
|
|
|
break;
|
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
/*
|
|
|
|
* The JSGC_END callback can legitimately allocate new GC
|
|
|
|
* things and populate the free list. If that happens, just
|
|
|
|
* return that list head.
|
|
|
|
*/
|
2011-08-05 09:43:59 -07:00
|
|
|
if (void *thing = compartment->freeLists.getNext(thingKind, sizeof(T)))
|
2011-04-21 15:20:12 -07:00
|
|
|
return thing;
|
2010-09-24 10:54:39 -07:00
|
|
|
}
|
2011-05-19 12:01:08 -07:00
|
|
|
ArenaHeader *aheader =
|
|
|
|
compartment->arenas[thingKind].getArenaWithFreeList<sizeof(T)>(cx, thingKind);
|
2011-04-19 22:30:10 -07:00
|
|
|
if (aheader) {
|
2011-04-25 13:05:30 -07:00
|
|
|
JS_ASSERT(sizeof(T) == aheader->getThingSize());
|
2011-05-11 05:46:33 -07:00
|
|
|
return compartment->freeLists.populate(aheader, thingKind, sizeof(T));
|
2010-09-24 10:54:39 -07:00
|
|
|
}
|
2010-10-07 13:43:52 -07:00
|
|
|
|
|
|
|
/*
|
2011-04-25 13:05:30 -07:00
|
|
|
* We failed to allocate any arena. Run the GC if we can unless we
|
|
|
|
* have done it already.
|
2010-10-07 13:43:52 -07:00
|
|
|
*/
|
2011-04-25 13:05:30 -07:00
|
|
|
if (!canGC || runGC)
|
|
|
|
break;
|
|
|
|
runGC = true;
|
|
|
|
}
|
2010-10-07 13:43:52 -07:00
|
|
|
|
|
|
|
js_ReportOutOfMemory(cx);
|
2011-04-21 15:20:12 -07:00
|
|
|
return NULL;
|
2010-09-24 10:54:39 -07:00
|
|
|
}
|
|
|
|
|
2011-08-05 09:43:59 -07:00
|
|
|
void *
|
2010-10-13 11:49:22 -07:00
|
|
|
RefillFinalizableFreeList(JSContext *cx, unsigned thingKind)
|
|
|
|
{
|
|
|
|
switch (thingKind) {
|
|
|
|
case FINALIZE_OBJECT0:
|
2011-04-13 13:43:33 -07:00
|
|
|
case FINALIZE_OBJECT0_BACKGROUND:
|
2010-10-13 11:49:22 -07:00
|
|
|
return RefillTypedFreeList<JSObject>(cx, thingKind);
|
|
|
|
case FINALIZE_OBJECT2:
|
2011-04-13 13:43:33 -07:00
|
|
|
case FINALIZE_OBJECT2_BACKGROUND:
|
2010-10-13 11:49:22 -07:00
|
|
|
return RefillTypedFreeList<JSObject_Slots2>(cx, thingKind);
|
|
|
|
case FINALIZE_OBJECT4:
|
2011-04-13 13:43:33 -07:00
|
|
|
case FINALIZE_OBJECT4_BACKGROUND:
|
2010-10-13 11:49:22 -07:00
|
|
|
return RefillTypedFreeList<JSObject_Slots4>(cx, thingKind);
|
|
|
|
case FINALIZE_OBJECT8:
|
2011-04-13 13:43:33 -07:00
|
|
|
case FINALIZE_OBJECT8_BACKGROUND:
|
2010-10-13 11:49:22 -07:00
|
|
|
return RefillTypedFreeList<JSObject_Slots8>(cx, thingKind);
|
|
|
|
case FINALIZE_OBJECT12:
|
2011-04-13 13:43:33 -07:00
|
|
|
case FINALIZE_OBJECT12_BACKGROUND:
|
2010-10-13 11:49:22 -07:00
|
|
|
return RefillTypedFreeList<JSObject_Slots12>(cx, thingKind);
|
|
|
|
case FINALIZE_OBJECT16:
|
2011-04-13 13:43:33 -07:00
|
|
|
case FINALIZE_OBJECT16_BACKGROUND:
|
2010-10-13 11:49:22 -07:00
|
|
|
return RefillTypedFreeList<JSObject_Slots16>(cx, thingKind);
|
|
|
|
case FINALIZE_STRING:
|
|
|
|
return RefillTypedFreeList<JSString>(cx, thingKind);
|
2010-11-15 12:39:00 -08:00
|
|
|
case FINALIZE_EXTERNAL_STRING:
|
|
|
|
return RefillTypedFreeList<JSExternalString>(cx, thingKind);
|
2010-10-13 11:49:22 -07:00
|
|
|
case FINALIZE_SHORT_STRING:
|
|
|
|
return RefillTypedFreeList<JSShortString>(cx, thingKind);
|
|
|
|
case FINALIZE_FUNCTION:
|
|
|
|
return RefillTypedFreeList<JSFunction>(cx, thingKind);
|
2011-03-23 11:57:44 -07:00
|
|
|
case FINALIZE_SHAPE:
|
|
|
|
return RefillTypedFreeList<Shape>(cx, thingKind);
|
2010-09-24 10:54:39 -07:00
|
|
|
#if JS_HAS_XML_SUPPORT
|
2010-10-13 11:49:22 -07:00
|
|
|
case FINALIZE_XML:
|
|
|
|
return RefillTypedFreeList<JSXML>(cx, thingKind);
|
2010-09-24 10:54:39 -07:00
|
|
|
#endif
|
2010-10-13 11:49:22 -07:00
|
|
|
default:
|
|
|
|
JS_NOT_REACHED("bad finalize kind");
|
2011-08-05 09:43:59 -07:00
|
|
|
return 0;
|
2010-10-13 11:49:22 -07:00
|
|
|
}
|
|
|
|
}
|
2010-09-24 10:54:39 -07:00
|
|
|
|
2011-04-21 15:20:12 -07:00
|
|
|
} /* namespace gc */
|
|
|
|
} /* namespace js */
|
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
uint32
|
2011-03-14 13:59:53 -07:00
|
|
|
js_GetGCThingTraceKind(void *thing)
|
|
|
|
{
|
2010-09-24 10:54:39 -07:00
|
|
|
return GetGCThingTraceKind(thing);
|
2009-10-15 02:53:40 -07:00
|
|
|
}
|
|
|
|
|
2007-03-22 10:30:00 -07:00
|
|
|
JSBool
|
|
|
|
js_LockGCThingRT(JSRuntime *rt, void *thing)
|
|
|
|
{
|
2009-09-10 04:13:59 -07:00
|
|
|
if (!thing)
|
2009-12-24 01:31:07 -08:00
|
|
|
return true;
|
2010-05-20 13:50:08 -07:00
|
|
|
|
2011-03-23 17:40:11 -07:00
|
|
|
AutoLockGC lock(rt);
|
2011-06-21 10:26:22 -07:00
|
|
|
if (GCLocks::Ptr p = rt->gcLocksHash.lookupWithDefault(thing, 0)) {
|
2010-05-20 13:50:08 -07:00
|
|
|
p->value++;
|
2011-06-21 10:26:22 -07:00
|
|
|
return true;
|
|
|
|
}
|
2010-05-20 13:50:08 -07:00
|
|
|
|
2011-06-21 10:26:22 -07:00
|
|
|
return false;
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
2009-12-13 23:55:17 -08:00
|
|
|
void
|
2007-03-22 10:30:00 -07:00
|
|
|
js_UnlockGCThingRT(JSRuntime *rt, void *thing)
|
|
|
|
{
|
2009-09-10 04:13:59 -07:00
|
|
|
if (!thing)
|
2009-12-13 23:55:17 -08:00
|
|
|
return;
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-04-08 05:54:18 -07:00
|
|
|
AutoLockGC lock(rt);
|
2010-05-20 13:50:08 -07:00
|
|
|
GCLocks::Ptr p = rt->gcLocksHash.lookup(thing);
|
|
|
|
|
|
|
|
if (p) {
|
2009-12-24 01:31:07 -08:00
|
|
|
rt->gcPoke = true;
|
2010-05-20 13:50:08 -07:00
|
|
|
if (--p->value == 0)
|
|
|
|
rt->gcLocksHash.remove(p);
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2010-07-26 11:44:04 -07:00
|
|
|
namespace js {
|
|
|
|
|
2007-03-22 10:30:00 -07:00
|
|
|
/*
|
2009-12-13 23:55:17 -08:00
|
|
|
* When the native stack is low, the GC does not call JS_TraceChildren to mark
|
|
|
|
* the reachable "children" of the thing. Rather the thing is put aside and
|
|
|
|
* JS_TraceChildren is called later with more space on the C stack.
|
|
|
|
*
|
|
|
|
* To implement such delayed marking of the children with minimal overhead for
|
2010-09-24 10:54:39 -07:00
|
|
|
* the normal case of sufficient native stack, the code adds a field per
|
2011-06-14 19:21:47 -07:00
|
|
|
* arena. The field markingDelay->link links all arenas with delayed things
|
2010-10-07 13:43:52 -07:00
|
|
|
* into a stack list with the pointer to stack top in
|
2010-09-24 10:54:39 -07:00
|
|
|
* GCMarker::unmarkedArenaStackTop. delayMarkingChildren adds
|
2010-07-26 11:44:04 -07:00
|
|
|
* arenas to the stack as necessary while markDelayedChildren pops the arenas
|
2010-04-12 13:59:19 -07:00
|
|
|
* from the stack until it empties.
|
2007-03-22 10:30:00 -07:00
|
|
|
*/
|
|
|
|
|
2010-08-05 05:16:56 -07:00
|
|
|
GCMarker::GCMarker(JSContext *cx)
|
2011-04-15 16:56:08 -07:00
|
|
|
: color(0),
|
2011-04-19 22:30:10 -07:00
|
|
|
unmarkedArenaStackTop(MarkingDelay::stackBottom()),
|
2011-04-15 16:56:08 -07:00
|
|
|
objStack(cx->runtime->gcMarkStackObjs, sizeof(cx->runtime->gcMarkStackObjs)),
|
2011-05-27 15:17:47 -07:00
|
|
|
ropeStack(cx->runtime->gcMarkStackRopes, sizeof(cx->runtime->gcMarkStackRopes)),
|
2011-04-15 16:56:08 -07:00
|
|
|
xmlStack(cx->runtime->gcMarkStackXMLs, sizeof(cx->runtime->gcMarkStackXMLs)),
|
|
|
|
largeStack(cx->runtime->gcMarkStackLarges, sizeof(cx->runtime->gcMarkStackLarges))
|
2010-08-05 05:16:56 -07:00
|
|
|
{
|
|
|
|
JS_TRACER_INIT(this, cx, NULL);
|
|
|
|
#ifdef DEBUG
|
2011-04-19 22:30:10 -07:00
|
|
|
markLaterArenas = 0;
|
2010-08-05 05:16:56 -07:00
|
|
|
#endif
|
|
|
|
#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
|
|
|
|
conservativeDumpFileName = getenv("JS_DUMP_CONSERVATIVE_GC_ROOTS");
|
|
|
|
memset(&conservativeStats, 0, sizeof(conservativeStats));
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
GCMarker::~GCMarker()
|
|
|
|
{
|
|
|
|
#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
|
|
|
|
dumpConservativeRoots();
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2010-07-26 11:44:04 -07:00
|
|
|
void
|
2011-03-23 11:57:15 -07:00
|
|
|
GCMarker::delayMarkingChildren(const void *thing)
|
2007-03-22 10:30:00 -07:00
|
|
|
{
|
2011-03-23 11:57:15 -07:00
|
|
|
const Cell *cell = reinterpret_cast<const Cell *>(thing);
|
2011-04-19 22:30:10 -07:00
|
|
|
ArenaHeader *aheader = cell->arenaHeader();
|
|
|
|
if (aheader->getMarkingDelay()->link) {
|
|
|
|
/* Arena already scheduled to be marked later */
|
2010-09-24 10:54:39 -07:00
|
|
|
return;
|
2007-09-16 06:03:17 -07:00
|
|
|
}
|
2011-04-19 22:30:10 -07:00
|
|
|
aheader->getMarkingDelay()->link = unmarkedArenaStackTop;
|
|
|
|
unmarkedArenaStackTop = aheader;
|
2011-06-21 10:26:22 -07:00
|
|
|
#ifdef DEBUG
|
|
|
|
markLaterArenas++;
|
|
|
|
#endif
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
2011-04-19 22:30:10 -07:00
|
|
|
static void
|
2011-05-19 12:01:08 -07:00
|
|
|
MarkDelayedChildren(JSTracer *trc, ArenaHeader *aheader)
|
2007-03-22 10:30:00 -07:00
|
|
|
{
|
2011-05-19 12:01:08 -07:00
|
|
|
unsigned traceKind = GetFinalizableTraceKind(aheader->getThingKind());
|
|
|
|
size_t thingSize = aheader->getThingSize();
|
|
|
|
Arena *a = aheader->getArena();
|
|
|
|
uintptr_t end = a->thingsEnd();
|
|
|
|
for (uintptr_t thing = a->thingsStart(thingSize); thing != end; thing += thingSize) {
|
|
|
|
Cell *t = reinterpret_cast<Cell *>(thing);
|
|
|
|
if (t->isMarked())
|
|
|
|
JS_TraceChildren(trc, t, traceKind);
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
2010-09-24 10:54:39 -07:00
|
|
|
}
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
void
|
|
|
|
GCMarker::markDelayedChildren()
|
|
|
|
{
|
2011-04-19 22:30:10 -07:00
|
|
|
while (unmarkedArenaStackTop != MarkingDelay::stackBottom()) {
|
2007-03-22 10:30:00 -07:00
|
|
|
/*
|
2011-04-19 22:30:10 -07:00
|
|
|
* If marking gets delayed at the same arena again, we must repeat
|
|
|
|
* marking of its things. For that we pop arena from the stack and
|
|
|
|
* clear its nextDelayedMarking before we begin the marking.
|
2007-03-22 10:30:00 -07:00
|
|
|
*/
|
2011-04-19 22:30:10 -07:00
|
|
|
ArenaHeader *aheader = unmarkedArenaStackTop;
|
|
|
|
unmarkedArenaStackTop = aheader->getMarkingDelay()->link;
|
|
|
|
JS_ASSERT(unmarkedArenaStackTop);
|
|
|
|
aheader->getMarkingDelay()->link = NULL;
|
2010-10-08 16:25:57 -07:00
|
|
|
#ifdef DEBUG
|
2011-04-19 22:30:10 -07:00
|
|
|
JS_ASSERT(markLaterArenas);
|
|
|
|
markLaterArenas--;
|
2010-10-08 16:25:57 -07:00
|
|
|
#endif
|
2011-05-19 12:01:08 -07:00
|
|
|
MarkDelayedChildren(this, aheader);
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
2011-04-19 22:30:10 -07:00
|
|
|
JS_ASSERT(!markLaterArenas);
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
} /* namespace js */
|
|
|
|
|
2011-04-19 22:30:10 -07:00
|
|
|
#ifdef DEBUG
|
|
|
|
static void
|
|
|
|
EmptyMarkCallback(JSTracer *trc, void *thing, uint32 kind)
|
|
|
|
{
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2010-05-20 13:50:08 -07:00
|
|
|
static void
|
2010-07-14 23:19:36 -07:00
|
|
|
gc_root_traversal(JSTracer *trc, const RootEntry &entry)
|
2007-03-22 10:30:00 -07:00
|
|
|
{
|
|
|
|
#ifdef DEBUG
|
2010-07-14 23:19:36 -07:00
|
|
|
void *ptr;
|
|
|
|
if (entry.value.type == JS_GC_ROOT_GCTHING_PTR) {
|
|
|
|
ptr = *reinterpret_cast<void **>(entry.key);
|
|
|
|
} else {
|
|
|
|
Value *vp = reinterpret_cast<Value *>(entry.key);
|
2010-09-24 10:54:39 -07:00
|
|
|
ptr = vp->isGCThing() ? vp->toGCThing() : NULL;
|
2010-07-14 23:19:36 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
if (ptr) {
|
2011-03-14 13:59:53 -07:00
|
|
|
if (!JSAtom::isStatic(ptr)) {
|
2011-04-19 22:30:10 -07:00
|
|
|
/* Use conservative machinery to find if ptr is a valid GC thing. */
|
|
|
|
JSTracer checker;
|
|
|
|
JS_TRACER_INIT(&checker, trc->context, EmptyMarkCallback);
|
|
|
|
ConservativeGCTest test = MarkIfGCThingWord(&checker, reinterpret_cast<jsuword>(ptr));
|
|
|
|
if (test != CGCT_VALID && entry.value.name) {
|
2009-12-13 23:55:17 -08:00
|
|
|
fprintf(stderr,
|
2007-03-22 10:30:00 -07:00
|
|
|
"JS API usage error: the address passed to JS_AddNamedRoot currently holds an\n"
|
2010-07-14 23:19:36 -07:00
|
|
|
"invalid gcthing. This is usually caused by a missing call to JS_RemoveRoot.\n"
|
2007-03-22 10:30:00 -07:00
|
|
|
"The root's name is \"%s\".\n",
|
2010-07-14 23:19:36 -07:00
|
|
|
entry.value.name);
|
2009-12-13 23:55:17 -08:00
|
|
|
}
|
2011-04-19 22:30:10 -07:00
|
|
|
JS_ASSERT(test == CGCT_VALID);
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
}
|
2010-07-14 23:19:36 -07:00
|
|
|
#endif
|
|
|
|
JS_SET_TRACING_NAME(trc, entry.value.name ? entry.value.name : "root");
|
|
|
|
if (entry.value.type == JS_GC_ROOT_GCTHING_PTR)
|
|
|
|
MarkGCThing(trc, *reinterpret_cast<void **>(entry.key));
|
|
|
|
else
|
|
|
|
MarkValueRaw(trc, *reinterpret_cast<Value *>(entry.key));
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
2010-05-20 13:50:08 -07:00
|
|
|
static void
|
|
|
|
gc_lock_traversal(const GCLocks::Entry &entry, JSTracer *trc)
|
2007-03-22 10:30:00 -07:00
|
|
|
{
|
2010-05-20 13:50:08 -07:00
|
|
|
JS_ASSERT(entry.value >= 1);
|
2010-09-24 10:54:39 -07:00
|
|
|
MarkGCThing(trc, entry.key, "locked object");
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2011-04-13 09:27:37 -07:00
|
|
|
js_TraceStackFrame(JSTracer *trc, StackFrame *fp)
|
2007-03-22 10:30:00 -07:00
|
|
|
{
|
2010-08-09 22:43:33 -07:00
|
|
|
MarkObject(trc, fp->scopeChain(), "scope chain");
|
|
|
|
if (fp->isDummyFrame())
|
|
|
|
return;
|
2010-08-12 15:46:03 -07:00
|
|
|
if (fp->hasArgsObj())
|
2010-08-09 22:43:33 -07:00
|
|
|
MarkObject(trc, fp->argsObj(), "arguments");
|
2011-07-25 18:05:05 -07:00
|
|
|
js_TraceScript(trc, fp->script(), NULL);
|
2011-03-14 11:30:36 -07:00
|
|
|
fp->script()->compartment->active = true;
|
2010-08-09 22:43:33 -07:00
|
|
|
MarkValue(trc, fp->returnValue(), "rval");
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
void
|
|
|
|
AutoIdArray::trace(JSTracer *trc)
|
|
|
|
{
|
|
|
|
JS_ASSERT(tag == IDARRAY);
|
|
|
|
gc::MarkIdRange(trc, idArray->length, idArray->vector, "JSAutoIdArray.idArray");
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
AutoEnumStateRooter::trace(JSTracer *trc)
|
|
|
|
{
|
2011-03-30 03:10:12 -07:00
|
|
|
gc::MarkObject(trc, *obj, "js::AutoEnumStateRooter.obj");
|
2010-09-24 10:54:39 -07:00
|
|
|
}
|
|
|
|
|
2010-06-16 14:13:01 -07:00
|
|
|
inline void
|
|
|
|
AutoGCRooter::trace(JSTracer *trc)
|
|
|
|
{
|
|
|
|
switch (tag) {
|
|
|
|
case JSVAL:
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkValue(trc, static_cast<AutoValueRooter *>(this)->val, "js::AutoValueRooter.val");
|
2010-06-16 14:13:01 -07:00
|
|
|
return;
|
|
|
|
|
2010-08-29 11:57:08 -07:00
|
|
|
case SHAPE:
|
2011-03-23 11:57:44 -07:00
|
|
|
MarkShape(trc, static_cast<AutoShapeRooter *>(this)->shape, "js::AutoShapeRooter.val");
|
2010-06-16 14:13:01 -07:00
|
|
|
return;
|
|
|
|
|
|
|
|
case PARSER:
|
|
|
|
static_cast<Parser *>(this)->trace(trc);
|
|
|
|
return;
|
|
|
|
|
|
|
|
case SCRIPT:
|
|
|
|
if (JSScript *script = static_cast<AutoScriptRooter *>(this)->script)
|
2011-07-25 18:05:05 -07:00
|
|
|
js_TraceScript(trc, script, NULL);
|
2010-06-16 14:13:01 -07:00
|
|
|
return;
|
|
|
|
|
|
|
|
case ENUMERATOR:
|
|
|
|
static_cast<AutoEnumStateRooter *>(this)->trace(trc);
|
|
|
|
return;
|
|
|
|
|
|
|
|
case IDARRAY: {
|
|
|
|
JSIdArray *ida = static_cast<AutoIdArray *>(this)->idArray;
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkIdRange(trc, ida->length, ida->vector, "js::AutoIdArray.idArray");
|
2010-06-16 14:13:01 -07:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
case DESCRIPTORS: {
|
2010-07-14 23:19:36 -07:00
|
|
|
PropDescArray &descriptors =
|
|
|
|
static_cast<AutoPropDescArrayRooter *>(this)->descriptors;
|
2010-06-16 14:13:01 -07:00
|
|
|
for (size_t i = 0, len = descriptors.length(); i < len; i++) {
|
2010-07-14 23:19:36 -07:00
|
|
|
PropDesc &desc = descriptors[i];
|
|
|
|
MarkValue(trc, desc.pd, "PropDesc::pd");
|
|
|
|
MarkValue(trc, desc.value, "PropDesc::value");
|
|
|
|
MarkValue(trc, desc.get, "PropDesc::get");
|
|
|
|
MarkValue(trc, desc.set, "PropDesc::set");
|
2010-06-16 14:13:01 -07:00
|
|
|
}
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
case DESCRIPTOR : {
|
2010-07-14 23:19:36 -07:00
|
|
|
PropertyDescriptor &desc = *static_cast<AutoPropertyDescriptorRooter *>(this);
|
2010-06-16 14:13:01 -07:00
|
|
|
if (desc.obj)
|
2010-08-09 22:43:33 -07:00
|
|
|
MarkObject(trc, *desc.obj, "Descriptor::obj");
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkValue(trc, desc.value, "Descriptor::value");
|
2010-07-17 01:51:07 -07:00
|
|
|
if ((desc.attrs & JSPROP_GETTER) && desc.getter)
|
2010-08-09 22:43:33 -07:00
|
|
|
MarkObject(trc, *CastAsObject(desc.getter), "Descriptor::get");
|
2010-07-17 01:51:07 -07:00
|
|
|
if (desc.attrs & JSPROP_SETTER && desc.setter)
|
2010-08-09 22:43:33 -07:00
|
|
|
MarkObject(trc, *CastAsObject(desc.setter), "Descriptor::set");
|
2010-06-16 14:13:01 -07:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
case NAMESPACES: {
|
2010-06-19 11:58:00 -07:00
|
|
|
JSXMLArray &array = static_cast<AutoNamespaceArray *>(this)->array;
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkObjectRange(trc, array.length, reinterpret_cast<JSObject **>(array.vector),
|
|
|
|
"JSXMLArray.vector");
|
2010-06-16 14:13:01 -07:00
|
|
|
array.cursors->trace(trc);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
case XML:
|
|
|
|
js_TraceXML(trc, static_cast<AutoXMLRooter *>(this)->xml);
|
|
|
|
return;
|
|
|
|
|
|
|
|
case OBJECT:
|
2010-07-14 23:19:36 -07:00
|
|
|
if (JSObject *obj = static_cast<AutoObjectRooter *>(this)->obj)
|
2010-08-09 22:43:33 -07:00
|
|
|
MarkObject(trc, *obj, "js::AutoObjectRooter.obj");
|
2010-06-16 14:13:01 -07:00
|
|
|
return;
|
|
|
|
|
|
|
|
case ID:
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkId(trc, static_cast<AutoIdRooter *>(this)->id_, "js::AutoIdRooter.val");
|
|
|
|
return;
|
|
|
|
|
|
|
|
case VALVECTOR: {
|
2011-03-30 03:10:12 -07:00
|
|
|
AutoValueVector::VectorImpl &vector = static_cast<AutoValueVector *>(this)->vector;
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkValueRange(trc, vector.length(), vector.begin(), "js::AutoValueVector.vector");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
case STRING:
|
2011-03-30 03:10:12 -07:00
|
|
|
if (JSString *str = static_cast<AutoStringRooter *>(this)->str)
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkString(trc, str, "js::AutoStringRooter.str");
|
2010-06-16 14:13:01 -07:00
|
|
|
return;
|
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
case IDVECTOR: {
|
2011-03-30 03:10:12 -07:00
|
|
|
AutoIdVector::VectorImpl &vector = static_cast<AutoIdVector *>(this)->vector;
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkIdRange(trc, vector.length(), vector.begin(), "js::AutoIdVector.vector");
|
2010-06-16 14:13:01 -07:00
|
|
|
return;
|
|
|
|
}
|
2011-01-05 14:50:30 -08:00
|
|
|
|
2011-02-07 12:06:32 -08:00
|
|
|
case SHAPEVECTOR: {
|
2011-03-30 03:10:12 -07:00
|
|
|
AutoShapeVector::VectorImpl &vector = static_cast<js::AutoShapeVector *>(this)->vector;
|
2011-02-07 12:06:32 -08:00
|
|
|
MarkShapeRange(trc, vector.length(), vector.begin(), "js::AutoShapeVector.vector");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2011-01-05 14:50:30 -08:00
|
|
|
case BINDINGS: {
|
|
|
|
static_cast<js::AutoBindingsRooter *>(this)->bindings.trace(trc);
|
|
|
|
return;
|
|
|
|
}
|
2010-06-16 14:13:01 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
JS_ASSERT(tag >= 0);
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkValueRange(trc, tag, static_cast<AutoArrayRooter *>(this)->array, "js::AutoArrayRooter.array");
|
2010-06-16 14:13:01 -07:00
|
|
|
}
|
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
namespace js {
|
|
|
|
|
2011-01-08 18:55:54 -08:00
|
|
|
JS_FRIEND_API(void)
|
2010-08-30 11:46:18 -07:00
|
|
|
MarkContext(JSTracer *trc, JSContext *acx)
|
2007-04-16 23:53:37 -07:00
|
|
|
{
|
2010-03-03 17:52:26 -08:00
|
|
|
/* Stack frames and slots are traced by StackSpace::mark. */
|
2007-04-16 23:53:37 -07:00
|
|
|
|
|
|
|
/* Mark other roots-by-definition in acx. */
|
2011-01-27 02:54:58 -08:00
|
|
|
if (acx->globalObject && !acx->hasRunOption(JSOPTION_UNROOTED_GLOBAL))
|
2010-09-24 10:54:39 -07:00
|
|
|
MarkObject(trc, *acx->globalObject, "global object");
|
2011-01-07 02:03:14 -08:00
|
|
|
if (acx->isExceptionPending())
|
|
|
|
MarkValue(trc, acx->getPendingException(), "exception");
|
2007-04-16 23:53:37 -07:00
|
|
|
|
2010-03-28 13:34:16 -07:00
|
|
|
for (js::AutoGCRooter *gcr = acx->autoGCRooters; gcr; gcr = gcr->down)
|
|
|
|
gcr->trace(trc);
|
|
|
|
|
2007-04-16 23:53:37 -07:00
|
|
|
if (acx->sharpObjectMap.depth > 0)
|
|
|
|
js_TraceSharpMap(trc, &acx->sharpObjectMap);
|
2008-12-18 12:06:45 -08:00
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkValue(trc, acx->iterValue, "iterValue");
|
2007-04-16 23:53:37 -07:00
|
|
|
}
|
|
|
|
|
2011-07-14 16:02:12 -07:00
|
|
|
#define PER_COMPARTMENT_OP(rt, op) \
|
|
|
|
if ((rt)->gcCurrentCompartment) { \
|
|
|
|
JSCompartment *c = (rt)->gcCurrentCompartment; \
|
|
|
|
op; \
|
|
|
|
} else { \
|
|
|
|
for (JSCompartment **i = rt->compartments.begin(); i != rt->compartments.end(); ++i) { \
|
|
|
|
JSCompartment *c = *i; \
|
|
|
|
op; \
|
|
|
|
} \
|
|
|
|
}
|
|
|
|
|
2009-01-30 15:40:05 -08:00
|
|
|
JS_REQUIRES_STACK void
|
2010-08-30 11:46:18 -07:00
|
|
|
MarkRuntime(JSTracer *trc)
|
2007-04-16 23:53:37 -07:00
|
|
|
{
|
|
|
|
JSRuntime *rt = trc->context->runtime;
|
|
|
|
|
2010-07-26 01:24:27 -07:00
|
|
|
if (rt->state != JSRTS_LANDING)
|
2010-08-05 05:16:56 -07:00
|
|
|
MarkConservativeStackRoots(trc);
|
2010-07-26 01:24:27 -07:00
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront())
|
|
|
|
gc_root_traversal(trc, r.front());
|
2010-05-20 13:50:08 -07:00
|
|
|
|
|
|
|
for (GCLocks::Range r = rt->gcLocksHash.all(); !r.empty(); r.popFront())
|
|
|
|
gc_lock_traversal(r.front(), trc);
|
|
|
|
|
2010-04-23 15:15:42 -07:00
|
|
|
js_TraceAtomState(trc);
|
2007-04-16 23:53:37 -07:00
|
|
|
|
2011-03-23 11:57:19 -07:00
|
|
|
JSContext *iter = NULL;
|
2010-06-04 07:22:28 -07:00
|
|
|
while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter))
|
2010-08-30 11:46:18 -07:00
|
|
|
MarkContext(trc, acx);
|
2007-05-01 03:09:46 -07:00
|
|
|
|
2011-01-12 16:56:23 -08:00
|
|
|
#ifdef JS_TRACER
|
2011-07-14 16:02:12 -07:00
|
|
|
PER_COMPARTMENT_OP(rt, if (c->hasTraceMonitor()) c->traceMonitor()->mark(trc));
|
2011-01-12 16:56:23 -08:00
|
|
|
#endif
|
|
|
|
|
2010-05-13 10:50:43 -07:00
|
|
|
for (ThreadDataIter i(rt); !i.empty(); i.popFront())
|
|
|
|
i.threadData()->mark(trc);
|
2009-08-17 14:50:57 -07:00
|
|
|
|
2010-06-04 07:22:28 -07:00
|
|
|
/*
|
2011-07-11 06:53:27 -07:00
|
|
|
* We mark extra roots at the end so that the hook can use additional
|
2011-06-15 10:55:58 -07:00
|
|
|
* colors to implement cycle collection.
|
2010-06-04 07:22:28 -07:00
|
|
|
*/
|
2010-07-15 17:58:36 -07:00
|
|
|
if (rt->gcExtraRootsTraceOp)
|
|
|
|
rt->gcExtraRootsTraceOp(trc, rt->gcExtraRootsData);
|
2007-04-16 23:53:37 -07:00
|
|
|
}
|
|
|
|
|
2010-07-28 11:20:19 -07:00
|
|
|
void
|
2010-09-07 14:08:20 -07:00
|
|
|
TriggerGC(JSRuntime *rt)
|
2010-07-28 11:20:19 -07:00
|
|
|
{
|
|
|
|
JS_ASSERT(!rt->gcRunning);
|
|
|
|
if (rt->gcIsNeeded)
|
|
|
|
return;
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Trigger the GC when it is safe to call an operation callback on any
|
|
|
|
* thread.
|
|
|
|
*/
|
2010-09-07 14:08:20 -07:00
|
|
|
rt->gcIsNeeded = true;
|
2011-01-07 23:44:57 -08:00
|
|
|
rt->gcTriggerCompartment = NULL;
|
2010-09-07 14:08:20 -07:00
|
|
|
TriggerAllOperationCallbacks(rt);
|
2010-07-28 11:20:19 -07:00
|
|
|
}
|
|
|
|
|
2011-01-07 23:44:57 -08:00
|
|
|
void
|
|
|
|
TriggerCompartmentGC(JSCompartment *comp)
|
|
|
|
{
|
|
|
|
JSRuntime *rt = comp->rt;
|
|
|
|
JS_ASSERT(!rt->gcRunning);
|
2011-06-02 13:02:21 -07:00
|
|
|
GCREASON(COMPARTMENT);
|
2011-01-07 23:44:57 -08:00
|
|
|
|
2011-06-01 17:48:52 -07:00
|
|
|
if (rt->gcZeal()) {
|
2011-01-07 23:44:57 -08:00
|
|
|
TriggerGC(rt);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2011-01-13 14:42:36 -08:00
|
|
|
if (rt->gcMode != JSGC_MODE_COMPARTMENT || comp == rt->atomsCompartment) {
|
2011-01-07 23:44:57 -08:00
|
|
|
/* We can't do a compartmental GC of the default compartment. */
|
|
|
|
TriggerGC(rt);
|
|
|
|
return;
|
|
|
|
}
|
2011-04-21 15:29:24 -07:00
|
|
|
|
2011-01-07 23:44:57 -08:00
|
|
|
if (rt->gcIsNeeded) {
|
|
|
|
/* If we need to GC more than one compartment, run a full GC. */
|
|
|
|
if (rt->gcTriggerCompartment != comp)
|
|
|
|
rt->gcTriggerCompartment = NULL;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (rt->gcBytes > 8192 && rt->gcBytes >= 3 * (rt->gcTriggerBytes / 2)) {
|
|
|
|
/* If we're using significantly more than our quota, do a full GC. */
|
|
|
|
TriggerGC(rt);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Trigger the GC when it is safe to call an operation callback on any
|
|
|
|
* thread.
|
|
|
|
*/
|
|
|
|
rt->gcIsNeeded = true;
|
|
|
|
rt->gcTriggerCompartment = comp;
|
|
|
|
TriggerAllOperationCallbacks(comp->rt);
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
MaybeGC(JSContext *cx)
|
|
|
|
{
|
|
|
|
JSRuntime *rt = cx->runtime;
|
2011-07-01 14:11:31 -07:00
|
|
|
JS_ASSERT(rt->onOwnerThread());
|
2011-01-07 23:44:57 -08:00
|
|
|
|
2011-06-01 17:48:52 -07:00
|
|
|
if (rt->gcZeal()) {
|
2011-06-02 13:02:21 -07:00
|
|
|
GCREASON(MAYBEGC);
|
2011-01-07 23:44:57 -08:00
|
|
|
js_GC(cx, NULL, GC_NORMAL);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
JSCompartment *comp = cx->compartment;
|
|
|
|
if (rt->gcIsNeeded) {
|
2011-06-02 13:02:21 -07:00
|
|
|
GCREASON(MAYBEGC);
|
2011-02-07 11:24:08 -08:00
|
|
|
js_GC(cx, (comp == rt->gcTriggerCompartment) ? comp : NULL, GC_NORMAL);
|
2011-01-07 23:44:57 -08:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2011-06-02 13:02:21 -07:00
|
|
|
if (comp->gcBytes > 8192 && comp->gcBytes >= 3 * (comp->gcTriggerBytes / 4)) {
|
|
|
|
GCREASON(MAYBEGC);
|
2011-02-07 11:24:08 -08:00
|
|
|
js_GC(cx, (rt->gcMode == JSGC_MODE_COMPARTMENT) ? comp : NULL, GC_NORMAL);
|
2011-06-20 14:44:26 -07:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
* On 32 bit setting gcNextFullGCTime below is not atomic and a race condition
|
|
|
|
* could trigger an GC. We tolerate this.
|
|
|
|
*/
|
|
|
|
int64 now = PRMJ_Now();
|
|
|
|
if (rt->gcNextFullGCTime && rt->gcNextFullGCTime <= now) {
|
2011-07-25 04:04:02 -07:00
|
|
|
if (rt->gcChunkAllocationSinceLastGC || rt->gcEmptyChunkListHead) {
|
2011-06-20 14:44:26 -07:00
|
|
|
GCREASON(MAYBEGC);
|
|
|
|
js_GC(cx, NULL, GC_SHRINK);
|
|
|
|
} else {
|
|
|
|
rt->gcNextFullGCTime = now + GC_IDLE_FULL_SPAN;
|
|
|
|
}
|
2011-06-02 13:02:21 -07:00
|
|
|
}
|
2011-01-07 23:44:57 -08:00
|
|
|
}
|
|
|
|
|
2010-09-07 14:08:20 -07:00
|
|
|
} /* namespace js */
|
|
|
|
|
2009-03-24 05:07:35 -07:00
|
|
|
void
|
2010-12-22 12:02:25 -08:00
|
|
|
js_DestroyScriptsToGC(JSContext *cx, JSCompartment *comp)
|
2008-08-20 22:18:42 -07:00
|
|
|
{
|
2009-03-24 05:07:35 -07:00
|
|
|
JSScript **listp, *script;
|
2008-08-20 22:18:42 -07:00
|
|
|
|
2010-12-22 12:02:25 -08:00
|
|
|
for (size_t i = 0; i != JS_ARRAY_LENGTH(comp->scriptsToGC); ++i) {
|
|
|
|
listp = &comp->scriptsToGC[i];
|
2009-03-24 05:07:35 -07:00
|
|
|
while ((script = *listp) != NULL) {
|
|
|
|
*listp = script->u.nextToGC;
|
|
|
|
script->u.nextToGC = NULL;
|
2011-02-11 13:23:18 -08:00
|
|
|
js_DestroyCachedScript(cx, script);
|
2009-03-24 05:07:35 -07:00
|
|
|
}
|
2008-08-20 22:18:42 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2011-04-13 13:43:33 -07:00
|
|
|
void
|
2011-04-25 13:05:30 -07:00
|
|
|
JSCompartment::finalizeObjectArenaLists(JSContext *cx)
|
2011-04-13 13:43:33 -07:00
|
|
|
{
|
2011-04-25 13:05:30 -07:00
|
|
|
arenas[FINALIZE_OBJECT0]. finalizeNow<JSObject>(cx);
|
|
|
|
arenas[FINALIZE_OBJECT2]. finalizeNow<JSObject_Slots2>(cx);
|
|
|
|
arenas[FINALIZE_OBJECT4]. finalizeNow<JSObject_Slots4>(cx);
|
|
|
|
arenas[FINALIZE_OBJECT8]. finalizeNow<JSObject_Slots8>(cx);
|
|
|
|
arenas[FINALIZE_OBJECT12].finalizeNow<JSObject_Slots12>(cx);
|
|
|
|
arenas[FINALIZE_OBJECT16].finalizeNow<JSObject_Slots16>(cx);
|
|
|
|
arenas[FINALIZE_FUNCTION].finalizeNow<JSFunction>(cx);
|
2011-04-13 13:43:33 -07:00
|
|
|
|
|
|
|
#ifdef JS_THREADSAFE
|
2011-04-25 13:05:30 -07:00
|
|
|
arenas[FINALIZE_OBJECT0_BACKGROUND]. finalizeLater<JSObject>(cx);
|
|
|
|
arenas[FINALIZE_OBJECT2_BACKGROUND]. finalizeLater<JSObject_Slots2>(cx);
|
|
|
|
arenas[FINALIZE_OBJECT4_BACKGROUND]. finalizeLater<JSObject_Slots4>(cx);
|
|
|
|
arenas[FINALIZE_OBJECT8_BACKGROUND]. finalizeLater<JSObject_Slots8>(cx);
|
|
|
|
arenas[FINALIZE_OBJECT12_BACKGROUND].finalizeLater<JSObject_Slots12>(cx);
|
|
|
|
arenas[FINALIZE_OBJECT16_BACKGROUND].finalizeLater<JSObject_Slots16>(cx);
|
2011-04-13 13:43:33 -07:00
|
|
|
#endif
|
|
|
|
|
2011-01-07 23:44:57 -08:00
|
|
|
#if JS_HAS_XML_SUPPORT
|
2011-04-25 13:05:30 -07:00
|
|
|
arenas[FINALIZE_XML].finalizeNow<JSXML>(cx);
|
2011-01-07 23:44:57 -08:00
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2011-04-25 13:05:30 -07:00
|
|
|
JSCompartment::finalizeStringArenaLists(JSContext *cx)
|
2011-01-07 23:44:57 -08:00
|
|
|
{
|
2011-04-13 13:43:33 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
2011-04-25 13:05:30 -07:00
|
|
|
arenas[FINALIZE_SHORT_STRING].finalizeLater<JSShortString>(cx);
|
|
|
|
arenas[FINALIZE_STRING].finalizeLater<JSString>(cx);
|
2011-04-13 13:43:33 -07:00
|
|
|
#else
|
2011-04-25 13:05:30 -07:00
|
|
|
arenas[FINALIZE_SHORT_STRING].finalizeNow<JSShortString>(cx);
|
|
|
|
arenas[FINALIZE_STRING].finalizeNow<JSString>(cx);
|
2011-04-13 13:43:33 -07:00
|
|
|
#endif
|
2011-04-25 13:05:30 -07:00
|
|
|
arenas[FINALIZE_EXTERNAL_STRING].finalizeNow<JSExternalString>(cx);
|
2011-01-07 23:44:57 -08:00
|
|
|
}
|
|
|
|
|
2011-03-28 13:17:46 -07:00
|
|
|
void
|
2011-04-25 13:05:30 -07:00
|
|
|
JSCompartment::finalizeShapeArenaLists(JSContext *cx)
|
2011-03-28 13:17:46 -07:00
|
|
|
{
|
2011-04-25 13:05:30 -07:00
|
|
|
arenas[FINALIZE_SHAPE].finalizeNow<Shape>(cx);
|
2011-03-28 13:17:46 -07:00
|
|
|
}
|
|
|
|
|
2010-04-27 06:46:24 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
|
|
|
|
namespace js {
|
|
|
|
|
2010-09-07 14:08:20 -07:00
|
|
|
bool
|
|
|
|
GCHelperThread::init(JSRuntime *rt)
|
|
|
|
{
|
|
|
|
if (!(wakeup = PR_NewCondVar(rt->gcLock)))
|
|
|
|
return false;
|
|
|
|
if (!(sweepingDone = PR_NewCondVar(rt->gcLock)))
|
|
|
|
return false;
|
|
|
|
|
|
|
|
thread = PR_CreateThread(PR_USER_THREAD, threadMain, rt, PR_PRIORITY_NORMAL,
|
|
|
|
PR_LOCAL_THREAD, PR_JOINABLE_THREAD, 0);
|
|
|
|
return !!thread;
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
GCHelperThread::finish(JSRuntime *rt)
|
|
|
|
{
|
|
|
|
PRThread *join = NULL;
|
|
|
|
{
|
|
|
|
AutoLockGC lock(rt);
|
|
|
|
if (thread && !shutdown) {
|
|
|
|
shutdown = true;
|
|
|
|
PR_NotifyCondVar(wakeup);
|
|
|
|
join = thread;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (join) {
|
|
|
|
/* PR_DestroyThread is not necessary. */
|
|
|
|
PR_JoinThread(join);
|
|
|
|
}
|
|
|
|
if (wakeup)
|
|
|
|
PR_DestroyCondVar(wakeup);
|
|
|
|
if (sweepingDone)
|
|
|
|
PR_DestroyCondVar(sweepingDone);
|
|
|
|
}
|
|
|
|
|
|
|
|
/* static */
|
|
|
|
void
|
|
|
|
GCHelperThread::threadMain(void *arg)
|
|
|
|
{
|
|
|
|
JSRuntime *rt = static_cast<JSRuntime *>(arg);
|
|
|
|
rt->gcHelperThread.threadLoop(rt);
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
GCHelperThread::threadLoop(JSRuntime *rt)
|
|
|
|
{
|
|
|
|
AutoLockGC lock(rt);
|
|
|
|
while (!shutdown) {
|
|
|
|
/*
|
|
|
|
* Sweeping can be true here on the first iteration if a GC and the
|
|
|
|
* corresponding startBackgroundSweep call happen before this thread
|
|
|
|
* has a chance to run.
|
|
|
|
*/
|
|
|
|
if (!sweeping)
|
|
|
|
PR_WaitCondVar(wakeup, PR_INTERVAL_NO_TIMEOUT);
|
|
|
|
if (sweeping) {
|
|
|
|
AutoUnlockGC unlock(rt);
|
|
|
|
doSweep();
|
|
|
|
}
|
|
|
|
sweeping = false;
|
|
|
|
PR_NotifyAllCondVar(sweepingDone);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2011-06-20 14:44:26 -07:00
|
|
|
GCHelperThread::startBackgroundSweep(JSRuntime *rt, JSGCInvocationKind gckind)
|
2010-09-07 14:08:20 -07:00
|
|
|
{
|
|
|
|
/* The caller takes the GC lock. */
|
|
|
|
JS_ASSERT(!sweeping);
|
2011-06-20 14:44:26 -07:00
|
|
|
lastGCKind = gckind;
|
2010-09-07 14:08:20 -07:00
|
|
|
sweeping = true;
|
|
|
|
PR_NotifyCondVar(wakeup);
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2011-04-25 13:05:30 -07:00
|
|
|
GCHelperThread::waitBackgroundSweepEnd(JSRuntime *rt, bool gcUnlocked)
|
2010-09-07 14:08:20 -07:00
|
|
|
{
|
2011-04-25 13:05:30 -07:00
|
|
|
Maybe<AutoLockGC> lock;
|
|
|
|
if (gcUnlocked)
|
|
|
|
lock.construct(rt);
|
2010-09-07 14:08:20 -07:00
|
|
|
while (sweeping)
|
|
|
|
PR_WaitCondVar(sweepingDone, PR_INTERVAL_NO_TIMEOUT);
|
|
|
|
}
|
|
|
|
|
2010-04-28 17:17:34 -07:00
|
|
|
JS_FRIEND_API(void)
|
2010-09-07 14:08:20 -07:00
|
|
|
GCHelperThread::replenishAndFreeLater(void *ptr)
|
2010-04-27 06:46:24 -07:00
|
|
|
{
|
|
|
|
JS_ASSERT(freeCursor == freeCursorEnd);
|
|
|
|
do {
|
|
|
|
if (freeCursor && !freeVector.append(freeCursorEnd - FREE_ARRAY_LENGTH))
|
|
|
|
break;
|
2011-03-31 01:14:12 -07:00
|
|
|
freeCursor = (void **) OffTheBooks::malloc_(FREE_ARRAY_SIZE);
|
2010-04-27 06:46:24 -07:00
|
|
|
if (!freeCursor) {
|
|
|
|
freeCursorEnd = NULL;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
freeCursorEnd = freeCursor + FREE_ARRAY_LENGTH;
|
|
|
|
*freeCursor++ = ptr;
|
|
|
|
return;
|
|
|
|
} while (false);
|
2011-03-31 01:14:12 -07:00
|
|
|
Foreground::free_(ptr);
|
2010-04-27 06:46:24 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2010-09-07 14:08:20 -07:00
|
|
|
GCHelperThread::doSweep()
|
2010-04-27 06:46:24 -07:00
|
|
|
{
|
2011-04-13 13:43:33 -07:00
|
|
|
JS_ASSERT(cx);
|
2011-04-25 13:05:30 -07:00
|
|
|
for (ArenaHeader **i = finalizeVector.begin(); i != finalizeVector.end(); ++i)
|
|
|
|
ArenaList::backgroundFinalize(cx, *i);
|
2011-04-13 13:43:33 -07:00
|
|
|
finalizeVector.resize(0);
|
2011-06-20 14:44:26 -07:00
|
|
|
ExpireGCChunks(cx->runtime, lastGCKind);
|
2011-04-13 13:43:33 -07:00
|
|
|
cx = NULL;
|
2011-04-19 22:30:10 -07:00
|
|
|
|
2010-04-27 06:46:24 -07:00
|
|
|
if (freeCursor) {
|
|
|
|
void **array = freeCursorEnd - FREE_ARRAY_LENGTH;
|
|
|
|
freeElementsAndArray(array, freeCursor);
|
|
|
|
freeCursor = freeCursorEnd = NULL;
|
|
|
|
} else {
|
|
|
|
JS_ASSERT(!freeCursorEnd);
|
|
|
|
}
|
|
|
|
for (void ***iter = freeVector.begin(); iter != freeVector.end(); ++iter) {
|
|
|
|
void **array = *iter;
|
|
|
|
freeElementsAndArray(array, array + FREE_ARRAY_LENGTH);
|
|
|
|
}
|
2010-09-07 14:08:20 -07:00
|
|
|
freeVector.resize(0);
|
2010-04-27 06:46:24 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
#endif /* JS_THREADSAFE */
|
|
|
|
|
2010-06-23 14:35:10 -07:00
|
|
|
static void
|
2011-01-08 20:06:29 -08:00
|
|
|
SweepCrossCompartmentWrappers(JSContext *cx)
|
2010-06-23 14:35:10 -07:00
|
|
|
{
|
|
|
|
JSRuntime *rt = cx->runtime;
|
2010-12-17 16:33:04 -08:00
|
|
|
/*
|
|
|
|
* Figure out how much JIT code should be released from inactive compartments.
|
2011-01-18 15:20:39 -08:00
|
|
|
* If multiple eighth-lives have passed, compound the release interval linearly;
|
2010-12-17 16:33:04 -08:00
|
|
|
* if enough time has passed, all inactive JIT code will be released.
|
|
|
|
*/
|
|
|
|
uint32 releaseInterval = 0;
|
|
|
|
int64 now = PRMJ_Now();
|
|
|
|
if (now >= rt->gcJitReleaseTime) {
|
|
|
|
releaseInterval = 8;
|
|
|
|
while (now >= rt->gcJitReleaseTime) {
|
|
|
|
if (--releaseInterval == 1)
|
|
|
|
rt->gcJitReleaseTime = now;
|
|
|
|
rt->gcJitReleaseTime += JIT_SCRIPT_EIGHTH_LIFETIME;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2011-04-15 16:56:08 -07:00
|
|
|
/*
|
|
|
|
* Sweep the compartment:
|
|
|
|
* (1) Remove dead wrappers from the compartment map.
|
|
|
|
* (2) Finalize any unused empty shapes.
|
|
|
|
* (3) Sweep the trace JIT of unused code.
|
|
|
|
* (4) Sweep the method JIT ICs and release infrequently used JIT code.
|
|
|
|
*/
|
2011-01-18 15:20:39 -08:00
|
|
|
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
|
2011-01-08 20:06:29 -08:00
|
|
|
(*c)->sweep(cx, releaseInterval);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
SweepCompartments(JSContext *cx, JSGCInvocationKind gckind)
|
|
|
|
{
|
|
|
|
JSRuntime *rt = cx->runtime;
|
|
|
|
JSCompartmentCallback callback = rt->compartmentCallback;
|
2011-01-18 15:20:39 -08:00
|
|
|
|
|
|
|
/* Skip the atomsCompartment. */
|
|
|
|
JSCompartment **read = rt->compartments.begin() + 1;
|
2011-01-08 20:06:29 -08:00
|
|
|
JSCompartment **end = rt->compartments.end();
|
|
|
|
JSCompartment **write = read;
|
2011-01-18 15:20:39 -08:00
|
|
|
JS_ASSERT(rt->compartments.length() >= 1);
|
|
|
|
JS_ASSERT(*rt->compartments.begin() == rt->atomsCompartment);
|
2011-01-13 19:04:03 -08:00
|
|
|
|
2010-06-23 14:35:10 -07:00
|
|
|
while (read < end) {
|
2011-01-18 15:20:39 -08:00
|
|
|
JSCompartment *compartment = *read++;
|
|
|
|
|
2011-08-10 22:58:28 -07:00
|
|
|
if (!compartment->hold &&
|
|
|
|
(compartment->arenaListsAreEmpty() || gckind == GC_LAST_CONTEXT))
|
|
|
|
{
|
2011-05-11 05:46:33 -07:00
|
|
|
compartment->freeLists.checkEmpty();
|
2010-09-20 12:43:53 -07:00
|
|
|
Probes::GCEndSweepPhase(compartment);
|
2011-01-18 15:20:39 -08:00
|
|
|
if (callback)
|
2011-05-22 03:09:28 -07:00
|
|
|
JS_ALWAYS_TRUE(callback(cx, compartment, JSCOMPARTMENT_DESTROY));
|
2011-01-18 15:20:39 -08:00
|
|
|
if (compartment->principals)
|
|
|
|
JSPRINCIPALS_DROP(cx, compartment->principals);
|
Bug 634155: Account for NewCompartment's memory, and change allocation APIs (r=nnethercote)
This changes the allocation API, in the following way:
js_malloc -> {cx->,rt->,OffTheBooks::}malloc
js_calloc -> {cx->,rt->,OffTheBooks::}calloc
js_realloc -> {cx->,rt->,OffTheBooks::}realloc
js_free -> {cx->,rt->,Foreground::,UnwantedForeground::}free
js_new -> {cx->,rt->,OffTheBooks::}new_
js_new_array -> {cx->,rt->,OffTheBooks::}new_array
js_delete -> {cx->,rt->,Foreground::,UnwantedForeground::}delete_
This is to move as many allocations as possible through a JSContext (so that they may be aken into account by gcMallocBytes) and to move as many deallocations to the background as possible (except on error paths).
2011-03-31 01:13:49 -07:00
|
|
|
cx->delete_(compartment);
|
2011-01-18 15:20:39 -08:00
|
|
|
continue;
|
2010-06-23 14:35:10 -07:00
|
|
|
}
|
2011-01-18 15:20:39 -08:00
|
|
|
*write++ = compartment;
|
2010-06-23 14:35:10 -07:00
|
|
|
}
|
|
|
|
rt->compartments.resize(write - rt->compartments.begin());
|
|
|
|
}
|
|
|
|
|
2010-04-08 05:54:18 -07:00
|
|
|
/*
|
2011-04-25 13:05:30 -07:00
|
|
|
* Perform mark-and-sweep GC.
|
|
|
|
*
|
|
|
|
* In a JS_THREADSAFE build, the calling thread must be rt->gcThread and each
|
|
|
|
* other thread must be either outside all requests or blocked waiting for GC
|
|
|
|
* to finish. Note that the caller does not hold rt->gcLock.
|
|
|
|
* If comp is set, we perform a single-compartment GC.
|
2010-04-08 05:54:18 -07:00
|
|
|
*/
|
|
|
|
static void
|
2011-04-25 13:05:30 -07:00
|
|
|
MarkAndSweep(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind GCTIMER_PARAM)
|
2010-04-08 05:54:18 -07:00
|
|
|
{
|
2011-04-25 13:05:30 -07:00
|
|
|
JS_ASSERT_IF(comp, gckind != GC_LAST_CONTEXT);
|
|
|
|
JS_ASSERT_IF(comp, comp != comp->rt->atomsCompartment);
|
|
|
|
JS_ASSERT_IF(comp, comp->rt->gcMode == JSGC_MODE_COMPARTMENT);
|
|
|
|
|
2010-04-08 05:54:18 -07:00
|
|
|
JSRuntime *rt = cx->runtime;
|
2011-04-25 13:05:30 -07:00
|
|
|
rt->gcNumber++;
|
2010-04-08 05:54:18 -07:00
|
|
|
|
|
|
|
/* Clear gcIsNeeded now, when we are about to start a normal GC cycle. */
|
2011-01-07 23:44:57 -08:00
|
|
|
rt->gcIsNeeded = false;
|
|
|
|
rt->gcTriggerCompartment = NULL;
|
2010-04-08 05:54:18 -07:00
|
|
|
|
|
|
|
/* Reset malloc counter. */
|
|
|
|
rt->resetGCMallocBytes();
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Reset the property cache's type id generator so we can compress ids.
|
|
|
|
* Same for the protoHazardShape proxy-shape standing in for all object
|
|
|
|
* prototypes having readonly or setter properties.
|
|
|
|
*/
|
2011-06-01 17:48:52 -07:00
|
|
|
if (rt->shapeGen & SHAPE_OVERFLOW_BIT || (rt->gcZeal() && !rt->gcCurrentCompartment)) {
|
2010-04-08 05:54:18 -07:00
|
|
|
rt->gcRegenShapes = true;
|
2011-02-04 10:59:07 -08:00
|
|
|
rt->shapeGen = 0;
|
2010-04-08 05:54:18 -07:00
|
|
|
rt->protoHazardShape = 0;
|
|
|
|
}
|
2011-01-07 23:44:57 -08:00
|
|
|
|
2011-07-14 16:02:12 -07:00
|
|
|
PER_COMPARTMENT_OP(rt, c->purge(cx));
|
2010-04-08 05:54:18 -07:00
|
|
|
|
|
|
|
js_PurgeThreads(cx);
|
|
|
|
{
|
|
|
|
JSContext *iter = NULL;
|
|
|
|
while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter))
|
|
|
|
acx->purge();
|
|
|
|
}
|
|
|
|
|
2011-03-28 13:17:46 -07:00
|
|
|
JS_ASSERT_IF(comp, !rt->gcRegenShapes);
|
2010-04-08 05:54:17 -07:00
|
|
|
|
|
|
|
/*
|
|
|
|
* Mark phase.
|
|
|
|
*/
|
2011-05-20 13:40:06 -07:00
|
|
|
GCTIMESTAMP(startMark);
|
2010-07-26 11:44:04 -07:00
|
|
|
GCMarker gcmarker(cx);
|
|
|
|
JS_ASSERT(IS_GC_MARKING_TRACER(&gcmarker));
|
|
|
|
JS_ASSERT(gcmarker.getMarkColor() == BLACK);
|
|
|
|
rt->gcMarkingTracer = &gcmarker;
|
2011-04-25 13:05:30 -07:00
|
|
|
|
2011-07-26 00:55:23 -07:00
|
|
|
for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
|
2011-07-04 21:14:33 -07:00
|
|
|
r.front()->bitmap.clear();
|
2010-09-07 14:08:20 -07:00
|
|
|
|
2011-03-28 13:17:46 -07:00
|
|
|
if (comp) {
|
|
|
|
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
|
|
|
|
(*c)->markCrossCompartmentWrappers(&gcmarker);
|
2011-07-05 05:48:26 -07:00
|
|
|
Debugger::markCrossCompartmentDebuggerObjectReferents(&gcmarker);
|
2011-03-28 13:17:46 -07:00
|
|
|
}
|
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
MarkRuntime(&gcmarker);
|
2011-04-15 16:56:08 -07:00
|
|
|
gcmarker.drainMarkStack();
|
2010-04-08 05:54:17 -07:00
|
|
|
|
2011-06-15 10:55:58 -07:00
|
|
|
/*
|
|
|
|
* Mark weak roots.
|
|
|
|
*/
|
2011-07-28 11:45:51 -07:00
|
|
|
while (WatchpointMap::markAllIteratively(&gcmarker) ||
|
2011-07-01 17:22:47 -07:00
|
|
|
WeakMapBase::markAllIteratively(&gcmarker) ||
|
2011-08-10 10:57:30 -07:00
|
|
|
Debugger::markAllIteratively(&gcmarker, gckind))
|
2011-07-01 17:22:47 -07:00
|
|
|
{
|
2011-06-15 10:55:58 -07:00
|
|
|
gcmarker.drainMarkStack();
|
|
|
|
}
|
|
|
|
|
2010-04-08 05:54:17 -07:00
|
|
|
rt->gcMarkingTracer = NULL;
|
|
|
|
|
2010-07-15 17:58:36 -07:00
|
|
|
if (rt->gcCallback)
|
|
|
|
(void) rt->gcCallback(cx, JSGC_MARK_END);
|
|
|
|
|
2011-03-28 13:17:46 -07:00
|
|
|
#ifdef DEBUG
|
|
|
|
/* Make sure that we didn't mark an object in another compartment */
|
|
|
|
if (comp) {
|
|
|
|
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
|
|
|
|
JS_ASSERT_IF(*c != comp && *c != rt->atomsCompartment, checkArenaListAllUnmarked(*c));
|
|
|
|
}
|
|
|
|
#endif
|
2010-04-08 05:54:17 -07:00
|
|
|
|
|
|
|
/*
|
|
|
|
* Sweep phase.
|
|
|
|
*
|
|
|
|
* Finalize as we sweep, outside of rt->gcLock but with rt->gcRunning set
|
|
|
|
* so that any attempt to allocate a GC-thing from a finalizer will fail,
|
|
|
|
* rather than nest badly and leave the unmarked newborn to be swept.
|
|
|
|
*
|
2011-03-14 13:55:55 -07:00
|
|
|
* We first sweep atom state so we can use IsAboutToBeFinalized on
|
2010-07-26 11:44:04 -07:00
|
|
|
* JSString held in a hashtable to check if the hashtable entry can be
|
|
|
|
* freed. Note that even after the entry is freed, JSObject finalizers can
|
|
|
|
* continue to access the corresponding JSString* assuming that they are
|
|
|
|
* unique. This works since the atomization API must not be called during
|
|
|
|
* the GC.
|
2010-04-08 05:54:17 -07:00
|
|
|
*/
|
2011-05-20 13:40:06 -07:00
|
|
|
GCTIMESTAMP(startSweep);
|
2011-04-16 21:23:44 -07:00
|
|
|
|
|
|
|
/* Finalize unreachable (key,value) pairs in all weak maps. */
|
2011-06-14 19:21:47 -07:00
|
|
|
WeakMapBase::sweepAll(&gcmarker);
|
2011-04-16 21:23:44 -07:00
|
|
|
|
2010-04-08 05:54:17 -07:00
|
|
|
js_SweepAtomState(cx);
|
|
|
|
|
2011-07-27 15:44:43 -07:00
|
|
|
/* Collect watch points associated with unreachable objects. */
|
2011-08-10 22:58:28 -07:00
|
|
|
WatchpointMap::sweepAll(cx);
|
2010-04-08 05:54:17 -07:00
|
|
|
|
|
|
|
/*
|
2011-08-05 09:43:59 -07:00
|
|
|
* We finalize objects before other GC things to ensure that object's finalizer
|
2011-04-21 15:29:24 -07:00
|
|
|
* can access them even if they will be freed. Sweep the runtime's property trees
|
2011-03-28 13:17:46 -07:00
|
|
|
* after finalizing objects, in case any had watchpoints referencing tree nodes.
|
|
|
|
* Do this before sweeping compartments, so that we sweep all shapes in
|
|
|
|
* unreachable compartments.
|
2010-04-08 05:54:17 -07:00
|
|
|
*/
|
2011-03-28 13:17:46 -07:00
|
|
|
if (comp) {
|
|
|
|
comp->sweep(cx, 0);
|
2011-04-25 13:05:30 -07:00
|
|
|
comp->finalizeObjectArenaLists(cx);
|
2011-05-20 13:40:06 -07:00
|
|
|
GCTIMESTAMP(sweepObjectEnd);
|
2011-04-25 13:05:30 -07:00
|
|
|
comp->finalizeStringArenaLists(cx);
|
2011-05-20 13:40:06 -07:00
|
|
|
GCTIMESTAMP(sweepStringEnd);
|
2011-04-25 13:05:30 -07:00
|
|
|
comp->finalizeShapeArenaLists(cx);
|
2011-05-20 13:40:06 -07:00
|
|
|
GCTIMESTAMP(sweepShapeEnd);
|
2011-03-28 13:17:46 -07:00
|
|
|
} else {
|
2010-09-20 12:43:53 -07:00
|
|
|
/*
|
|
|
|
* Some sweeping is not compartment-specific. Start a NULL-compartment
|
|
|
|
* phase to demarcate all of that. (The compartment sweeps will nest
|
|
|
|
* within.)
|
|
|
|
*/
|
|
|
|
Probes::GCStartSweepPhase(NULL);
|
2011-06-28 14:06:34 -07:00
|
|
|
|
2011-07-18 08:58:56 -07:00
|
|
|
Debugger::sweepAll(cx);
|
2011-03-28 13:17:46 -07:00
|
|
|
SweepCrossCompartmentWrappers(cx);
|
2010-09-20 12:43:53 -07:00
|
|
|
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); c++) {
|
|
|
|
Probes::GCStartSweepPhase(*c);
|
2011-04-25 13:05:30 -07:00
|
|
|
(*c)->finalizeObjectArenaLists(cx);
|
2010-09-20 12:43:53 -07:00
|
|
|
}
|
2011-01-07 23:44:57 -08:00
|
|
|
|
2011-05-20 13:40:06 -07:00
|
|
|
GCTIMESTAMP(sweepObjectEnd);
|
2010-04-08 05:54:17 -07:00
|
|
|
|
2011-03-28 13:17:46 -07:00
|
|
|
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); c++)
|
2011-04-25 13:05:30 -07:00
|
|
|
(*c)->finalizeStringArenaLists(cx);
|
2010-09-07 14:08:20 -07:00
|
|
|
|
2011-05-20 13:40:06 -07:00
|
|
|
GCTIMESTAMP(sweepStringEnd);
|
2010-04-08 05:54:17 -07:00
|
|
|
|
2010-09-20 12:43:53 -07:00
|
|
|
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); c++) {
|
2011-04-25 13:05:30 -07:00
|
|
|
(*c)->finalizeShapeArenaLists(cx);
|
2010-09-20 12:43:53 -07:00
|
|
|
Probes::GCEndSweepPhase(*c);
|
|
|
|
}
|
2011-03-28 13:17:46 -07:00
|
|
|
|
2011-05-20 13:40:06 -07:00
|
|
|
GCTIMESTAMP(sweepShapeEnd);
|
2011-03-28 13:17:46 -07:00
|
|
|
}
|
2011-02-04 10:59:07 -08:00
|
|
|
|
2011-04-25 13:05:30 -07:00
|
|
|
#ifdef DEBUG
|
|
|
|
PropertyTree::dumpShapes(cx);
|
|
|
|
#endif
|
2011-02-04 10:59:07 -08:00
|
|
|
|
2011-07-14 16:02:12 -07:00
|
|
|
/*
|
|
|
|
* Sweep script filenames after sweeping functions in the generic loop
|
|
|
|
* above. In this way when a scripted function's finalizer destroys the
|
|
|
|
* script and calls rt->destroyScriptHook, the hook can still access the
|
|
|
|
* script's filename. See bug 323267.
|
|
|
|
*/
|
|
|
|
PER_COMPARTMENT_OP(rt, js_SweepScriptFilenames(c));
|
|
|
|
|
2011-03-28 13:17:46 -07:00
|
|
|
if (!comp) {
|
|
|
|
SweepCompartments(cx, gckind);
|
2010-04-08 05:54:17 -07:00
|
|
|
|
2010-09-20 12:43:53 -07:00
|
|
|
/* non-compartmental sweep pieces */
|
|
|
|
Probes::GCEndSweepPhase(NULL);
|
2011-03-28 13:17:46 -07:00
|
|
|
}
|
2010-04-08 05:54:17 -07:00
|
|
|
|
2011-05-12 15:29:38 -07:00
|
|
|
#ifndef JS_THREADSAFE
|
2010-04-08 05:54:17 -07:00
|
|
|
/*
|
|
|
|
* Destroy arenas after we finished the sweeping so finalizers can safely
|
2011-03-14 13:55:55 -07:00
|
|
|
* use IsAboutToBeFinalized().
|
2011-05-12 15:29:38 -07:00
|
|
|
* This is done on the GCHelperThread if JS_THREADSAFE is defined.
|
2010-04-08 05:54:17 -07:00
|
|
|
*/
|
2011-06-20 14:44:26 -07:00
|
|
|
ExpireGCChunks(rt, gckind);
|
2011-05-12 15:29:38 -07:00
|
|
|
#endif
|
2011-05-20 13:40:06 -07:00
|
|
|
GCTIMESTAMP(sweepDestroyEnd);
|
2010-04-08 05:54:17 -07:00
|
|
|
|
|
|
|
if (rt->gcCallback)
|
|
|
|
(void) rt->gcCallback(cx, JSGC_FINALIZE_END);
|
|
|
|
#ifdef DEBUG_srcnotesize
|
|
|
|
{ extern void DumpSrcNoteSizeHist();
|
|
|
|
DumpSrcNoteSizeHist();
|
|
|
|
printf("GC HEAP SIZE %lu\n", (unsigned long)rt->gcBytes);
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2010-04-22 05:31:00 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
2010-05-27 07:57:55 -07:00
|
|
|
|
|
|
|
/*
|
|
|
|
* If the GC is running and we're called on another thread, wait for this GC
|
|
|
|
* activation to finish. We can safely wait here without fear of deadlock (in
|
|
|
|
* the case where we are called within a request on another thread's context)
|
|
|
|
* because the GC doesn't set rt->gcRunning until after it has waited for all
|
|
|
|
* active requests to end.
|
|
|
|
*
|
|
|
|
* We call here js_CurrentThreadId() after checking for rt->gcState to avoid
|
|
|
|
* an expensive call when the GC is not running.
|
|
|
|
*/
|
|
|
|
void
|
|
|
|
js_WaitForGC(JSRuntime *rt)
|
|
|
|
{
|
|
|
|
if (rt->gcRunning && rt->gcThread->id != js_CurrentThreadId()) {
|
|
|
|
do {
|
|
|
|
JS_AWAIT_GC_DONE(rt);
|
|
|
|
} while (rt->gcRunning);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2010-04-22 05:31:00 -07:00
|
|
|
/*
|
2010-05-14 06:55:17 -07:00
|
|
|
* GC is running on another thread. Temporarily suspend all requests running
|
|
|
|
* on the current thread and wait until the GC is done.
|
2010-04-22 05:31:00 -07:00
|
|
|
*/
|
|
|
|
static void
|
2010-05-22 12:49:58 -07:00
|
|
|
LetOtherGCFinish(JSContext *cx)
|
2010-04-22 05:31:00 -07:00
|
|
|
{
|
|
|
|
JSRuntime *rt = cx->runtime;
|
|
|
|
JS_ASSERT(rt->gcThread);
|
2011-04-13 09:27:37 -07:00
|
|
|
JS_ASSERT(cx->thread() != rt->gcThread);
|
2010-04-22 05:31:00 -07:00
|
|
|
|
2011-04-13 09:27:37 -07:00
|
|
|
size_t requestDebit = cx->thread()->data.requestDepth ? 1 : 0;
|
2010-04-22 05:31:00 -07:00
|
|
|
JS_ASSERT(requestDebit <= rt->requestCount);
|
|
|
|
#ifdef JS_TRACER
|
|
|
|
JS_ASSERT_IF(requestDebit == 0, !JS_ON_TRACE(cx));
|
|
|
|
#endif
|
|
|
|
if (requestDebit != 0) {
|
|
|
|
#ifdef JS_TRACER
|
|
|
|
if (JS_ON_TRACE(cx)) {
|
|
|
|
/*
|
|
|
|
* Leave trace before we decrease rt->requestCount and notify the
|
|
|
|
* GC. Otherwise the GC may start immediately after we unlock while
|
|
|
|
* this thread is still on trace.
|
|
|
|
*/
|
|
|
|
AutoUnlockGC unlock(rt);
|
|
|
|
LeaveTrace(cx);
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
rt->requestCount -= requestDebit;
|
|
|
|
if (rt->requestCount == 0)
|
|
|
|
JS_NOTIFY_REQUEST_DONE(rt);
|
2011-05-20 03:38:31 -07:00
|
|
|
|
|
|
|
/*
|
|
|
|
* Update the native stack before we wait so the GC thread see the
|
|
|
|
* correct stack bounds.
|
|
|
|
*/
|
|
|
|
RecordNativeStackTopForGC(cx);
|
2010-05-22 12:49:58 -07:00
|
|
|
}
|
2010-04-22 05:31:00 -07:00
|
|
|
|
2010-05-22 12:49:58 -07:00
|
|
|
/*
|
|
|
|
* Check that we did not release the GC lock above and let the GC to
|
|
|
|
* finish before we wait.
|
|
|
|
*/
|
|
|
|
JS_ASSERT(rt->gcThread);
|
2010-04-22 05:31:00 -07:00
|
|
|
|
2010-05-22 12:49:58 -07:00
|
|
|
/*
|
|
|
|
* Wait for GC to finish on the other thread, even if requestDebit is 0
|
|
|
|
* and even if GC has not started yet because the gcThread is waiting in
|
2010-08-30 11:46:18 -07:00
|
|
|
* AutoGCSession. This ensures that js_GC never returns without a full GC
|
2010-05-22 12:49:58 -07:00
|
|
|
* cycle happening.
|
|
|
|
*/
|
|
|
|
do {
|
|
|
|
JS_AWAIT_GC_DONE(rt);
|
|
|
|
} while (rt->gcThread);
|
|
|
|
|
|
|
|
rt->requestCount += requestDebit;
|
2010-04-22 05:31:00 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
#endif
|
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
class AutoGCSession {
|
|
|
|
public:
|
|
|
|
explicit AutoGCSession(JSContext *cx);
|
|
|
|
~AutoGCSession();
|
|
|
|
|
|
|
|
private:
|
|
|
|
JSContext *context;
|
|
|
|
|
|
|
|
/* Disable copy constructor or assignments */
|
|
|
|
AutoGCSession(const AutoGCSession&);
|
|
|
|
void operator=(const AutoGCSession&);
|
|
|
|
};
|
|
|
|
|
2010-04-22 05:31:00 -07:00
|
|
|
/*
|
2010-08-30 11:46:18 -07:00
|
|
|
* Start a new GC session. Together with LetOtherGCFinish this function
|
|
|
|
* contains the rendezvous algorithm by which we stop the world for GC.
|
2010-04-22 05:31:00 -07:00
|
|
|
*
|
2010-05-14 06:55:17 -07:00
|
|
|
* This thread becomes the GC thread. Wait for all other threads to quiesce.
|
2010-08-30 11:46:18 -07:00
|
|
|
* Then set rt->gcRunning and return.
|
2010-04-22 05:31:00 -07:00
|
|
|
*/
|
2010-08-30 11:46:18 -07:00
|
|
|
AutoGCSession::AutoGCSession(JSContext *cx)
|
|
|
|
: context(cx)
|
2010-04-22 05:31:00 -07:00
|
|
|
{
|
|
|
|
JSRuntime *rt = cx->runtime;
|
2010-08-30 11:46:18 -07:00
|
|
|
|
|
|
|
#ifdef JS_THREADSAFE
|
2011-04-13 09:27:37 -07:00
|
|
|
if (rt->gcThread && rt->gcThread != cx->thread())
|
2010-08-30 11:46:18 -07:00
|
|
|
LetOtherGCFinish(cx);
|
|
|
|
#endif
|
|
|
|
|
2010-05-14 06:55:17 -07:00
|
|
|
JS_ASSERT(!rt->gcRunning);
|
2010-04-22 05:31:00 -07:00
|
|
|
|
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
/* No other thread is in GC, so indicate that we're now in GC. */
|
2010-05-14 06:55:17 -07:00
|
|
|
JS_ASSERT(!rt->gcThread);
|
2011-04-13 09:27:37 -07:00
|
|
|
rt->gcThread = cx->thread();
|
2010-04-22 05:31:00 -07:00
|
|
|
|
|
|
|
/*
|
2010-07-22 13:59:59 -07:00
|
|
|
* Notify operation callbacks on other threads, which will give them a
|
|
|
|
* chance to yield their requests. Threads without requests perform their
|
|
|
|
* callback at some later point, which then will be unnecessary, but
|
|
|
|
* harmless.
|
2010-04-22 05:31:00 -07:00
|
|
|
*/
|
2010-07-22 13:59:59 -07:00
|
|
|
for (JSThread::Map::Range r = rt->threads.all(); !r.empty(); r.popFront()) {
|
|
|
|
JSThread *thread = r.front().value;
|
2011-04-13 09:27:37 -07:00
|
|
|
if (thread != cx->thread())
|
2010-10-22 10:48:06 -07:00
|
|
|
thread->data.triggerOperationCallback(rt);
|
2010-07-22 13:59:59 -07:00
|
|
|
}
|
2010-04-22 05:31:00 -07:00
|
|
|
|
|
|
|
/*
|
2010-06-26 13:31:54 -07:00
|
|
|
* Discount the request on the current thread from contributing to
|
2010-05-14 06:55:17 -07:00
|
|
|
* rt->requestCount before we wait for all other requests to finish.
|
2010-04-22 05:31:00 -07:00
|
|
|
* JS_NOTIFY_REQUEST_DONE, which will wake us up, is only called on
|
|
|
|
* rt->requestCount transitions to 0.
|
|
|
|
*/
|
2011-04-13 09:27:37 -07:00
|
|
|
size_t requestDebit = cx->thread()->data.requestDepth ? 1 : 0;
|
2010-04-22 05:31:00 -07:00
|
|
|
JS_ASSERT(requestDebit <= rt->requestCount);
|
|
|
|
if (requestDebit != rt->requestCount) {
|
|
|
|
rt->requestCount -= requestDebit;
|
|
|
|
|
|
|
|
do {
|
|
|
|
JS_AWAIT_REQUEST_DONE(rt);
|
|
|
|
} while (rt->requestCount > 0);
|
|
|
|
rt->requestCount += requestDebit;
|
|
|
|
}
|
|
|
|
|
2010-05-14 06:55:17 -07:00
|
|
|
#endif /* JS_THREADSAFE */
|
2010-04-22 05:31:00 -07:00
|
|
|
|
|
|
|
/*
|
|
|
|
* Set rt->gcRunning here within the GC lock, and after waiting for any
|
2010-05-14 06:55:17 -07:00
|
|
|
* active requests to end. This way js_WaitForGC called outside a request
|
|
|
|
* would not block on the GC that is waiting for other requests to finish
|
|
|
|
* with rt->gcThread set while JS_BeginRequest would do such wait.
|
2010-04-22 05:31:00 -07:00
|
|
|
*/
|
2010-05-14 06:55:17 -07:00
|
|
|
rt->gcRunning = true;
|
2010-04-22 05:31:00 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
/* End the current GC session and allow other threads to proceed. */
|
2010-08-30 11:46:18 -07:00
|
|
|
AutoGCSession::~AutoGCSession()
|
2010-04-22 05:31:00 -07:00
|
|
|
{
|
2010-08-30 11:46:18 -07:00
|
|
|
JSRuntime *rt = context->runtime;
|
2010-05-14 06:55:17 -07:00
|
|
|
rt->gcRunning = false;
|
2010-04-22 05:31:00 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
2011-04-13 09:27:37 -07:00
|
|
|
JS_ASSERT(rt->gcThread == context->thread());
|
2010-04-22 05:31:00 -07:00
|
|
|
rt->gcThread = NULL;
|
|
|
|
JS_NOTIFY_GC_DONE(rt);
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2010-04-08 05:54:18 -07:00
|
|
|
/*
|
2010-05-14 06:55:17 -07:00
|
|
|
* GC, repeatedly if necessary, until we think we have not created any new
|
2011-04-25 13:05:30 -07:00
|
|
|
* garbage and no other threads are demanding more GC. We disable inlining
|
|
|
|
* to ensure that the bottom of the stack with possible GC roots recorded in
|
|
|
|
* js_GC excludes any pointers we use during the marking implementation.
|
2010-04-08 05:54:18 -07:00
|
|
|
*/
|
2011-04-25 13:05:30 -07:00
|
|
|
static JS_NEVER_INLINE void
|
|
|
|
GCCycle(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind GCTIMER_PARAM)
|
2010-04-08 05:54:18 -07:00
|
|
|
{
|
|
|
|
JSRuntime *rt = cx->runtime;
|
|
|
|
|
2011-04-25 13:05:30 -07:00
|
|
|
/*
|
|
|
|
* Recursive GC is no-op and a call from another thread waits the started
|
|
|
|
* GC cycle to finish.
|
|
|
|
*/
|
2010-08-30 11:46:18 -07:00
|
|
|
if (rt->gcMarkAndSweep) {
|
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
JS_ASSERT(rt->gcThread);
|
2011-04-13 09:27:37 -07:00
|
|
|
if (rt->gcThread != cx->thread()) {
|
2010-08-30 11:46:18 -07:00
|
|
|
/* We do not return until another GC finishes. */
|
|
|
|
LetOtherGCFinish(cx);
|
2010-08-29 12:41:24 -07:00
|
|
|
}
|
2010-08-30 11:46:18 -07:00
|
|
|
#endif
|
|
|
|
return;
|
2010-08-29 12:41:24 -07:00
|
|
|
}
|
2010-09-07 14:08:20 -07:00
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
AutoGCSession gcsession(cx);
|
2010-04-22 05:32:13 -07:00
|
|
|
|
2011-05-22 11:50:08 -07:00
|
|
|
/*
|
|
|
|
* Don't GC if any thread is reporting an OOM. We check the flag after we
|
|
|
|
* have set up the GC session and know that the thread that reported OOM
|
|
|
|
* is either the current thread or waits for the GC to complete on this
|
|
|
|
* thread.
|
|
|
|
*/
|
|
|
|
if (rt->inOOMReport) {
|
|
|
|
JS_ASSERT(gckind != GC_LAST_CONTEXT);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2011-02-04 10:59:07 -08:00
|
|
|
/*
|
|
|
|
* We should not be depending on cx->compartment in the GC, so set it to
|
|
|
|
* NULL to look for violations.
|
|
|
|
*/
|
2011-03-14 10:37:05 -07:00
|
|
|
SwitchToCompartment sc(cx, (JSCompartment *)NULL);
|
2011-02-22 12:45:18 -08:00
|
|
|
|
2011-01-07 23:44:57 -08:00
|
|
|
JS_ASSERT(!rt->gcCurrentCompartment);
|
|
|
|
rt->gcCurrentCompartment = comp;
|
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
rt->gcMarkAndSweep = true;
|
2011-04-25 13:05:30 -07:00
|
|
|
{
|
2010-05-14 06:55:17 -07:00
|
|
|
AutoUnlockGC unlock(rt);
|
|
|
|
|
2011-04-13 13:43:33 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
2011-04-25 13:05:30 -07:00
|
|
|
/*
|
|
|
|
* As we about to purge caches and clear the mark bits we must wait
|
|
|
|
* for any background finalization to finish.
|
|
|
|
*/
|
|
|
|
JS_ASSERT(!cx->gcBackgroundFree);
|
|
|
|
rt->gcHelperThread.waitBackgroundSweepEnd(rt);
|
|
|
|
if (gckind != GC_LAST_CONTEXT && rt->state != JSRTS_LANDING) {
|
|
|
|
cx->gcBackgroundFree = &rt->gcHelperThread;
|
|
|
|
cx->gcBackgroundFree->setContext(cx);
|
2011-04-13 13:43:33 -07:00
|
|
|
}
|
|
|
|
#endif
|
2011-04-25 13:05:30 -07:00
|
|
|
MarkAndSweep(cx, comp, gckind GCTIMER_ARG);
|
|
|
|
}
|
2010-05-14 06:55:17 -07:00
|
|
|
|
2010-09-07 14:08:20 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
2011-04-25 13:05:30 -07:00
|
|
|
if (gckind != GC_LAST_CONTEXT && rt->state != JSRTS_LANDING) {
|
|
|
|
JS_ASSERT(cx->gcBackgroundFree == &rt->gcHelperThread);
|
|
|
|
cx->gcBackgroundFree = NULL;
|
2011-06-20 14:44:26 -07:00
|
|
|
rt->gcHelperThread.startBackgroundSweep(rt, gckind);
|
2011-04-25 13:05:30 -07:00
|
|
|
} else {
|
|
|
|
JS_ASSERT(!cx->gcBackgroundFree);
|
|
|
|
}
|
2010-09-07 14:08:20 -07:00
|
|
|
#endif
|
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
rt->gcMarkAndSweep = false;
|
2010-05-14 06:55:17 -07:00
|
|
|
rt->gcRegenShapes = false;
|
2011-06-20 14:44:26 -07:00
|
|
|
rt->setGCLastBytes(rt->gcBytes, gckind);
|
2011-01-07 23:44:57 -08:00
|
|
|
rt->gcCurrentCompartment = NULL;
|
2011-06-14 19:21:47 -07:00
|
|
|
rt->gcWeakMapList = NULL;
|
2011-01-07 23:44:57 -08:00
|
|
|
|
2011-02-04 10:59:07 -08:00
|
|
|
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
|
2011-06-20 14:44:26 -07:00
|
|
|
(*c)->setGCLastBytes((*c)->gcBytes, gckind);
|
2010-04-22 05:32:13 -07:00
|
|
|
}
|
|
|
|
|
2011-07-07 17:31:24 -07:00
|
|
|
struct GCCrashData
|
|
|
|
{
|
|
|
|
int isRegen;
|
|
|
|
int isCompartment;
|
|
|
|
};
|
|
|
|
|
2007-03-22 10:30:00 -07:00
|
|
|
void
|
2011-01-07 23:44:57 -08:00
|
|
|
js_GC(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind)
|
2007-03-22 10:30:00 -07:00
|
|
|
{
|
2010-04-22 05:31:05 -07:00
|
|
|
JSRuntime *rt = cx->runtime;
|
2011-07-01 14:11:31 -07:00
|
|
|
JS_AbortIfWrongThread(rt);
|
2007-03-22 10:30:00 -07:00
|
|
|
|
|
|
|
/*
|
|
|
|
* Don't collect garbage if the runtime isn't up, and cx is not the last
|
|
|
|
* context in the runtime. The last context must force a GC, and nothing
|
|
|
|
* should suppress that final collection or there may be shutdown leaks,
|
|
|
|
* or runtime bloat until the next context is created.
|
|
|
|
*/
|
|
|
|
if (rt->state != JSRTS_UP && gckind != GC_LAST_CONTEXT)
|
|
|
|
return;
|
2010-04-22 05:35:21 -07:00
|
|
|
|
2011-05-22 11:50:08 -07:00
|
|
|
if (JS_ON_TRACE(cx)) {
|
|
|
|
JS_ASSERT(gckind != GC_LAST_CONTEXT);
|
|
|
|
return;
|
|
|
|
}
|
2010-08-30 11:46:18 -07:00
|
|
|
|
|
|
|
RecordNativeStackTopForGC(cx);
|
2010-10-05 10:09:50 -07:00
|
|
|
|
2011-07-07 17:31:24 -07:00
|
|
|
GCCrashData crashData;
|
|
|
|
crashData.isRegen = rt->shapeGen & SHAPE_OVERFLOW_BIT;
|
|
|
|
crashData.isCompartment = !!comp;
|
2011-08-12 06:57:45 -07:00
|
|
|
crash::SaveCrashData(crash::JS_CRASH_TAG_GC, &crashData, sizeof(crashData));
|
2011-07-07 17:31:24 -07:00
|
|
|
|
2011-05-20 13:40:06 -07:00
|
|
|
GCTIMER_BEGIN(rt, comp);
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-09-20 12:43:53 -07:00
|
|
|
struct AutoGCProbe {
|
|
|
|
JSCompartment *comp;
|
|
|
|
AutoGCProbe(JSCompartment *comp) : comp(comp) {
|
|
|
|
Probes::GCStart(comp);
|
|
|
|
}
|
|
|
|
~AutoGCProbe() {
|
|
|
|
Probes::GCEnd(comp); /* background thread may still be sweeping */
|
|
|
|
}
|
|
|
|
} autoGCProbe(comp);
|
|
|
|
|
2010-05-14 06:55:17 -07:00
|
|
|
do {
|
|
|
|
/*
|
|
|
|
* Let the API user decide to defer a GC if it wants to (unless this
|
|
|
|
* is the last context). Invoke the callback regardless. Sample the
|
|
|
|
* callback in case we are freely racing with a JS_SetGCCallback{,RT}
|
|
|
|
* on another thread.
|
|
|
|
*/
|
|
|
|
if (JSGCCallback callback = rt->gcCallback) {
|
|
|
|
if (!callback(cx, JSGC_BEGIN) && gckind != GC_LAST_CONTEXT)
|
|
|
|
return;
|
|
|
|
}
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-04-22 05:34:28 -07:00
|
|
|
{
|
2011-04-13 13:43:33 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
rt->gcHelperThread.waitBackgroundSweepEnd(rt);
|
|
|
|
#endif
|
2010-04-22 05:34:28 -07:00
|
|
|
/* Lock out other GC allocator and collector invocations. */
|
2010-10-07 13:43:52 -07:00
|
|
|
AutoLockGC lock(rt);
|
2011-04-25 13:05:30 -07:00
|
|
|
rt->gcPoke = false;
|
|
|
|
GCCycle(cx, comp, gckind GCTIMER_ARG);
|
2010-05-14 06:55:17 -07:00
|
|
|
}
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-05-14 06:55:17 -07:00
|
|
|
/* We re-sample the callback again as the finalizers can change it. */
|
2010-10-07 13:43:52 -07:00
|
|
|
if (JSGCCallback callback = rt->gcCallback)
|
2010-05-14 06:55:17 -07:00
|
|
|
(void) callback(cx, JSGC_END);
|
2010-04-22 05:34:28 -07:00
|
|
|
|
2010-05-14 06:55:17 -07:00
|
|
|
/*
|
2011-04-25 13:05:30 -07:00
|
|
|
* On shutdown, iterate until finalizers or the JSGC_END callback
|
|
|
|
* stop creating garbage.
|
2010-05-14 06:55:17 -07:00
|
|
|
*/
|
|
|
|
} while (gckind == GC_LAST_CONTEXT && rt->gcPoke);
|
2011-06-20 14:44:26 -07:00
|
|
|
|
|
|
|
rt->gcNextFullGCTime = PRMJ_Now() + GC_IDLE_FULL_SPAN;
|
|
|
|
|
|
|
|
rt->gcChunkAllocationSinceLastGC = false;
|
2010-04-22 05:35:21 -07:00
|
|
|
GCTIMER_END(gckind == GC_LAST_CONTEXT);
|
2011-07-07 17:31:24 -07:00
|
|
|
|
2011-08-12 06:57:45 -07:00
|
|
|
crash::SnapshotGCStack();
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
2010-05-14 06:55:17 -07:00
|
|
|
|
2010-06-17 15:23:17 -07:00
|
|
|
namespace js {
|
2010-08-30 11:46:18 -07:00
|
|
|
|
2011-05-20 03:38:31 -07:00
|
|
|
class AutoCopyFreeListToArenas {
|
|
|
|
JSRuntime *rt;
|
2010-05-14 06:55:17 -07:00
|
|
|
|
2011-05-20 03:38:31 -07:00
|
|
|
public:
|
|
|
|
AutoCopyFreeListToArenas(JSRuntime *rt)
|
|
|
|
: rt(rt) {
|
|
|
|
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
|
|
|
|
(*c)->freeLists.copyToArenas();
|
2010-07-19 13:36:49 -07:00
|
|
|
}
|
|
|
|
|
2011-05-20 03:38:31 -07:00
|
|
|
~AutoCopyFreeListToArenas() {
|
|
|
|
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
|
|
|
|
(*c)->freeLists.clearInArenas();
|
2010-06-04 16:32:10 -07:00
|
|
|
}
|
2011-05-20 03:38:31 -07:00
|
|
|
};
|
2010-09-24 10:54:39 -07:00
|
|
|
|
|
|
|
void
|
|
|
|
TraceRuntime(JSTracer *trc)
|
|
|
|
{
|
2011-07-01 17:02:40 -07:00
|
|
|
JS_ASSERT(!IS_GC_MARKING_TRACER(trc));
|
2010-09-24 10:54:39 -07:00
|
|
|
LeaveTrace(trc->context);
|
|
|
|
|
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
{
|
|
|
|
JSContext *cx = trc->context;
|
|
|
|
JSRuntime *rt = cx->runtime;
|
2011-04-13 09:27:37 -07:00
|
|
|
if (rt->gcThread != cx->thread()) {
|
2011-05-20 03:38:31 -07:00
|
|
|
AutoLockGC lock(rt);
|
2010-09-24 10:54:39 -07:00
|
|
|
AutoGCSession gcsession(cx);
|
2011-05-20 03:38:31 -07:00
|
|
|
|
|
|
|
rt->gcHelperThread.waitBackgroundSweepEnd(rt, false);
|
2010-09-24 10:54:39 -07:00
|
|
|
AutoUnlockGC unlock(rt);
|
2011-05-20 03:38:31 -07:00
|
|
|
|
|
|
|
AutoCopyFreeListToArenas copy(rt);
|
2010-09-24 10:54:39 -07:00
|
|
|
RecordNativeStackTopForGC(trc->context);
|
|
|
|
MarkRuntime(trc);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#else
|
2011-05-22 00:27:01 -07:00
|
|
|
AutoCopyFreeListToArenas copy(trc->context->runtime);
|
2010-09-24 10:54:39 -07:00
|
|
|
RecordNativeStackTopForGC(trc->context);
|
|
|
|
#endif
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Calls from inside a normal GC or a recursive calls are OK and do not
|
|
|
|
* require session setup.
|
|
|
|
*/
|
|
|
|
MarkRuntime(trc);
|
2010-06-04 16:32:10 -07:00
|
|
|
}
|
2010-09-24 10:54:39 -07:00
|
|
|
|
2011-05-19 17:11:16 -07:00
|
|
|
void
|
2011-06-29 22:44:17 -07:00
|
|
|
IterateCompartmentsArenasCells(JSContext *cx, void *data,
|
2011-07-24 09:14:10 -07:00
|
|
|
IterateCompartmentCallback compartmentCallback,
|
2011-06-29 22:44:17 -07:00
|
|
|
IterateArenaCallback arenaCallback,
|
|
|
|
IterateCellCallback cellCallback)
|
2011-05-19 17:11:16 -07:00
|
|
|
{
|
2011-06-29 22:44:17 -07:00
|
|
|
CHECK_REQUEST(cx);
|
|
|
|
|
2011-05-19 17:11:16 -07:00
|
|
|
LeaveTrace(cx);
|
|
|
|
|
|
|
|
JSRuntime *rt = cx->runtime;
|
|
|
|
JS_ASSERT(!rt->gcRunning);
|
|
|
|
|
|
|
|
AutoLockGC lock(rt);
|
|
|
|
AutoGCSession gcsession(cx);
|
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
rt->gcHelperThread.waitBackgroundSweepEnd(rt, false);
|
|
|
|
#endif
|
|
|
|
AutoUnlockGC unlock(rt);
|
|
|
|
|
2011-05-20 03:38:31 -07:00
|
|
|
AutoCopyFreeListToArenas copy(rt);
|
2011-06-29 22:44:17 -07:00
|
|
|
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c) {
|
|
|
|
JSCompartment *compartment = *c;
|
|
|
|
(*compartmentCallback)(cx, data, compartment);
|
|
|
|
|
|
|
|
for (unsigned thingKind = 0; thingKind < FINALIZE_LIMIT; thingKind++) {
|
|
|
|
size_t traceKind = GetFinalizableTraceKind(thingKind);
|
|
|
|
size_t thingSize = GCThingSizeMap[thingKind];
|
|
|
|
ArenaHeader *aheader = compartment->arenas[thingKind].getHead();
|
|
|
|
|
|
|
|
for (; aheader; aheader = aheader->next) {
|
|
|
|
Arena *arena = aheader->getArena();
|
|
|
|
(*arenaCallback)(cx, data, arena, traceKind, thingSize);
|
|
|
|
FreeSpan firstSpan(aheader->getFirstFreeSpan());
|
2011-08-05 09:43:59 -07:00
|
|
|
const FreeSpan *span = &firstSpan;
|
2011-06-29 22:44:17 -07:00
|
|
|
|
|
|
|
for (uintptr_t thing = arena->thingsStart(thingSize); ; thing += thingSize) {
|
|
|
|
JS_ASSERT(thing <= arena->thingsEnd());
|
2011-08-05 09:43:59 -07:00
|
|
|
if (thing == span->first) {
|
2011-06-29 22:44:17 -07:00
|
|
|
if (!span->hasNext())
|
|
|
|
break;
|
2011-08-05 09:43:59 -07:00
|
|
|
thing = span->last;
|
2011-06-29 22:44:17 -07:00
|
|
|
span = span->nextSpan();
|
|
|
|
} else {
|
2011-08-05 09:43:59 -07:00
|
|
|
void *t = reinterpret_cast<void *>(thing);
|
|
|
|
(*cellCallback)(cx, data, t, traceKind, thingSize);
|
2011-06-29 22:44:17 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2011-05-19 17:11:16 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2011-05-22 03:09:28 -07:00
|
|
|
namespace gc {
|
|
|
|
|
|
|
|
JSCompartment *
|
|
|
|
NewCompartment(JSContext *cx, JSPrincipals *principals)
|
|
|
|
{
|
|
|
|
JSRuntime *rt = cx->runtime;
|
2011-07-01 14:11:31 -07:00
|
|
|
JS_AbortIfWrongThread(rt);
|
|
|
|
|
2011-05-22 03:09:28 -07:00
|
|
|
JSCompartment *compartment = cx->new_<JSCompartment>(rt);
|
|
|
|
if (compartment && compartment->init()) {
|
2011-07-21 17:39:01 -07:00
|
|
|
// Any compartment with the trusted principals -- and there can be
|
|
|
|
// multiple -- is a system compartment.
|
2011-07-14 16:52:51 -07:00
|
|
|
compartment->isSystemCompartment = principals && rt->trustedPrincipals() == principals;
|
2011-05-22 03:09:28 -07:00
|
|
|
if (principals) {
|
|
|
|
compartment->principals = principals;
|
|
|
|
JSPRINCIPALS_HOLD(cx, principals);
|
|
|
|
}
|
|
|
|
|
2011-06-20 14:44:26 -07:00
|
|
|
compartment->setGCLastBytes(8192, GC_NORMAL);
|
2011-05-22 03:09:28 -07:00
|
|
|
|
2011-06-02 10:06:36 -07:00
|
|
|
/*
|
|
|
|
* Before reporting the OOM condition, |lock| needs to be cleaned up,
|
|
|
|
* hence the scoping.
|
|
|
|
*/
|
|
|
|
{
|
|
|
|
AutoLockGC lock(rt);
|
|
|
|
if (rt->compartments.append(compartment))
|
|
|
|
return compartment;
|
|
|
|
}
|
|
|
|
|
|
|
|
js_ReportOutOfMemory(cx);
|
2011-05-22 03:09:28 -07:00
|
|
|
}
|
|
|
|
Foreground::delete_(compartment);
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
2011-06-01 17:48:52 -07:00
|
|
|
void
|
|
|
|
RunDebugGC(JSContext *cx)
|
|
|
|
{
|
|
|
|
#ifdef JS_GC_ZEAL
|
|
|
|
if (IsGCAllowed(cx)) {
|
|
|
|
JSRuntime *rt = cx->runtime;
|
|
|
|
|
|
|
|
/*
|
|
|
|
* If rt->gcDebugCompartmentGC is true, only GC the current
|
|
|
|
* compartment. But don't GC the atoms compartment.
|
|
|
|
*/
|
|
|
|
rt->gcTriggerCompartment = rt->gcDebugCompartmentGC ? cx->compartment : NULL;
|
|
|
|
if (rt->gcTriggerCompartment == rt->atomsCompartment)
|
|
|
|
rt->gcTriggerCompartment = NULL;
|
|
|
|
|
|
|
|
RunLastDitchGC(cx);
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2011-05-22 03:09:28 -07:00
|
|
|
} /* namespace gc */
|
|
|
|
|
2010-09-24 10:54:39 -07:00
|
|
|
} /* namespace js */
|
2011-07-07 15:40:33 -07:00
|
|
|
|
|
|
|
#if JS_HAS_XML_SUPPORT
|
|
|
|
extern size_t sE4XObjectsCreated;
|
|
|
|
|
|
|
|
JSXML *
|
|
|
|
js_NewGCXML(JSContext *cx)
|
|
|
|
{
|
|
|
|
if (!cx->runningWithTrustedPrincipals())
|
|
|
|
++sE4XObjectsCreated;
|
|
|
|
|
|
|
|
return NewGCThing<JSXML>(cx, js::gc::FINALIZE_XML, sizeof(JSXML));
|
|
|
|
}
|
|
|
|
#endif
|