2009-06-10 18:29:44 -07:00
|
|
|
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
|
2007-03-22 10:30:00 -07:00
|
|
|
* vim: set ts=8 sw=4 et tw=78:
|
|
|
|
*
|
|
|
|
* ***** BEGIN LICENSE BLOCK *****
|
|
|
|
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
|
|
|
*
|
|
|
|
* The contents of this file are subject to the Mozilla Public License Version
|
|
|
|
* 1.1 (the "License"); you may not use this file except in compliance with
|
|
|
|
* the License. You may obtain a copy of the License at
|
|
|
|
* http://www.mozilla.org/MPL/
|
|
|
|
*
|
|
|
|
* Software distributed under the License is distributed on an "AS IS" basis,
|
|
|
|
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
|
|
|
* for the specific language governing rights and limitations under the
|
|
|
|
* License.
|
|
|
|
*
|
|
|
|
* The Original Code is Mozilla Communicator client code, released
|
|
|
|
* March 31, 1998.
|
|
|
|
*
|
|
|
|
* The Initial Developer of the Original Code is
|
|
|
|
* Netscape Communications Corporation.
|
|
|
|
* Portions created by the Initial Developer are Copyright (C) 1998
|
|
|
|
* the Initial Developer. All Rights Reserved.
|
|
|
|
*
|
|
|
|
* Contributor(s):
|
|
|
|
*
|
|
|
|
* Alternatively, the contents of this file may be used under the terms of
|
|
|
|
* either of the GNU General Public License Version 2 or later (the "GPL"),
|
|
|
|
* or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
|
|
|
* in which case the provisions of the GPL or the LGPL are applicable instead
|
|
|
|
* of those above. If you wish to allow use of your version of this file only
|
|
|
|
* under the terms of either the GPL or the LGPL, and not to allow others to
|
|
|
|
* use your version of this file under the terms of the MPL, indicate your
|
|
|
|
* decision by deleting the provisions above and replace them with the notice
|
|
|
|
* and other provisions required by the GPL or the LGPL. If you do not delete
|
|
|
|
* the provisions above, a recipient may use your version of this file under
|
|
|
|
* the terms of any one of the MPL, the GPL or the LGPL.
|
|
|
|
*
|
|
|
|
* ***** END LICENSE BLOCK ***** */
|
|
|
|
|
|
|
|
/*
|
|
|
|
* JS Mark-and-Sweep Garbage Collector.
|
|
|
|
*
|
|
|
|
* This GC allocates fixed-sized things with sizes up to GC_NBYTES_MAX (see
|
|
|
|
* jsgc.h). It allocates from a special GC arena pool with each arena allocated
|
|
|
|
* using malloc. It uses an ideally parallel array of flag bytes to hold the
|
|
|
|
* mark bit, finalizer type index, etc.
|
|
|
|
*
|
|
|
|
* XXX swizzle page to freelist for better locality of reference
|
|
|
|
*/
|
|
|
|
#include <stdlib.h> /* for free */
|
2008-05-28 19:07:32 -07:00
|
|
|
#include <math.h>
|
2007-03-22 10:30:00 -07:00
|
|
|
#include <string.h> /* for memset used when DEBUG */
|
|
|
|
#include "jstypes.h"
|
2009-03-18 11:38:16 -07:00
|
|
|
#include "jsstdint.h"
|
2007-03-22 10:30:00 -07:00
|
|
|
#include "jsutil.h" /* Added by JSIFY */
|
|
|
|
#include "jshash.h" /* Added by JSIFY */
|
|
|
|
#include "jsbit.h"
|
|
|
|
#include "jsclist.h"
|
2008-05-28 19:07:32 -07:00
|
|
|
#include "jsprf.h"
|
|
|
|
#include "jsapi.h"
|
|
|
|
#include "jsatom.h"
|
2007-03-22 10:30:00 -07:00
|
|
|
#include "jscntxt.h"
|
2008-09-05 10:19:17 -07:00
|
|
|
#include "jsversion.h"
|
2007-03-22 10:30:00 -07:00
|
|
|
#include "jsdbgapi.h"
|
|
|
|
#include "jsexn.h"
|
|
|
|
#include "jsfun.h"
|
|
|
|
#include "jsgc.h"
|
2010-04-12 13:59:19 -07:00
|
|
|
#include "jsgcchunk.h"
|
2007-03-22 10:30:00 -07:00
|
|
|
#include "jsinterp.h"
|
|
|
|
#include "jsiter.h"
|
|
|
|
#include "jslock.h"
|
|
|
|
#include "jsnum.h"
|
|
|
|
#include "jsobj.h"
|
2007-07-08 02:03:34 -07:00
|
|
|
#include "jsparse.h"
|
2010-05-18 19:21:43 -07:00
|
|
|
#include "jsproxy.h"
|
2007-03-22 10:30:00 -07:00
|
|
|
#include "jsscope.h"
|
|
|
|
#include "jsscript.h"
|
2009-01-30 15:40:05 -08:00
|
|
|
#include "jsstaticcheck.h"
|
2007-03-22 10:30:00 -07:00
|
|
|
#include "jsstr.h"
|
2010-09-01 11:24:45 -07:00
|
|
|
#include "jstask.h"
|
2008-08-06 21:56:25 -07:00
|
|
|
#include "jstracer.h"
|
2007-03-22 10:30:00 -07:00
|
|
|
|
|
|
|
#if JS_HAS_XML_SUPPORT
|
|
|
|
#include "jsxml.h"
|
|
|
|
#endif
|
|
|
|
|
2009-07-13 15:33:33 -07:00
|
|
|
#include "jsdtracef.h"
|
2010-03-28 13:34:16 -07:00
|
|
|
#include "jscntxtinlines.h"
|
2009-12-30 03:06:26 -08:00
|
|
|
#include "jsobjinlines.h"
|
2010-05-20 13:50:08 -07:00
|
|
|
#include "jshashtable.h"
|
2009-12-30 03:06:26 -08:00
|
|
|
|
2010-06-21 05:22:32 -07:00
|
|
|
#ifdef MOZ_VALGRIND
|
|
|
|
# define JS_VALGRIND
|
|
|
|
#endif
|
|
|
|
#ifdef JS_VALGRIND
|
|
|
|
# include <valgrind/memcheck.h>
|
|
|
|
#endif
|
|
|
|
|
2010-01-22 14:49:18 -08:00
|
|
|
using namespace js;
|
|
|
|
|
2009-03-05 03:12:50 -08:00
|
|
|
/*
|
2010-07-14 23:19:36 -07:00
|
|
|
* Check that JSTRACE_XML follows JSTRACE_OBJECT and JSTRACE_STRING.
|
2009-03-05 03:12:50 -08:00
|
|
|
*/
|
|
|
|
JS_STATIC_ASSERT(JSTRACE_OBJECT == 0);
|
2010-07-14 23:19:36 -07:00
|
|
|
JS_STATIC_ASSERT(JSTRACE_STRING == 1);
|
|
|
|
JS_STATIC_ASSERT(JSTRACE_XML == 2);
|
2009-03-05 03:12:50 -08:00
|
|
|
|
|
|
|
/*
|
|
|
|
* JS_IS_VALID_TRACE_KIND assumes that JSTRACE_STRING is the last non-xml
|
|
|
|
* trace kind when JS_HAS_XML_SUPPORT is false.
|
|
|
|
*/
|
|
|
|
JS_STATIC_ASSERT(JSTRACE_STRING + 1 == JSTRACE_XML);
|
|
|
|
|
2009-10-02 07:34:22 -07:00
|
|
|
/*
|
|
|
|
* Check consistency of external string constants from JSFinalizeGCThingKind.
|
|
|
|
*/
|
|
|
|
JS_STATIC_ASSERT(FINALIZE_EXTERNAL_STRING_LAST - FINALIZE_EXTERNAL_STRING0 ==
|
|
|
|
JS_EXTERNAL_STRING_LIMIT - 1);
|
2009-03-05 03:12:50 -08:00
|
|
|
|
2007-03-22 10:30:00 -07:00
|
|
|
/*
|
2010-04-12 13:59:19 -07:00
|
|
|
* GC memory is allocated in chunks. The size of each chunk is GC_CHUNK_SIZE.
|
|
|
|
* The chunk contains an array of GC arenas holding GC things, an array of
|
|
|
|
* the mark bitmaps for each arena, an array of JSGCArenaInfo arena
|
2010-08-05 05:16:56 -07:00
|
|
|
* descriptors, an array of JSGCMarkingDelay descriptors, the GCChunkInfo
|
2010-04-12 13:59:19 -07:00
|
|
|
* chunk descriptor and a bitmap indicating free arenas in the chunk. The
|
|
|
|
* following picture demonstrates the layout:
|
2010-01-14 00:27:32 -08:00
|
|
|
*
|
2010-04-12 13:59:19 -07:00
|
|
|
* +--------+--------------+-------+--------+------------+-----------------+
|
|
|
|
* | arenas | mark bitmaps | infos | delays | chunk info | free arena bits |
|
|
|
|
* +--------+--------------+-------+--------+------------+-----------------+
|
2010-01-14 00:27:32 -08:00
|
|
|
*
|
2010-04-12 13:59:19 -07:00
|
|
|
* To ensure fast O(1) lookup of mark bits and arena descriptors each chunk is
|
|
|
|
* allocated on GC_CHUNK_SIZE boundary. This way a simple mask and shift
|
|
|
|
* operation gives an arena index into the mark and JSGCArenaInfo arrays.
|
2010-01-14 00:27:32 -08:00
|
|
|
*
|
2010-04-12 13:59:19 -07:00
|
|
|
* All chunks that have at least one free arena are put on the doubly-linked
|
2010-08-05 05:16:56 -07:00
|
|
|
* list with the head stored in JSRuntime.gcChunkList. GCChunkInfo contains
|
2010-04-12 13:59:19 -07:00
|
|
|
* the head of the chunk's free arena list together with the link fields for
|
|
|
|
* gcChunkList.
|
2010-01-14 00:27:32 -08:00
|
|
|
*
|
2010-04-12 13:59:19 -07:00
|
|
|
* A GC arena contains GC_ARENA_SIZE bytes aligned on GC_ARENA_SIZE boundary
|
|
|
|
* and holds things of the same size and kind. The size of each thing in the
|
|
|
|
* arena must be divisible by GC_CELL_SIZE, the minimal allocation unit, and
|
|
|
|
* the size of the mark bitmap is fixed and is independent of the thing's
|
|
|
|
* size with one bit per each GC_CELL_SIZE bytes. For thing sizes that exceed
|
|
|
|
* GC_CELL_SIZE this implies that we waste space in the mark bitmap. The
|
|
|
|
* advantage is that we can find the mark bit for the thing using just
|
|
|
|
* integer shifts avoiding an expensive integer division. We trade some space
|
|
|
|
* for speed here.
|
2010-01-14 00:27:32 -08:00
|
|
|
*
|
2010-04-12 13:59:19 -07:00
|
|
|
* The number of arenas in the chunk is given by GC_ARENAS_PER_CHUNK. We find
|
|
|
|
* that number as follows. Suppose chunk contains n arenas. Together with the
|
2010-08-05 05:16:56 -07:00
|
|
|
* word-aligned free arena bitmap and GCChunkInfo they should fit into the
|
2010-04-12 13:59:19 -07:00
|
|
|
* chunk. Hence GC_ARENAS_PER_CHUNK or n_max is the maximum value of n for
|
|
|
|
* which the following holds:
|
2010-01-14 00:27:32 -08:00
|
|
|
*
|
|
|
|
* n*s + ceil(n/B) <= M (1)
|
|
|
|
*
|
|
|
|
* where "/" denotes normal real division,
|
|
|
|
* ceil(r) gives the least integer not smaller than the number r,
|
2010-04-12 13:59:19 -07:00
|
|
|
* s is the number of words in the GC arena, arena's mark bitmap,
|
|
|
|
* JSGCArenaInfo and JSGCMarkingDelay or GC_ARENA_ALL_WORDS.
|
2010-01-14 00:27:32 -08:00
|
|
|
* B is number of bits per word or B == JS_BITS_PER_WORD
|
2010-08-05 05:16:56 -07:00
|
|
|
* M is the number of words in the chunk without GCChunkInfo or
|
2010-04-12 13:59:19 -07:00
|
|
|
* M == (GC_CHUNK_SIZE - sizeof(JSGCArenaInfo)) / sizeof(jsuword).
|
2010-01-14 00:27:32 -08:00
|
|
|
*
|
|
|
|
* We rewrite the inequality as
|
2007-03-22 10:30:00 -07:00
|
|
|
*
|
2010-01-14 00:27:32 -08:00
|
|
|
* n*B*s/B + ceil(n/B) <= M,
|
|
|
|
* ceil(n*B*s/B + n/B) <= M,
|
|
|
|
* ceil(n*(B*s + 1)/B) <= M (2)
|
|
|
|
*
|
|
|
|
* We define a helper function e(n, s, B),
|
|
|
|
*
|
|
|
|
* e(n, s, B) := ceil(n*(B*s + 1)/B) - n*(B*s + 1)/B, 0 <= e(n, s, B) < 1.
|
2007-03-22 10:30:00 -07:00
|
|
|
*
|
2010-01-14 00:27:32 -08:00
|
|
|
* It gives:
|
|
|
|
*
|
|
|
|
* n*(B*s + 1)/B + e(n, s, B) <= M,
|
|
|
|
* n + e*B/(B*s + 1) <= M*B/(B*s + 1)
|
|
|
|
*
|
|
|
|
* We apply the floor function to both sides of the last equation, where
|
|
|
|
* floor(r) gives the biggest integer not greater than r. As a consequence we
|
|
|
|
* have:
|
2007-03-22 10:30:00 -07:00
|
|
|
*
|
2010-01-14 00:27:32 -08:00
|
|
|
* floor(n + e*B/(B*s + 1)) <= floor(M*B/(B*s + 1)),
|
|
|
|
* n + floor(e*B/(B*s + 1)) <= floor(M*B/(B*s + 1)),
|
|
|
|
* n <= floor(M*B/(B*s + 1)), (3)
|
2008-02-26 13:01:42 -08:00
|
|
|
*
|
2010-01-14 00:27:32 -08:00
|
|
|
* where floor(e*B/(B*s + 1)) is zero as e*B/(B*s + 1) < B/(B*s + 1) < 1.
|
|
|
|
* Thus any n that satisfies the original constraint (1) or its equivalent (2),
|
|
|
|
* must also satisfy (3). That is, we got an upper estimate for the maximum
|
|
|
|
* value of n. Lets show that this upper estimate,
|
2008-02-26 13:01:42 -08:00
|
|
|
*
|
2010-01-14 00:27:32 -08:00
|
|
|
* floor(M*B/(B*s + 1)), (4)
|
2007-03-22 10:30:00 -07:00
|
|
|
*
|
2010-01-14 00:27:32 -08:00
|
|
|
* also satisfies (1) and, as such, gives the required maximum value.
|
|
|
|
* Substituting it into (2) gives:
|
2008-02-26 13:01:42 -08:00
|
|
|
*
|
2010-01-14 00:27:32 -08:00
|
|
|
* ceil(floor(M*B/(B*s + 1))*(B*s + 1)/B) == ceil(floor(M/X)*X)
|
2008-02-26 13:01:42 -08:00
|
|
|
*
|
2010-01-14 00:27:32 -08:00
|
|
|
* where X == (B*s + 1)/B > 1. But then floor(M/X)*X <= M/X*X == M and
|
2007-03-22 10:30:00 -07:00
|
|
|
*
|
2010-01-14 00:27:32 -08:00
|
|
|
* ceil(floor(M/X)*X) <= ceil(M) == M.
|
2008-02-26 13:01:42 -08:00
|
|
|
*
|
2010-01-14 00:27:32 -08:00
|
|
|
* Thus the value of (4) gives the maximum n satisfying (1).
|
2008-02-26 13:01:42 -08:00
|
|
|
*
|
2010-01-14 00:27:32 -08:00
|
|
|
* For the final result we observe that in (4)
|
2008-02-26 13:01:42 -08:00
|
|
|
*
|
2010-08-05 05:16:56 -07:00
|
|
|
* M*B == (GC_CHUNK_SIZE - sizeof(GCChunkInfo)) / sizeof(jsuword) *
|
2010-04-12 13:59:19 -07:00
|
|
|
* JS_BITS_PER_WORD
|
2010-08-05 05:16:56 -07:00
|
|
|
* == (GC_CHUNK_SIZE - sizeof(GCChunkInfo)) * JS_BITS_PER_BYTE
|
2008-02-26 13:01:42 -08:00
|
|
|
*
|
2010-08-05 05:16:56 -07:00
|
|
|
* since GC_CHUNK_SIZE and sizeof(GCChunkInfo) are at least word-aligned.
|
2007-09-16 06:03:17 -07:00
|
|
|
*/
|
|
|
|
|
2010-04-22 23:58:44 -07:00
|
|
|
const jsuword GC_ARENA_SHIFT = 12;
|
|
|
|
const jsuword GC_ARENA_MASK = JS_BITMASK(GC_ARENA_SHIFT);
|
|
|
|
const jsuword GC_ARENA_SIZE = JS_BIT(GC_ARENA_SHIFT);
|
2010-01-14 00:27:32 -08:00
|
|
|
|
2010-04-22 23:58:44 -07:00
|
|
|
const jsuword GC_MAX_CHUNK_AGE = 3;
|
2010-04-13 20:14:10 -07:00
|
|
|
|
2010-01-14 00:27:32 -08:00
|
|
|
const size_t GC_CELL_SHIFT = 3;
|
|
|
|
const size_t GC_CELL_SIZE = size_t(1) << GC_CELL_SHIFT;
|
|
|
|
const size_t GC_CELL_MASK = GC_CELL_SIZE - 1;
|
|
|
|
|
|
|
|
const size_t BITS_PER_GC_CELL = GC_CELL_SIZE * JS_BITS_PER_BYTE;
|
2009-09-17 15:40:37 -07:00
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
const size_t GC_CELLS_PER_ARENA = size_t(1) << (GC_ARENA_SHIFT - GC_CELL_SHIFT);
|
|
|
|
const size_t GC_MARK_BITMAP_SIZE = GC_CELLS_PER_ARENA / JS_BITS_PER_BYTE;
|
|
|
|
const size_t GC_MARK_BITMAP_WORDS = GC_CELLS_PER_ARENA / JS_BITS_PER_WORD;
|
|
|
|
|
|
|
|
JS_STATIC_ASSERT(sizeof(jsbitmap) == sizeof(jsuword));
|
|
|
|
|
|
|
|
JS_STATIC_ASSERT(sizeof(JSString) % GC_CELL_SIZE == 0);
|
|
|
|
JS_STATIC_ASSERT(sizeof(JSObject) % GC_CELL_SIZE == 0);
|
|
|
|
JS_STATIC_ASSERT(sizeof(JSFunction) % GC_CELL_SIZE == 0);
|
|
|
|
#ifdef JSXML
|
|
|
|
JS_STATIC_ASSERT(sizeof(JSXML) % GC_CELL_SIZE == 0);
|
|
|
|
#endif
|
|
|
|
|
2010-08-24 18:57:14 -07:00
|
|
|
#ifdef JS_GCMETER
|
|
|
|
# define METER(x) ((void) (x))
|
|
|
|
# define METER_IF(condition, x) ((void) ((condition) && (x)))
|
|
|
|
#else
|
|
|
|
# define METER(x) ((void) 0)
|
|
|
|
# define METER_IF(condition, x) ((void) 0)
|
|
|
|
#endif
|
|
|
|
|
2007-09-16 06:03:17 -07:00
|
|
|
struct JSGCArenaInfo {
|
|
|
|
/*
|
2010-07-26 11:44:04 -07:00
|
|
|
* Allocation list for the arena.
|
2007-09-16 06:03:17 -07:00
|
|
|
*/
|
|
|
|
JSGCArenaList *list;
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Pointer to the previous arena in a linked list. The arena can either
|
|
|
|
* belong to one of JSContext.gcArenaList lists or, when it does not have
|
|
|
|
* any allocated GC things, to the list of free arenas in the chunk with
|
2010-08-05 05:16:56 -07:00
|
|
|
* head stored in GCChunkInfo.lastFreeArena.
|
2007-09-16 06:03:17 -07:00
|
|
|
*/
|
2010-01-14 00:27:32 -08:00
|
|
|
JSGCArena *prev;
|
2007-09-16 06:03:17 -07:00
|
|
|
|
2009-12-13 23:55:17 -08:00
|
|
|
JSGCThing *freeList;
|
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
static inline JSGCArenaInfo *fromGCThing(void* thing);
|
2007-09-16 06:03:17 -07:00
|
|
|
};
|
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
/* See comments before ThingsPerUnmarkedBit below. */
|
|
|
|
struct JSGCMarkingDelay {
|
|
|
|
JSGCArena *link;
|
|
|
|
jsuword unmarkedChildren;
|
|
|
|
};
|
2010-01-14 00:27:32 -08:00
|
|
|
|
|
|
|
struct JSGCArena {
|
2010-04-12 13:59:19 -07:00
|
|
|
uint8 data[GC_ARENA_SIZE];
|
2010-01-14 00:27:32 -08:00
|
|
|
|
|
|
|
void checkAddress() const {
|
|
|
|
JS_ASSERT(!(reinterpret_cast<jsuword>(this) & GC_ARENA_MASK));
|
|
|
|
}
|
|
|
|
|
|
|
|
jsuword toPageStart() const {
|
|
|
|
checkAddress();
|
|
|
|
return reinterpret_cast<jsuword>(this);
|
|
|
|
}
|
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
static inline JSGCArena *fromGCThing(void* thing);
|
2007-09-15 10:19:32 -07:00
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
static inline JSGCArena *fromChunkAndIndex(jsuword chunk, size_t index);
|
2010-04-12 10:15:30 -07:00
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
jsuword getChunk() {
|
|
|
|
return toPageStart() & ~GC_CHUNK_MASK;
|
2010-04-12 12:42:45 -07:00
|
|
|
}
|
2010-04-12 10:15:30 -07:00
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
jsuword getIndex() {
|
|
|
|
return (toPageStart() & GC_CHUNK_MASK) >> GC_ARENA_SHIFT;
|
2010-01-14 00:27:32 -08:00
|
|
|
}
|
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
inline JSGCArenaInfo *getInfo();
|
|
|
|
|
|
|
|
inline JSGCMarkingDelay *getMarkingDelay();
|
|
|
|
|
|
|
|
inline jsbitmap *getMarkBitmap();
|
|
|
|
};
|
2010-04-12 10:15:30 -07:00
|
|
|
|
2010-08-05 05:16:56 -07:00
|
|
|
namespace js {
|
|
|
|
|
|
|
|
struct GCChunkInfo {
|
2010-04-22 23:58:44 -07:00
|
|
|
JSRuntime *runtime;
|
2010-04-12 13:59:19 -07:00
|
|
|
size_t numFreeArenas;
|
2010-04-13 20:14:10 -07:00
|
|
|
size_t gcChunkAge;
|
2010-04-12 13:59:19 -07:00
|
|
|
|
|
|
|
inline void init(JSRuntime *rt);
|
|
|
|
|
|
|
|
inline jsbitmap *getFreeArenaBitmap();
|
|
|
|
|
|
|
|
inline jsuword getChunk();
|
|
|
|
|
2010-05-18 03:01:33 -07:00
|
|
|
inline void clearMarkBitmap();
|
|
|
|
|
2010-08-05 05:16:56 -07:00
|
|
|
static inline GCChunkInfo *fromChunk(jsuword chunk);
|
2010-01-14 00:27:32 -08:00
|
|
|
};
|
2007-09-16 06:03:17 -07:00
|
|
|
|
2010-08-05 05:16:56 -07:00
|
|
|
} /* namespace js */
|
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
/* Check that all chunk arrays at least word-aligned. */
|
2010-01-14 00:27:32 -08:00
|
|
|
JS_STATIC_ASSERT(sizeof(JSGCArena) == GC_ARENA_SIZE);
|
2010-04-12 13:59:19 -07:00
|
|
|
JS_STATIC_ASSERT(GC_MARK_BITMAP_WORDS % sizeof(jsuword) == 0);
|
|
|
|
JS_STATIC_ASSERT(sizeof(JSGCArenaInfo) % sizeof(jsuword) == 0);
|
|
|
|
JS_STATIC_ASSERT(sizeof(JSGCMarkingDelay) % sizeof(jsuword) == 0);
|
2010-01-14 00:27:32 -08:00
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
const size_t GC_ARENA_ALL_WORDS = (GC_ARENA_SIZE + GC_MARK_BITMAP_SIZE +
|
|
|
|
sizeof(JSGCArenaInfo) +
|
|
|
|
sizeof(JSGCMarkingDelay)) / sizeof(jsuword);
|
2009-09-17 15:40:37 -07:00
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
/* The value according (4) above. */
|
|
|
|
const size_t GC_ARENAS_PER_CHUNK =
|
2010-08-05 05:16:56 -07:00
|
|
|
(GC_CHUNK_SIZE - sizeof(GCChunkInfo)) * JS_BITS_PER_BYTE /
|
2010-04-12 13:59:19 -07:00
|
|
|
(JS_BITS_PER_WORD * GC_ARENA_ALL_WORDS + 1);
|
2009-09-17 15:40:37 -07:00
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
const size_t GC_FREE_ARENA_BITMAP_WORDS = (GC_ARENAS_PER_CHUNK +
|
|
|
|
JS_BITS_PER_WORD - 1) /
|
|
|
|
JS_BITS_PER_WORD;
|
2010-04-12 10:15:30 -07:00
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
const size_t GC_FREE_ARENA_BITMAP_SIZE = GC_FREE_ARENA_BITMAP_WORDS *
|
|
|
|
sizeof(jsuword);
|
2010-04-12 10:15:30 -07:00
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
/* Check that GC_ARENAS_PER_CHUNK indeed maximises (1). */
|
|
|
|
JS_STATIC_ASSERT(GC_ARENAS_PER_CHUNK * GC_ARENA_ALL_WORDS +
|
|
|
|
GC_FREE_ARENA_BITMAP_WORDS <=
|
2010-08-05 05:16:56 -07:00
|
|
|
(GC_CHUNK_SIZE - sizeof(GCChunkInfo)) / sizeof(jsuword));
|
2010-04-12 10:15:30 -07:00
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
JS_STATIC_ASSERT((GC_ARENAS_PER_CHUNK + 1) * GC_ARENA_ALL_WORDS +
|
|
|
|
(GC_ARENAS_PER_CHUNK + 1 + JS_BITS_PER_WORD - 1) /
|
|
|
|
JS_BITS_PER_WORD >
|
2010-08-05 05:16:56 -07:00
|
|
|
(GC_CHUNK_SIZE - sizeof(GCChunkInfo)) / sizeof(jsuword));
|
2009-09-17 15:40:37 -07:00
|
|
|
|
2010-04-12 10:15:30 -07:00
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
const size_t GC_MARK_BITMAP_ARRAY_OFFSET = GC_ARENAS_PER_CHUNK
|
|
|
|
<< GC_ARENA_SHIFT;
|
|
|
|
|
|
|
|
const size_t GC_ARENA_INFO_ARRAY_OFFSET =
|
|
|
|
GC_MARK_BITMAP_ARRAY_OFFSET + GC_MARK_BITMAP_SIZE * GC_ARENAS_PER_CHUNK;
|
|
|
|
|
|
|
|
const size_t GC_MARKING_DELAY_ARRAY_OFFSET =
|
|
|
|
GC_ARENA_INFO_ARRAY_OFFSET + sizeof(JSGCArenaInfo) * GC_ARENAS_PER_CHUNK;
|
|
|
|
|
|
|
|
const size_t GC_CHUNK_INFO_OFFSET = GC_CHUNK_SIZE - GC_FREE_ARENA_BITMAP_SIZE -
|
2010-08-05 05:16:56 -07:00
|
|
|
sizeof(GCChunkInfo);
|
2010-04-12 10:15:30 -07:00
|
|
|
|
2010-04-12 12:42:45 -07:00
|
|
|
inline jsuword
|
2010-08-05 05:16:56 -07:00
|
|
|
GCChunkInfo::getChunk() {
|
2010-04-12 13:59:19 -07:00
|
|
|
jsuword addr = reinterpret_cast<jsuword>(this);
|
|
|
|
JS_ASSERT((addr & GC_CHUNK_MASK) == GC_CHUNK_INFO_OFFSET);
|
|
|
|
jsuword chunk = addr & ~GC_CHUNK_MASK;
|
|
|
|
return chunk;
|
|
|
|
}
|
|
|
|
|
2010-05-18 03:01:33 -07:00
|
|
|
inline void
|
2010-08-05 05:16:56 -07:00
|
|
|
GCChunkInfo::clearMarkBitmap()
|
2010-05-18 03:01:33 -07:00
|
|
|
{
|
|
|
|
PodZero(reinterpret_cast<jsbitmap *>(getChunk() + GC_MARK_BITMAP_ARRAY_OFFSET),
|
|
|
|
GC_MARK_BITMAP_WORDS * GC_ARENAS_PER_CHUNK);
|
|
|
|
}
|
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
/* static */
|
2010-08-05 05:16:56 -07:00
|
|
|
inline GCChunkInfo *
|
|
|
|
GCChunkInfo::fromChunk(jsuword chunk) {
|
2010-04-12 13:59:19 -07:00
|
|
|
JS_ASSERT(!(chunk & GC_CHUNK_MASK));
|
|
|
|
jsuword addr = chunk | GC_CHUNK_INFO_OFFSET;
|
2010-08-05 05:16:56 -07:00
|
|
|
return reinterpret_cast<GCChunkInfo *>(addr);
|
2010-04-12 13:59:19 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
inline jsbitmap *
|
2010-08-05 05:16:56 -07:00
|
|
|
GCChunkInfo::getFreeArenaBitmap()
|
2010-01-14 00:27:32 -08:00
|
|
|
{
|
2010-04-12 13:59:19 -07:00
|
|
|
jsuword addr = reinterpret_cast<jsuword>(this);
|
2010-08-05 05:16:56 -07:00
|
|
|
return reinterpret_cast<jsbitmap *>(addr + sizeof(GCChunkInfo));
|
2010-01-14 00:27:32 -08:00
|
|
|
}
|
2009-09-17 15:40:37 -07:00
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
inline void
|
2010-08-05 05:16:56 -07:00
|
|
|
GCChunkInfo::init(JSRuntime *rt)
|
2010-01-14 00:27:32 -08:00
|
|
|
{
|
2010-04-22 23:58:44 -07:00
|
|
|
runtime = rt;
|
2010-04-12 13:59:19 -07:00
|
|
|
numFreeArenas = GC_ARENAS_PER_CHUNK;
|
2010-04-22 23:58:44 -07:00
|
|
|
gcChunkAge = 0;
|
2010-04-12 13:59:19 -07:00
|
|
|
|
|
|
|
/*
|
|
|
|
* For simplicity we set all bits to 1 including the high bits in the
|
2010-05-23 12:26:15 -07:00
|
|
|
* last word that corresponds to nonexistent arenas. This is fine since
|
2010-04-12 13:59:19 -07:00
|
|
|
* the arena scans the bitmap words from lowest to highest bits and the
|
|
|
|
* allocation checks numFreeArenas before doing the search.
|
|
|
|
*/
|
2010-04-22 23:58:44 -07:00
|
|
|
memset(getFreeArenaBitmap(), 0xFF, GC_FREE_ARENA_BITMAP_SIZE);
|
2010-01-14 00:27:32 -08:00
|
|
|
}
|
2009-09-17 15:40:37 -07:00
|
|
|
|
2010-01-14 00:27:32 -08:00
|
|
|
inline void
|
2010-04-12 13:59:19 -07:00
|
|
|
CheckValidGCThingPtr(void *thing)
|
2010-01-14 00:27:32 -08:00
|
|
|
{
|
2010-04-12 13:59:19 -07:00
|
|
|
#ifdef DEBUG
|
|
|
|
JS_ASSERT(!JSString::isStatic(thing));
|
|
|
|
jsuword addr = reinterpret_cast<jsuword>(thing);
|
|
|
|
JS_ASSERT(!(addr & GC_CELL_MASK));
|
|
|
|
JS_ASSERT((addr & GC_CHUNK_MASK) < GC_MARK_BITMAP_ARRAY_OFFSET);
|
|
|
|
#endif
|
2010-01-14 00:27:32 -08:00
|
|
|
}
|
2009-09-17 15:40:37 -07:00
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
/* static */
|
|
|
|
inline JSGCArenaInfo *
|
|
|
|
JSGCArenaInfo::fromGCThing(void* thing)
|
2010-01-14 00:27:32 -08:00
|
|
|
{
|
2010-04-12 13:59:19 -07:00
|
|
|
CheckValidGCThingPtr(thing);
|
|
|
|
jsuword addr = reinterpret_cast<jsuword>(thing);
|
|
|
|
jsuword chunk = addr & ~GC_CHUNK_MASK;
|
|
|
|
JSGCArenaInfo *array =
|
|
|
|
reinterpret_cast<JSGCArenaInfo *>(chunk | GC_ARENA_INFO_ARRAY_OFFSET);
|
|
|
|
size_t arenaIndex = (addr & GC_CHUNK_MASK) >> GC_ARENA_SHIFT;
|
|
|
|
return array + arenaIndex;
|
2010-01-14 00:27:32 -08:00
|
|
|
}
|
2009-09-17 15:40:37 -07:00
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
/* static */
|
2010-01-14 00:27:32 -08:00
|
|
|
inline JSGCArena *
|
2010-04-12 13:59:19 -07:00
|
|
|
JSGCArena::fromGCThing(void* thing)
|
2010-01-14 00:27:32 -08:00
|
|
|
{
|
2010-04-12 13:59:19 -07:00
|
|
|
CheckValidGCThingPtr(thing);
|
|
|
|
jsuword addr = reinterpret_cast<jsuword>(thing);
|
|
|
|
return reinterpret_cast<JSGCArena *>(addr & ~GC_ARENA_MASK);
|
2009-10-15 23:10:54 -07:00
|
|
|
}
|
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
/* static */
|
|
|
|
inline JSGCArena *
|
|
|
|
JSGCArena::fromChunkAndIndex(jsuword chunk, size_t index) {
|
|
|
|
JS_ASSERT(chunk);
|
|
|
|
JS_ASSERT(!(chunk & GC_CHUNK_MASK));
|
|
|
|
JS_ASSERT(index < GC_ARENAS_PER_CHUNK);
|
|
|
|
return reinterpret_cast<JSGCArena *>(chunk | (index << GC_ARENA_SHIFT));
|
2009-10-15 23:10:54 -07:00
|
|
|
}
|
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
inline JSGCArenaInfo *
|
|
|
|
JSGCArena::getInfo()
|
2010-01-14 00:27:32 -08:00
|
|
|
{
|
2010-04-12 13:59:19 -07:00
|
|
|
jsuword chunk = getChunk();
|
|
|
|
jsuword index = getIndex();
|
|
|
|
jsuword offset = GC_ARENA_INFO_ARRAY_OFFSET + index * sizeof(JSGCArenaInfo);
|
|
|
|
return reinterpret_cast<JSGCArenaInfo *>(chunk | offset);
|
2010-01-14 00:27:32 -08:00
|
|
|
}
|
2009-10-08 09:08:00 -07:00
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
inline JSGCMarkingDelay *
|
|
|
|
JSGCArena::getMarkingDelay()
|
2010-01-14 00:27:32 -08:00
|
|
|
{
|
2010-04-12 13:59:19 -07:00
|
|
|
jsuword chunk = getChunk();
|
|
|
|
jsuword index = getIndex();
|
|
|
|
jsuword offset = GC_MARKING_DELAY_ARRAY_OFFSET +
|
|
|
|
index * sizeof(JSGCMarkingDelay);
|
|
|
|
return reinterpret_cast<JSGCMarkingDelay *>(chunk | offset);
|
2010-01-14 00:27:32 -08:00
|
|
|
}
|
2008-02-26 13:01:42 -08:00
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
inline jsbitmap *
|
|
|
|
JSGCArena::getMarkBitmap()
|
2008-02-26 13:01:42 -08:00
|
|
|
{
|
2010-04-12 13:59:19 -07:00
|
|
|
jsuword chunk = getChunk();
|
|
|
|
jsuword index = getIndex();
|
|
|
|
jsuword offset = GC_MARK_BITMAP_ARRAY_OFFSET + index * GC_MARK_BITMAP_SIZE;
|
|
|
|
return reinterpret_cast<jsbitmap *>(chunk | offset);
|
2008-02-26 13:01:42 -08:00
|
|
|
}
|
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
/*
|
|
|
|
* Helpers for GC-thing operations.
|
|
|
|
*/
|
|
|
|
|
|
|
|
inline jsbitmap *
|
|
|
|
GetGCThingMarkBit(void *thing, size_t &bitIndex)
|
2010-01-14 00:27:32 -08:00
|
|
|
{
|
2010-04-12 13:59:19 -07:00
|
|
|
CheckValidGCThingPtr(thing);
|
|
|
|
jsuword addr = reinterpret_cast<jsuword>(thing);
|
|
|
|
jsuword chunk = addr & ~GC_CHUNK_MASK;
|
|
|
|
bitIndex = (addr & GC_CHUNK_MASK) >> GC_CELL_SHIFT;
|
|
|
|
return reinterpret_cast<jsbitmap *>(chunk | GC_MARK_BITMAP_ARRAY_OFFSET);
|
2008-02-26 13:01:42 -08:00
|
|
|
}
|
|
|
|
|
2010-07-15 17:58:36 -07:00
|
|
|
/*
|
|
|
|
* Live objects are marked black. How many other additional colors are available
|
|
|
|
* depends on the size of the GCThing.
|
|
|
|
*/
|
|
|
|
static const uint32 BLACK = 0;
|
|
|
|
|
|
|
|
static void
|
|
|
|
AssertValidColor(void *thing, uint32 color)
|
|
|
|
{
|
|
|
|
JS_ASSERT_IF(color, color < JSGCArenaInfo::fromGCThing(thing)->list->thingSize / GC_CELL_SIZE);
|
|
|
|
}
|
|
|
|
|
2010-01-14 00:27:32 -08:00
|
|
|
inline bool
|
2010-07-15 17:58:36 -07:00
|
|
|
IsMarkedGCThing(void *thing, uint32 color = BLACK)
|
2010-04-12 10:15:30 -07:00
|
|
|
{
|
2010-07-15 17:58:36 -07:00
|
|
|
AssertValidColor(thing, color);
|
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
size_t index;
|
|
|
|
jsbitmap *markBitmap = GetGCThingMarkBit(thing, index);
|
2010-07-15 17:58:36 -07:00
|
|
|
return !!JS_TEST_BIT(markBitmap, index + color);
|
2010-04-12 10:15:30 -07:00
|
|
|
}
|
|
|
|
|
2010-07-15 17:58:36 -07:00
|
|
|
/*
|
|
|
|
* The GC always marks live objects BLACK. If color is not BLACK, we also mark
|
|
|
|
* the object with that additional color.
|
|
|
|
*/
|
2010-04-12 12:42:45 -07:00
|
|
|
inline bool
|
2010-07-15 17:58:36 -07:00
|
|
|
MarkIfUnmarkedGCThing(void *thing, uint32 color = BLACK)
|
2010-04-12 10:15:30 -07:00
|
|
|
{
|
2010-07-15 17:58:36 -07:00
|
|
|
AssertValidColor(thing, color);
|
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
size_t index;
|
|
|
|
jsbitmap *markBitmap = GetGCThingMarkBit(thing, index);
|
|
|
|
if (JS_TEST_BIT(markBitmap, index))
|
2010-04-12 12:42:45 -07:00
|
|
|
return false;
|
2010-04-12 13:59:19 -07:00
|
|
|
JS_SET_BIT(markBitmap, index);
|
2010-07-15 17:58:36 -07:00
|
|
|
if (color != BLACK)
|
|
|
|
JS_SET_BIT(markBitmap, index + color);
|
2010-04-12 12:42:45 -07:00
|
|
|
return true;
|
2010-04-12 10:15:30 -07:00
|
|
|
}
|
|
|
|
|
2010-08-24 18:57:14 -07:00
|
|
|
size_t
|
2010-04-12 13:59:19 -07:00
|
|
|
ThingsPerArena(size_t thingSize)
|
|
|
|
{
|
|
|
|
JS_ASSERT(!(thingSize & GC_CELL_MASK));
|
|
|
|
JS_ASSERT(thingSize <= GC_ARENA_SIZE);
|
|
|
|
return GC_ARENA_SIZE / thingSize;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Can only be called if thing belongs to an arena where a->list is not null. */
|
|
|
|
inline size_t
|
|
|
|
GCThingToArenaIndex(void *thing)
|
|
|
|
{
|
|
|
|
CheckValidGCThingPtr(thing);
|
|
|
|
jsuword addr = reinterpret_cast<jsuword>(thing);
|
|
|
|
jsuword offsetInArena = addr & GC_ARENA_MASK;
|
|
|
|
JSGCArenaInfo *a = JSGCArenaInfo::fromGCThing(thing);
|
|
|
|
JS_ASSERT(a->list);
|
|
|
|
JS_ASSERT(offsetInArena % a->list->thingSize == 0);
|
|
|
|
return offsetInArena / a->list->thingSize;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Can only be applicable to arena where a->list is not null. */
|
|
|
|
inline uint8 *
|
|
|
|
GCArenaIndexToThing(JSGCArena *a, JSGCArenaInfo *ainfo, size_t index)
|
|
|
|
{
|
|
|
|
JS_ASSERT(a->getInfo() == ainfo);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* We use "<=" and not "<" in the assert so index can mean the limit.
|
|
|
|
* For the same reason we use "+", not "|" when finding the thing address
|
|
|
|
* as the limit address can start at the next arena.
|
|
|
|
*/
|
|
|
|
JS_ASSERT(index <= ThingsPerArena(ainfo->list->thingSize));
|
|
|
|
jsuword offsetInArena = index * ainfo->list->thingSize;
|
|
|
|
return reinterpret_cast<uint8 *>(a->toPageStart() + offsetInArena);
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
* The private JSGCThing struct, which describes a JSRuntime.gcFreeList element.
|
|
|
|
*/
|
2010-07-26 11:44:04 -07:00
|
|
|
struct JSGCThing {
|
2010-04-12 13:59:19 -07:00
|
|
|
JSGCThing *link;
|
|
|
|
};
|
|
|
|
|
2010-01-14 00:27:32 -08:00
|
|
|
static inline JSGCThing *
|
|
|
|
MakeNewArenaFreeList(JSGCArena *a, size_t thingSize)
|
|
|
|
{
|
|
|
|
jsuword thingsStart = a->toPageStart();
|
2010-04-12 13:59:19 -07:00
|
|
|
jsuword lastThingMinAddr = thingsStart + GC_ARENA_SIZE - thingSize * 2 + 1;
|
2010-01-14 00:27:32 -08:00
|
|
|
jsuword thingPtr = thingsStart;
|
|
|
|
do {
|
|
|
|
jsuword nextPtr = thingPtr + thingSize;
|
2010-04-12 13:59:19 -07:00
|
|
|
JS_ASSERT((nextPtr & GC_ARENA_MASK) + thingSize <= GC_ARENA_SIZE);
|
2010-01-14 00:27:32 -08:00
|
|
|
JSGCThing *thing = reinterpret_cast<JSGCThing *>(thingPtr);
|
|
|
|
thing->link = reinterpret_cast<JSGCThing *>(nextPtr);
|
|
|
|
thingPtr = nextPtr;
|
|
|
|
} while (thingPtr < lastThingMinAddr);
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-01-14 00:27:32 -08:00
|
|
|
JSGCThing *lastThing = reinterpret_cast<JSGCThing *>(thingPtr);
|
|
|
|
lastThing->link = NULL;
|
|
|
|
return reinterpret_cast<JSGCThing *>(thingsStart);
|
|
|
|
}
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-08-05 05:16:56 -07:00
|
|
|
inline jsuword
|
2010-04-12 13:59:19 -07:00
|
|
|
GetGCChunk(JSRuntime *rt)
|
2009-09-17 15:40:37 -07:00
|
|
|
{
|
2010-06-07 02:17:15 -07:00
|
|
|
void *p = rt->gcChunkAllocator->alloc();
|
2010-03-25 16:11:27 -07:00
|
|
|
#ifdef MOZ_GCTIMER
|
2010-04-12 13:59:19 -07:00
|
|
|
if (p)
|
|
|
|
JS_ATOMIC_INCREMENT(&newChunkCount);
|
2009-09-17 15:40:37 -07:00
|
|
|
#endif
|
2010-04-12 13:59:19 -07:00
|
|
|
METER_IF(p, rt->gcStats.nchunks++);
|
|
|
|
METER_UPDATE_MAX(rt->gcStats.maxnchunks, rt->gcStats.nchunks);
|
2010-08-05 05:16:56 -07:00
|
|
|
return reinterpret_cast<jsuword>(p);
|
2009-09-17 15:40:37 -07:00
|
|
|
}
|
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
inline void
|
2010-04-22 23:58:44 -07:00
|
|
|
ReleaseGCChunk(JSRuntime *rt, jsuword chunk)
|
2009-09-17 15:40:37 -07:00
|
|
|
{
|
2010-04-22 23:58:44 -07:00
|
|
|
void *p = reinterpret_cast<void *>(chunk);
|
2010-04-12 13:59:19 -07:00
|
|
|
JS_ASSERT(p);
|
2010-03-25 16:11:27 -07:00
|
|
|
#ifdef MOZ_GCTIMER
|
|
|
|
JS_ATOMIC_INCREMENT(&destroyChunkCount);
|
|
|
|
#endif
|
2010-04-12 13:59:19 -07:00
|
|
|
JS_ASSERT(rt->gcStats.nchunks != 0);
|
|
|
|
METER(rt->gcStats.nchunks--);
|
2010-06-07 02:17:15 -07:00
|
|
|
rt->gcChunkAllocator->free(p);
|
2009-09-17 15:40:37 -07:00
|
|
|
}
|
|
|
|
|
2010-01-14 00:27:32 -08:00
|
|
|
static JSGCArena *
|
2009-10-23 00:03:17 -07:00
|
|
|
NewGCArena(JSContext *cx)
|
2007-09-16 06:03:17 -07:00
|
|
|
{
|
2009-10-23 00:03:17 -07:00
|
|
|
JSRuntime *rt = cx->runtime;
|
2010-08-31 08:37:16 -07:00
|
|
|
if (!JS_THREAD_DATA(cx)->waiveGCQuota &&
|
|
|
|
(rt->gcBytes >= rt->gcMaxBytes ||
|
|
|
|
rt->gcBytes > GC_HEAP_GROWTH_FACTOR * rt->gcNewArenaTriggerBytes)) {
|
2010-07-28 11:20:19 -07:00
|
|
|
/*
|
|
|
|
* FIXME bug 524051 We cannot run a last-ditch GC on trace for now, so
|
|
|
|
* just pretend we are out of memory which will throw us off trace and
|
|
|
|
* we will re-try this code path from the interpreter.
|
|
|
|
*/
|
|
|
|
if (!JS_ON_TRACE(cx))
|
|
|
|
return NULL;
|
2010-09-01 11:24:45 -07:00
|
|
|
js_TriggerGC(cx, true);
|
2010-07-28 11:20:19 -07:00
|
|
|
}
|
2009-09-17 15:40:37 -07:00
|
|
|
|
2010-08-05 05:16:56 -07:00
|
|
|
if (rt->gcFreeArenaChunks.empty()) {
|
|
|
|
#ifdef DEBUG
|
|
|
|
for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
|
|
|
|
JS_ASSERT(GCChunkInfo::fromChunk(r.front())->numFreeArenas == 0);
|
|
|
|
#endif
|
|
|
|
/*
|
|
|
|
* Make sure that after the GC we can append all allocated chunks to
|
|
|
|
* gcFreeArenaChunks.
|
|
|
|
*
|
|
|
|
* FIXME bug 583729 - use the same for the rt->gcChunkSet.
|
|
|
|
*/
|
|
|
|
if (!rt->gcFreeArenaChunks.reserve(rt->gcChunkSet.count() + 1))
|
2010-04-22 23:58:44 -07:00
|
|
|
return NULL;
|
2010-08-05 05:16:56 -07:00
|
|
|
jsuword chunk = GetGCChunk(rt);
|
|
|
|
if (!chunk)
|
2010-04-22 23:58:44 -07:00
|
|
|
return NULL;
|
2010-08-05 05:16:56 -07:00
|
|
|
GCChunkInfo *ci = GCChunkInfo::fromChunk(chunk);
|
2010-04-12 13:59:19 -07:00
|
|
|
ci->init(rt);
|
2010-08-05 05:16:56 -07:00
|
|
|
|
|
|
|
/*
|
|
|
|
* FIXME bug 583732 - chunk is newly allocated and cannot present in
|
|
|
|
* the table so using ordinary lookupForAdd is suboptimal here.
|
|
|
|
*/
|
|
|
|
GCChunkSet::AddPtr p = rt->gcChunkSet.lookupForAdd(chunk);
|
|
|
|
JS_ASSERT(!p);
|
|
|
|
if (!rt->gcChunkSet.add(p, chunk)) {
|
|
|
|
ReleaseGCChunk(rt, chunk);
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
JS_ALWAYS_TRUE(rt->gcFreeArenaChunks.append(ci));
|
2010-04-12 13:59:19 -07:00
|
|
|
}
|
|
|
|
|
2010-08-05 05:16:56 -07:00
|
|
|
GCChunkInfo *ci = rt->gcFreeArenaChunks.back();
|
|
|
|
JS_ASSERT(ci->numFreeArenas);
|
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
/* Scan the bitmap for the first non-zero bit. */
|
|
|
|
jsbitmap *freeArenas = ci->getFreeArenaBitmap();
|
|
|
|
size_t arenaIndex = 0;
|
|
|
|
while (!*freeArenas) {
|
|
|
|
arenaIndex += JS_BITS_PER_WORD;
|
|
|
|
freeArenas++;
|
2010-04-12 10:15:30 -07:00
|
|
|
}
|
2010-04-12 13:59:19 -07:00
|
|
|
size_t bit = CountTrailingZeros(*freeArenas);
|
|
|
|
arenaIndex += bit;
|
|
|
|
JS_ASSERT(arenaIndex < GC_ARENAS_PER_CHUNK);
|
|
|
|
JS_ASSERT(*freeArenas & (jsuword(1) << bit));
|
|
|
|
*freeArenas &= ~(jsuword(1) << bit);
|
|
|
|
--ci->numFreeArenas;
|
2010-08-05 05:16:56 -07:00
|
|
|
if (ci->numFreeArenas == 0) {
|
|
|
|
JS_ASSERT(ci == rt->gcFreeArenaChunks.back());
|
|
|
|
rt->gcFreeArenaChunks.popBack();
|
|
|
|
}
|
2009-09-17 15:40:37 -07:00
|
|
|
|
|
|
|
rt->gcBytes += GC_ARENA_SIZE;
|
2010-04-12 13:59:19 -07:00
|
|
|
METER(rt->gcStats.nallarenas++);
|
|
|
|
METER_UPDATE_MAX(rt->gcStats.maxnallarenas, rt->gcStats.nallarenas);
|
2009-09-17 15:40:37 -07:00
|
|
|
|
2010-04-22 23:58:44 -07:00
|
|
|
return JSGCArena::fromChunkAndIndex(ci->getChunk(), arenaIndex);
|
2007-09-16 06:03:17 -07:00
|
|
|
}
|
|
|
|
|
2010-04-22 23:58:44 -07:00
|
|
|
/*
|
|
|
|
* This function does not touch the arena or release its memory so code can
|
|
|
|
* still refer into it.
|
|
|
|
*/
|
|
|
|
static void
|
|
|
|
ReleaseGCArena(JSRuntime *rt, JSGCArena *a)
|
|
|
|
{
|
|
|
|
METER(rt->gcStats.afree++);
|
|
|
|
JS_ASSERT(rt->gcBytes >= GC_ARENA_SIZE);
|
|
|
|
rt->gcBytes -= GC_ARENA_SIZE;
|
|
|
|
JS_ASSERT(rt->gcStats.nallarenas != 0);
|
|
|
|
METER(rt->gcStats.nallarenas--);
|
|
|
|
|
|
|
|
jsuword chunk = a->getChunk();
|
2010-08-05 05:16:56 -07:00
|
|
|
GCChunkInfo *ci = GCChunkInfo::fromChunk(chunk);
|
2010-04-22 23:58:44 -07:00
|
|
|
JS_ASSERT(ci->numFreeArenas <= GC_ARENAS_PER_CHUNK - 1);
|
|
|
|
jsbitmap *freeArenas = ci->getFreeArenaBitmap();
|
|
|
|
JS_ASSERT(!JS_TEST_BIT(freeArenas, a->getIndex()));
|
|
|
|
JS_SET_BIT(freeArenas, a->getIndex());
|
|
|
|
ci->numFreeArenas++;
|
|
|
|
if (ci->numFreeArenas == GC_ARENAS_PER_CHUNK)
|
|
|
|
ci->gcChunkAge = 0;
|
2010-04-12 10:15:30 -07:00
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
#ifdef DEBUG
|
2010-04-22 23:58:44 -07:00
|
|
|
a->getInfo()->prev = rt->gcEmptyArenaList;
|
|
|
|
rt->gcEmptyArenaList = a;
|
2010-04-12 13:59:19 -07:00
|
|
|
#endif
|
2010-04-22 23:58:44 -07:00
|
|
|
}
|
2010-04-12 13:59:19 -07:00
|
|
|
|
2010-04-22 23:58:44 -07:00
|
|
|
static void
|
|
|
|
FreeGCChunks(JSRuntime *rt)
|
|
|
|
{
|
2010-04-12 13:59:19 -07:00
|
|
|
#ifdef DEBUG
|
2010-04-22 23:58:44 -07:00
|
|
|
while (rt->gcEmptyArenaList) {
|
|
|
|
JSGCArena *next = rt->gcEmptyArenaList->getInfo()->prev;
|
|
|
|
memset(rt->gcEmptyArenaList, JS_FREE_PATTERN, GC_ARENA_SIZE);
|
|
|
|
rt->gcEmptyArenaList = next;
|
2010-04-12 13:59:19 -07:00
|
|
|
}
|
2010-04-22 23:58:44 -07:00
|
|
|
#endif
|
2010-04-12 13:59:19 -07:00
|
|
|
|
2010-08-05 05:16:56 -07:00
|
|
|
/* Remove unused chunks and rebuild gcFreeArenaChunks. */
|
|
|
|
rt->gcFreeArenaChunks.clear();
|
|
|
|
JS_ASSERT(rt->gcFreeArenaChunks.capacity() >= rt->gcChunkSet.count());
|
|
|
|
for (GCChunkSet::Enum e(rt->gcChunkSet); !e.empty(); e.popFront()) {
|
|
|
|
GCChunkInfo *ci = GCChunkInfo::fromChunk(e.front());
|
2010-04-22 23:58:44 -07:00
|
|
|
JS_ASSERT(ci->runtime == rt);
|
2010-04-12 13:59:19 -07:00
|
|
|
if (ci->numFreeArenas == GC_ARENAS_PER_CHUNK) {
|
2010-04-22 23:58:44 -07:00
|
|
|
if (ci->gcChunkAge > GC_MAX_CHUNK_AGE) {
|
2010-08-05 05:16:56 -07:00
|
|
|
e.removeFront();
|
2010-04-22 23:58:44 -07:00
|
|
|
ReleaseGCChunk(rt, ci->getChunk());
|
|
|
|
continue;
|
2010-04-13 20:14:10 -07:00
|
|
|
}
|
2010-04-22 23:58:44 -07:00
|
|
|
ci->gcChunkAge++;
|
2010-04-12 13:59:19 -07:00
|
|
|
}
|
2010-08-05 05:16:56 -07:00
|
|
|
|
|
|
|
if (ci->numFreeArenas)
|
|
|
|
JS_ALWAYS_TRUE(rt->gcFreeArenaChunks.append(ci));
|
2010-04-12 13:59:19 -07:00
|
|
|
}
|
2010-04-22 23:58:44 -07:00
|
|
|
}
|
2009-09-17 15:40:37 -07:00
|
|
|
|
2009-10-02 07:34:22 -07:00
|
|
|
static inline size_t
|
|
|
|
GetFinalizableThingSize(unsigned thingKind)
|
|
|
|
{
|
|
|
|
JS_STATIC_ASSERT(JS_EXTERNAL_STRING_LIMIT == 8);
|
|
|
|
|
|
|
|
static const uint8 map[FINALIZE_LIMIT] = {
|
2010-07-24 20:26:34 -07:00
|
|
|
sizeof(JSObject), /* FINALIZE_OBJECT */
|
|
|
|
sizeof(JSFunction), /* FINALIZE_FUNCTION */
|
2009-10-02 07:34:22 -07:00
|
|
|
#if JS_HAS_XML_SUPPORT
|
2010-07-24 20:26:34 -07:00
|
|
|
sizeof(JSXML), /* FINALIZE_XML */
|
2009-10-02 07:34:22 -07:00
|
|
|
#endif
|
2010-07-24 20:26:34 -07:00
|
|
|
sizeof(JSShortString), /* FINALIZE_SHORT_STRING */
|
|
|
|
sizeof(JSString), /* FINALIZE_STRING */
|
|
|
|
sizeof(JSString), /* FINALIZE_EXTERNAL_STRING0 */
|
|
|
|
sizeof(JSString), /* FINALIZE_EXTERNAL_STRING1 */
|
|
|
|
sizeof(JSString), /* FINALIZE_EXTERNAL_STRING2 */
|
|
|
|
sizeof(JSString), /* FINALIZE_EXTERNAL_STRING3 */
|
|
|
|
sizeof(JSString), /* FINALIZE_EXTERNAL_STRING4 */
|
|
|
|
sizeof(JSString), /* FINALIZE_EXTERNAL_STRING5 */
|
|
|
|
sizeof(JSString), /* FINALIZE_EXTERNAL_STRING6 */
|
|
|
|
sizeof(JSString), /* FINALIZE_EXTERNAL_STRING7 */
|
2009-10-02 07:34:22 -07:00
|
|
|
};
|
|
|
|
|
|
|
|
JS_ASSERT(thingKind < FINALIZE_LIMIT);
|
|
|
|
return map[thingKind];
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline size_t
|
2009-10-15 23:10:54 -07:00
|
|
|
GetFinalizableTraceKind(size_t thingKind)
|
2009-10-02 07:34:22 -07:00
|
|
|
{
|
|
|
|
JS_STATIC_ASSERT(JS_EXTERNAL_STRING_LIMIT == 8);
|
|
|
|
|
|
|
|
static const uint8 map[FINALIZE_LIMIT] = {
|
|
|
|
JSTRACE_OBJECT, /* FINALIZE_OBJECT */
|
|
|
|
JSTRACE_OBJECT, /* FINALIZE_FUNCTION */
|
2010-03-22 22:39:29 -07:00
|
|
|
#if JS_HAS_XML_SUPPORT /* FINALIZE_XML */
|
|
|
|
JSTRACE_XML,
|
2010-09-01 11:24:45 -07:00
|
|
|
#endif
|
2010-07-24 20:26:34 -07:00
|
|
|
JSTRACE_STRING, /* FINALIZE_SHORT_STRING */
|
|
|
|
JSTRACE_STRING, /* FINALIZE_STRING */
|
2009-10-02 07:34:22 -07:00
|
|
|
JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING0 */
|
|
|
|
JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING1 */
|
|
|
|
JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING2 */
|
|
|
|
JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING3 */
|
|
|
|
JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING4 */
|
|
|
|
JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING5 */
|
|
|
|
JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING6 */
|
|
|
|
JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING7 */
|
|
|
|
};
|
|
|
|
|
2009-10-15 23:10:54 -07:00
|
|
|
JS_ASSERT(thingKind < FINALIZE_LIMIT);
|
|
|
|
return map[thingKind];
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline size_t
|
2010-04-12 13:59:19 -07:00
|
|
|
GetFinalizableArenaTraceKind(JSGCArenaInfo *ainfo)
|
2009-10-15 23:10:54 -07:00
|
|
|
{
|
2010-04-12 13:59:19 -07:00
|
|
|
JS_ASSERT(ainfo->list);
|
|
|
|
return GetFinalizableTraceKind(ainfo->list->thingKind);
|
|
|
|
}
|
|
|
|
|
2010-06-04 07:22:28 -07:00
|
|
|
static inline size_t
|
|
|
|
GetArenaTraceKind(JSGCArenaInfo *ainfo)
|
|
|
|
{
|
|
|
|
return GetFinalizableArenaTraceKind(ainfo);
|
|
|
|
}
|
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
static inline size_t
|
|
|
|
GetFinalizableThingTraceKind(void *thing)
|
|
|
|
{
|
|
|
|
JSGCArenaInfo *ainfo = JSGCArenaInfo::fromGCThing(thing);
|
|
|
|
return GetFinalizableArenaTraceKind(ainfo);
|
2009-10-02 07:34:22 -07:00
|
|
|
}
|
|
|
|
|
2007-03-22 10:30:00 -07:00
|
|
|
static void
|
|
|
|
InitGCArenaLists(JSRuntime *rt)
|
|
|
|
{
|
2009-10-02 07:34:22 -07:00
|
|
|
for (unsigned i = 0; i != FINALIZE_LIMIT; ++i) {
|
|
|
|
JSGCArenaList *arenaList = &rt->gcArenaList[i];
|
2009-10-15 23:10:54 -07:00
|
|
|
arenaList->head = NULL;
|
|
|
|
arenaList->cursor = NULL;
|
2009-10-15 02:53:40 -07:00
|
|
|
arenaList->thingKind = i;
|
2009-10-15 23:10:54 -07:00
|
|
|
arenaList->thingSize = GetFinalizableThingSize(i);
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
FinishGCArenaLists(JSRuntime *rt)
|
|
|
|
{
|
2009-10-02 07:34:22 -07:00
|
|
|
for (unsigned i = 0; i < FINALIZE_LIMIT; i++) {
|
2010-04-22 23:58:44 -07:00
|
|
|
rt->gcArenaList[i].head = NULL;
|
|
|
|
rt->gcArenaList[i].cursor = NULL;
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
2008-02-26 13:01:42 -08:00
|
|
|
|
2007-09-16 06:03:17 -07:00
|
|
|
rt->gcBytes = 0;
|
2010-04-22 23:58:44 -07:00
|
|
|
|
2010-08-05 05:16:56 -07:00
|
|
|
for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
|
|
|
|
ReleaseGCChunk(rt, r.front());
|
|
|
|
rt->gcChunkSet.clear();
|
|
|
|
rt->gcFreeArenaChunks.clear();
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
2007-11-14 20:16:35 -08:00
|
|
|
intN
|
|
|
|
js_GetExternalStringGCType(JSString *str)
|
|
|
|
{
|
2009-10-02 07:34:22 -07:00
|
|
|
JS_STATIC_ASSERT(FINALIZE_STRING + 1 == FINALIZE_EXTERNAL_STRING0);
|
2009-09-05 21:48:30 -07:00
|
|
|
JS_ASSERT(!JSString::isStatic(str));
|
2007-11-14 20:16:35 -08:00
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
unsigned thingKind = JSGCArenaInfo::fromGCThing(str)->list->thingKind;
|
2009-10-02 07:34:22 -07:00
|
|
|
JS_ASSERT(IsFinalizableStringKind(thingKind));
|
|
|
|
return intN(thingKind) - intN(FINALIZE_EXTERNAL_STRING0);
|
2007-11-14 20:16:35 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
JS_FRIEND_API(uint32)
|
|
|
|
js_GetGCThingTraceKind(void *thing)
|
|
|
|
{
|
2009-09-10 04:13:59 -07:00
|
|
|
if (JSString::isStatic(thing))
|
|
|
|
return JSTRACE_STRING;
|
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
JSGCArenaInfo *ainfo = JSGCArenaInfo::fromGCThing(thing);
|
2010-06-04 07:22:28 -07:00
|
|
|
return GetArenaTraceKind(ainfo);
|
2007-11-14 20:16:35 -08:00
|
|
|
}
|
|
|
|
|
2010-04-22 23:58:44 -07:00
|
|
|
JSRuntime *
|
|
|
|
js_GetGCThingRuntime(void *thing)
|
2007-03-22 10:30:00 -07:00
|
|
|
{
|
2010-04-22 23:58:44 -07:00
|
|
|
jsuword chunk = JSGCArena::fromGCThing(thing)->getChunk();
|
2010-08-05 05:16:56 -07:00
|
|
|
return GCChunkInfo::fromChunk(chunk)->runtime;
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
2010-07-15 17:58:36 -07:00
|
|
|
JS_FRIEND_API(bool)
|
2009-12-13 23:55:17 -08:00
|
|
|
js_IsAboutToBeFinalized(void *thing)
|
2007-03-22 10:30:00 -07:00
|
|
|
{
|
2009-12-22 12:50:44 -08:00
|
|
|
if (JSString::isStatic(thing))
|
|
|
|
return false;
|
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
return !IsMarkedGCThing(thing);
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
2010-07-15 17:58:36 -07:00
|
|
|
JS_FRIEND_API(bool)
|
|
|
|
js_GCThingIsMarked(void *thing, uint32 color)
|
|
|
|
{
|
|
|
|
return IsMarkedGCThing(thing, color);
|
|
|
|
}
|
|
|
|
|
2007-03-22 10:30:00 -07:00
|
|
|
JSBool
|
2009-09-17 15:40:37 -07:00
|
|
|
js_InitGC(JSRuntime *rt, uint32 maxbytes)
|
|
|
|
{
|
2007-03-22 10:30:00 -07:00
|
|
|
InitGCArenaLists(rt);
|
2010-06-22 02:19:04 -07:00
|
|
|
|
2010-08-05 05:16:56 -07:00
|
|
|
/*
|
|
|
|
* Make room for at least 16 chunks so the table would not grow before
|
|
|
|
* the browser starts up.
|
|
|
|
*/
|
|
|
|
if (!rt->gcChunkSet.init(16))
|
|
|
|
return false;
|
|
|
|
|
2010-05-20 13:50:08 -07:00
|
|
|
if (!rt->gcRootsHash.init(256))
|
2009-12-24 01:31:07 -08:00
|
|
|
return false;
|
2010-06-22 02:19:04 -07:00
|
|
|
|
2010-05-20 13:50:08 -07:00
|
|
|
if (!rt->gcLocksHash.init(256))
|
2009-12-24 01:31:07 -08:00
|
|
|
return false;
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-04-27 12:40:28 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
2010-09-01 11:24:45 -07:00
|
|
|
if (!rt->gcHelperThread.init())
|
2010-04-27 06:46:24 -07:00
|
|
|
return false;
|
2010-04-27 12:40:28 -07:00
|
|
|
#endif
|
2010-04-27 06:46:24 -07:00
|
|
|
|
2009-08-25 14:42:42 -07:00
|
|
|
/*
|
|
|
|
* Separate gcMaxMallocBytes from gcMaxBytes but initialize to maxbytes
|
|
|
|
* for default backward API compatibility.
|
|
|
|
*/
|
2009-10-18 08:40:19 -07:00
|
|
|
rt->gcMaxBytes = maxbytes;
|
|
|
|
rt->setGCMaxMallocBytes(maxbytes);
|
|
|
|
|
2008-09-12 15:11:48 -07:00
|
|
|
rt->gcEmptyArenaPoolLifespan = 30000;
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2009-01-27 09:21:51 -08:00
|
|
|
/*
|
2009-08-25 14:42:42 -07:00
|
|
|
* By default the trigger factor gets maximum possible value. This
|
|
|
|
* means that GC will not be triggered by growth of GC memory (gcBytes).
|
2009-01-27 09:21:51 -08:00
|
|
|
*/
|
2009-08-25 14:42:42 -07:00
|
|
|
rt->setGCTriggerFactor((uint32) -1);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* The assigned value prevents GC from running when GC memory is too low
|
|
|
|
* (during JS engine start).
|
|
|
|
*/
|
|
|
|
rt->setGCLastBytes(8192);
|
2010-08-31 08:37:16 -07:00
|
|
|
rt->gcNewArenaTriggerBytes = GC_ARENA_ALLOCATION_TRIGGER;
|
2009-01-27 09:21:51 -08:00
|
|
|
|
2010-03-10 15:34:12 -08:00
|
|
|
METER(PodZero(&rt->gcStats));
|
2009-12-24 01:31:07 -08:00
|
|
|
return true;
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
2010-06-04 07:22:28 -07:00
|
|
|
namespace js {
|
|
|
|
|
2010-08-05 05:16:56 -07:00
|
|
|
/*
|
|
|
|
* Returns CGCT_VALID if the w can be a live GC thing and sets thing and traceKind
|
|
|
|
* accordingly. Otherwise returns the reason for rejection.
|
|
|
|
*/
|
|
|
|
inline ConservativeGCTest
|
|
|
|
IsGCThingWord(JSRuntime *rt, jsuword w, void *&thing, uint32 &traceKind)
|
2010-06-04 07:22:28 -07:00
|
|
|
{
|
2010-06-21 05:22:32 -07:00
|
|
|
/*
|
|
|
|
* The conservative scanner may access words that valgrind considers as
|
|
|
|
* undefined. To avoid false positives and not to alter valgrind view of
|
|
|
|
* the memory we make as memcheck-defined the argument, a copy of the
|
|
|
|
* original word. See bug 572678.
|
|
|
|
*/
|
|
|
|
#ifdef JS_VALGRIND
|
|
|
|
VALGRIND_MAKE_MEM_DEFINED(&w, sizeof(w));
|
|
|
|
#endif
|
|
|
|
|
2010-06-22 02:19:04 -07:00
|
|
|
/*
|
|
|
|
* We assume that the compiler never uses sub-word alignment to store
|
2010-07-14 23:19:36 -07:00
|
|
|
* pointers and does not tag pointers on its own. Additionally, the value
|
|
|
|
* representation for all values and the jsid representation for GC-things
|
|
|
|
* do not touch the low two bits. Thus any word with the low two bits set
|
|
|
|
* is not a valid GC-thing.
|
2010-06-22 02:19:04 -07:00
|
|
|
*/
|
2010-07-14 23:19:36 -07:00
|
|
|
JS_STATIC_ASSERT(JSID_TYPE_STRING == 0 && JSID_TYPE_OBJECT == 4);
|
|
|
|
if (w & 0x3)
|
2010-08-05 05:16:56 -07:00
|
|
|
return CGCT_LOWBITSET;
|
2010-06-04 07:22:28 -07:00
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
/*
|
|
|
|
* An object jsid has its low bits tagged. In the value representation on
|
|
|
|
* 64-bit, the high bits are tagged.
|
|
|
|
*/
|
2010-08-05 05:16:56 -07:00
|
|
|
const jsuword JSID_PAYLOAD_MASK = ~jsuword(JSID_TYPE_MASK);
|
2010-07-14 23:19:36 -07:00
|
|
|
#if JS_BITS_PER_WORD == 32
|
|
|
|
jsuword payload = w & JSID_PAYLOAD_MASK;
|
|
|
|
#elif JS_BITS_PER_WORD == 64
|
|
|
|
jsuword payload = w & JSID_PAYLOAD_MASK & JSVAL_PAYLOAD_MASK;
|
|
|
|
#endif
|
2010-06-04 07:22:28 -07:00
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
jsuword chunk = payload & ~GC_CHUNK_MASK;
|
2010-08-05 05:16:56 -07:00
|
|
|
if (!rt->gcChunkSet.has(chunk))
|
|
|
|
return CGCT_NOTCHUNK;
|
|
|
|
|
|
|
|
GCChunkInfo *ci = GCChunkInfo::fromChunk(chunk);
|
2010-06-04 07:22:28 -07:00
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
if ((payload & GC_CHUNK_MASK) >= GC_MARK_BITMAP_ARRAY_OFFSET)
|
2010-08-05 05:16:56 -07:00
|
|
|
return CGCT_NOTARENA;
|
2010-06-24 01:30:56 -07:00
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
size_t arenaIndex = (payload & GC_CHUNK_MASK) >> GC_ARENA_SHIFT;
|
2010-06-04 07:22:28 -07:00
|
|
|
if (JS_TEST_BIT(ci->getFreeArenaBitmap(), arenaIndex))
|
2010-08-05 05:16:56 -07:00
|
|
|
return CGCT_FREEARENA;
|
2010-06-04 07:22:28 -07:00
|
|
|
|
|
|
|
JSGCArena *a = JSGCArena::fromChunkAndIndex(chunk, arenaIndex);
|
|
|
|
JSGCArenaInfo *ainfo = a->getInfo();
|
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
traceKind = GetFinalizableArenaTraceKind(ainfo);
|
2010-06-22 02:19:04 -07:00
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
/*
|
|
|
|
* On 64-bit we might consider using the tag bits in w to disqualify
|
|
|
|
* additional false roots, however, the condition would have to look
|
|
|
|
* something like:
|
|
|
|
*
|
|
|
|
* if ((traceKind == JSTRACE_STRING && tag > 0 && tag != JSVAL_TAG_SHIFT) ||
|
|
|
|
* (traceKind == JSTRACE_OBJECT && tag > 0 && tag != JSVAL_TAG_OBJECT))
|
2010-08-05 05:16:56 -07:00
|
|
|
* return CGCT_WRONGTAG;
|
2010-07-14 23:19:36 -07:00
|
|
|
*
|
|
|
|
* However, it seems like we should measure how often this actually avoids
|
|
|
|
* false roots.
|
|
|
|
*/
|
2010-06-22 02:19:04 -07:00
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
jsuword start = a->toPageStart();
|
|
|
|
jsuword offset = payload - start;
|
|
|
|
size_t thingSize = ainfo->list->thingSize;
|
|
|
|
offset -= offset % thingSize;
|
2010-06-04 07:22:28 -07:00
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
/*
|
|
|
|
* If GC_ARENA_SIZE % thingSize != 0 or when thingSize is not a power
|
|
|
|
* of two, thingSize-aligned pointer may point at the end of the last
|
|
|
|
* thing yet be inside the arena.
|
|
|
|
*/
|
|
|
|
if (offset + thingSize > GC_ARENA_SIZE) {
|
|
|
|
JS_ASSERT(thingSize & (thingSize - 1));
|
2010-08-05 05:16:56 -07:00
|
|
|
return CGCT_NOTARENA;
|
2010-07-14 23:19:36 -07:00
|
|
|
}
|
|
|
|
thing = (JSGCThing *) (start + offset);
|
2010-06-22 02:19:04 -07:00
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
/* Make sure the thing is not on the freelist of the arena. */
|
|
|
|
JSGCThing *cursor = ainfo->freeList;
|
|
|
|
while (cursor) {
|
|
|
|
JS_ASSERT((((jsuword) cursor) & GC_ARENA_MASK) % thingSize == 0);
|
|
|
|
JS_ASSERT(!IsMarkedGCThing(cursor));
|
2010-06-04 07:22:28 -07:00
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
/* If the cursor moves past the thing, it's not in the freelist. */
|
|
|
|
if (thing < cursor)
|
|
|
|
break;
|
|
|
|
|
|
|
|
/* If we find it on the freelist, it's dead. */
|
|
|
|
if (thing == cursor)
|
2010-08-05 05:16:56 -07:00
|
|
|
return CGCT_NOTLIVE;
|
2010-07-14 23:19:36 -07:00
|
|
|
JS_ASSERT_IF(cursor->link, cursor < cursor->link);
|
|
|
|
cursor = cursor->link;
|
2010-06-04 07:22:28 -07:00
|
|
|
}
|
|
|
|
|
2010-08-05 05:16:56 -07:00
|
|
|
return CGCT_VALID;
|
|
|
|
}
|
|
|
|
|
|
|
|
inline ConservativeGCTest
|
|
|
|
IsGCThingWord(JSRuntime *rt, jsuword w)
|
|
|
|
{
|
|
|
|
void *thing;
|
|
|
|
uint32 traceKind;
|
|
|
|
return IsGCThingWord(rt, w, thing, traceKind);
|
|
|
|
}
|
2010-06-04 07:22:28 -07:00
|
|
|
|
2010-08-05 05:16:56 -07:00
|
|
|
static void
|
|
|
|
MarkWordConservatively(JSTracer *trc, jsuword w)
|
|
|
|
{
|
2010-06-04 07:22:28 -07:00
|
|
|
/*
|
2010-08-05 05:16:56 -07:00
|
|
|
* The conservative scanner may access words that valgrind considers as
|
|
|
|
* undefined. To avoid false positives and not to alter valgrind view of
|
|
|
|
* the memory we make as memcheck-defined the argument, a copy of the
|
|
|
|
* original word. See bug 572678.
|
2010-06-04 07:22:28 -07:00
|
|
|
*/
|
2010-08-05 05:16:56 -07:00
|
|
|
#ifdef JS_VALGRIND
|
|
|
|
VALGRIND_MAKE_MEM_DEFINED(&w, sizeof(w));
|
|
|
|
#endif
|
2010-06-04 07:22:28 -07:00
|
|
|
|
2010-08-05 05:16:56 -07:00
|
|
|
void *thing;
|
|
|
|
uint32 traceKind;
|
|
|
|
ConservativeGCTest test = IsGCThingWord(trc->context->runtime, w, thing, traceKind);
|
|
|
|
if (test == CGCT_VALID) {
|
|
|
|
Mark(trc, thing, traceKind, "machine stack");
|
2010-06-04 07:22:28 -07:00
|
|
|
#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
|
2010-08-05 05:16:56 -07:00
|
|
|
if (IS_GC_MARKING_TRACER(trc) && static_cast<GCMarker *>(trc)->conservativeDumpFileName) {
|
|
|
|
GCMarker::ConservativeRoot root = {thing, traceKind};
|
|
|
|
static_cast<GCMarker *>(trc)->conservativeRoots.append(root);
|
|
|
|
}
|
|
|
|
#endif
|
2010-06-04 07:22:28 -07:00
|
|
|
}
|
2010-09-01 11:24:45 -07:00
|
|
|
|
2010-08-05 05:16:56 -07:00
|
|
|
#if defined JS_DUMP_CONSERVATIVE_GC_ROOTS || defined JS_GCMETER
|
|
|
|
if (IS_GC_MARKING_TRACER(trc))
|
|
|
|
static_cast<GCMarker *>(trc)->conservativeStats.counter[test]++;
|
2010-06-04 07:22:28 -07:00
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2010-08-05 05:16:56 -07:00
|
|
|
static void
|
|
|
|
MarkRangeConservatively(JSTracer *trc, jsuword *begin, jsuword *end)
|
2010-06-04 07:22:28 -07:00
|
|
|
{
|
|
|
|
JS_ASSERT(begin <= end);
|
2010-08-05 05:16:56 -07:00
|
|
|
for (jsuword *i = begin; i != end; ++i)
|
|
|
|
MarkWordConservatively(trc, *i);
|
2010-06-04 07:22:28 -07:00
|
|
|
}
|
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
static void
|
|
|
|
MarkThreadDataConservatively(JSTracer *trc, JSThreadData *td)
|
|
|
|
{
|
|
|
|
ConservativeGCThreadData *ctd = &td->conservativeGC;
|
|
|
|
JS_ASSERT(ctd->hasStackToScan());
|
|
|
|
jsuword *stackMin, *stackEnd;
|
|
|
|
#if JS_STACK_GROWTH_DIRECTION > 0
|
|
|
|
stackMin = td->nativeStackBase;
|
|
|
|
stackEnd = ctd->nativeStackTop;
|
|
|
|
#else
|
|
|
|
stackMin = ctd->nativeStackTop + 1;
|
|
|
|
stackEnd = td->nativeStackBase;
|
|
|
|
#endif
|
|
|
|
JS_ASSERT(stackMin <= stackEnd);
|
|
|
|
MarkRangeConservatively(trc, stackMin, stackEnd);
|
|
|
|
MarkRangeConservatively(trc, ctd->registerSnapshot.words,
|
|
|
|
JS_ARRAY_END(ctd->registerSnapshot.words));
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2010-07-29 17:13:08 -07:00
|
|
|
void
|
2010-08-05 15:57:34 -07:00
|
|
|
MarkStackRangeConservatively(JSTracer *trc, Value *beginv, Value *endv)
|
2010-07-29 17:13:08 -07:00
|
|
|
{
|
2010-08-05 15:57:34 -07:00
|
|
|
jsuword *begin = (jsuword *) beginv;
|
|
|
|
jsuword *end = (jsuword *) endv;
|
|
|
|
#ifdef JS_NUNBOX32
|
|
|
|
/*
|
|
|
|
* With 64-bit jsvals on 32-bit systems, we can optimize a bit by
|
|
|
|
* scanning only the payloads.
|
|
|
|
*/
|
|
|
|
JS_ASSERT(begin <= end);
|
|
|
|
for (jsuword *i = begin; i != end; i += 2)
|
|
|
|
MarkWordConservatively(trc, *i);
|
|
|
|
#else
|
|
|
|
MarkRangeConservatively(trc, begin, end);
|
|
|
|
#endif
|
2010-07-29 17:13:08 -07:00
|
|
|
}
|
|
|
|
|
2010-06-04 07:22:28 -07:00
|
|
|
void
|
2010-08-05 05:16:56 -07:00
|
|
|
MarkConservativeStackRoots(JSTracer *trc)
|
2010-06-04 07:22:28 -07:00
|
|
|
{
|
2010-08-30 11:46:18 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
for (JSThread::Map::Range r = trc->context->runtime->threads.all(); !r.empty(); r.popFront()) {
|
|
|
|
JSThread *thread = r.front().value;
|
|
|
|
ConservativeGCThreadData *ctd = &thread->data.conservativeGC;
|
|
|
|
if (ctd->hasStackToScan()) {
|
|
|
|
JS_ASSERT_IF(!thread->requestDepth, thread->suspendCount);
|
|
|
|
MarkThreadDataConservatively(trc, &thread->data);
|
|
|
|
} else {
|
|
|
|
JS_ASSERT(!thread->suspendCount);
|
|
|
|
JS_ASSERT(thread->requestDepth <= ctd->requestThreshold);
|
2010-06-04 07:22:28 -07:00
|
|
|
}
|
|
|
|
}
|
2010-08-30 11:46:18 -07:00
|
|
|
#else
|
|
|
|
MarkThreadDataConservatively(trc, &trc->context->runtime->threadData);
|
2010-09-01 11:24:45 -07:00
|
|
|
#endif
|
2010-06-04 07:22:28 -07:00
|
|
|
}
|
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
JS_NEVER_INLINE void
|
|
|
|
ConservativeGCThreadData::recordStackTop()
|
2010-06-04 07:22:28 -07:00
|
|
|
{
|
|
|
|
/* Update the native stack pointer if it points to a bigger stack. */
|
|
|
|
jsuword dummy;
|
2010-08-30 11:46:18 -07:00
|
|
|
nativeStackTop = &dummy;
|
2010-06-04 07:22:28 -07:00
|
|
|
|
|
|
|
/* Update the register snapshot with the latest values. */
|
|
|
|
#if defined(_MSC_VER)
|
|
|
|
# pragma warning(push)
|
|
|
|
# pragma warning(disable: 4611)
|
|
|
|
#endif
|
|
|
|
setjmp(registerSnapshot.jmpbuf);
|
|
|
|
#if defined(_MSC_VER)
|
|
|
|
# pragma warning(pop)
|
|
|
|
#endif
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
static inline void
|
|
|
|
RecordNativeStackTopForGC(JSContext *cx)
|
2010-06-04 07:22:28 -07:00
|
|
|
{
|
2010-08-30 11:46:18 -07:00
|
|
|
ConservativeGCThreadData *ctd = &JS_THREAD_DATA(cx)->conservativeGC;
|
2010-09-01 11:24:45 -07:00
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
/* Record the stack top here only if we are called from a request. */
|
|
|
|
JS_ASSERT(cx->thread->requestDepth >= ctd->requestThreshold);
|
|
|
|
if (cx->thread->requestDepth == ctd->requestThreshold)
|
|
|
|
return;
|
2010-06-04 07:22:28 -07:00
|
|
|
#endif
|
2010-08-30 11:46:18 -07:00
|
|
|
ctd->recordStackTop();
|
2010-06-04 07:22:28 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
} /* namespace js */
|
|
|
|
|
2007-03-22 10:30:00 -07:00
|
|
|
#ifdef DEBUG
|
|
|
|
static void
|
|
|
|
CheckLeakedRoots(JSRuntime *rt);
|
|
|
|
#endif
|
|
|
|
|
|
|
|
void
|
|
|
|
js_FinishGC(JSRuntime *rt)
|
|
|
|
{
|
2010-07-11 00:09:34 -07:00
|
|
|
#ifdef JS_ARENAMETER
|
|
|
|
JS_DumpArenaStats(stdout);
|
2007-03-22 10:30:00 -07:00
|
|
|
#endif
|
|
|
|
#ifdef JS_GCMETER
|
2009-12-24 01:31:07 -08:00
|
|
|
if (JS_WANT_GC_METER_PRINT)
|
|
|
|
js_DumpGCStats(rt, stdout);
|
2007-03-22 10:30:00 -07:00
|
|
|
#endif
|
|
|
|
|
2010-04-27 12:40:28 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
2010-09-01 11:24:45 -07:00
|
|
|
rt->gcHelperThread.cancel();
|
2010-04-27 12:40:28 -07:00
|
|
|
#endif
|
2007-03-22 10:30:00 -07:00
|
|
|
FinishGCArenaLists(rt);
|
2010-04-22 23:58:44 -07:00
|
|
|
|
2010-06-22 02:19:04 -07:00
|
|
|
#ifdef DEBUG
|
2010-05-20 13:50:08 -07:00
|
|
|
if (!rt->gcRootsHash.empty())
|
2007-03-22 10:30:00 -07:00
|
|
|
CheckLeakedRoots(rt);
|
|
|
|
#endif
|
2010-05-20 13:50:08 -07:00
|
|
|
rt->gcRootsHash.clear();
|
|
|
|
rt->gcLocksHash.clear();
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
JSBool
|
2010-07-14 23:19:36 -07:00
|
|
|
js_AddRoot(JSContext *cx, Value *vp, const char *name)
|
2007-03-22 10:30:00 -07:00
|
|
|
{
|
2010-07-14 23:19:36 -07:00
|
|
|
JSBool ok = js_AddRootRT(cx->runtime, Jsvalify(vp), name);
|
2007-03-22 10:30:00 -07:00
|
|
|
if (!ok)
|
|
|
|
JS_ReportOutOfMemory(cx);
|
|
|
|
return ok;
|
|
|
|
}
|
|
|
|
|
|
|
|
JSBool
|
2010-06-07 17:05:02 -07:00
|
|
|
js_AddGCThingRoot(JSContext *cx, void **rp, const char *name)
|
|
|
|
{
|
|
|
|
JSBool ok = js_AddGCThingRootRT(cx->runtime, rp, name);
|
|
|
|
if (!ok)
|
|
|
|
JS_ReportOutOfMemory(cx);
|
|
|
|
return ok;
|
|
|
|
}
|
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
JS_FRIEND_API(JSBool)
|
|
|
|
js_AddRootRT(JSRuntime *rt, jsval *vp, const char *name)
|
2007-03-22 10:30:00 -07:00
|
|
|
{
|
|
|
|
/*
|
|
|
|
* Due to the long-standing, but now removed, use of rt->gcLock across the
|
|
|
|
* bulk of js_GC, API users have come to depend on JS_AddRoot etc. locking
|
|
|
|
* properly with a racing GC, without calling JS_AddRoot from a request.
|
|
|
|
* We have to preserve API compatibility here, now that we avoid holding
|
|
|
|
* rt->gcLock across the mark phase (including the root hashtable mark).
|
|
|
|
*/
|
2010-04-08 05:54:18 -07:00
|
|
|
AutoLockGC lock(rt);
|
2009-02-06 20:05:32 -08:00
|
|
|
js_WaitForGC(rt);
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
return !!rt->gcRootsHash.put((void *)vp,
|
|
|
|
RootInfo(name, JS_GC_ROOT_VALUE_PTR));
|
2010-06-07 17:05:02 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
JS_FRIEND_API(JSBool)
|
|
|
|
js_AddGCThingRootRT(JSRuntime *rt, void **rp, const char *name)
|
|
|
|
{
|
2010-07-14 23:19:36 -07:00
|
|
|
/*
|
|
|
|
* Due to the long-standing, but now removed, use of rt->gcLock across the
|
|
|
|
* bulk of js_GC, API users have come to depend on JS_AddRoot etc. locking
|
|
|
|
* properly with a racing GC, without calling JS_AddRoot from a request.
|
|
|
|
* We have to preserve API compatibility here, now that we avoid holding
|
|
|
|
* rt->gcLock across the mark phase (including the root hashtable mark).
|
|
|
|
*/
|
|
|
|
AutoLockGC lock(rt);
|
|
|
|
js_WaitForGC(rt);
|
|
|
|
|
|
|
|
return !!rt->gcRootsHash.put((void *)rp,
|
|
|
|
RootInfo(name, JS_GC_ROOT_GCTHING_PTR));
|
2010-06-07 17:05:02 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
JS_FRIEND_API(JSBool)
|
2007-03-22 10:30:00 -07:00
|
|
|
js_RemoveRoot(JSRuntime *rt, void *rp)
|
|
|
|
{
|
|
|
|
/*
|
2010-07-14 23:19:36 -07:00
|
|
|
* Due to the JS_RemoveRootRT API, we may be called outside of a request.
|
2007-03-22 10:30:00 -07:00
|
|
|
* Same synchronization drill as above in js_AddRoot.
|
|
|
|
*/
|
2010-04-08 05:54:18 -07:00
|
|
|
AutoLockGC lock(rt);
|
2009-02-06 20:05:32 -08:00
|
|
|
js_WaitForGC(rt);
|
2010-05-20 13:50:08 -07:00
|
|
|
rt->gcRootsHash.remove(rp);
|
2007-03-22 10:30:00 -07:00
|
|
|
rt->gcPoke = JS_TRUE;
|
|
|
|
return JS_TRUE;
|
|
|
|
}
|
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
typedef RootedValueMap::Range RootRange;
|
|
|
|
typedef RootedValueMap::Entry RootEntry;
|
|
|
|
typedef RootedValueMap::Enum RootEnum;
|
|
|
|
|
2007-03-22 10:30:00 -07:00
|
|
|
#ifdef DEBUG
|
|
|
|
|
|
|
|
static void
|
|
|
|
CheckLeakedRoots(JSRuntime *rt)
|
|
|
|
{
|
|
|
|
uint32 leakedroots = 0;
|
|
|
|
|
|
|
|
/* Warn (but don't assert) debug builds of any remaining roots. */
|
2010-07-14 23:19:36 -07:00
|
|
|
for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront()) {
|
|
|
|
RootEntry &entry = r.front();
|
2010-05-20 13:50:08 -07:00
|
|
|
leakedroots++;
|
|
|
|
fprintf(stderr,
|
|
|
|
"JS engine warning: leaking GC root \'%s\' at %p\n",
|
2010-07-14 23:19:36 -07:00
|
|
|
entry.value.name ? entry.value.name : "", entry.key);
|
2010-05-20 13:50:08 -07:00
|
|
|
}
|
2010-07-14 23:19:36 -07:00
|
|
|
|
2007-03-22 10:30:00 -07:00
|
|
|
if (leakedroots > 0) {
|
|
|
|
if (leakedroots == 1) {
|
|
|
|
fprintf(stderr,
|
2008-02-16 22:56:40 -08:00
|
|
|
"JS engine warning: 1 GC root remains after destroying the JSRuntime at %p.\n"
|
2007-03-22 10:30:00 -07:00
|
|
|
" This root may point to freed memory. Objects reachable\n"
|
2008-02-26 13:01:42 -08:00
|
|
|
" through it have not been finalized.\n",
|
|
|
|
(void *) rt);
|
2007-03-22 10:30:00 -07:00
|
|
|
} else {
|
|
|
|
fprintf(stderr,
|
2008-02-16 22:56:40 -08:00
|
|
|
"JS engine warning: %lu GC roots remain after destroying the JSRuntime at %p.\n"
|
2007-03-22 10:30:00 -07:00
|
|
|
" These roots may point to freed memory. Objects reachable\n"
|
|
|
|
" through them have not been finalized.\n",
|
2008-02-26 13:01:42 -08:00
|
|
|
(unsigned long) leakedroots, (void *) rt);
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
js_DumpNamedRoots(JSRuntime *rt,
|
2010-07-14 23:19:36 -07:00
|
|
|
void (*dump)(const char *name, void *rp, JSGCRootType type, void *data),
|
2007-03-22 10:30:00 -07:00
|
|
|
void *data)
|
|
|
|
{
|
2010-07-14 23:19:36 -07:00
|
|
|
for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront()) {
|
|
|
|
RootEntry &entry = r.front();
|
|
|
|
if (const char *name = entry.value.name)
|
|
|
|
dump(name, entry.key, entry.value.type, data);
|
2010-05-20 13:50:08 -07:00
|
|
|
}
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
#endif /* DEBUG */
|
|
|
|
|
|
|
|
uint32
|
|
|
|
js_MapGCRoots(JSRuntime *rt, JSGCRootMapFun map, void *data)
|
|
|
|
{
|
2010-04-08 05:54:18 -07:00
|
|
|
AutoLockGC lock(rt);
|
2010-07-14 23:19:36 -07:00
|
|
|
int ct = 0;
|
|
|
|
for (RootEnum e(rt->gcRootsHash); !e.empty(); e.popFront()) {
|
|
|
|
RootEntry &entry = e.front();
|
|
|
|
|
|
|
|
ct++;
|
|
|
|
intN mapflags = map(entry.key, entry.value.type, entry.value.name, data);
|
2010-05-20 13:50:08 -07:00
|
|
|
|
|
|
|
if (mapflags & JS_MAP_GCROOT_REMOVE)
|
|
|
|
e.removeFront();
|
|
|
|
if (mapflags & JS_MAP_GCROOT_STOP)
|
|
|
|
break;
|
|
|
|
}
|
2010-07-14 23:19:36 -07:00
|
|
|
|
|
|
|
return ct;
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
2009-08-25 14:42:42 -07:00
|
|
|
void
|
|
|
|
JSRuntime::setGCTriggerFactor(uint32 factor)
|
|
|
|
{
|
|
|
|
JS_ASSERT(factor >= 100);
|
|
|
|
|
|
|
|
gcTriggerFactor = factor;
|
|
|
|
setGCLastBytes(gcLastBytes);
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
JSRuntime::setGCLastBytes(size_t lastBytes)
|
|
|
|
{
|
|
|
|
gcLastBytes = lastBytes;
|
|
|
|
uint64 triggerBytes = uint64(lastBytes) * uint64(gcTriggerFactor / 100);
|
|
|
|
if (triggerBytes != size_t(triggerBytes))
|
|
|
|
triggerBytes = size_t(-1);
|
|
|
|
gcTriggerBytes = size_t(triggerBytes);
|
|
|
|
}
|
|
|
|
|
2009-10-15 23:10:54 -07:00
|
|
|
void
|
|
|
|
JSGCFreeLists::purge()
|
|
|
|
{
|
|
|
|
/*
|
|
|
|
* Return the free list back to the arena so the GC finalization will not
|
|
|
|
* run the finalizers over unitialized bytes from free things.
|
|
|
|
*/
|
|
|
|
for (JSGCThing **p = finalizables; p != JS_ARRAY_END(finalizables); ++p) {
|
2009-10-18 08:40:19 -07:00
|
|
|
JSGCThing *freeListHead = *p;
|
|
|
|
if (freeListHead) {
|
2010-04-12 13:59:19 -07:00
|
|
|
JSGCArenaInfo *ainfo = JSGCArenaInfo::fromGCThing(freeListHead);
|
|
|
|
JS_ASSERT(!ainfo->freeList);
|
|
|
|
ainfo->freeList = freeListHead;
|
2009-10-18 08:40:19 -07:00
|
|
|
*p = NULL;
|
|
|
|
}
|
2009-10-15 23:10:54 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2009-11-12 03:53:25 -08:00
|
|
|
void
|
|
|
|
JSGCFreeLists::moveTo(JSGCFreeLists *another)
|
|
|
|
{
|
|
|
|
*another = *this;
|
2010-03-10 15:34:12 -08:00
|
|
|
PodArrayZero(finalizables);
|
2009-11-12 03:53:25 -08:00
|
|
|
JS_ASSERT(isEmpty());
|
|
|
|
}
|
|
|
|
|
2010-07-28 11:20:19 -07:00
|
|
|
static inline bool
|
|
|
|
IsGCThresholdReached(JSRuntime *rt)
|
|
|
|
{
|
|
|
|
#ifdef JS_GC_ZEAL
|
|
|
|
if (rt->gcZeal >= 1)
|
|
|
|
return true;
|
|
|
|
#endif
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Since the initial value of the gcLastBytes parameter is not equal to
|
|
|
|
* zero (see the js_InitGC function) the return value is false when
|
|
|
|
* the gcBytes value is close to zero at the JS engine start.
|
|
|
|
*/
|
|
|
|
return rt->isGCMallocLimitReached() || rt->gcBytes >= rt->gcTriggerBytes;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
LastDitchGC(JSContext *cx)
|
|
|
|
{
|
|
|
|
JS_ASSERT(!JS_ON_TRACE(cx));
|
|
|
|
|
|
|
|
/* The last ditch GC preserves weak roots and all atoms. */
|
|
|
|
AutoKeepAtoms keep(cx->runtime);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Keep rt->gcLock across the call into the GC so we don't starve and
|
|
|
|
* lose to racing threads who deplete the heap just after the GC has
|
|
|
|
* replenished it (or has synchronized with a racing GC that collected a
|
|
|
|
* bunch of garbage). This unfair scheduling can happen on certain
|
|
|
|
* operating systems. For the gory details, see bug 162779.
|
|
|
|
*/
|
|
|
|
js_GC(cx, GC_LOCK_HELD);
|
|
|
|
}
|
|
|
|
|
2009-10-15 23:10:54 -07:00
|
|
|
static JSGCThing *
|
|
|
|
RefillFinalizableFreeList(JSContext *cx, unsigned thingKind)
|
2007-03-22 10:30:00 -07:00
|
|
|
{
|
2010-06-23 09:37:35 -07:00
|
|
|
JS_ASSERT(!JS_THREAD_DATA(cx)->gcFreeLists.finalizables[thingKind]);
|
2009-10-15 23:10:54 -07:00
|
|
|
JSRuntime *rt = cx->runtime;
|
2010-04-08 05:54:18 -07:00
|
|
|
JSGCArenaList *arenaList;
|
2010-01-14 00:27:32 -08:00
|
|
|
JSGCArena *a;
|
2009-10-17 01:57:14 -07:00
|
|
|
|
2010-04-08 05:54:18 -07:00
|
|
|
{
|
|
|
|
AutoLockGC lock(rt);
|
|
|
|
JS_ASSERT(!rt->gcRunning);
|
2010-08-17 11:54:54 -07:00
|
|
|
if (rt->gcRunning)
|
2010-04-08 05:54:18 -07:00
|
|
|
return NULL;
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-07-28 11:20:19 -07:00
|
|
|
bool canGC = !JS_ON_TRACE(cx) && !JS_THREAD_DATA(cx)->waiveGCQuota;
|
|
|
|
bool doGC = canGC && IsGCThresholdReached(rt);
|
2010-04-08 05:54:18 -07:00
|
|
|
arenaList = &rt->gcArenaList[thingKind];
|
2010-07-28 11:20:19 -07:00
|
|
|
for (;;) {
|
|
|
|
if (doGC) {
|
|
|
|
LastDitchGC(cx);
|
2010-08-24 18:57:14 -07:00
|
|
|
METER(cx->runtime->gcArenaStats[thingKind].retry++);
|
2010-07-28 11:20:19 -07:00
|
|
|
canGC = false;
|
|
|
|
|
|
|
|
/*
|
|
|
|
* The JSGC_END callback can legitimately allocate new GC
|
|
|
|
* things and populate the free list. If that happens, just
|
|
|
|
* return that list head.
|
|
|
|
*/
|
|
|
|
JSGCThing *freeList = JS_THREAD_DATA(cx)->gcFreeLists.finalizables[thingKind];
|
|
|
|
if (freeList)
|
|
|
|
return freeList;
|
2009-10-15 02:53:40 -07:00
|
|
|
}
|
2009-10-15 23:10:54 -07:00
|
|
|
|
2010-07-28 11:20:19 -07:00
|
|
|
while ((a = arenaList->cursor) != NULL) {
|
|
|
|
JSGCArenaInfo *ainfo = a->getInfo();
|
|
|
|
arenaList->cursor = ainfo->prev;
|
|
|
|
JSGCThing *freeList = ainfo->freeList;
|
|
|
|
if (freeList) {
|
|
|
|
ainfo->freeList = NULL;
|
|
|
|
return freeList;
|
|
|
|
}
|
|
|
|
}
|
2010-04-08 05:54:18 -07:00
|
|
|
|
2010-07-28 11:20:19 -07:00
|
|
|
a = NewGCArena(cx);
|
|
|
|
if (a)
|
|
|
|
break;
|
|
|
|
if (!canGC) {
|
2010-08-24 18:57:14 -07:00
|
|
|
METER(cx->runtime->gcArenaStats[thingKind].fail++);
|
2010-07-28 11:20:19 -07:00
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
doGC = true;
|
|
|
|
}
|
2007-09-16 06:03:17 -07:00
|
|
|
|
2010-04-08 05:54:18 -07:00
|
|
|
/*
|
|
|
|
* Do only minimal initialization of the arena inside the GC lock. We
|
|
|
|
* can do the rest outside the lock because no other threads will see
|
|
|
|
* the arena until the GC is run.
|
|
|
|
*/
|
2010-04-12 13:59:19 -07:00
|
|
|
JSGCArenaInfo *ainfo = a->getInfo();
|
|
|
|
ainfo->list = arenaList;
|
|
|
|
ainfo->prev = arenaList->head;
|
|
|
|
ainfo->freeList = NULL;
|
2010-04-08 05:54:18 -07:00
|
|
|
arenaList->head = a;
|
|
|
|
}
|
2009-10-15 23:10:54 -07:00
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
JSGCMarkingDelay *markingDelay = a->getMarkingDelay();
|
|
|
|
markingDelay->link = NULL;
|
|
|
|
markingDelay->unmarkedChildren = 0;
|
|
|
|
|
2010-01-14 00:27:32 -08:00
|
|
|
return MakeNewArenaFreeList(a, arenaList->thingSize);
|
2009-10-15 23:10:54 -07:00
|
|
|
}
|
|
|
|
|
2009-11-12 03:53:25 -08:00
|
|
|
static inline void
|
|
|
|
CheckGCFreeListLink(JSGCThing *thing)
|
|
|
|
{
|
|
|
|
/*
|
|
|
|
* The GC things on the free lists come from one arena and the things on
|
|
|
|
* the free list are linked in ascending address order.
|
|
|
|
*/
|
|
|
|
JS_ASSERT_IF(thing->link,
|
2010-01-14 00:27:32 -08:00
|
|
|
JSGCArena::fromGCThing(thing) ==
|
|
|
|
JSGCArena::fromGCThing(thing->link));
|
2009-11-12 03:53:25 -08:00
|
|
|
JS_ASSERT_IF(thing->link, thing < thing->link);
|
|
|
|
}
|
|
|
|
|
2009-10-15 23:10:54 -07:00
|
|
|
void *
|
2009-11-12 03:53:25 -08:00
|
|
|
js_NewFinalizableGCThing(JSContext *cx, unsigned thingKind)
|
2009-10-15 23:10:54 -07:00
|
|
|
{
|
|
|
|
JS_ASSERT(thingKind < FINALIZE_LIMIT);
|
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
JS_ASSERT(cx->thread);
|
|
|
|
#endif
|
|
|
|
|
|
|
|
/* Updates of metering counters here may not be thread-safe. */
|
2010-08-24 18:57:14 -07:00
|
|
|
METER(cx->runtime->gcArenaStats[thingKind].alloc++);
|
2009-10-15 23:10:54 -07:00
|
|
|
|
|
|
|
JSGCThing **freeListp =
|
|
|
|
JS_THREAD_DATA(cx)->gcFreeLists.finalizables + thingKind;
|
|
|
|
JSGCThing *thing = *freeListp;
|
2009-11-12 03:53:25 -08:00
|
|
|
if (thing) {
|
|
|
|
*freeListp = thing->link;
|
|
|
|
CheckGCFreeListLink(thing);
|
2010-08-24 18:57:14 -07:00
|
|
|
METER(cx->runtime->gcArenaStats[thingKind].localalloc++);
|
2009-11-12 03:53:25 -08:00
|
|
|
return thing;
|
|
|
|
}
|
|
|
|
|
2010-06-23 09:37:35 -07:00
|
|
|
thing = RefillFinalizableFreeList(cx, thingKind);
|
|
|
|
if (!thing) {
|
2009-10-15 23:10:54 -07:00
|
|
|
js_ReportOutOfMemory(cx);
|
|
|
|
return NULL;
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
2010-06-23 09:37:35 -07:00
|
|
|
/*
|
|
|
|
* See comments in RefillFinalizableFreeList about a possibility
|
|
|
|
* of *freeListp == thing.
|
|
|
|
*/
|
|
|
|
JS_ASSERT(!*freeListp || *freeListp == thing);
|
|
|
|
*freeListp = thing->link;
|
|
|
|
|
2009-11-12 03:53:25 -08:00
|
|
|
CheckGCFreeListLink(thing);
|
2010-06-23 09:37:35 -07:00
|
|
|
|
2009-10-15 23:10:54 -07:00
|
|
|
return thing;
|
2009-10-15 02:53:40 -07:00
|
|
|
}
|
|
|
|
|
2007-03-22 10:30:00 -07:00
|
|
|
JSBool
|
|
|
|
js_LockGCThingRT(JSRuntime *rt, void *thing)
|
|
|
|
{
|
2010-05-20 13:50:08 -07:00
|
|
|
GCLocks *locks;
|
2010-06-22 02:19:04 -07:00
|
|
|
|
2009-09-10 04:13:59 -07:00
|
|
|
if (!thing)
|
2009-12-24 01:31:07 -08:00
|
|
|
return true;
|
2010-05-20 13:50:08 -07:00
|
|
|
locks = &rt->gcLocksHash;
|
2010-04-08 05:54:18 -07:00
|
|
|
AutoLockGC lock(rt);
|
2010-05-20 13:50:08 -07:00
|
|
|
GCLocks::AddPtr p = locks->lookupForAdd(thing);
|
|
|
|
|
|
|
|
if (!p) {
|
|
|
|
if (!locks->add(p, thing, 1))
|
|
|
|
return false;
|
|
|
|
} else {
|
|
|
|
JS_ASSERT(p->value >= 1);
|
|
|
|
p->value++;
|
2008-02-24 06:14:45 -08:00
|
|
|
}
|
2010-05-20 13:50:08 -07:00
|
|
|
|
|
|
|
METER(rt->gcStats.lock++);
|
|
|
|
return true;
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
2009-12-13 23:55:17 -08:00
|
|
|
void
|
2007-03-22 10:30:00 -07:00
|
|
|
js_UnlockGCThingRT(JSRuntime *rt, void *thing)
|
|
|
|
{
|
2009-09-10 04:13:59 -07:00
|
|
|
if (!thing)
|
2009-12-13 23:55:17 -08:00
|
|
|
return;
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-04-08 05:54:18 -07:00
|
|
|
AutoLockGC lock(rt);
|
2010-05-20 13:50:08 -07:00
|
|
|
GCLocks::Ptr p = rt->gcLocksHash.lookup(thing);
|
|
|
|
|
|
|
|
if (p) {
|
2009-12-24 01:31:07 -08:00
|
|
|
rt->gcPoke = true;
|
2010-05-20 13:50:08 -07:00
|
|
|
if (--p->value == 0)
|
|
|
|
rt->gcLocksHash.remove(p);
|
|
|
|
|
2009-12-24 01:31:07 -08:00
|
|
|
METER(rt->gcStats.unlock++);
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2007-04-16 23:53:37 -07:00
|
|
|
JS_PUBLIC_API(void)
|
|
|
|
JS_TraceChildren(JSTracer *trc, void *thing, uint32 kind)
|
|
|
|
{
|
|
|
|
switch (kind) {
|
2009-08-27 22:53:26 -07:00
|
|
|
case JSTRACE_OBJECT: {
|
2007-03-22 10:30:00 -07:00
|
|
|
/* If obj has no map, it must be a newborn. */
|
2009-08-27 22:53:26 -07:00
|
|
|
JSObject *obj = (JSObject *) thing;
|
2007-03-22 10:30:00 -07:00
|
|
|
if (!obj->map)
|
|
|
|
break;
|
2010-07-31 02:54:01 -07:00
|
|
|
if (JSObject *proto = obj->getProto())
|
|
|
|
JS_CALL_OBJECT_TRACER(trc, proto, "proto");
|
|
|
|
if (JSObject *parent = obj->getParent())
|
|
|
|
JS_CALL_OBJECT_TRACER(trc, parent, "parent");
|
2010-06-12 09:29:04 -07:00
|
|
|
JSTraceOp op = obj->getOps()->trace;
|
|
|
|
(op ? op : js_TraceObject)(trc, obj);
|
2007-03-22 10:30:00 -07:00
|
|
|
break;
|
2009-08-27 22:53:26 -07:00
|
|
|
}
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2009-08-27 22:53:26 -07:00
|
|
|
case JSTRACE_STRING: {
|
|
|
|
JSString *str = (JSString *) thing;
|
2009-06-10 18:29:44 -07:00
|
|
|
if (str->isDependent())
|
|
|
|
JS_CALL_STRING_TRACER(trc, str->dependentBase(), "base");
|
2010-07-16 17:41:22 -07:00
|
|
|
else if (str->isRope()) {
|
|
|
|
if (str->isInteriorNode())
|
|
|
|
JS_CALL_STRING_TRACER(trc, str->interiorNodeParent(), "parent");
|
|
|
|
JS_CALL_STRING_TRACER(trc, str->ropeLeft(), "left child");
|
|
|
|
JS_CALL_STRING_TRACER(trc, str->ropeRight(), "right child");
|
|
|
|
}
|
2007-03-22 10:30:00 -07:00
|
|
|
break;
|
2009-08-27 22:53:26 -07:00
|
|
|
}
|
2007-03-22 10:30:00 -07:00
|
|
|
|
|
|
|
#if JS_HAS_XML_SUPPORT
|
2007-04-16 23:53:37 -07:00
|
|
|
case JSTRACE_XML:
|
|
|
|
js_TraceXML(trc, (JSXML *)thing);
|
2007-03-22 10:30:00 -07:00
|
|
|
break;
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2010-07-26 11:44:04 -07:00
|
|
|
namespace js {
|
|
|
|
|
2007-03-22 10:30:00 -07:00
|
|
|
/*
|
2009-12-13 23:55:17 -08:00
|
|
|
* When the native stack is low, the GC does not call JS_TraceChildren to mark
|
|
|
|
* the reachable "children" of the thing. Rather the thing is put aside and
|
|
|
|
* JS_TraceChildren is called later with more space on the C stack.
|
|
|
|
*
|
|
|
|
* To implement such delayed marking of the children with minimal overhead for
|
2010-04-12 13:59:19 -07:00
|
|
|
* the normal case of sufficient native stack, the code uses two fields per
|
|
|
|
* arena stored in JSGCMarkingDelay. The first field, JSGCMarkingDelay::link,
|
|
|
|
* links all arenas with delayed things into a stack list with the pointer to
|
2010-07-26 11:44:04 -07:00
|
|
|
* stack top in JSRuntime::gcUnmarkedArenaStackTop. delayMarkingChildren adds
|
|
|
|
* arenas to the stack as necessary while markDelayedChildren pops the arenas
|
2010-04-12 13:59:19 -07:00
|
|
|
* from the stack until it empties.
|
2009-12-13 23:55:17 -08:00
|
|
|
*
|
2010-04-12 13:59:19 -07:00
|
|
|
* The second field, JSGCMarkingDelay::unmarkedChildren, is a bitmap that
|
|
|
|
* tells for which things the GC should call JS_TraceChildren later. The
|
|
|
|
* bitmap is a single word. As such it does not pinpoint the delayed things
|
|
|
|
* in the arena but rather tells the intervals containing
|
|
|
|
* ThingsPerUnmarkedBit(thingSize) things. Later the code in
|
2010-07-26 11:44:04 -07:00
|
|
|
* markDelayedChildren discovers such intervals and calls JS_TraceChildren on
|
2010-04-12 13:59:19 -07:00
|
|
|
* any marked thing in the interval. This implies that JS_TraceChildren can be
|
|
|
|
* called many times for a single thing if the thing shares the same interval
|
|
|
|
* with some delayed things. This should be fine as any GC graph
|
|
|
|
* marking/traversing hooks must allow repeated calls during the same GC cycle.
|
|
|
|
* In particular, xpcom cycle collector relies on this.
|
2009-12-13 23:55:17 -08:00
|
|
|
*
|
|
|
|
* Note that such repeated scanning may slow down the GC. In particular, it is
|
|
|
|
* possible to construct an object graph where the GC calls JS_TraceChildren
|
|
|
|
* ThingsPerUnmarkedBit(thingSize) for almost all things in the graph. We
|
|
|
|
* tolerate this as the max value for ThingsPerUnmarkedBit(thingSize) is 4.
|
|
|
|
* This is archived for JSObject on 32 bit system as it is exactly JSObject
|
|
|
|
* that has the smallest size among the GC things that can be delayed. On 32
|
|
|
|
* bit CPU we have less than 128 objects per 4K GC arena so each bit in
|
|
|
|
* unmarkedChildren covers 4 objects.
|
2007-03-22 10:30:00 -07:00
|
|
|
*/
|
2009-12-13 23:55:17 -08:00
|
|
|
inline unsigned
|
|
|
|
ThingsPerUnmarkedBit(unsigned thingSize)
|
|
|
|
{
|
2010-01-14 00:27:32 -08:00
|
|
|
return JS_HOWMANY(ThingsPerArena(thingSize), JS_BITS_PER_WORD);
|
2009-12-13 23:55:17 -08:00
|
|
|
}
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-08-05 05:16:56 -07:00
|
|
|
GCMarker::GCMarker(JSContext *cx)
|
|
|
|
: color(0), unmarkedArenaStackTop(NULL)
|
|
|
|
{
|
|
|
|
JS_TRACER_INIT(this, cx, NULL);
|
|
|
|
#ifdef DEBUG
|
|
|
|
markLaterCount = 0;
|
|
|
|
#endif
|
|
|
|
#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
|
|
|
|
conservativeDumpFileName = getenv("JS_DUMP_CONSERVATIVE_GC_ROOTS");
|
|
|
|
memset(&conservativeStats, 0, sizeof(conservativeStats));
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
GCMarker::~GCMarker()
|
|
|
|
{
|
|
|
|
#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
|
|
|
|
dumpConservativeRoots();
|
|
|
|
#endif
|
|
|
|
#ifdef JS_GCMETER
|
|
|
|
/* Update total stats. */
|
|
|
|
context->runtime->gcStats.conservative.add(conservativeStats);
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2010-07-26 11:44:04 -07:00
|
|
|
void
|
|
|
|
GCMarker::delayMarkingChildren(void *thing)
|
2007-03-22 10:30:00 -07:00
|
|
|
{
|
2010-07-26 11:44:04 -07:00
|
|
|
JS_ASSERT(this == context->runtime->gcMarkingTracer);
|
2010-04-12 13:59:19 -07:00
|
|
|
JS_ASSERT(IsMarkedGCThing(thing));
|
2010-07-26 11:44:04 -07:00
|
|
|
METER(context->runtime->gcStats.unmarked++);
|
2010-04-12 13:59:19 -07:00
|
|
|
|
2010-01-14 00:27:32 -08:00
|
|
|
JSGCArena *a = JSGCArena::fromGCThing(thing);
|
2010-04-12 13:59:19 -07:00
|
|
|
JSGCArenaInfo *ainfo = a->getInfo();
|
|
|
|
JSGCMarkingDelay *markingDelay = a->getMarkingDelay();
|
2010-01-14 00:27:32 -08:00
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
size_t thingArenaIndex = GCThingToArenaIndex(thing);
|
|
|
|
size_t unmarkedBitIndex = thingArenaIndex /
|
|
|
|
ThingsPerUnmarkedBit(ainfo->list->thingSize);
|
2009-12-13 23:55:17 -08:00
|
|
|
JS_ASSERT(unmarkedBitIndex < JS_BITS_PER_WORD);
|
2010-01-14 00:27:32 -08:00
|
|
|
|
|
|
|
jsuword bit = jsuword(1) << unmarkedBitIndex;
|
2010-04-12 13:59:19 -07:00
|
|
|
if (markingDelay->unmarkedChildren != 0) {
|
2010-07-26 11:44:04 -07:00
|
|
|
JS_ASSERT(unmarkedArenaStackTop);
|
2010-04-12 13:59:19 -07:00
|
|
|
if (markingDelay->unmarkedChildren & bit) {
|
2009-12-13 23:55:17 -08:00
|
|
|
/* bit already covers things with children to mark later. */
|
2007-09-16 06:03:17 -07:00
|
|
|
return;
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
2010-04-12 13:59:19 -07:00
|
|
|
markingDelay->unmarkedChildren |= bit;
|
2007-03-22 10:30:00 -07:00
|
|
|
} else {
|
|
|
|
/*
|
2009-12-13 23:55:17 -08:00
|
|
|
* The thing is the first thing with not yet marked children in the
|
2007-09-16 06:03:17 -07:00
|
|
|
* whole arena, so push the arena on the stack of arenas with things
|
2009-12-13 23:55:17 -08:00
|
|
|
* to be marked later unless the arena has already been pushed. We
|
2010-04-12 13:59:19 -07:00
|
|
|
* detect that through checking prevUnmarked as the field is 0
|
2007-09-16 06:03:17 -07:00
|
|
|
* only for not yet pushed arenas. To ensure that
|
2010-04-12 13:59:19 -07:00
|
|
|
* prevUnmarked != 0
|
|
|
|
* even when the stack contains one element, we make prevUnmarked
|
2007-09-16 06:03:17 -07:00
|
|
|
* for the arena at the bottom to point to itself.
|
|
|
|
*
|
2010-07-26 11:44:04 -07:00
|
|
|
* See comments in markDelayedChildren.
|
2007-03-22 10:30:00 -07:00
|
|
|
*/
|
2010-04-12 13:59:19 -07:00
|
|
|
markingDelay->unmarkedChildren = bit;
|
|
|
|
if (!markingDelay->link) {
|
2010-07-26 11:44:04 -07:00
|
|
|
if (!unmarkedArenaStackTop) {
|
2007-09-16 06:03:17 -07:00
|
|
|
/* Stack was empty, mark the arena as the bottom element. */
|
2010-04-12 13:59:19 -07:00
|
|
|
markingDelay->link = a;
|
2007-09-16 06:03:17 -07:00
|
|
|
} else {
|
2010-07-26 11:44:04 -07:00
|
|
|
JS_ASSERT(unmarkedArenaStackTop->getMarkingDelay()->link);
|
|
|
|
markingDelay->link = unmarkedArenaStackTop;
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
2010-07-26 11:44:04 -07:00
|
|
|
unmarkedArenaStackTop = a;
|
2007-09-16 06:03:17 -07:00
|
|
|
}
|
2010-07-26 11:44:04 -07:00
|
|
|
JS_ASSERT(unmarkedArenaStackTop);
|
2007-09-16 06:03:17 -07:00
|
|
|
}
|
2009-12-13 23:55:17 -08:00
|
|
|
#ifdef DEBUG
|
2010-07-26 11:44:04 -07:00
|
|
|
markLaterCount += ThingsPerUnmarkedBit(ainfo->list->thingSize);
|
|
|
|
METER_UPDATE_MAX(context->runtime->gcStats.maxunmarked, markLaterCount);
|
2009-12-13 23:55:17 -08:00
|
|
|
#endif
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
2010-07-26 11:44:04 -07:00
|
|
|
JS_FRIEND_API(void)
|
|
|
|
GCMarker::markDelayedChildren()
|
2007-03-22 10:30:00 -07:00
|
|
|
{
|
2010-07-26 11:44:04 -07:00
|
|
|
JS_ASSERT(this == context->runtime->gcMarkingTracer);
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-07-26 11:44:04 -07:00
|
|
|
JSGCArena *a = unmarkedArenaStackTop;
|
2007-09-16 06:03:17 -07:00
|
|
|
if (!a) {
|
2010-07-26 11:44:04 -07:00
|
|
|
JS_ASSERT(markLaterCount == 0);
|
2007-03-22 10:30:00 -07:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
for (;;) {
|
|
|
|
/*
|
2007-09-16 06:03:17 -07:00
|
|
|
* The following assert verifies that the current arena belongs to the
|
2010-07-26 11:44:04 -07:00
|
|
|
* unmarked stack, since delayMarkingChildren ensures that even for
|
2010-04-12 13:59:19 -07:00
|
|
|
* the stack's bottom, prevUnmarked != 0 but rather points to
|
2009-12-13 23:55:17 -08:00
|
|
|
* itself.
|
2007-03-22 10:30:00 -07:00
|
|
|
*/
|
2010-04-12 13:59:19 -07:00
|
|
|
JSGCArenaInfo *ainfo = a->getInfo();
|
|
|
|
JSGCMarkingDelay *markingDelay = a->getMarkingDelay();
|
|
|
|
JS_ASSERT(markingDelay->link);
|
2010-07-26 11:44:04 -07:00
|
|
|
JS_ASSERT(unmarkedArenaStackTop->getMarkingDelay()->link);
|
|
|
|
unsigned thingSize = ainfo->list->thingSize;
|
|
|
|
unsigned traceKind = GetFinalizableArenaTraceKind(ainfo);
|
|
|
|
unsigned indexLimit = ThingsPerArena(thingSize);
|
|
|
|
unsigned thingsPerUnmarkedBit = ThingsPerUnmarkedBit(thingSize);
|
2007-09-15 10:19:32 -07:00
|
|
|
|
2007-09-16 06:03:17 -07:00
|
|
|
/*
|
2009-12-13 23:55:17 -08:00
|
|
|
* We cannot use do-while loop here as a->unmarkedChildren can be zero
|
2008-02-03 19:41:31 -08:00
|
|
|
* before the loop as a leftover from the previous iterations. See
|
|
|
|
* comments after the loop.
|
2007-09-16 06:03:17 -07:00
|
|
|
*/
|
2010-04-12 13:59:19 -07:00
|
|
|
while (markingDelay->unmarkedChildren != 0) {
|
2010-07-26 11:44:04 -07:00
|
|
|
unsigned unmarkedBitIndex = JS_FLOOR_LOG2W(markingDelay->unmarkedChildren);
|
2010-04-12 13:59:19 -07:00
|
|
|
markingDelay->unmarkedChildren &= ~(jsuword(1) << unmarkedBitIndex);
|
2009-12-13 23:55:17 -08:00
|
|
|
#ifdef DEBUG
|
2010-07-26 11:44:04 -07:00
|
|
|
JS_ASSERT(markLaterCount >= thingsPerUnmarkedBit);
|
|
|
|
markLaterCount -= thingsPerUnmarkedBit;
|
2009-12-13 23:55:17 -08:00
|
|
|
#endif
|
2010-07-26 11:44:04 -07:00
|
|
|
unsigned thingIndex = unmarkedBitIndex * thingsPerUnmarkedBit;
|
|
|
|
unsigned endIndex = thingIndex + thingsPerUnmarkedBit;
|
2007-09-16 06:03:17 -07:00
|
|
|
|
|
|
|
/*
|
|
|
|
* endIndex can go beyond the last allocated thing as the real
|
|
|
|
* limit can be "inside" the bit.
|
|
|
|
*/
|
|
|
|
if (endIndex > indexLimit)
|
|
|
|
endIndex = indexLimit;
|
2010-04-12 13:59:19 -07:00
|
|
|
uint8 *thing = GCArenaIndexToThing(a, ainfo, thingIndex);
|
|
|
|
uint8 *end = GCArenaIndexToThing(a, ainfo, endIndex);
|
2007-09-16 06:03:17 -07:00
|
|
|
do {
|
2010-04-12 13:59:19 -07:00
|
|
|
JS_ASSERT(thing < end);
|
|
|
|
if (IsMarkedGCThing(thing))
|
2010-07-26 11:44:04 -07:00
|
|
|
JS_TraceChildren(this, thing, traceKind);
|
2010-04-12 13:59:19 -07:00
|
|
|
thing += thingSize;
|
|
|
|
} while (thing != end);
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
2007-09-16 06:03:17 -07:00
|
|
|
|
2007-03-22 10:30:00 -07:00
|
|
|
/*
|
2007-09-16 06:03:17 -07:00
|
|
|
* We finished tracing of all things in the the arena but we can only
|
|
|
|
* pop it from the stack if the arena is the stack's top.
|
2007-03-22 10:30:00 -07:00
|
|
|
*
|
2007-09-16 06:03:17 -07:00
|
|
|
* When JS_TraceChildren from the above calls JS_CallTracer that in
|
2010-07-26 11:44:04 -07:00
|
|
|
* turn on low C stack calls delayMarkingChildren and the latter
|
2009-12-13 23:55:17 -08:00
|
|
|
* pushes new arenas to the unmarked stack, we have to skip popping
|
2007-09-16 06:03:17 -07:00
|
|
|
* of this arena until it becomes the top of the stack again.
|
2007-03-22 10:30:00 -07:00
|
|
|
*/
|
2010-07-26 11:44:04 -07:00
|
|
|
if (a == unmarkedArenaStackTop) {
|
|
|
|
JSGCArena *aprev = markingDelay->link;
|
2010-04-12 13:59:19 -07:00
|
|
|
markingDelay->link = NULL;
|
2007-09-16 06:03:17 -07:00
|
|
|
if (a == aprev) {
|
2007-03-22 10:30:00 -07:00
|
|
|
/*
|
2010-04-12 13:59:19 -07:00
|
|
|
* prevUnmarked points to itself and we reached the bottom of
|
|
|
|
* the stack.
|
2007-03-22 10:30:00 -07:00
|
|
|
*/
|
|
|
|
break;
|
|
|
|
}
|
2010-07-26 11:44:04 -07:00
|
|
|
unmarkedArenaStackTop = a = aprev;
|
2007-03-22 10:30:00 -07:00
|
|
|
} else {
|
2010-07-26 11:44:04 -07:00
|
|
|
a = unmarkedArenaStackTop;
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
}
|
2010-07-26 11:44:04 -07:00
|
|
|
JS_ASSERT(unmarkedArenaStackTop);
|
|
|
|
JS_ASSERT(!unmarkedArenaStackTop->getMarkingDelay()->link);
|
|
|
|
unmarkedArenaStackTop = NULL;
|
|
|
|
JS_ASSERT(markLaterCount == 0);
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
2010-07-26 11:44:04 -07:00
|
|
|
void
|
|
|
|
GCMarker::slowifyArrays()
|
|
|
|
{
|
|
|
|
while (!arraysToSlowify.empty()) {
|
|
|
|
JSObject *obj = arraysToSlowify.back();
|
|
|
|
arraysToSlowify.popBack();
|
|
|
|
if (IsMarkedGCThing(obj))
|
|
|
|
obj->makeDenseArraySlow(context);
|
|
|
|
}
|
|
|
|
}
|
2010-07-14 23:19:36 -07:00
|
|
|
|
2010-02-18 13:16:02 -08:00
|
|
|
void
|
2010-07-14 23:19:36 -07:00
|
|
|
Mark(JSTracer *trc, void *thing, uint32 kind)
|
2007-03-22 10:30:00 -07:00
|
|
|
{
|
2007-04-24 16:11:02 -07:00
|
|
|
JS_ASSERT(thing);
|
2007-04-16 23:53:37 -07:00
|
|
|
JS_ASSERT(JS_IS_VALID_TRACE_KIND(kind));
|
|
|
|
JS_ASSERT(trc->debugPrinter || trc->debugPrintArg);
|
2010-07-26 11:44:04 -07:00
|
|
|
JS_ASSERT_IF(!JSString::isStatic(thing), kind == GetFinalizableThingTraceKind(thing));
|
|
|
|
#ifdef DEBUG
|
|
|
|
if (IS_GC_MARKING_TRACER(trc)) {
|
|
|
|
JSRuntime *rt = trc->context->runtime;
|
|
|
|
JS_ASSERT(rt->gcMarkingTracer == trc);
|
|
|
|
JS_ASSERT(rt->gcRunning);
|
|
|
|
}
|
|
|
|
#endif
|
2007-04-16 23:53:37 -07:00
|
|
|
|
|
|
|
if (!IS_GC_MARKING_TRACER(trc)) {
|
|
|
|
trc->callback(trc, thing, kind);
|
2010-07-26 11:44:04 -07:00
|
|
|
} else {
|
|
|
|
GCMarker *gcmarker = static_cast<GCMarker *>(trc);
|
2007-09-16 05:02:56 -07:00
|
|
|
|
2010-07-26 11:44:04 -07:00
|
|
|
if (kind == JSTRACE_STRING) {
|
|
|
|
/*
|
|
|
|
* Optimize for string as their marking is not recursive.
|
|
|
|
*
|
|
|
|
* Iterate through all nodes and leaves in the rope if this is
|
|
|
|
* part of a rope; otherwise, we only iterate once: on the string
|
|
|
|
* itself.
|
|
|
|
*/
|
|
|
|
JSRopeNodeIterator iter((JSString *) thing);
|
|
|
|
JSString *str = iter.init();
|
|
|
|
do {
|
|
|
|
for (;;) {
|
|
|
|
if (JSString::isStatic(str))
|
|
|
|
break;
|
|
|
|
JS_ASSERT(kind == GetFinalizableThingTraceKind(str));
|
|
|
|
if (!MarkIfUnmarkedGCThing(str))
|
|
|
|
break;
|
|
|
|
if (!str->isDependent())
|
|
|
|
break;
|
|
|
|
str = str->dependentBase();
|
|
|
|
}
|
|
|
|
str = iter.next();
|
|
|
|
} while (str);
|
2010-09-01 11:24:45 -07:00
|
|
|
|
2010-07-26 11:44:04 -07:00
|
|
|
} else if (MarkIfUnmarkedGCThing(thing, gcmarker->getMarkColor())) {
|
|
|
|
/*
|
|
|
|
* With JS_GC_ASSUME_LOW_C_STACK defined the mark phase of GC
|
|
|
|
* always uses the non-recursive code that otherwise would be
|
|
|
|
* called only on a low C stack condition.
|
|
|
|
*/
|
2007-04-16 23:53:37 -07:00
|
|
|
#ifdef JS_GC_ASSUME_LOW_C_STACK
|
2010-07-26 11:44:04 -07:00
|
|
|
# define RECURSION_TOO_DEEP() true
|
2007-04-16 23:53:37 -07:00
|
|
|
#else
|
2010-07-26 11:44:04 -07:00
|
|
|
int stackDummy;
|
|
|
|
# define RECURSION_TOO_DEEP() (!JS_CHECK_STACK_SIZE(trc->context, stackDummy))
|
2007-04-16 23:53:37 -07:00
|
|
|
#endif
|
2010-07-26 11:44:04 -07:00
|
|
|
if (RECURSION_TOO_DEEP())
|
|
|
|
gcmarker->delayMarkingChildren(thing);
|
|
|
|
else
|
|
|
|
JS_TraceChildren(trc, thing, kind);
|
|
|
|
}
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
2007-04-16 23:53:37 -07:00
|
|
|
|
|
|
|
#ifdef DEBUG
|
|
|
|
trc->debugPrinter = NULL;
|
|
|
|
trc->debugPrintArg = NULL;
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkGCThing(JSTracer *trc, void *thing)
|
|
|
|
{
|
|
|
|
JS_ASSERT(size_t(thing) % JS_GCTHING_ALIGN == 0);
|
2010-09-01 11:24:45 -07:00
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
if (!thing)
|
2007-04-24 16:11:02 -07:00
|
|
|
return;
|
2010-07-14 23:19:36 -07:00
|
|
|
|
|
|
|
uint32 kind = js_GetGCThingTraceKind(thing);
|
|
|
|
Mark(trc, thing, kind);
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
} /* namespace js */
|
|
|
|
|
2010-05-20 13:50:08 -07:00
|
|
|
static void
|
2010-07-14 23:19:36 -07:00
|
|
|
gc_root_traversal(JSTracer *trc, const RootEntry &entry)
|
2007-03-22 10:30:00 -07:00
|
|
|
{
|
|
|
|
#ifdef DEBUG
|
2010-07-14 23:19:36 -07:00
|
|
|
void *ptr;
|
|
|
|
if (entry.value.type == JS_GC_ROOT_GCTHING_PTR) {
|
|
|
|
ptr = *reinterpret_cast<void **>(entry.key);
|
|
|
|
} else {
|
|
|
|
Value *vp = reinterpret_cast<Value *>(entry.key);
|
|
|
|
ptr = vp->isGCThing() ? vp->asGCThing() : NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (ptr) {
|
|
|
|
if (!JSString::isStatic(ptr)) {
|
2009-12-13 23:55:17 -08:00
|
|
|
bool root_points_to_gcArenaList = false;
|
2010-07-14 23:19:36 -07:00
|
|
|
jsuword thing = (jsuword) ptr;
|
2009-12-13 23:55:17 -08:00
|
|
|
JSRuntime *rt = trc->context->runtime;
|
|
|
|
for (unsigned i = 0; i != FINALIZE_LIMIT; i++) {
|
|
|
|
JSGCArenaList *arenaList = &rt->gcArenaList[i];
|
|
|
|
size_t thingSize = arenaList->thingSize;
|
2010-01-14 00:27:32 -08:00
|
|
|
size_t limit = ThingsPerArena(thingSize) * thingSize;
|
2010-04-12 13:59:19 -07:00
|
|
|
for (JSGCArena *a = arenaList->head;
|
|
|
|
a;
|
|
|
|
a = a->getInfo()->prev) {
|
2010-01-14 00:27:32 -08:00
|
|
|
if (thing - a->toPageStart() < limit) {
|
2009-12-13 23:55:17 -08:00
|
|
|
root_points_to_gcArenaList = true;
|
|
|
|
break;
|
|
|
|
}
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
}
|
2010-07-14 23:19:36 -07:00
|
|
|
if (!root_points_to_gcArenaList && entry.value.name) {
|
2009-12-13 23:55:17 -08:00
|
|
|
fprintf(stderr,
|
2007-03-22 10:30:00 -07:00
|
|
|
"JS API usage error: the address passed to JS_AddNamedRoot currently holds an\n"
|
2010-07-14 23:19:36 -07:00
|
|
|
"invalid gcthing. This is usually caused by a missing call to JS_RemoveRoot.\n"
|
2007-03-22 10:30:00 -07:00
|
|
|
"The root's name is \"%s\".\n",
|
2010-07-14 23:19:36 -07:00
|
|
|
entry.value.name);
|
2009-12-13 23:55:17 -08:00
|
|
|
}
|
|
|
|
JS_ASSERT(root_points_to_gcArenaList);
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
}
|
2010-07-14 23:19:36 -07:00
|
|
|
#endif
|
|
|
|
JS_SET_TRACING_NAME(trc, entry.value.name ? entry.value.name : "root");
|
|
|
|
if (entry.value.type == JS_GC_ROOT_GCTHING_PTR)
|
|
|
|
MarkGCThing(trc, *reinterpret_cast<void **>(entry.key));
|
|
|
|
else
|
|
|
|
MarkValueRaw(trc, *reinterpret_cast<Value *>(entry.key));
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
2010-05-20 13:50:08 -07:00
|
|
|
static void
|
|
|
|
gc_lock_traversal(const GCLocks::Entry &entry, JSTracer *trc)
|
2007-03-22 10:30:00 -07:00
|
|
|
{
|
2007-04-16 23:53:37 -07:00
|
|
|
uint32 traceKind;
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-05-20 13:50:08 -07:00
|
|
|
JS_ASSERT(entry.value >= 1);
|
|
|
|
traceKind = js_GetGCThingTraceKind(entry.key);
|
|
|
|
JS_CALL_TRACER(trc, entry.key, traceKind, "locked object");
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2007-04-16 23:53:37 -07:00
|
|
|
js_TraceStackFrame(JSTracer *trc, JSStackFrame *fp)
|
2007-03-22 10:30:00 -07:00
|
|
|
{
|
2010-08-12 15:46:03 -07:00
|
|
|
if (fp->hasCallObj())
|
|
|
|
JS_CALL_OBJECT_TRACER(trc, fp->getCallObj(), "call");
|
|
|
|
if (fp->hasArgsObj())
|
|
|
|
JS_CALL_OBJECT_TRACER(trc, fp->getArgsObj(), "arguments");
|
2010-08-15 05:44:51 -07:00
|
|
|
if (fp->hasScript())
|
|
|
|
js_TraceScript(trc, fp->getScript());
|
2008-11-27 02:16:30 -08:00
|
|
|
|
2007-03-22 10:30:00 -07:00
|
|
|
/* Allow for primitive this parameter due to JSFUN_THISP_* flags. */
|
2010-08-15 05:44:51 -07:00
|
|
|
MarkValue(trc, fp->getThisValue(), "this");
|
|
|
|
MarkValue(trc, fp->getReturnValue(), "rval");
|
2010-08-13 20:36:37 -07:00
|
|
|
if (fp->hasScopeChain())
|
|
|
|
JS_CALL_OBJECT_TRACER(trc, fp->getScopeChain(), "scope chain");
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
|
|
|
|
2010-06-16 14:13:01 -07:00
|
|
|
inline void
|
|
|
|
AutoGCRooter::trace(JSTracer *trc)
|
|
|
|
{
|
|
|
|
switch (tag) {
|
|
|
|
case JSVAL:
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkValue(trc, static_cast<AutoValueRooter *>(this)->val, "js::AutoValueRooter.val");
|
2010-06-16 14:13:01 -07:00
|
|
|
return;
|
|
|
|
|
2010-08-29 11:57:08 -07:00
|
|
|
case SHAPE:
|
|
|
|
static_cast<AutoShapeRooter *>(this)->shape->trace(trc);
|
2010-06-16 14:13:01 -07:00
|
|
|
return;
|
|
|
|
|
|
|
|
case PARSER:
|
|
|
|
static_cast<Parser *>(this)->trace(trc);
|
|
|
|
return;
|
|
|
|
|
|
|
|
case SCRIPT:
|
|
|
|
if (JSScript *script = static_cast<AutoScriptRooter *>(this)->script)
|
|
|
|
js_TraceScript(trc, script);
|
|
|
|
return;
|
|
|
|
|
|
|
|
case ENUMERATOR:
|
|
|
|
static_cast<AutoEnumStateRooter *>(this)->trace(trc);
|
|
|
|
return;
|
|
|
|
|
|
|
|
case IDARRAY: {
|
|
|
|
JSIdArray *ida = static_cast<AutoIdArray *>(this)->idArray;
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkIdRange(trc, ida->length, ida->vector, "js::AutoIdArray.idArray");
|
2010-06-16 14:13:01 -07:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
case DESCRIPTORS: {
|
2010-07-14 23:19:36 -07:00
|
|
|
PropDescArray &descriptors =
|
|
|
|
static_cast<AutoPropDescArrayRooter *>(this)->descriptors;
|
2010-06-16 14:13:01 -07:00
|
|
|
for (size_t i = 0, len = descriptors.length(); i < len; i++) {
|
2010-07-14 23:19:36 -07:00
|
|
|
PropDesc &desc = descriptors[i];
|
|
|
|
MarkValue(trc, desc.pd, "PropDesc::pd");
|
|
|
|
MarkValue(trc, desc.value, "PropDesc::value");
|
|
|
|
MarkValue(trc, desc.get, "PropDesc::get");
|
|
|
|
MarkValue(trc, desc.set, "PropDesc::set");
|
|
|
|
MarkId(trc, desc.id, "PropDesc::id");
|
2010-06-16 14:13:01 -07:00
|
|
|
}
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
case DESCRIPTOR : {
|
2010-07-14 23:19:36 -07:00
|
|
|
PropertyDescriptor &desc = *static_cast<AutoPropertyDescriptorRooter *>(this);
|
2010-06-16 14:13:01 -07:00
|
|
|
if (desc.obj)
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkObject(trc, desc.obj, "Descriptor::obj");
|
|
|
|
MarkValue(trc, desc.value, "Descriptor::value");
|
2010-07-17 01:51:07 -07:00
|
|
|
if ((desc.attrs & JSPROP_GETTER) && desc.getter)
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkObject(trc, CastAsObject(desc.getter), "Descriptor::get");
|
2010-07-17 01:51:07 -07:00
|
|
|
if (desc.attrs & JSPROP_SETTER && desc.setter)
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkObject(trc, CastAsObject(desc.setter), "Descriptor::set");
|
2010-06-16 14:13:01 -07:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
case NAMESPACES: {
|
2010-06-19 11:58:00 -07:00
|
|
|
JSXMLArray &array = static_cast<AutoNamespaceArray *>(this)->array;
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkObjectRange(trc, array.length, reinterpret_cast<JSObject **>(array.vector),
|
|
|
|
"JSXMLArray.vector");
|
2010-06-16 14:13:01 -07:00
|
|
|
array.cursors->trace(trc);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
case XML:
|
|
|
|
js_TraceXML(trc, static_cast<AutoXMLRooter *>(this)->xml);
|
|
|
|
return;
|
|
|
|
|
|
|
|
case OBJECT:
|
2010-07-14 23:19:36 -07:00
|
|
|
if (JSObject *obj = static_cast<AutoObjectRooter *>(this)->obj)
|
|
|
|
MarkObject(trc, obj, "js::AutoObjectRooter.obj");
|
2010-06-16 14:13:01 -07:00
|
|
|
return;
|
|
|
|
|
|
|
|
case ID:
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkId(trc, static_cast<AutoIdRooter *>(this)->id_, "js::AutoIdRooter.val");
|
|
|
|
return;
|
|
|
|
|
|
|
|
case VALVECTOR: {
|
|
|
|
Vector<Value, 8> &vector = static_cast<js::AutoValueVector *>(this)->vector;
|
|
|
|
MarkValueRange(trc, vector.length(), vector.begin(), "js::AutoValueVector.vector");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
case STRING:
|
|
|
|
if (JSString *str = static_cast<js::AutoStringRooter *>(this)->str)
|
|
|
|
MarkString(trc, str, "js::AutoStringRooter.str");
|
2010-06-16 14:13:01 -07:00
|
|
|
return;
|
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
case IDVECTOR: {
|
|
|
|
Vector<jsid, 8> &vector = static_cast<js::AutoIdVector *>(this)->vector;
|
|
|
|
MarkIdRange(trc, vector.length(), vector.begin(), "js::AutoIdVector.vector");
|
2010-06-16 14:13:01 -07:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
JS_ASSERT(tag >= 0);
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkValueRange(trc, tag, static_cast<AutoArrayRooter *>(this)->array, "js::AutoArrayRooter.array");
|
2010-06-16 14:13:01 -07:00
|
|
|
}
|
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
namespace js {
|
|
|
|
|
2010-03-03 17:52:26 -08:00
|
|
|
void
|
2010-08-30 11:46:18 -07:00
|
|
|
MarkContext(JSTracer *trc, JSContext *acx)
|
2007-04-16 23:53:37 -07:00
|
|
|
{
|
2010-03-03 17:52:26 -08:00
|
|
|
/* Stack frames and slots are traced by StackSpace::mark. */
|
2007-04-16 23:53:37 -07:00
|
|
|
|
|
|
|
/* Mark other roots-by-definition in acx. */
|
2008-12-14 22:40:10 -08:00
|
|
|
if (acx->globalObject && !JS_HAS_OPTION(acx, JSOPTION_UNROOTED_GLOBAL))
|
2007-04-16 23:53:37 -07:00
|
|
|
JS_CALL_OBJECT_TRACER(trc, acx->globalObject, "global object");
|
|
|
|
if (acx->throwing) {
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkValue(trc, acx->exception, "exception");
|
2007-04-16 23:53:37 -07:00
|
|
|
} else {
|
|
|
|
/* Avoid keeping GC-ed junk stored in JSContext.exception. */
|
2010-07-14 23:19:36 -07:00
|
|
|
acx->exception.setNull();
|
2007-04-16 23:53:37 -07:00
|
|
|
}
|
|
|
|
|
2010-03-28 13:34:16 -07:00
|
|
|
for (js::AutoGCRooter *gcr = acx->autoGCRooters; gcr; gcr = gcr->down)
|
|
|
|
gcr->trace(trc);
|
|
|
|
|
2007-04-16 23:53:37 -07:00
|
|
|
if (acx->sharpObjectMap.depth > 0)
|
|
|
|
js_TraceSharpMap(trc, &acx->sharpObjectMap);
|
2008-12-18 12:06:45 -08:00
|
|
|
|
|
|
|
js_TraceRegExpStatics(trc, acx);
|
2009-05-05 14:26:06 -07:00
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkValue(trc, acx->iterValue, "iterValue");
|
2010-05-07 17:52:52 -07:00
|
|
|
|
2010-07-13 16:22:45 -07:00
|
|
|
acx->compartment->marked = true;
|
|
|
|
|
2009-05-05 14:26:06 -07:00
|
|
|
#ifdef JS_TRACER
|
2010-04-22 11:54:11 -07:00
|
|
|
TracerState* state = acx->tracerState;
|
2009-09-09 17:38:07 -07:00
|
|
|
while (state) {
|
|
|
|
if (state->nativeVp)
|
2010-07-14 23:19:36 -07:00
|
|
|
MarkValueRange(trc, state->nativeVpLen, state->nativeVp, "nativeVp");
|
2009-09-09 17:38:07 -07:00
|
|
|
state = state->prev;
|
|
|
|
}
|
2009-05-05 14:26:06 -07:00
|
|
|
#endif
|
2007-04-16 23:53:37 -07:00
|
|
|
}
|
|
|
|
|
2009-01-30 15:40:05 -08:00
|
|
|
JS_REQUIRES_STACK void
|
2010-08-30 11:46:18 -07:00
|
|
|
MarkRuntime(JSTracer *trc)
|
2007-04-16 23:53:37 -07:00
|
|
|
{
|
|
|
|
JSRuntime *rt = trc->context->runtime;
|
|
|
|
|
2010-07-26 01:24:27 -07:00
|
|
|
if (rt->state != JSRTS_LANDING)
|
2010-08-05 05:16:56 -07:00
|
|
|
MarkConservativeStackRoots(trc);
|
2010-07-26 01:24:27 -07:00
|
|
|
|
|
|
|
/*
|
|
|
|
* Verify that we do not have at this point unmarked GC things stored in
|
|
|
|
* autorooters. To maximize test coverage we abort even in non-debug
|
|
|
|
* builds for now, see bug 574313.
|
|
|
|
*/
|
|
|
|
JSContext *iter;
|
|
|
|
#if 1
|
|
|
|
iter = NULL;
|
|
|
|
while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter)) {
|
|
|
|
for (AutoGCRooter *gcr = acx->autoGCRooters; gcr; gcr = gcr->down) {
|
|
|
|
#ifdef JS_THREADSAFE
|
2010-08-30 11:46:18 -07:00
|
|
|
JS_ASSERT_IF(!acx->thread->requestDepth, acx->thread->suspendCount);
|
2010-07-26 01:24:27 -07:00
|
|
|
#endif
|
2010-08-30 11:46:18 -07:00
|
|
|
JS_ASSERT(JS_THREAD_DATA(acx)->conservativeGC.hasStackToScan());
|
2010-07-26 01:24:27 -07:00
|
|
|
void *thing;
|
|
|
|
switch (gcr->tag) {
|
|
|
|
default:
|
|
|
|
continue;
|
|
|
|
case AutoGCRooter::JSVAL: {
|
|
|
|
const Value &v = static_cast<AutoValueRooter *>(gcr)->val;
|
|
|
|
if (!v.isMarkable())
|
|
|
|
continue;
|
|
|
|
thing = v.asGCThing();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case AutoGCRooter::XML:
|
|
|
|
thing = static_cast<AutoXMLRooter *>(gcr)->xml;
|
|
|
|
break;
|
|
|
|
case AutoGCRooter::OBJECT:
|
|
|
|
thing = static_cast<AutoObjectRooter *>(gcr)->obj;
|
|
|
|
if (!thing)
|
|
|
|
continue;
|
|
|
|
break;
|
|
|
|
case AutoGCRooter::ID: {
|
|
|
|
jsid id = static_cast<AutoIdRooter *>(gcr)->id();
|
|
|
|
if (!JSID_IS_GCTHING(id))
|
|
|
|
continue;
|
|
|
|
thing = JSID_TO_GCTHING(id);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (JSString::isStatic(thing))
|
|
|
|
continue;
|
|
|
|
|
|
|
|
if (!IsMarkedGCThing(thing)) {
|
2010-08-05 05:16:56 -07:00
|
|
|
ConservativeGCTest test = IsGCThingWord(rt, reinterpret_cast<jsuword>(thing));
|
2010-07-26 01:24:27 -07:00
|
|
|
fprintf(stderr,
|
2010-08-06 12:45:25 -07:00
|
|
|
"Conservative GC scanner has missed the root 0x%p with tag %ld"
|
|
|
|
" on the stack due to %d. The root location 0x%p, distance from"
|
|
|
|
" the stack base %ld, conservative gc span %ld."
|
|
|
|
" Consevtaive GC status for the thread %d."
|
|
|
|
" Aborting.\n",
|
|
|
|
thing, (long) gcr->tag, int(test), (void *) gcr,
|
|
|
|
(long) ((jsword) JS_THREAD_DATA(acx)->nativeStackBase - (jsword) gcr),
|
|
|
|
(long) ((jsword) JS_THREAD_DATA(acx)->nativeStackBase -
|
|
|
|
(jsword) JS_THREAD_DATA(acx)->conservativeGC.nativeStackTop),
|
2010-08-30 11:46:18 -07:00
|
|
|
int(JS_THREAD_DATA(acx)->conservativeGC.hasStackToScan()));
|
2010-07-26 01:24:27 -07:00
|
|
|
JS_ASSERT(false);
|
|
|
|
abort();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2010-07-14 23:19:36 -07:00
|
|
|
for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront())
|
|
|
|
gc_root_traversal(trc, r.front());
|
2010-05-20 13:50:08 -07:00
|
|
|
|
|
|
|
for (GCLocks::Range r = rt->gcLocksHash.all(); !r.empty(); r.popFront())
|
|
|
|
gc_lock_traversal(r.front(), trc);
|
|
|
|
|
2010-04-23 15:15:42 -07:00
|
|
|
js_TraceAtomState(trc);
|
2009-11-12 14:13:25 -08:00
|
|
|
js_MarkTraps(trc);
|
2007-04-16 23:53:37 -07:00
|
|
|
|
2010-07-26 01:24:27 -07:00
|
|
|
iter = NULL;
|
2010-06-04 07:22:28 -07:00
|
|
|
while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter))
|
2010-08-30 11:46:18 -07:00
|
|
|
MarkContext(trc, acx);
|
2007-05-01 03:09:46 -07:00
|
|
|
|
2010-05-13 10:50:43 -07:00
|
|
|
for (ThreadDataIter i(rt); !i.empty(); i.popFront())
|
|
|
|
i.threadData()->mark(trc);
|
2009-08-17 14:50:57 -07:00
|
|
|
|
2010-08-29 11:57:08 -07:00
|
|
|
if (rt->emptyArgumentsShape)
|
|
|
|
rt->emptyArgumentsShape->trace(trc);
|
|
|
|
if (rt->emptyBlockShape)
|
|
|
|
rt->emptyBlockShape->trace(trc);
|
|
|
|
if (rt->emptyCallShape)
|
|
|
|
rt->emptyCallShape->trace(trc);
|
|
|
|
if (rt->emptyDeclEnvShape)
|
|
|
|
rt->emptyDeclEnvShape->trace(trc);
|
|
|
|
if (rt->emptyEnumeratorShape)
|
|
|
|
rt->emptyEnumeratorShape->trace(trc);
|
|
|
|
if (rt->emptyWithShape)
|
|
|
|
rt->emptyWithShape->trace(trc);
|
|
|
|
|
2010-06-04 07:22:28 -07:00
|
|
|
/*
|
2010-07-26 01:24:27 -07:00
|
|
|
* We mark extra roots at the last thing so it can use use additional
|
|
|
|
* colors to implement cycle collection.
|
2010-06-04 07:22:28 -07:00
|
|
|
*/
|
2010-07-15 17:58:36 -07:00
|
|
|
if (rt->gcExtraRootsTraceOp)
|
|
|
|
rt->gcExtraRootsTraceOp(trc, rt->gcExtraRootsData);
|
2010-08-01 09:58:03 -07:00
|
|
|
|
|
|
|
#ifdef DEBUG
|
|
|
|
if (rt->functionMeterFilename) {
|
|
|
|
for (int k = 0; k < 2; k++) {
|
|
|
|
typedef JSRuntime::FunctionCountMap HM;
|
|
|
|
HM &h = (k == 0) ? rt->methodReadBarrierCountMap : rt->unjoinedFunctionCountMap;
|
|
|
|
for (HM::Range r = h.all(); !r.empty(); r.popFront()) {
|
|
|
|
JSFunction *fun = r.front().key;
|
|
|
|
JS_CALL_OBJECT_TRACER(trc, fun, "FunctionCountMap key");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#endif
|
2007-04-16 23:53:37 -07:00
|
|
|
}
|
|
|
|
|
2010-09-01 11:24:45 -07:00
|
|
|
} /* namespace js */
|
|
|
|
|
2010-07-28 11:20:19 -07:00
|
|
|
void
|
2010-09-01 11:24:45 -07:00
|
|
|
js_TriggerGC(JSContext *cx, JSBool gcLocked)
|
2010-07-28 11:20:19 -07:00
|
|
|
{
|
2010-09-01 11:24:45 -07:00
|
|
|
JSRuntime *rt = cx->runtime;
|
|
|
|
|
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
JS_ASSERT(cx->thread->requestDepth > 0);
|
|
|
|
#endif
|
2010-07-28 11:20:19 -07:00
|
|
|
JS_ASSERT(!rt->gcRunning);
|
|
|
|
if (rt->gcIsNeeded)
|
|
|
|
return;
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Trigger the GC when it is safe to call an operation callback on any
|
|
|
|
* thread.
|
|
|
|
*/
|
2010-09-01 11:24:45 -07:00
|
|
|
rt->gcIsNeeded = JS_TRUE;
|
|
|
|
js_TriggerAllOperationCallbacks(rt, gcLocked);
|
2010-07-28 11:20:19 -07:00
|
|
|
}
|
|
|
|
|
2009-03-24 05:07:35 -07:00
|
|
|
void
|
|
|
|
js_DestroyScriptsToGC(JSContext *cx, JSThreadData *data)
|
2008-08-20 22:18:42 -07:00
|
|
|
{
|
2009-03-24 05:07:35 -07:00
|
|
|
JSScript **listp, *script;
|
2008-08-20 22:18:42 -07:00
|
|
|
|
2009-03-24 05:07:35 -07:00
|
|
|
for (size_t i = 0; i != JS_ARRAY_LENGTH(data->scriptsToGC); ++i) {
|
|
|
|
listp = &data->scriptsToGC[i];
|
|
|
|
while ((script = *listp) != NULL) {
|
|
|
|
*listp = script->u.nextToGC;
|
|
|
|
script->u.nextToGC = NULL;
|
|
|
|
js_DestroyScript(cx, script);
|
|
|
|
}
|
2008-08-20 22:18:42 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2009-12-30 03:06:26 -08:00
|
|
|
inline void
|
|
|
|
FinalizeObject(JSContext *cx, JSObject *obj, unsigned thingKind)
|
2009-07-12 07:38:59 -07:00
|
|
|
{
|
2010-04-10 16:08:14 -07:00
|
|
|
JS_ASSERT(thingKind == FINALIZE_OBJECT ||
|
|
|
|
thingKind == FINALIZE_FUNCTION);
|
2009-10-02 07:34:22 -07:00
|
|
|
|
2009-07-12 07:38:59 -07:00
|
|
|
/* Cope with stillborn objects that have no map. */
|
|
|
|
if (!obj->map)
|
|
|
|
return;
|
|
|
|
|
|
|
|
/* Finalize obj first, in case it needs map and slots. */
|
2010-07-14 23:19:36 -07:00
|
|
|
Class *clasp = obj->getClass();
|
2009-07-22 09:23:21 -07:00
|
|
|
if (clasp->finalize)
|
|
|
|
clasp->finalize(cx, obj);
|
2009-07-12 07:38:59 -07:00
|
|
|
|
2010-04-30 17:04:31 -07:00
|
|
|
DTrace::finalizeObject(obj);
|
2009-07-12 07:38:59 -07:00
|
|
|
|
2010-08-29 11:57:08 -07:00
|
|
|
obj->finish(cx);
|
2009-07-12 07:38:59 -07:00
|
|
|
}
|
|
|
|
|
2009-12-30 03:06:26 -08:00
|
|
|
inline void
|
|
|
|
FinalizeFunction(JSContext *cx, JSFunction *fun, unsigned thingKind)
|
|
|
|
{
|
|
|
|
FinalizeObject(cx, FUN_OBJECT(fun), thingKind);
|
|
|
|
}
|
|
|
|
|
2009-10-02 07:34:22 -07:00
|
|
|
#if JS_HAS_XML_SUPPORT
|
2009-12-30 03:06:26 -08:00
|
|
|
inline void
|
|
|
|
FinalizeXML(JSContext *cx, JSXML *xml, unsigned thingKind)
|
2009-10-02 07:34:22 -07:00
|
|
|
{
|
|
|
|
js_FinalizeXML(cx, xml);
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2009-10-02 01:21:19 -07:00
|
|
|
JS_STATIC_ASSERT(JS_EXTERNAL_STRING_LIMIT == 8);
|
|
|
|
static JSStringFinalizeOp str_finalizers[JS_EXTERNAL_STRING_LIMIT] = {
|
2009-07-12 07:41:20 -07:00
|
|
|
NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL
|
|
|
|
};
|
|
|
|
|
|
|
|
intN
|
|
|
|
js_ChangeExternalStringFinalizer(JSStringFinalizeOp oldop,
|
|
|
|
JSStringFinalizeOp newop)
|
|
|
|
{
|
2009-10-02 01:21:19 -07:00
|
|
|
for (uintN i = 0; i != JS_ARRAY_LENGTH(str_finalizers); i++) {
|
2009-07-12 07:41:20 -07:00
|
|
|
if (str_finalizers[i] == oldop) {
|
|
|
|
str_finalizers[i] = newop;
|
2009-10-02 01:21:19 -07:00
|
|
|
return intN(i);
|
2009-07-12 07:41:20 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
|
2010-07-24 20:26:34 -07:00
|
|
|
inline void
|
|
|
|
FinalizeShortString(JSContext *cx, JSShortString *str, unsigned thingKind)
|
|
|
|
{
|
|
|
|
JS_ASSERT(FINALIZE_SHORT_STRING == thingKind);
|
|
|
|
JS_ASSERT(!JSString::isStatic(str->header()));
|
|
|
|
JS_ASSERT(str->header()->isFlat());
|
|
|
|
JS_RUNTIME_UNMETER(cx->runtime, liveStrings);
|
|
|
|
}
|
|
|
|
|
2009-12-30 03:06:26 -08:00
|
|
|
inline void
|
|
|
|
FinalizeString(JSContext *cx, JSString *str, unsigned thingKind)
|
2009-07-12 07:41:20 -07:00
|
|
|
{
|
2009-12-30 03:06:26 -08:00
|
|
|
JS_ASSERT(FINALIZE_STRING == thingKind);
|
2009-09-04 16:28:30 -07:00
|
|
|
JS_ASSERT(!JSString::isStatic(str));
|
2009-12-30 03:06:26 -08:00
|
|
|
JS_RUNTIME_UNMETER(cx->runtime, liveStrings);
|
2009-07-12 07:41:20 -07:00
|
|
|
if (str->isDependent()) {
|
|
|
|
JS_ASSERT(str->dependentBase());
|
2009-12-30 03:06:26 -08:00
|
|
|
JS_RUNTIME_UNMETER(cx->runtime, liveDependentStrings);
|
2010-07-16 17:41:22 -07:00
|
|
|
} else if (str->isFlat()) {
|
2009-12-30 03:06:26 -08:00
|
|
|
/*
|
2010-07-16 17:41:22 -07:00
|
|
|
* flatChars for stillborn string is null, but cx->free checks
|
2009-12-30 03:06:26 -08:00
|
|
|
* for a null pointer on its own.
|
|
|
|
*/
|
|
|
|
cx->free(str->flatChars());
|
2010-07-16 17:41:22 -07:00
|
|
|
} else if (str->isTopNode()) {
|
|
|
|
cx->free(str->topNodeBuffer());
|
2009-07-12 07:41:20 -07:00
|
|
|
}
|
2010-07-16 17:41:22 -07:00
|
|
|
/* Nothing to be done for rope interior nodes. */
|
2009-07-12 07:41:20 -07:00
|
|
|
}
|
|
|
|
|
2009-12-30 03:06:26 -08:00
|
|
|
inline void
|
|
|
|
FinalizeExternalString(JSContext *cx, JSString *str, unsigned thingKind)
|
2009-10-02 07:34:22 -07:00
|
|
|
{
|
2009-12-30 03:06:26 -08:00
|
|
|
unsigned type = thingKind - FINALIZE_EXTERNAL_STRING0;
|
|
|
|
JS_ASSERT(type < JS_ARRAY_LENGTH(str_finalizers));
|
|
|
|
JS_ASSERT(!JSString::isStatic(str));
|
2010-07-16 17:41:22 -07:00
|
|
|
JS_ASSERT(str->isFlat());
|
2009-12-30 03:06:26 -08:00
|
|
|
|
|
|
|
JS_RUNTIME_UNMETER(cx->runtime, liveStrings);
|
|
|
|
|
|
|
|
/* A stillborn string has null chars. */
|
|
|
|
jschar *chars = str->flatChars();
|
|
|
|
if (!chars)
|
|
|
|
return;
|
|
|
|
JSStringFinalizeOp finalizer = str_finalizers[type];
|
|
|
|
if (finalizer)
|
|
|
|
finalizer(cx, str);
|
2009-10-02 07:34:22 -07:00
|
|
|
}
|
|
|
|
|
2009-12-30 03:06:26 -08:00
|
|
|
/*
|
|
|
|
* This function is called from js_FinishAtomState to force the finalization
|
|
|
|
* of the permanently interned strings when cx is not available.
|
|
|
|
*/
|
2009-10-02 07:34:22 -07:00
|
|
|
void
|
|
|
|
js_FinalizeStringRT(JSRuntime *rt, JSString *str)
|
|
|
|
{
|
2009-12-30 03:06:26 -08:00
|
|
|
JS_RUNTIME_UNMETER(rt, liveStrings);
|
2009-10-02 07:34:22 -07:00
|
|
|
JS_ASSERT(!JSString::isStatic(str));
|
2010-07-16 17:41:22 -07:00
|
|
|
JS_ASSERT(!str->isRope());
|
2009-10-02 07:34:22 -07:00
|
|
|
|
2009-12-30 03:06:26 -08:00
|
|
|
if (str->isDependent()) {
|
|
|
|
/* A dependent string can not be external and must be valid. */
|
2010-07-16 17:41:22 -07:00
|
|
|
JS_ASSERT(JSGCArenaInfo::fromGCThing(str)->list->thingKind == FINALIZE_STRING);
|
2009-12-30 03:06:26 -08:00
|
|
|
JS_ASSERT(str->dependentBase());
|
|
|
|
JS_RUNTIME_UNMETER(rt, liveDependentStrings);
|
|
|
|
} else {
|
2010-04-12 13:59:19 -07:00
|
|
|
unsigned thingKind = JSGCArenaInfo::fromGCThing(str)->list->thingKind;
|
2010-01-14 00:27:32 -08:00
|
|
|
JS_ASSERT(IsFinalizableStringKind(thingKind));
|
|
|
|
|
2009-12-30 03:06:26 -08:00
|
|
|
/* A stillborn string has null chars, so is not valid. */
|
|
|
|
jschar *chars = str->flatChars();
|
|
|
|
if (!chars)
|
|
|
|
return;
|
|
|
|
if (thingKind == FINALIZE_STRING) {
|
|
|
|
rt->free(chars);
|
2010-07-24 20:26:34 -07:00
|
|
|
} else if (thingKind != FINALIZE_SHORT_STRING) {
|
2009-12-30 03:06:26 -08:00
|
|
|
unsigned type = thingKind - FINALIZE_EXTERNAL_STRING0;
|
|
|
|
JS_ASSERT(type < JS_ARRAY_LENGTH(str_finalizers));
|
|
|
|
JSStringFinalizeOp finalizer = str_finalizers[type];
|
|
|
|
if (finalizer) {
|
|
|
|
/*
|
|
|
|
* Assume that the finalizer for the permanently interned
|
|
|
|
* string knows how to deal with null context.
|
|
|
|
*/
|
|
|
|
finalizer(NULL, str);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2009-10-02 07:34:22 -07:00
|
|
|
}
|
|
|
|
|
2009-12-30 03:06:26 -08:00
|
|
|
template<typename T,
|
|
|
|
void finalizer(JSContext *cx, T *thing, unsigned thingKind)>
|
2009-10-02 07:34:22 -07:00
|
|
|
static void
|
2010-04-22 23:58:44 -07:00
|
|
|
FinalizeArenaList(JSContext *cx, unsigned thingKind)
|
2009-10-02 07:34:22 -07:00
|
|
|
{
|
2010-01-14 00:27:32 -08:00
|
|
|
JS_STATIC_ASSERT(!(sizeof(T) & GC_CELL_MASK));
|
2009-10-02 07:34:22 -07:00
|
|
|
JSGCArenaList *arenaList = &cx->runtime->gcArenaList[thingKind];
|
|
|
|
JS_ASSERT(sizeof(T) == arenaList->thingSize);
|
|
|
|
|
2010-01-14 00:27:32 -08:00
|
|
|
JSGCArena **ap = &arenaList->head;
|
|
|
|
JSGCArena *a = *ap;
|
2009-10-02 07:34:22 -07:00
|
|
|
if (!a)
|
|
|
|
return;
|
|
|
|
|
|
|
|
#ifdef JS_GCMETER
|
|
|
|
uint32 nlivearenas = 0, nkilledarenas = 0, nthings = 0;
|
|
|
|
#endif
|
|
|
|
for (;;) {
|
2010-04-12 13:59:19 -07:00
|
|
|
JSGCArenaInfo *ainfo = a->getInfo();
|
|
|
|
JS_ASSERT(ainfo->list == arenaList);
|
|
|
|
JS_ASSERT(!a->getMarkingDelay()->link);
|
|
|
|
JS_ASSERT(a->getMarkingDelay()->unmarkedChildren == 0);
|
2009-10-02 07:34:22 -07:00
|
|
|
|
2009-10-15 23:10:54 -07:00
|
|
|
JSGCThing *freeList = NULL;
|
|
|
|
JSGCThing **tailp = &freeList;
|
2009-10-02 07:34:22 -07:00
|
|
|
bool allClear = true;
|
2010-01-14 00:27:32 -08:00
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
jsuword thing = a->toPageStart();
|
|
|
|
jsuword thingsEnd = thing + GC_ARENA_SIZE / sizeof(T) * sizeof(T);
|
2010-01-14 00:27:32 -08:00
|
|
|
|
2010-04-12 13:59:19 -07:00
|
|
|
jsuword nextFree = reinterpret_cast<jsuword>(ainfo->freeList);
|
2010-01-14 00:27:32 -08:00
|
|
|
if (!nextFree) {
|
|
|
|
nextFree = thingsEnd;
|
|
|
|
} else {
|
|
|
|
JS_ASSERT(thing <= nextFree);
|
|
|
|
JS_ASSERT(nextFree < thingsEnd);
|
|
|
|
}
|
|
|
|
|
|
|
|
jsuword gcCellIndex = 0;
|
2010-04-12 13:59:19 -07:00
|
|
|
jsbitmap *bitmap = a->getMarkBitmap();
|
|
|
|
for (;; thing += sizeof(T), gcCellIndex += sizeof(T) >> GC_CELL_SHIFT) {
|
2010-01-14 00:27:32 -08:00
|
|
|
if (thing == nextFree) {
|
2009-10-15 23:10:54 -07:00
|
|
|
if (thing == thingsEnd)
|
|
|
|
break;
|
2010-04-12 13:59:19 -07:00
|
|
|
nextFree = reinterpret_cast<jsuword>(
|
|
|
|
reinterpret_cast<JSGCThing *>(nextFree)->link);
|
2009-10-15 23:10:54 -07:00
|
|
|
if (!nextFree) {
|
|
|
|
nextFree = thingsEnd;
|
|
|
|
} else {
|
|
|
|
JS_ASSERT(thing < nextFree);
|
|
|
|
JS_ASSERT(nextFree < thingsEnd);
|
|
|
|
}
|
2010-01-14 00:27:32 -08:00
|
|
|
} else if (JS_TEST_BIT(bitmap, gcCellIndex)) {
|
|
|
|
allClear = false;
|
|
|
|
METER(nthings++);
|
|
|
|
continue;
|
2009-10-02 07:34:22 -07:00
|
|
|
} else {
|
2010-04-12 13:59:19 -07:00
|
|
|
T *t = reinterpret_cast<T *>(thing);
|
|
|
|
finalizer(cx, t, thingKind);
|
2009-10-02 07:34:22 -07:00
|
|
|
#ifdef DEBUG
|
2010-04-12 13:59:19 -07:00
|
|
|
memset(t, JS_FREE_PATTERN, sizeof(T));
|
2009-10-02 07:34:22 -07:00
|
|
|
#endif
|
|
|
|
}
|
2010-04-12 13:59:19 -07:00
|
|
|
JSGCThing *t = reinterpret_cast<JSGCThing *>(thing);
|
|
|
|
*tailp = t;
|
|
|
|
tailp = &t->link;
|
2009-10-15 23:10:54 -07:00
|
|
|
}
|
2009-10-02 07:34:22 -07:00
|
|
|
|
2009-10-15 23:10:54 -07:00
|
|
|
#ifdef DEBUG
|
|
|
|
/* Check that the free list is consistent. */
|
|
|
|
unsigned nfree = 0;
|
|
|
|
if (freeList) {
|
|
|
|
JS_ASSERT(tailp != &freeList);
|
2010-04-12 13:59:19 -07:00
|
|
|
JSGCThing *t = freeList;
|
2009-10-15 23:10:54 -07:00
|
|
|
for (;;) {
|
|
|
|
++nfree;
|
2010-04-12 13:59:19 -07:00
|
|
|
if (&t->link == tailp)
|
2009-10-15 23:10:54 -07:00
|
|
|
break;
|
2010-04-12 13:59:19 -07:00
|
|
|
JS_ASSERT(t < t->link);
|
|
|
|
t = t->link;
|
2009-10-15 23:10:54 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
#endif
|
2009-10-02 07:34:22 -07:00
|
|
|
if (allClear) {
|
|
|
|
/*
|
|
|
|
* Forget just assembled free list head for the arena and
|
|
|
|
* add the arena itself to the destroy list.
|
|
|
|
*/
|
2010-01-14 00:27:32 -08:00
|
|
|
JS_ASSERT(nfree == ThingsPerArena(sizeof(T)));
|
2010-04-12 13:59:19 -07:00
|
|
|
*ap = ainfo->prev;
|
2010-04-22 23:58:44 -07:00
|
|
|
ReleaseGCArena(cx->runtime, a);
|
2009-10-02 07:34:22 -07:00
|
|
|
METER(nkilledarenas++);
|
|
|
|
} else {
|
2010-01-14 00:27:32 -08:00
|
|
|
JS_ASSERT(nfree < ThingsPerArena(sizeof(T)));
|
2009-10-15 23:10:54 -07:00
|
|
|
*tailp = NULL;
|
2010-04-12 13:59:19 -07:00
|
|
|
ainfo->freeList = freeList;
|
|
|
|
ap = &ainfo->prev;
|
2009-10-02 07:34:22 -07:00
|
|
|
METER(nlivearenas++);
|
|
|
|
}
|
|
|
|
if (!(a = *ap))
|
|
|
|
break;
|
|
|
|
}
|
2009-10-15 23:10:54 -07:00
|
|
|
arenaList->cursor = arenaList->head;
|
2009-10-02 07:34:22 -07:00
|
|
|
|
2010-08-24 18:57:14 -07:00
|
|
|
METER(UpdateArenaStats(&cx->runtime->gcArenaStats[thingKind],
|
2009-10-02 07:34:22 -07:00
|
|
|
nlivearenas, nkilledarenas, nthings));
|
|
|
|
}
|
|
|
|
|
2010-04-27 06:46:24 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
|
|
|
|
namespace js {
|
|
|
|
|
2010-04-28 17:17:34 -07:00
|
|
|
JS_FRIEND_API(void)
|
2010-09-01 11:24:45 -07:00
|
|
|
BackgroundSweepTask::replenishAndFreeLater(void *ptr)
|
2010-04-27 06:46:24 -07:00
|
|
|
{
|
|
|
|
JS_ASSERT(freeCursor == freeCursorEnd);
|
|
|
|
do {
|
|
|
|
if (freeCursor && !freeVector.append(freeCursorEnd - FREE_ARRAY_LENGTH))
|
|
|
|
break;
|
|
|
|
freeCursor = (void **) js_malloc(FREE_ARRAY_SIZE);
|
|
|
|
if (!freeCursor) {
|
|
|
|
freeCursorEnd = NULL;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
freeCursorEnd = freeCursor + FREE_ARRAY_LENGTH;
|
|
|
|
*freeCursor++ = ptr;
|
|
|
|
return;
|
|
|
|
} while (false);
|
|
|
|
js_free(ptr);
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2010-09-01 11:24:45 -07:00
|
|
|
BackgroundSweepTask::run()
|
2010-04-27 06:46:24 -07:00
|
|
|
{
|
|
|
|
if (freeCursor) {
|
|
|
|
void **array = freeCursorEnd - FREE_ARRAY_LENGTH;
|
|
|
|
freeElementsAndArray(array, freeCursor);
|
|
|
|
freeCursor = freeCursorEnd = NULL;
|
|
|
|
} else {
|
|
|
|
JS_ASSERT(!freeCursorEnd);
|
|
|
|
}
|
|
|
|
for (void ***iter = freeVector.begin(); iter != freeVector.end(); ++iter) {
|
|
|
|
void **array = *iter;
|
|
|
|
freeElementsAndArray(array, array + FREE_ARRAY_LENGTH);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
#endif /* JS_THREADSAFE */
|
|
|
|
|
2010-06-23 14:35:10 -07:00
|
|
|
static void
|
|
|
|
SweepCompartments(JSContext *cx)
|
|
|
|
{
|
|
|
|
JSRuntime *rt = cx->runtime;
|
2010-07-19 13:36:49 -07:00
|
|
|
JSCompartmentCallback callback = rt->compartmentCallback;
|
2010-06-23 14:35:10 -07:00
|
|
|
JSCompartment **read = rt->compartments.begin();
|
|
|
|
JSCompartment **end = rt->compartments.end();
|
|
|
|
JSCompartment **write = read;
|
2010-09-01 11:24:45 -07:00
|
|
|
|
2010-08-05 15:48:34 -07:00
|
|
|
/* Delete defaultCompartment only during runtime shutdown */
|
|
|
|
rt->defaultCompartment->marked = true;
|
2010-09-01 11:24:45 -07:00
|
|
|
|
2010-06-23 14:35:10 -07:00
|
|
|
while (read < end) {
|
|
|
|
JSCompartment *compartment = (*read++);
|
|
|
|
if (compartment->marked) {
|
|
|
|
compartment->marked = false;
|
|
|
|
*write++ = compartment;
|
|
|
|
/* Remove dead wrappers from the compartment map. */
|
|
|
|
compartment->sweep(cx);
|
|
|
|
} else {
|
2010-07-19 13:36:49 -07:00
|
|
|
if (callback)
|
|
|
|
(void) callback(cx, compartment, JSCOMPARTMENT_DESTROY);
|
2010-06-24 14:45:32 -07:00
|
|
|
if (compartment->principals)
|
|
|
|
JSPRINCIPALS_DROP(cx, compartment->principals);
|
2010-06-23 14:35:10 -07:00
|
|
|
delete compartment;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
rt->compartments.resize(write - rt->compartments.begin());
|
|
|
|
}
|
|
|
|
|
2010-04-08 05:54:18 -07:00
|
|
|
/*
|
|
|
|
* Common cache invalidation and so forth that must be done before GC. Even if
|
2010-08-29 11:57:08 -07:00
|
|
|
* GCUntilDone calls GC several times, this work needs to be done only once.
|
2010-04-08 05:54:18 -07:00
|
|
|
*/
|
|
|
|
static void
|
|
|
|
PreGCCleanup(JSContext *cx, JSGCInvocationKind gckind)
|
|
|
|
{
|
|
|
|
JSRuntime *rt = cx->runtime;
|
|
|
|
|
|
|
|
/* Clear gcIsNeeded now, when we are about to start a normal GC cycle. */
|
|
|
|
rt->gcIsNeeded = JS_FALSE;
|
|
|
|
|
|
|
|
/* Reset malloc counter. */
|
|
|
|
rt->resetGCMallocBytes();
|
|
|
|
|
|
|
|
#ifdef JS_DUMP_SCOPE_METERS
|
|
|
|
{
|
|
|
|
extern void js_DumpScopeMeters(JSRuntime *rt);
|
|
|
|
js_DumpScopeMeters(rt);
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Reset the property cache's type id generator so we can compress ids.
|
|
|
|
* Same for the protoHazardShape proxy-shape standing in for all object
|
|
|
|
* prototypes having readonly or setter properties.
|
|
|
|
*/
|
|
|
|
if (rt->shapeGen & SHAPE_OVERFLOW_BIT
|
|
|
|
#ifdef JS_GC_ZEAL
|
|
|
|
|| rt->gcZeal >= 1
|
|
|
|
#endif
|
|
|
|
) {
|
|
|
|
rt->gcRegenShapes = true;
|
2010-08-29 11:57:08 -07:00
|
|
|
rt->shapeGen = Shape::LAST_RESERVED_SHAPE;
|
2010-04-08 05:54:18 -07:00
|
|
|
rt->protoHazardShape = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
js_PurgeThreads(cx);
|
|
|
|
{
|
|
|
|
JSContext *iter = NULL;
|
|
|
|
while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter))
|
|
|
|
acx->purge();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2010-04-08 05:54:17 -07:00
|
|
|
/*
|
|
|
|
* Perform mark-and-sweep GC.
|
|
|
|
*
|
|
|
|
* In a JS_THREADSAFE build, the calling thread must be rt->gcThread and each
|
|
|
|
* other thread must be either outside all requests or blocked waiting for GC
|
|
|
|
* to finish. Note that the caller does not hold rt->gcLock.
|
|
|
|
*/
|
|
|
|
static void
|
2010-08-30 11:46:18 -07:00
|
|
|
MarkAndSweep(JSContext *cx GCTIMER_PARAM)
|
2010-04-08 05:54:17 -07:00
|
|
|
{
|
|
|
|
JSRuntime *rt = cx->runtime;
|
|
|
|
rt->gcNumber++;
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Mark phase.
|
|
|
|
*/
|
2010-07-26 11:44:04 -07:00
|
|
|
GCMarker gcmarker(cx);
|
|
|
|
JS_ASSERT(IS_GC_MARKING_TRACER(&gcmarker));
|
|
|
|
JS_ASSERT(gcmarker.getMarkColor() == BLACK);
|
|
|
|
rt->gcMarkingTracer = &gcmarker;
|
2010-09-01 11:24:45 -07:00
|
|
|
|
2010-08-05 05:16:56 -07:00
|
|
|
for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
|
|
|
|
GCChunkInfo::fromChunk(r.front())->clearMarkBitmap();
|
2010-09-01 11:24:45 -07:00
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
MarkRuntime(&gcmarker);
|
2010-04-23 15:15:42 -07:00
|
|
|
js_MarkScriptFilenames(rt);
|
2010-04-08 05:54:17 -07:00
|
|
|
|
|
|
|
/*
|
|
|
|
* Mark children of things that caused too deep recursion during the above
|
|
|
|
* tracing.
|
|
|
|
*/
|
2010-07-26 11:44:04 -07:00
|
|
|
gcmarker.markDelayedChildren();
|
2010-04-08 05:54:17 -07:00
|
|
|
|
|
|
|
rt->gcMarkingTracer = NULL;
|
|
|
|
|
2010-07-15 17:58:36 -07:00
|
|
|
if (rt->gcCallback)
|
|
|
|
(void) rt->gcCallback(cx, JSGC_MARK_END);
|
|
|
|
|
2010-04-08 05:54:17 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
2010-09-01 11:24:45 -07:00
|
|
|
JS_ASSERT(!cx->gcSweepTask);
|
|
|
|
if (!rt->gcHelperThread.busy())
|
|
|
|
cx->gcSweepTask = new js::BackgroundSweepTask();
|
2010-04-08 05:54:17 -07:00
|
|
|
#endif
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Sweep phase.
|
|
|
|
*
|
|
|
|
* Finalize as we sweep, outside of rt->gcLock but with rt->gcRunning set
|
|
|
|
* so that any attempt to allocate a GC-thing from a finalizer will fail,
|
|
|
|
* rather than nest badly and leave the unmarked newborn to be swept.
|
|
|
|
*
|
|
|
|
* We first sweep atom state so we can use js_IsAboutToBeFinalized on
|
2010-07-26 11:44:04 -07:00
|
|
|
* JSString held in a hashtable to check if the hashtable entry can be
|
|
|
|
* freed. Note that even after the entry is freed, JSObject finalizers can
|
|
|
|
* continue to access the corresponding JSString* assuming that they are
|
|
|
|
* unique. This works since the atomization API must not be called during
|
|
|
|
* the GC.
|
2010-04-08 05:54:17 -07:00
|
|
|
*/
|
2010-04-22 05:35:21 -07:00
|
|
|
TIMESTAMP(startSweep);
|
2010-04-08 05:54:17 -07:00
|
|
|
js_SweepAtomState(cx);
|
|
|
|
|
|
|
|
/* Finalize watch points associated with unreachable objects. */
|
|
|
|
js_SweepWatchPoints(cx);
|
|
|
|
|
|
|
|
#ifdef DEBUG
|
|
|
|
/* Save the pre-sweep count of scope-mapped properties. */
|
2010-08-29 11:57:08 -07:00
|
|
|
rt->liveObjectPropsPreSweep = rt->liveObjectProps;
|
2010-04-08 05:54:17 -07:00
|
|
|
#endif
|
|
|
|
|
|
|
|
/*
|
2010-04-22 23:58:44 -07:00
|
|
|
* We finalize iterators before other objects so the iterator can use the
|
|
|
|
* object which properties it enumerates over to finalize the enumeration
|
2010-07-26 11:44:04 -07:00
|
|
|
* state. We finalize objects before other GC things to ensure that
|
|
|
|
* object's finalizer can access them even if they will be freed.
|
2010-04-08 05:54:17 -07:00
|
|
|
*/
|
2010-04-22 23:58:44 -07:00
|
|
|
JS_ASSERT(!rt->gcEmptyArenaList);
|
2010-07-23 19:33:49 -07:00
|
|
|
FinalizeArenaList<JSObject, FinalizeObject>(cx, FINALIZE_OBJECT);
|
|
|
|
FinalizeArenaList<JSFunction, FinalizeFunction>(cx, FINALIZE_FUNCTION);
|
2010-04-08 05:54:17 -07:00
|
|
|
#if JS_HAS_XML_SUPPORT
|
2010-04-22 23:58:44 -07:00
|
|
|
FinalizeArenaList<JSXML, FinalizeXML>(cx, FINALIZE_XML);
|
2010-04-08 05:54:17 -07:00
|
|
|
#endif
|
2010-04-22 05:35:21 -07:00
|
|
|
TIMESTAMP(sweepObjectEnd);
|
2010-04-08 05:54:17 -07:00
|
|
|
|
|
|
|
/*
|
|
|
|
* We sweep the deflated cache before we finalize the strings so the
|
|
|
|
* cache can safely use js_IsAboutToBeFinalized..
|
|
|
|
*/
|
|
|
|
rt->deflatedStringCache->sweep(cx);
|
|
|
|
|
2010-07-24 20:26:34 -07:00
|
|
|
FinalizeArenaList<JSShortString, FinalizeShortString>(cx, FINALIZE_SHORT_STRING);
|
2010-04-22 23:58:44 -07:00
|
|
|
FinalizeArenaList<JSString, FinalizeString>(cx, FINALIZE_STRING);
|
2010-04-08 05:54:17 -07:00
|
|
|
for (unsigned i = FINALIZE_EXTERNAL_STRING0;
|
|
|
|
i <= FINALIZE_EXTERNAL_STRING_LAST;
|
|
|
|
++i) {
|
2010-04-22 23:58:44 -07:00
|
|
|
FinalizeArenaList<JSString, FinalizeExternalString>(cx, i);
|
2010-04-08 05:54:17 -07:00
|
|
|
}
|
2010-08-31 08:37:16 -07:00
|
|
|
|
|
|
|
rt->gcNewArenaTriggerBytes = rt->gcBytes < GC_ARENA_ALLOCATION_TRIGGER ?
|
|
|
|
GC_ARENA_ALLOCATION_TRIGGER :
|
|
|
|
rt->gcBytes;
|
|
|
|
|
2010-04-22 05:35:21 -07:00
|
|
|
TIMESTAMP(sweepStringEnd);
|
2010-04-08 05:54:17 -07:00
|
|
|
|
2010-06-23 14:35:10 -07:00
|
|
|
SweepCompartments(cx);
|
2010-06-04 16:32:10 -07:00
|
|
|
|
2010-04-08 05:54:17 -07:00
|
|
|
/*
|
2010-08-29 11:57:08 -07:00
|
|
|
* Sweep the runtime's property trees after finalizing objects, in case any
|
2010-04-08 05:54:17 -07:00
|
|
|
* had watchpoints referencing tree nodes.
|
|
|
|
*/
|
2010-08-29 11:57:08 -07:00
|
|
|
js::PropertyTree::sweepShapes(cx);
|
2010-04-08 05:54:17 -07:00
|
|
|
|
|
|
|
/*
|
|
|
|
* Sweep script filenames after sweeping functions in the generic loop
|
|
|
|
* above. In this way when a scripted function's finalizer destroys the
|
|
|
|
* script and calls rt->destroyScriptHook, the hook can still access the
|
|
|
|
* script's filename. See bug 323267.
|
|
|
|
*/
|
|
|
|
js_SweepScriptFilenames(rt);
|
|
|
|
|
2010-07-26 11:44:04 -07:00
|
|
|
/* Slowify arrays we have accumulated. */
|
|
|
|
gcmarker.slowifyArrays();
|
2010-07-22 18:45:21 -07:00
|
|
|
|
2010-04-08 05:54:17 -07:00
|
|
|
/*
|
|
|
|
* Destroy arenas after we finished the sweeping so finalizers can safely
|
|
|
|
* use js_IsAboutToBeFinalized().
|
|
|
|
*/
|
2010-04-22 23:58:44 -07:00
|
|
|
FreeGCChunks(rt);
|
2010-04-22 05:35:21 -07:00
|
|
|
TIMESTAMP(sweepDestroyEnd);
|
2010-04-08 05:54:17 -07:00
|
|
|
|
2010-09-01 11:24:45 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
if (cx->gcSweepTask) {
|
|
|
|
rt->gcHelperThread.schedule(cx->gcSweepTask);
|
|
|
|
cx->gcSweepTask = NULL;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
2010-04-08 05:54:17 -07:00
|
|
|
if (rt->gcCallback)
|
|
|
|
(void) rt->gcCallback(cx, JSGC_FINALIZE_END);
|
|
|
|
#ifdef DEBUG_srcnotesize
|
|
|
|
{ extern void DumpSrcNoteSizeHist();
|
|
|
|
DumpSrcNoteSizeHist();
|
|
|
|
printf("GC HEAP SIZE %lu\n", (unsigned long)rt->gcBytes);
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#ifdef JS_SCOPE_DEPTH_METER
|
2010-08-24 18:57:14 -07:00
|
|
|
DumpScopeDepthMeter(rt);
|
|
|
|
#endif
|
2010-04-08 05:54:17 -07:00
|
|
|
|
|
|
|
#ifdef JS_DUMP_LOOP_STATS
|
2010-08-24 18:57:14 -07:00
|
|
|
DumpLoopStats(rt);
|
|
|
|
#endif
|
2010-04-08 05:54:17 -07:00
|
|
|
}
|
|
|
|
|
2010-04-22 05:31:00 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
2010-05-27 07:57:55 -07:00
|
|
|
|
|
|
|
/*
|
|
|
|
* If the GC is running and we're called on another thread, wait for this GC
|
|
|
|
* activation to finish. We can safely wait here without fear of deadlock (in
|
|
|
|
* the case where we are called within a request on another thread's context)
|
|
|
|
* because the GC doesn't set rt->gcRunning until after it has waited for all
|
|
|
|
* active requests to end.
|
|
|
|
*
|
|
|
|
* We call here js_CurrentThreadId() after checking for rt->gcState to avoid
|
|
|
|
* an expensive call when the GC is not running.
|
|
|
|
*/
|
|
|
|
void
|
|
|
|
js_WaitForGC(JSRuntime *rt)
|
|
|
|
{
|
|
|
|
if (rt->gcRunning && rt->gcThread->id != js_CurrentThreadId()) {
|
|
|
|
do {
|
|
|
|
JS_AWAIT_GC_DONE(rt);
|
|
|
|
} while (rt->gcRunning);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2010-04-22 05:31:00 -07:00
|
|
|
/*
|
2010-05-14 06:55:17 -07:00
|
|
|
* GC is running on another thread. Temporarily suspend all requests running
|
|
|
|
* on the current thread and wait until the GC is done.
|
2010-04-22 05:31:00 -07:00
|
|
|
*/
|
|
|
|
static void
|
2010-05-22 12:49:58 -07:00
|
|
|
LetOtherGCFinish(JSContext *cx)
|
2010-04-22 05:31:00 -07:00
|
|
|
{
|
|
|
|
JSRuntime *rt = cx->runtime;
|
|
|
|
JS_ASSERT(rt->gcThread);
|
2010-05-14 06:55:17 -07:00
|
|
|
JS_ASSERT(cx->thread != rt->gcThread);
|
2010-04-22 05:31:00 -07:00
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
size_t requestDebit = cx->thread->requestDepth ? 1 : 0;
|
2010-04-22 05:31:00 -07:00
|
|
|
JS_ASSERT(requestDebit <= rt->requestCount);
|
|
|
|
#ifdef JS_TRACER
|
|
|
|
JS_ASSERT_IF(requestDebit == 0, !JS_ON_TRACE(cx));
|
|
|
|
#endif
|
|
|
|
if (requestDebit != 0) {
|
|
|
|
#ifdef JS_TRACER
|
|
|
|
if (JS_ON_TRACE(cx)) {
|
|
|
|
/*
|
|
|
|
* Leave trace before we decrease rt->requestCount and notify the
|
|
|
|
* GC. Otherwise the GC may start immediately after we unlock while
|
|
|
|
* this thread is still on trace.
|
|
|
|
*/
|
|
|
|
AutoUnlockGC unlock(rt);
|
|
|
|
LeaveTrace(cx);
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
rt->requestCount -= requestDebit;
|
|
|
|
if (rt->requestCount == 0)
|
|
|
|
JS_NOTIFY_REQUEST_DONE(rt);
|
2010-05-22 12:49:58 -07:00
|
|
|
}
|
2010-04-22 05:31:00 -07:00
|
|
|
|
2010-05-22 12:49:58 -07:00
|
|
|
/* See comments before another call to js_ShareWaitingTitles below. */
|
|
|
|
cx->thread->gcWaiting = true;
|
|
|
|
js_ShareWaitingTitles(cx);
|
2010-04-22 05:31:00 -07:00
|
|
|
|
2010-05-22 12:49:58 -07:00
|
|
|
/*
|
|
|
|
* Check that we did not release the GC lock above and let the GC to
|
|
|
|
* finish before we wait.
|
|
|
|
*/
|
|
|
|
JS_ASSERT(rt->gcThread);
|
2010-04-22 05:31:00 -07:00
|
|
|
|
2010-05-22 12:49:58 -07:00
|
|
|
/*
|
|
|
|
* Wait for GC to finish on the other thread, even if requestDebit is 0
|
|
|
|
* and even if GC has not started yet because the gcThread is waiting in
|
2010-08-30 11:46:18 -07:00
|
|
|
* AutoGCSession. This ensures that js_GC never returns without a full GC
|
2010-05-22 12:49:58 -07:00
|
|
|
* cycle happening.
|
|
|
|
*/
|
|
|
|
do {
|
|
|
|
JS_AWAIT_GC_DONE(rt);
|
|
|
|
} while (rt->gcThread);
|
|
|
|
|
|
|
|
cx->thread->gcWaiting = false;
|
|
|
|
rt->requestCount += requestDebit;
|
2010-04-22 05:31:00 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
#endif
|
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
class AutoGCSession {
|
|
|
|
public:
|
|
|
|
explicit AutoGCSession(JSContext *cx);
|
|
|
|
~AutoGCSession();
|
|
|
|
|
|
|
|
private:
|
|
|
|
JSContext *context;
|
|
|
|
|
|
|
|
/* Disable copy constructor or assignments */
|
|
|
|
AutoGCSession(const AutoGCSession&);
|
|
|
|
void operator=(const AutoGCSession&);
|
|
|
|
};
|
|
|
|
|
2010-04-22 05:31:00 -07:00
|
|
|
/*
|
2010-08-30 11:46:18 -07:00
|
|
|
* Start a new GC session. Together with LetOtherGCFinish this function
|
|
|
|
* contains the rendezvous algorithm by which we stop the world for GC.
|
2010-04-22 05:31:00 -07:00
|
|
|
*
|
2010-05-14 06:55:17 -07:00
|
|
|
* This thread becomes the GC thread. Wait for all other threads to quiesce.
|
2010-08-30 11:46:18 -07:00
|
|
|
* Then set rt->gcRunning and return.
|
2010-04-22 05:31:00 -07:00
|
|
|
*/
|
2010-08-30 11:46:18 -07:00
|
|
|
AutoGCSession::AutoGCSession(JSContext *cx)
|
|
|
|
: context(cx)
|
2010-04-22 05:31:00 -07:00
|
|
|
{
|
|
|
|
JSRuntime *rt = cx->runtime;
|
2010-08-30 11:46:18 -07:00
|
|
|
|
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
if (rt->gcThread && rt->gcThread != cx->thread)
|
|
|
|
LetOtherGCFinish(cx);
|
|
|
|
#endif
|
|
|
|
|
2010-05-14 06:55:17 -07:00
|
|
|
JS_ASSERT(!rt->gcRunning);
|
2010-04-22 05:31:00 -07:00
|
|
|
|
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
/* No other thread is in GC, so indicate that we're now in GC. */
|
2010-05-14 06:55:17 -07:00
|
|
|
JS_ASSERT(!rt->gcThread);
|
2010-04-22 05:31:00 -07:00
|
|
|
rt->gcThread = cx->thread;
|
|
|
|
|
|
|
|
/*
|
2010-07-22 13:59:59 -07:00
|
|
|
* Notify operation callbacks on other threads, which will give them a
|
|
|
|
* chance to yield their requests. Threads without requests perform their
|
|
|
|
* callback at some later point, which then will be unnecessary, but
|
|
|
|
* harmless.
|
2010-04-22 05:31:00 -07:00
|
|
|
*/
|
2010-07-22 13:59:59 -07:00
|
|
|
for (JSThread::Map::Range r = rt->threads.all(); !r.empty(); r.popFront()) {
|
|
|
|
JSThread *thread = r.front().value;
|
|
|
|
if (thread != cx->thread)
|
|
|
|
thread->data.triggerOperationCallback();
|
|
|
|
}
|
2010-04-22 05:31:00 -07:00
|
|
|
|
|
|
|
/*
|
2010-06-26 13:31:54 -07:00
|
|
|
* Discount the request on the current thread from contributing to
|
2010-05-14 06:55:17 -07:00
|
|
|
* rt->requestCount before we wait for all other requests to finish.
|
2010-04-22 05:31:00 -07:00
|
|
|
* JS_NOTIFY_REQUEST_DONE, which will wake us up, is only called on
|
|
|
|
* rt->requestCount transitions to 0.
|
|
|
|
*/
|
2010-08-30 11:46:18 -07:00
|
|
|
size_t requestDebit = cx->thread->requestDepth ? 1 : 0;
|
2010-04-22 05:31:00 -07:00
|
|
|
JS_ASSERT(requestDebit <= rt->requestCount);
|
|
|
|
if (requestDebit != rt->requestCount) {
|
|
|
|
rt->requestCount -= requestDebit;
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Share any title that is owned by the GC thread before we wait, to
|
|
|
|
* avoid a deadlock with ClaimTitle. We also set the gcWaiting flag so
|
|
|
|
* that ClaimTitle can claim the title ownership from the GC thread if
|
|
|
|
* that function is called while the GC is waiting.
|
|
|
|
*/
|
|
|
|
cx->thread->gcWaiting = true;
|
|
|
|
js_ShareWaitingTitles(cx);
|
|
|
|
do {
|
|
|
|
JS_AWAIT_REQUEST_DONE(rt);
|
|
|
|
} while (rt->requestCount > 0);
|
|
|
|
cx->thread->gcWaiting = false;
|
|
|
|
rt->requestCount += requestDebit;
|
|
|
|
}
|
|
|
|
|
2010-05-14 06:55:17 -07:00
|
|
|
#endif /* JS_THREADSAFE */
|
2010-04-22 05:31:00 -07:00
|
|
|
|
|
|
|
/*
|
|
|
|
* Set rt->gcRunning here within the GC lock, and after waiting for any
|
2010-05-14 06:55:17 -07:00
|
|
|
* active requests to end. This way js_WaitForGC called outside a request
|
|
|
|
* would not block on the GC that is waiting for other requests to finish
|
|
|
|
* with rt->gcThread set while JS_BeginRequest would do such wait.
|
2010-04-22 05:31:00 -07:00
|
|
|
*/
|
2010-05-14 06:55:17 -07:00
|
|
|
rt->gcRunning = true;
|
2010-04-22 05:31:00 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
/* End the current GC session and allow other threads to proceed. */
|
2010-08-30 11:46:18 -07:00
|
|
|
AutoGCSession::~AutoGCSession()
|
2010-04-22 05:31:00 -07:00
|
|
|
{
|
2010-08-30 11:46:18 -07:00
|
|
|
JSRuntime *rt = context->runtime;
|
2010-05-14 06:55:17 -07:00
|
|
|
rt->gcRunning = false;
|
2010-04-22 05:31:00 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
2010-08-30 11:46:18 -07:00
|
|
|
JS_ASSERT(rt->gcThread == context->thread);
|
2010-04-22 05:31:00 -07:00
|
|
|
rt->gcThread = NULL;
|
|
|
|
JS_NOTIFY_GC_DONE(rt);
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2010-04-08 05:54:18 -07:00
|
|
|
/*
|
2010-05-14 06:55:17 -07:00
|
|
|
* GC, repeatedly if necessary, until we think we have not created any new
|
|
|
|
* garbage and no other threads are demanding more GC.
|
2010-04-08 05:54:18 -07:00
|
|
|
*/
|
2010-05-14 06:55:17 -07:00
|
|
|
static void
|
|
|
|
GCUntilDone(JSContext *cx, JSGCInvocationKind gckind GCTIMER_PARAM)
|
2010-04-08 05:54:18 -07:00
|
|
|
{
|
2010-05-14 06:55:17 -07:00
|
|
|
if (JS_ON_TRACE(cx))
|
|
|
|
return;
|
2010-04-08 05:54:18 -07:00
|
|
|
|
|
|
|
JSRuntime *rt = cx->runtime;
|
|
|
|
|
2010-05-14 06:55:17 -07:00
|
|
|
/* Recursive GC or a call from another thread restarts the GC cycle. */
|
2010-08-30 11:46:18 -07:00
|
|
|
if (rt->gcMarkAndSweep) {
|
2010-05-14 06:55:17 -07:00
|
|
|
rt->gcPoke = true;
|
2010-08-30 11:46:18 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
JS_ASSERT(rt->gcThread);
|
|
|
|
if (rt->gcThread != cx->thread) {
|
|
|
|
/* We do not return until another GC finishes. */
|
|
|
|
LetOtherGCFinish(cx);
|
2010-08-29 12:41:24 -07:00
|
|
|
}
|
2010-08-30 11:46:18 -07:00
|
|
|
#endif
|
|
|
|
return;
|
2010-08-29 12:41:24 -07:00
|
|
|
}
|
2010-09-01 11:24:45 -07:00
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
AutoGCSession gcsession(cx);
|
2010-04-22 05:32:13 -07:00
|
|
|
|
2010-05-14 06:55:17 -07:00
|
|
|
METER(rt->gcStats.poke++);
|
2010-04-22 05:32:13 -07:00
|
|
|
|
2010-05-14 06:55:17 -07:00
|
|
|
bool firstRun = true;
|
2010-08-30 11:46:18 -07:00
|
|
|
rt->gcMarkAndSweep = true;
|
2010-05-14 06:55:17 -07:00
|
|
|
do {
|
|
|
|
rt->gcPoke = false;
|
|
|
|
|
|
|
|
AutoUnlockGC unlock(rt);
|
|
|
|
if (firstRun) {
|
|
|
|
PreGCCleanup(cx, gckind);
|
|
|
|
TIMESTAMP(startMark);
|
|
|
|
firstRun = false;
|
2010-04-22 05:32:13 -07:00
|
|
|
}
|
2010-08-30 11:46:18 -07:00
|
|
|
MarkAndSweep(cx GCTIMER_ARG);
|
2010-05-14 06:55:17 -07:00
|
|
|
|
|
|
|
// GC again if:
|
|
|
|
// - another thread, not in a request, called js_GC
|
|
|
|
// - js_GC was called recursively
|
|
|
|
// - a finalizer called js_RemoveRoot or js_UnlockGCThingRT.
|
|
|
|
} while (rt->gcPoke);
|
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
rt->gcMarkAndSweep = false;
|
2010-05-14 06:55:17 -07:00
|
|
|
rt->gcRegenShapes = false;
|
|
|
|
rt->setGCLastBytes(rt->gcBytes);
|
2010-04-22 05:32:13 -07:00
|
|
|
}
|
|
|
|
|
2007-03-22 10:30:00 -07:00
|
|
|
/*
|
2008-01-29 22:29:49 -08:00
|
|
|
* The gckind flag bit GC_LOCK_HELD indicates a call from js_NewGCThing with
|
|
|
|
* rt->gcLock already held, so the lock should be kept on return.
|
2007-03-22 10:30:00 -07:00
|
|
|
*/
|
|
|
|
void
|
|
|
|
js_GC(JSContext *cx, JSGCInvocationKind gckind)
|
|
|
|
{
|
2010-04-22 05:31:05 -07:00
|
|
|
JSRuntime *rt = cx->runtime;
|
2007-03-22 10:30:00 -07:00
|
|
|
|
|
|
|
/*
|
|
|
|
* Don't collect garbage if the runtime isn't up, and cx is not the last
|
|
|
|
* context in the runtime. The last context must force a GC, and nothing
|
|
|
|
* should suppress that final collection or there may be shutdown leaks,
|
|
|
|
* or runtime bloat until the next context is created.
|
|
|
|
*/
|
|
|
|
if (rt->state != JSRTS_UP && gckind != GC_LAST_CONTEXT)
|
|
|
|
return;
|
2010-04-22 05:35:21 -07:00
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
RecordNativeStackTopForGC(cx);
|
|
|
|
|
2010-04-22 05:35:21 -07:00
|
|
|
GCTIMER_BEGIN();
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-05-14 06:55:17 -07:00
|
|
|
do {
|
|
|
|
/*
|
|
|
|
* Let the API user decide to defer a GC if it wants to (unless this
|
|
|
|
* is the last context). Invoke the callback regardless. Sample the
|
|
|
|
* callback in case we are freely racing with a JS_SetGCCallback{,RT}
|
|
|
|
* on another thread.
|
|
|
|
*/
|
|
|
|
if (JSGCCallback callback = rt->gcCallback) {
|
|
|
|
Conditionally<AutoUnlockGC> unlockIf(!!(gckind & GC_LOCK_HELD), rt);
|
|
|
|
if (!callback(cx, JSGC_BEGIN) && gckind != GC_LAST_CONTEXT)
|
|
|
|
return;
|
|
|
|
}
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-04-22 05:34:28 -07:00
|
|
|
{
|
|
|
|
/* Lock out other GC allocator and collector invocations. */
|
|
|
|
Conditionally<AutoLockGC> lockIf(!(gckind & GC_LOCK_HELD), rt);
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-05-14 06:55:17 -07:00
|
|
|
GCUntilDone(cx, gckind GCTIMER_ARG);
|
|
|
|
}
|
2007-03-22 10:30:00 -07:00
|
|
|
|
2010-05-14 06:55:17 -07:00
|
|
|
/* We re-sample the callback again as the finalizers can change it. */
|
|
|
|
if (JSGCCallback callback = rt->gcCallback) {
|
|
|
|
Conditionally<AutoUnlockGC> unlockIf(gckind & GC_LOCK_HELD, rt);
|
2008-01-29 22:29:49 -08:00
|
|
|
|
2010-05-14 06:55:17 -07:00
|
|
|
(void) callback(cx, JSGC_END);
|
2010-04-22 05:34:28 -07:00
|
|
|
}
|
|
|
|
|
2010-05-14 06:55:17 -07:00
|
|
|
/*
|
|
|
|
* On shutdown, iterate until the JSGC_END callback stops creating
|
|
|
|
* garbage.
|
|
|
|
*/
|
|
|
|
} while (gckind == GC_LAST_CONTEXT && rt->gcPoke);
|
2010-04-08 06:01:21 -07:00
|
|
|
|
2010-04-22 05:35:21 -07:00
|
|
|
GCTIMER_END(gckind == GC_LAST_CONTEXT);
|
2007-03-22 10:30:00 -07:00
|
|
|
}
|
2010-05-14 06:55:17 -07:00
|
|
|
|
2010-06-17 15:23:17 -07:00
|
|
|
namespace js {
|
|
|
|
|
2010-05-14 06:55:17 -07:00
|
|
|
bool
|
2010-06-17 15:23:17 -07:00
|
|
|
SetProtoCheckingForCycles(JSContext *cx, JSObject *obj, JSObject *proto)
|
2010-05-14 06:55:17 -07:00
|
|
|
{
|
|
|
|
/*
|
|
|
|
* This function cannot be called during the GC and always requires a
|
|
|
|
* request.
|
|
|
|
*/
|
|
|
|
#ifdef JS_THREADSAFE
|
2010-08-30 11:46:18 -07:00
|
|
|
JS_ASSERT(cx->thread->requestDepth);
|
2010-05-14 06:55:17 -07:00
|
|
|
|
2010-08-29 12:41:24 -07:00
|
|
|
/*
|
2010-08-30 11:46:18 -07:00
|
|
|
* This is only necessary if AutoGCSession below would wait for GC to
|
|
|
|
* finish on another thread, but to capture the minimal stack space and
|
|
|
|
* for code simplicity we do it here unconditionally.
|
2010-08-29 12:41:24 -07:00
|
|
|
*/
|
2010-08-30 11:46:18 -07:00
|
|
|
RecordNativeStackTopForGC(cx);
|
2010-08-29 12:41:24 -07:00
|
|
|
#endif
|
2010-05-14 06:55:17 -07:00
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
JSRuntime *rt = cx->runtime;
|
|
|
|
AutoLockGC lock(rt);
|
|
|
|
AutoGCSession gcsession(cx);
|
|
|
|
AutoUnlockGC unlock(rt);
|
|
|
|
|
|
|
|
bool cycle = false;
|
|
|
|
for (JSObject *obj2 = proto; obj2;) {
|
|
|
|
obj2 = obj2->wrappedObject(cx);
|
|
|
|
if (obj2 == obj) {
|
|
|
|
cycle = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
obj2 = obj2->getProto();
|
|
|
|
}
|
|
|
|
if (!cycle)
|
|
|
|
obj->setProto(proto);
|
|
|
|
|
|
|
|
return !cycle;
|
|
|
|
}
|
2010-05-14 06:55:17 -07:00
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
void
|
|
|
|
TraceRuntime(JSTracer *trc)
|
|
|
|
{
|
|
|
|
LeaveTrace(trc->context);
|
2010-08-29 12:41:24 -07:00
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
#ifdef JS_THREADSAFE
|
|
|
|
{
|
|
|
|
JSContext *cx = trc->context;
|
|
|
|
JSRuntime *rt = cx->runtime;
|
|
|
|
AutoLockGC lock(rt);
|
2010-09-01 11:24:45 -07:00
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
if (rt->gcThread != cx->thread) {
|
|
|
|
AutoGCSession gcsession(cx);
|
|
|
|
AutoUnlockGC unlock(rt);
|
|
|
|
RecordNativeStackTopForGC(trc->context);
|
|
|
|
MarkRuntime(trc);
|
|
|
|
return;
|
2010-05-14 06:55:17 -07:00
|
|
|
}
|
|
|
|
}
|
2010-08-30 11:46:18 -07:00
|
|
|
#else
|
|
|
|
RecordNativeStackTopForGC(trc->context);
|
|
|
|
#endif
|
2010-05-14 06:55:17 -07:00
|
|
|
|
2010-08-30 11:46:18 -07:00
|
|
|
/*
|
|
|
|
* Calls from inside a normal GC or a recursive calls are OK and do not
|
|
|
|
* require session setup.
|
|
|
|
*/
|
|
|
|
MarkRuntime(trc);
|
2010-05-14 06:55:17 -07:00
|
|
|
}
|
2010-06-04 16:32:10 -07:00
|
|
|
|
|
|
|
JSCompartment *
|
2010-06-24 14:45:32 -07:00
|
|
|
NewCompartment(JSContext *cx, JSPrincipals *principals)
|
2010-06-04 16:32:10 -07:00
|
|
|
{
|
|
|
|
JSRuntime *rt = cx->runtime;
|
|
|
|
JSCompartment *compartment = new JSCompartment(rt);
|
2010-06-23 14:35:10 -07:00
|
|
|
if (!compartment || !compartment->init()) {
|
2010-06-04 16:32:10 -07:00
|
|
|
JS_ReportOutOfMemory(cx);
|
2010-07-19 13:36:49 -07:00
|
|
|
return NULL;
|
2010-06-04 16:32:10 -07:00
|
|
|
}
|
|
|
|
|
2010-06-24 14:45:32 -07:00
|
|
|
if (principals) {
|
|
|
|
compartment->principals = principals;
|
|
|
|
JSPRINCIPALS_HOLD(cx, principals);
|
|
|
|
}
|
|
|
|
|
2010-07-19 13:36:49 -07:00
|
|
|
{
|
|
|
|
AutoLockGC lock(rt);
|
2010-06-04 16:32:10 -07:00
|
|
|
|
2010-07-19 13:36:49 -07:00
|
|
|
if (!rt->compartments.append(compartment)) {
|
|
|
|
AutoUnlockGC unlock(rt);
|
|
|
|
JS_ReportOutOfMemory(cx);
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
JSCompartmentCallback callback = rt->compartmentCallback;
|
|
|
|
if (callback && !callback(cx, compartment, JSCOMPARTMENT_NEW)) {
|
|
|
|
AutoLockGC lock(rt);
|
|
|
|
rt->compartments.popBack();
|
|
|
|
return NULL;
|
2010-06-04 16:32:10 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
return compartment;
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|