Bug 649576: Extricate JSHashTable from JSAtomList death grip. (r=luke)

This commit is contained in:
Chris Leary 2011-06-24 14:22:30 -07:00
parent 0e43edd30e
commit 0780a149be
25 changed files with 1866 additions and 918 deletions

View File

@ -186,6 +186,7 @@ CPPSRCS = \
GlobalObject.cpp \
Stack.cpp \
String.cpp \
ParseMaps.cpp \
$(NULL)
# Changes to internal header files, used externally, massively slow down
@ -257,11 +258,12 @@ INSTALLED_HEADERS = \
prmjtime.h \
$(NULL)
###############################################
# BEGIN include sources for the vm subdirectory
######################################################
# BEGIN include sources for the engine subdirectories
#
VPATH += \
$(srcdir)/vm \
$(srcdir)/vm \
$(srcdir)/frontend \
$(NULL)
EXPORTS_NAMESPACES = vm
@ -645,7 +647,7 @@ check-malloc-function-usage: $(filter-out %jsalloc.h %jscntxt.h %jsutil.h, $(ALL
# We desire these numbers to go down, not up. See "User guide to memory
# management within SpiderMonkey" in jsutil.h.
$(srcdir)/config/check_source_count.py OffTheBooks:: 53 \
$(srcdir)/config/check_source_count.py OffTheBooks:: 54 \
"in Makefile.in" "{cx,rt}->{new_,new_array,malloc_,calloc_,realloc_}" $^
# This should go to zero, if possible.
$(srcdir)/config/check_source_count.py UnwantedForeground:: 33 \

View File

@ -0,0 +1,161 @@
/* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=4 sw=4 et tw=99 ft=cpp:
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is SpiderMonkey JavaScript engine.
*
* The Initial Developer of the Original Code is
* Mozilla Corporation.
* Portions created by the Initial Developer are Copyright (C) 2011
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Chris Leary <cdleary@mozilla.com>
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
#ifndef ParseMapPool_inl_h__
#define ParseMapPool_inl_h__
#include "jscntxt.h"
#include "jsparse.h" /* Need sizeof(JSDefinition). */
#include "ParseMaps.h"
namespace js {
template <>
inline AtomDefnMap *
ParseMapPool::acquire<AtomDefnMap>()
{
return reinterpret_cast<AtomDefnMap *>(allocate());
}
template <>
inline AtomIndexMap *
ParseMapPool::acquire<AtomIndexMap>()
{
return reinterpret_cast<AtomIndexMap *>(allocate());
}
template <>
inline AtomDOHMap *
ParseMapPool::acquire<AtomDOHMap>()
{
return reinterpret_cast<AtomDOHMap *>(allocate());
}
inline void *
ParseMapPool::allocate()
{
if (recyclable.empty())
return allocateFresh();
void *map = recyclable.popCopy();
asAtomMap(map)->clear();
return map;
}
inline JSDefinition *
AtomDecls::lookupFirst(JSAtom *atom)
{
JS_ASSERT(map);
AtomDOHPtr p = map->lookup(atom);
if (!p)
return NULL;
if (p.value().isHeader()) {
/* Just return the head defn. */
return p.value().header()->defn;
}
return p.value().defn();
}
inline MultiDeclRange
AtomDecls::lookupMulti(JSAtom *atom)
{
JS_ASSERT(map);
AtomDOHPtr p = map->lookup(atom);
if (!p)
return MultiDeclRange((JSDefinition *) NULL);
DefnOrHeader &doh = p.value();
if (doh.isHeader())
return MultiDeclRange(doh.header());
else
return MultiDeclRange(doh.defn());
}
inline bool
AtomDecls::addUnique(JSAtom *atom, JSDefinition *defn)
{
JS_ASSERT(map);
AtomDOHAddPtr p = map->lookupForAdd(atom);
if (p) {
JS_ASSERT(!p.value().isHeader());
p.value() = DefnOrHeader(defn);
return true;
}
return map->add(p, atom, DefnOrHeader(defn));
}
template <class Map>
inline bool
AtomThingMapPtr<Map>::ensureMap(JSContext *cx)
{
if (map_)
return true;
map_ = cx->parseMapPool().acquire<Map>();
return !!map_;
}
template <class Map>
inline void
AtomThingMapPtr<Map>::releaseMap(JSContext *cx)
{
if (!map_)
return;
cx->parseMapPool().release(map_);
map_ = NULL;
}
inline bool
AtomDecls::init()
{
map = cx->parseMapPool().acquire<AtomDOHMap>();
return map;
}
inline
AtomDecls::~AtomDecls()
{
if (map)
cx->parseMapPool().release(map);
}
} /* namespace js */
#endif

View File

@ -0,0 +1,196 @@
/* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=4 sw=4 et tw=99 ft=cpp:
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is SpiderMonkey JavaScript engine.
*
* The Initial Developer of the Original Code is
* Mozilla Corporation.
* Portions created by the Initial Developer are Copyright (C) 2011
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Chris Leary <cdleary@mozilla.com>
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
#include "ParseMaps-inl.h"
using namespace js;
void
ParseMapPool::checkInvariants()
{
/*
* Having all values be of the same size permits us to easily reuse the
* allocated space for each of the map types.
*/
JS_STATIC_ASSERT(sizeof(JSDefinition *) == sizeof(jsatomid));
JS_STATIC_ASSERT(sizeof(JSDefinition *) == sizeof(DefnOrHeader));
JS_STATIC_ASSERT(sizeof(AtomDefnMap::Entry) == sizeof(AtomIndexMap::Entry));
JS_STATIC_ASSERT(sizeof(AtomDefnMap::Entry) == sizeof(AtomDOHMap::Entry));
JS_STATIC_ASSERT(sizeof(AtomMapT::Entry) == sizeof(AtomDOHMap::Entry));
/* Ensure that the HasTable::clear goes quickly via memset. */
JS_STATIC_ASSERT(tl::IsPodType<AtomIndexMap::WordMap::Entry>::result);
JS_STATIC_ASSERT(tl::IsPodType<AtomDOHMap::WordMap::Entry>::result);
JS_STATIC_ASSERT(tl::IsPodType<AtomDefnMap::WordMap::Entry>::result);
}
void
ParseMapPool::purgeAll()
{
for (void **it = all.begin(), **end = all.end(); it != end; ++it)
cx->delete_<AtomMapT>(asAtomMap(*it));
all.clearAndFree();
recyclable.clearAndFree();
}
void *
ParseMapPool::allocateFresh()
{
size_t newAllLength = all.length() + 1;
if (!all.reserve(newAllLength) || !recyclable.reserve(newAllLength))
return NULL;
AtomMapT *map = cx->new_<AtomMapT>(cx);
if (!map)
return NULL;
all.infallibleAppend(map);
return (void *) map;
}
#ifdef DEBUG
void
AtomDecls::dump()
{
for (AtomDOHRange r = map->all(); !r.empty(); r.popFront()) {
fprintf(stderr, "atom: ");
js_DumpAtom(r.front().key());
const DefnOrHeader &doh = r.front().value();
if (doh.isHeader()) {
AtomDeclNode *node = doh.header();
do {
fprintf(stderr, " node: %p\n", (void *) node);
fprintf(stderr, " defn: %p\n", (void *) node->defn);
node = node->next;
} while (node);
} else {
fprintf(stderr, " defn: %p\n", (void *) doh.defn());
}
}
}
void
DumpAtomDefnMap(const AtomDefnMapPtr &map)
{
if (map->empty()) {
fprintf(stderr, "empty\n");
return;
}
for (AtomDefnRange r = map->all(); !r.empty(); r.popFront()) {
fprintf(stderr, "atom: ");
js_DumpAtom(r.front().key());
fprintf(stderr, "defn: %p\n", (void *) r.front().value());
}
}
#endif
AtomDeclNode *
AtomDecls::allocNode(JSDefinition *defn)
{
AtomDeclNode *p;
JS_ARENA_ALLOCATE_TYPE(p, AtomDeclNode, &cx->tempPool);
if (!p) {
js_ReportOutOfMemory(cx);
return NULL;
}
return new (p) AtomDeclNode(defn);
}
bool
AtomDecls::addShadow(JSAtom *atom, JSDefinition *defn)
{
AtomDeclNode *node = allocNode(defn);
if (!node)
return false;
AtomDOHAddPtr p = map->lookupForAdd(atom);
if (!p)
return map->add(p, atom, DefnOrHeader(node));
AtomDeclNode *toShadow;
if (p.value().isHeader()) {
toShadow = p.value().header();
} else {
toShadow = allocNode(p.value().defn());
if (!toShadow)
return false;
}
node->next = toShadow;
p.value() = DefnOrHeader(node);
return true;
}
AtomDeclNode *
AtomDecls::lastAsNode(DefnOrHeader *doh)
{
if (doh->isHeader()) {
AtomDeclNode *last = doh->header();
while (last->next)
last = last->next;
return last;
}
/* Otherwise, we need to turn the existing defn into a node. */
AtomDeclNode *node = allocNode(doh->defn());
if (!node)
return NULL;
*doh = DefnOrHeader(node);
return node;
}
bool
AtomDecls::addHoist(JSAtom *atom, JSDefinition *defn)
{
AtomDeclNode *node = allocNode(defn);
if (!node)
return false;
AtomDOHAddPtr p = map->lookupForAdd(atom);
if (p) {
AtomDeclNode *last = lastAsNode(&p.value());
if (!last)
return false;
last->next = node;
return true;
}
return map->add(p, atom, DefnOrHeader(node));
}

425
js/src/frontend/ParseMaps.h Normal file
View File

@ -0,0 +1,425 @@
/* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=4 sw=4 et tw=99 ft=cpp:
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is SpiderMonkey JavaScript engine.
*
* The Initial Developer of the Original Code is
* Mozilla Corporation.
* Portions created by the Initial Developer are Copyright (C) 2011
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Chris Leary <cdleary@mozilla.com>
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
#ifndef ParseMaps_h__
#define ParseMaps_h__
#include "jsvector.h"
#include "mfbt/InlineMap.h"
namespace js {
/*
* A pool that permits the reuse of the backing storage for the defn, index, or
* defn-or-header (multi) maps.
*
* The pool owns all the maps that are given out, and is responsible for
* relinquishing all resources when |purgeAll| is triggered.
*/
class ParseMapPool
{
typedef Vector<void *, 32, SystemAllocPolicy> RecyclableMaps;
RecyclableMaps all;
RecyclableMaps recyclable;
JSContext *cx;
void checkInvariants();
void recycle(void *map) {
JS_ASSERT(map);
#ifdef DEBUG
bool ok = false;
/* Make sure the map is in |all| but not already in |recyclable|. */
for (void **it = all.begin(), **end = all.end(); it != end; ++it) {
if (*it == map) {
ok = true;
break;
}
}
JS_ASSERT(ok);
for (void **it = recyclable.begin(), **end = recyclable.end(); it != end; ++it)
JS_ASSERT(*it != map);
#endif
JS_ASSERT(recyclable.length() < all.length());
recyclable.infallibleAppend(map); /* Reserved in allocateFresh. */
}
void *allocateFresh();
void *allocate();
/* Arbitrary atom map type, that has keys and values of the same kind. */
typedef AtomIndexMap AtomMapT;
static AtomMapT *asAtomMap(void *ptr) {
return reinterpret_cast<AtomMapT *>(ptr);
}
public:
explicit ParseMapPool(JSContext *cx) : cx(cx) {}
~ParseMapPool() {
purgeAll();
}
void purgeAll();
bool empty() const {
return all.empty();
}
/* Fallibly aquire one of the supported map types from the pool. */
template <typename T>
T *acquire();
/* Release one of the supported map types back to the pool. */
void release(AtomIndexMap *map) {
recycle((void *) map);
}
void release(AtomDefnMap *map) {
recycle((void *) map);
}
void release(AtomDOHMap *map) {
recycle((void *) map);
}
}; /* ParseMapPool */
/*
* N.B. This is a POD-type so that it can be included in the JSParseNode union.
* If possible, use the corresponding |OwnedAtomThingMapPtr| variant.
*/
template <class Map>
struct AtomThingMapPtr
{
Map *map_;
void init() { clearMap(); }
bool ensureMap(JSContext *cx);
void releaseMap(JSContext *cx);
bool hasMap() const { return map_; }
Map *getMap() { return map_; }
void setMap(Map *newMap) { JS_ASSERT(!map_); map_ = newMap; }
void clearMap() { map_ = NULL; }
Map *operator->() { return map_; }
const Map *operator->() const { return map_; }
Map &operator*() const { return *map_; }
};
struct AtomDefnMapPtr : public AtomThingMapPtr<AtomDefnMap>
{
JS_ALWAYS_INLINE
JSDefinition *lookupDefn(JSAtom *atom) {
AtomDefnMap::Ptr p = map_->lookup(atom);
return p ? p.value() : NULL;
}
};
typedef AtomThingMapPtr<AtomIndexMap> AtomIndexMapPtr;
/*
* Wrapper around an AtomThingMapPtr (or its derivatives) that automatically
* releases a map on destruction, if one has been acquired.
*/
template <typename AtomThingMapPtrT>
class OwnedAtomThingMapPtr : public AtomThingMapPtrT
{
JSContext *cx;
public:
explicit OwnedAtomThingMapPtr(JSContext *cx) : cx(cx) {
AtomThingMapPtrT::init();
}
~OwnedAtomThingMapPtr() {
AtomThingMapPtrT::releaseMap(cx);
}
};
typedef OwnedAtomThingMapPtr<AtomDefnMapPtr> OwnedAtomDefnMapPtr;
typedef OwnedAtomThingMapPtr<AtomIndexMapPtr> OwnedAtomIndexMapPtr;
/* Node structure for chaining in AtomDecls. */
struct AtomDeclNode
{
JSDefinition *defn;
AtomDeclNode *next;
explicit AtomDeclNode(JSDefinition *defn)
: defn(defn), next(NULL)
{}
};
/*
* Tagged union of a JSDefinition and an AtomDeclNode, for use in AtomDecl's
* internal map.
*/
class DefnOrHeader
{
union {
JSDefinition *defn;
AtomDeclNode *head;
uintptr_t bits;
} u;
public:
DefnOrHeader() {
u.bits = 0;
}
explicit DefnOrHeader(JSDefinition *defn) {
u.defn = defn;
JS_ASSERT(!isHeader());
}
explicit DefnOrHeader(AtomDeclNode *node) {
u.head = node;
u.bits |= 0x1;
JS_ASSERT(isHeader());
}
bool isHeader() const {
return u.bits & 0x1;
}
JSDefinition *defn() const {
JS_ASSERT(!isHeader());
return u.defn;
}
AtomDeclNode *header() const {
JS_ASSERT(isHeader());
return (AtomDeclNode *) (u.bits & ~0x1);
}
#ifdef DEBUG
void dump();
#endif
};
namespace tl {
template <> struct IsPodType<DefnOrHeader> {
static const bool result = true;
};
} /* namespace tl */
/*
* Multimap for function-scope atom declarations.
*
* Wraps an internal DeclOrHeader map with multi-map functionality.
*
* In the common case, no block scoping is used, and atoms have a single
* associated definition. In the uncommon (block scoping) case, we map the atom
* to a chain of definition nodes.
*/
class AtomDecls
{
/* AtomDeclsIter needs to get at the DOHMap directly. */
friend class AtomDeclsIter;
JSContext *cx;
AtomDOHMap *map;
AtomDecls(const AtomDecls &other);
void operator=(const AtomDecls &other);
AtomDeclNode *allocNode(JSDefinition *defn);
/*
* Fallibly return the value in |doh| as a node.
* Update the defn currently occupying |doh| to a node if necessary.
*/
AtomDeclNode *lastAsNode(DefnOrHeader *doh);
public:
explicit AtomDecls(JSContext *cx)
: cx(cx), map(NULL)
{}
~AtomDecls();
bool init();
void clear() {
map->clear();
}
/* Return the definition at the head of the chain for |atom|. */
inline JSDefinition *lookupFirst(JSAtom *atom);
/* Perform a lookup that can iterate over the definitions associated with |atom|. */
inline MultiDeclRange lookupMulti(JSAtom *atom);
/* Add-or-update a known-unique definition for |atom|. */
inline bool addUnique(JSAtom *atom, JSDefinition *defn);
bool addShadow(JSAtom *atom, JSDefinition *defn);
bool addHoist(JSAtom *atom, JSDefinition *defn);
/* Updating the definition for an entry that is known to exist is infallible. */
void update(JSAtom *atom, JSDefinition *defn) {
JS_ASSERT(map);
AtomDOHMap::Ptr p = map->lookup(atom);
JS_ASSERT(p);
JS_ASSERT(!p.value().isHeader());
p.value() = DefnOrHeader(defn);
}
/* Remove the node at the head of the chain for |atom|. */
void remove(JSAtom *atom) {
JS_ASSERT(map);
AtomDOHMap::Ptr p = map->lookup(atom);
if (!p)
return;
DefnOrHeader &doh = p.value();
if (!doh.isHeader()) {
map->remove(p);
return;
}
AtomDeclNode *node = doh.header();
AtomDeclNode *newHead = node->next;
if (newHead)
p.value() = DefnOrHeader(newHead);
else
map->remove(p);
}
AtomDOHMap::Range all() {
JS_ASSERT(map);
return map->all();
}
#ifdef DEBUG
void dump();
#endif
};
/*
* Lookup state tracker for those situations where the caller wants to traverse
* multiple definitions associated with a single atom. This occurs due to block
* scoping.
*/
class MultiDeclRange
{
friend class AtomDecls;
AtomDeclNode *node;
JSDefinition *defn;
explicit MultiDeclRange(JSDefinition *defn) : node(NULL), defn(defn) {}
explicit MultiDeclRange(AtomDeclNode *node) : node(node), defn(node->defn) {}
public:
void popFront() {
JS_ASSERT(!empty());
if (!node) {
defn = NULL;
return;
}
node = node->next;
defn = node ? node->defn : NULL;
}
JSDefinition *front() {
JS_ASSERT(!empty());
return defn;
}
bool empty() const {
JS_ASSERT_IF(!defn, !node);
return !defn;
}
};
/* Iterates over all the definitions in an AtomDecls. */
class AtomDeclsIter
{
AtomDOHMap::Range r; /* Range over the map. */
AtomDeclNode *link; /* Optional next node in the current atom's chain. */
public:
explicit AtomDeclsIter(AtomDecls *decls) : r(decls->all()), link(NULL) {}
JSDefinition *operator()() {
if (link) {
JS_ASSERT(link != link->next);
JSDefinition *result = link->defn;
link = link->next;
JS_ASSERT(result);
return result;
}
if (r.empty())
return NULL;
const DefnOrHeader &doh = r.front().value();
r.popFront();
if (!doh.isHeader())
return doh.defn();
JS_ASSERT(!link);
AtomDeclNode *node = doh.header();
link = node->next;
return node->defn;
}
};
typedef AtomDefnMap::Range AtomDefnRange;
typedef AtomDefnMap::AddPtr AtomDefnAddPtr;
typedef AtomDefnMap::Ptr AtomDefnPtr;
typedef AtomIndexMap::AddPtr AtomIndexAddPtr;
typedef AtomIndexMap::Ptr AtomIndexPtr;
typedef AtomDOHMap::Ptr AtomDOHPtr;
typedef AtomDOHMap::AddPtr AtomDOHAddPtr;
typedef AtomDOHMap::Range AtomDOHRange;
} /* namepsace js */
#endif

View File

@ -0,0 +1 @@
assertEq((function (a, b, a) { return a; })(2, 4, 6), 6);

View File

@ -0,0 +1,9 @@
/* Parse correctly. */
function assignToClassListStrict(e) {
"use strict";
try {
e.classList = "foo";
ok(false, "assigning to classList didn't throw");
} catch (e) { }
}

View File

@ -641,13 +641,6 @@ js_DumpAtoms(JSContext *cx, FILE *fp)
}
#endif
static JSHashNumber
js_hash_atom_ptr(const void *key)
{
const JSAtom *atom = (const JSAtom *) key;
return ATOM_HASH(atom);
}
#if JS_BITS_PER_WORD == 32
# define TEMP_SIZE_START_LOG2 5
#else
@ -660,312 +653,31 @@ js_hash_atom_ptr(const void *key)
JS_STATIC_ASSERT(TEMP_SIZE_START >= sizeof(JSHashTable));
static void *
js_alloc_temp_space(void *priv, size_t size)
{
Parser *parser = (Parser *) priv;
void *space;
if (size < TEMP_SIZE_LIMIT) {
int bin = JS_CeilingLog2(size) - TEMP_SIZE_START_LOG2;
JS_ASSERT(unsigned(bin) < NUM_TEMP_FREELISTS);
space = parser->tempFreeList[bin];
if (space) {
parser->tempFreeList[bin] = *(void **)space;
return space;
}
}
JS_ARENA_ALLOCATE(space, &parser->context->tempPool, size);
if (!space)
js_ReportOutOfMemory(parser->context);
return space;
}
static void
js_free_temp_space(void *priv, void *item, size_t size)
{
if (size >= TEMP_SIZE_LIMIT)
return;
Parser *parser = (Parser *) priv;
int bin = JS_CeilingLog2(size) - TEMP_SIZE_START_LOG2;
JS_ASSERT(unsigned(bin) < NUM_TEMP_FREELISTS);
*(void **)item = parser->tempFreeList[bin];
parser->tempFreeList[bin] = item;
}
static JSHashEntry *
js_alloc_temp_entry(void *priv, const void *key)
{
Parser *parser = (Parser *) priv;
JSAtomListElement *ale;
ale = parser->aleFreeList;
if (ale) {
parser->aleFreeList = ALE_NEXT(ale);
return &ale->entry;
}
JS_ARENA_ALLOCATE_TYPE(ale, JSAtomListElement, &parser->context->tempPool);
if (!ale) {
js_ReportOutOfMemory(parser->context);
return NULL;
}
return &ale->entry;
}
static void
js_free_temp_entry(void *priv, JSHashEntry *he, uintN flag)
{
Parser *parser = (Parser *) priv;
JSAtomListElement *ale = (JSAtomListElement *) he;
ALE_SET_NEXT(ale, parser->aleFreeList);
parser->aleFreeList = ale;
}
static JSHashAllocOps temp_alloc_ops = {
js_alloc_temp_space, js_free_temp_space,
js_alloc_temp_entry, js_free_temp_entry
};
JSAtomListElement *
JSAtomList::rawLookup(JSAtom *atom, JSHashEntry **&hep)
{
if (table) {
hep = JS_HashTableRawLookup(table, ATOM_HASH(atom), atom);
return (JSAtomListElement *) *hep;
}
JSHashEntry **alep = &list;
hep = NULL;
JSAtomListElement *ale;
while ((ale = (JSAtomListElement *)*alep) != NULL) {
if (ALE_ATOM(ale) == atom) {
/* Hit, move atom's element to the front of the list. */
*alep = ale->entry.next;
ale->entry.next = list;
list = &ale->entry;
break;
}
alep = &ale->entry.next;
}
return ale;
}
#define ATOM_LIST_HASH_THRESHOLD 12
JSAtomListElement *
JSAtomList::add(Parser *parser, JSAtom *atom, AddHow how)
{
JS_ASSERT(!set);
JSAtomListElement *ale, *ale2, *next;
JSHashEntry **hep;
ale = rawLookup(atom, hep);
if (!ale || how != UNIQUE) {
if (count < ATOM_LIST_HASH_THRESHOLD && !table) {
/* Few enough for linear search and no hash table yet needed. */
ale = (JSAtomListElement *)js_alloc_temp_entry(parser, atom);
if (!ale)
return NULL;
ALE_SET_ATOM(ale, atom);
if (how == HOIST) {
ale->entry.next = NULL;
hep = (JSHashEntry **) &list;
while (*hep)
hep = &(*hep)->next;
*hep = &ale->entry;
} else {
ale->entry.next = list;
list = &ale->entry;
}
} else {
/*
* We should hash, or else we already are hashing, but count was
* reduced by JSAtomList::rawRemove below ATOM_LIST_HASH_THRESHOLD.
* Check whether we should create the table.
*/
if (!table) {
/* No hash table yet, so hep had better be null! */
JS_ASSERT(!hep);
table = JS_NewHashTable(count + 1, js_hash_atom_ptr,
JS_CompareValues, JS_CompareValues,
&temp_alloc_ops, parser);
if (!table)
return NULL;
/*
* Set ht->nentries explicitly, because we are moving entries
* from list to ht, not calling JS_HashTable(Raw|)Add.
*/
table->nentries = count;
/*
* Insert each ale on list into the new hash table. Append to
* the hash chain rather than inserting at the bucket head, to
* preserve order among entries with the same key.
*/
for (ale2 = (JSAtomListElement *)list; ale2; ale2 = next) {
next = ALE_NEXT(ale2);
ale2->entry.keyHash = ATOM_HASH(ALE_ATOM(ale2));
hep = JS_HashTableRawLookup(table, ale2->entry.keyHash,
ale2->entry.key);
while (*hep)
hep = &(*hep)->next;
*hep = &ale2->entry;
ale2->entry.next = NULL;
}
list = NULL;
/* Set hep for insertion of atom's ale, immediately below. */
hep = JS_HashTableRawLookup(table, ATOM_HASH(atom), atom);
}
/* Finally, add an entry for atom into the hash bucket at hep. */
ale = (JSAtomListElement *)
JS_HashTableRawAdd(table, hep, ATOM_HASH(atom), atom, NULL);
if (!ale)
return NULL;
/*
* If hoisting, move ale to the end of its chain after we called
* JS_HashTableRawAdd, since RawAdd may have grown the table and
* then recomputed hep to refer to the pointer to the first entry
* with the given key.
*/
if (how == HOIST && ale->entry.next) {
JS_ASSERT(*hep == &ale->entry);
*hep = ale->entry.next;
ale->entry.next = NULL;
do {
hep = &(*hep)->next;
} while (*hep);
*hep = &ale->entry;
}
}
ALE_SET_INDEX(ale, count++);
}
return ale;
}
void
JSAtomList::rawRemove(Parser *parser, JSAtomListElement *ale, JSHashEntry **hep)
js_InitAtomMap(JSContext *cx, JSAtomMap *map, AtomIndexMap *indices)
{
JS_ASSERT(!set);
JS_ASSERT(count != 0);
if (table) {
JS_ASSERT(hep);
JS_HashTableRawRemove(table, hep, &ale->entry);
} else {
JS_ASSERT(!hep);
hep = &list;
while (*hep != &ale->entry) {
JS_ASSERT(*hep);
hep = &(*hep)->next;
}
*hep = ale->entry.next;
js_free_temp_entry(parser, &ale->entry, HT_FREE_ENTRY);
}
--count;
}
JSAutoAtomList::~JSAutoAtomList()
{
if (table) {
JS_HashTableDestroy(table);
} else {
JSHashEntry *hep = list;
while (hep) {
JSHashEntry *next = hep->next;
js_free_temp_entry(parser, hep, HT_FREE_ENTRY);
hep = next;
}
}
}
JSAtomListElement *
JSAtomListIterator::operator ()()
{
JSAtomListElement *ale;
JSHashTable *ht;
if (index == uint32(-1))
return NULL;
ale = next;
if (!ale) {
ht = list->table;
if (!ht)
goto done;
do {
if (index == JS_BIT(JS_HASH_BITS - ht->shift))
goto done;
next = (JSAtomListElement *) ht->buckets[index++];
} while (!next);
ale = next;
}
next = ALE_NEXT(ale);
return ale;
done:
index = uint32(-1);
return NULL;
}
static intN
js_map_atom(JSHashEntry *he, intN i, void *arg)
{
JSAtomListElement *ale = (JSAtomListElement *)he;
JSAtom **vector = (JSAtom **) arg;
vector[ALE_INDEX(ale)] = ALE_ATOM(ale);
return HT_ENUMERATE_NEXT;
}
#ifdef DEBUG
static jsrefcount js_atom_map_count;
static jsrefcount js_atom_map_hash_table_count;
#endif
void
js_InitAtomMap(JSContext *cx, JSAtomMap *map, JSAtomList *al)
{
JSAtom **vector;
JSAtomListElement *ale;
/* Map length must already be initialized. */
JS_ASSERT(al->count == map->length);
#ifdef DEBUG
JS_ATOMIC_INCREMENT(&js_atom_map_count);
#endif
ale = (JSAtomListElement *)al->list;
if (!ale && !al->table) {
JS_ASSERT(!map->vector);
return;
}
JS_ASSERT(indices->count() == map->length);
vector = map->vector;
if (al->table) {
#ifdef DEBUG
JS_ATOMIC_INCREMENT(&js_atom_map_hash_table_count);
#endif
JS_HashTableEnumerateEntries(al->table, js_map_atom, vector);
if (indices->isMap()) {
typedef AtomIndexMap::WordMap WordMap;
const WordMap &wm = indices->asMap();
for (WordMap::Range r = wm.all(); !r.empty(); r.popFront()) {
JSAtom *atom = r.front().key;
jsatomid index = r.front().value;
JS_ASSERT(index < map->length);
map->vector[index] = atom;
}
} else {
do {
vector[ALE_INDEX(ale)] = ALE_ATOM(ale);
} while ((ale = ALE_NEXT(ale)) != NULL);
for (const AtomIndexMap::InlineElem *it = indices->asInline(), *end = indices->inlineEnd();
it != end; ++it) {
JSAtom *atom = it->key;
if (!atom)
continue;
JS_ASSERT(it->value < map->length);
map->vector[it->value] = atom;
}
}
al->clear();
}
#if JS_HAS_XML_SUPPORT

View File

@ -39,14 +39,11 @@
#ifndef jsatom_h___
#define jsatom_h___
/*
* JS atom table.
*/
#include <stddef.h>
#include "jsversion.h"
#include "jsapi.h"
#include "jsprvtd.h"
#include "jshash.h"
#include "jshashtable.h"
#include "jspubtd.h"
#include "jsstr.h"
@ -144,123 +141,9 @@ struct DefaultHasher<jsid>
extern const char *
js_AtomToPrintableString(JSContext *cx, JSAtom *atom, JSAutoByteString *bytes);
struct JSAtomListElement {
JSHashEntry entry;
};
#define ALE_ATOM(ale) ((JSAtom *) (ale)->entry.key)
#define ALE_INDEX(ale) (jsatomid(uintptr_t((ale)->entry.value)))
#define ALE_VALUE(ale) ((jsboxedword) (ale)->entry.value)
#define ALE_NEXT(ale) ((JSAtomListElement *) (ale)->entry.next)
/*
* In an upvars list, ALE_DEFN(ale)->resolve() is the outermost definition the
* name may reference. If a with block or a function that calls eval encloses
* the use, the name may end up referring to something else at runtime.
*/
#define ALE_DEFN(ale) ((JSDefinition *) (ale)->entry.value)
#define ALE_SET_ATOM(ale,atom) ((ale)->entry.key = (const void *)(atom))
#define ALE_SET_INDEX(ale,index)((ale)->entry.value = (void *)(index))
#define ALE_SET_DEFN(ale, dn) ((ale)->entry.value = (void *)(dn))
#define ALE_SET_VALUE(ale, v) ((ale)->entry.value = (void *)(v))
#define ALE_SET_NEXT(ale,nxt) ((ale)->entry.next = (JSHashEntry *)(nxt))
/*
* NB: JSAtomSet must be plain-old-data as it is embedded in the pn_u union in
* JSParseNode. JSAtomList encapsulates all operational uses of a JSAtomSet.
*
* The JSAtomList name is traditional, even though the implementation is a map
* (not to be confused with JSAtomMap). In particular the "ALE" and "ale" short
* names for JSAtomListElement variables roll off the fingers, compared to ASE
* or AME alternatives.
*/
struct JSAtomSet {
JSHashEntry *list; /* literals indexed for mapping */
JSHashTable *table; /* hash table if list gets too long */
jsuint count; /* count of indexed literals */
};
struct JSAtomList : public JSAtomSet
{
#ifdef DEBUG
const JSAtomSet* set; /* asserted null in mutating methods */
#endif
JSAtomList() {
list = NULL; table = NULL; count = 0;
#ifdef DEBUG
set = NULL;
#endif
}
JSAtomList(const JSAtomSet& as) {
list = as.list; table = as.table; count = as.count;
#ifdef DEBUG
set = &as;
#endif
}
void clear() { JS_ASSERT(!set); list = NULL; table = NULL; count = 0; }
JSAtomListElement *lookup(JSAtom *atom) {
JSHashEntry **hep;
return rawLookup(atom, hep);
}
JSAtomListElement *rawLookup(JSAtom *atom, JSHashEntry **&hep);
enum AddHow { UNIQUE, SHADOW, HOIST };
JSAtomListElement *add(js::Parser *parser, JSAtom *atom, AddHow how = UNIQUE);
void remove(js::Parser *parser, JSAtom *atom) {
JSHashEntry **hep;
JSAtomListElement *ale = rawLookup(atom, hep);
if (ale)
rawRemove(parser, ale, hep);
}
void rawRemove(js::Parser *parser, JSAtomListElement *ale, JSHashEntry **hep);
};
/*
* A subclass of JSAtomList with a destructor. This atom list owns its
* hash table and its entries, but no keys or values.
*/
struct JSAutoAtomList: public JSAtomList
{
JSAutoAtomList(js::Parser *p): parser(p) {}
~JSAutoAtomList();
private:
js::Parser *parser; /* For freeing list entries. */
};
/*
* Iterate over an atom list. We define a call operator to minimize the syntax
* tax for users. We do not use a more standard pattern using ++ and * because
* (a) it's the wrong pattern for a non-scalar; (b) it's overkill -- one method
* is enough. (This comment is overkill!)
*/
class JSAtomListIterator {
JSAtomList* list;
JSAtomListElement* next;
uint32 index;
public:
JSAtomListIterator(JSAtomList* al) : list(al) { reset(); }
void reset() {
next = (JSAtomListElement *) list->list;
index = 0;
}
JSAtomListElement* operator ()();
};
struct JSAtomMap {
JSAtom **vector; /* array of ptrs to indexed atoms */
jsatomid length; /* count of (to-be-)indexed atoms */
JSAtom **vector; /* array of ptrs to indexed atoms */
uint32 length; /* count of (to-be-)indexed atoms */
};
namespace js {
@ -673,12 +556,13 @@ js_InternNonIntElementId(JSContext *cx, JSObject *obj, const js::Value &idval,
inline bool
js_InternNonIntElementId(JSContext *cx, JSObject *obj, const js::Value &idval,
jsid *idp, js::Value *vp);
/*
* For all unmapped atoms recorded in al, add a mapping from the atom's index
* to its address. map->length must already be set to the number of atoms in
* the list and map->vector must point to pre-allocated memory.
*/
extern void
js_InitAtomMap(JSContext *cx, JSAtomMap *map, JSAtomList *al);
js_InitAtomMap(JSContext *cx, JSAtomMap *map, js::AtomIndexMap *indices);
#endif /* jsatom_h___ */

View File

@ -85,7 +85,9 @@
#ifdef JS_METHODJIT
# include "assembler/assembler/MacroAssembler.h"
#endif
#include "frontend/ParseMaps.h"
#include "jsatominlines.h"
#include "jscntxtinlines.h"
#include "jscompartment.h"
#include "jsobjinlines.h"
@ -1329,8 +1331,11 @@ JSContext::~JSContext()
/* Free the stuff hanging off of cx. */
VOUCH_DOES_NOT_REQUIRE_STACK();
JS_FinishArenaPool(&tempPool);
if (parseMapPool_)
Foreground::delete_<ParseMapPool>(parseMapPool_);
JS_FinishArenaPool(&regExpPool);
JS_FinishArenaPool(&tempPool);
if (lastMessage)
Foreground::free_(lastMessage);
@ -1454,7 +1459,7 @@ JSRuntime::onOutOfMemory(void *p, size_t nbytes, JSContext *cx)
* Release pool's arenas if the stackPool has existed for longer than the
* limit specified by gcEmptyArenaPoolLifespan.
*/
inline void
static void
FreeOldArenas(JSRuntime *rt, JSArenaPool *pool)
{
JSArena *a = pool->current;
@ -1469,6 +1474,10 @@ void
JSContext::purge()
{
FreeOldArenas(runtime, &regExpPool);
if (!activeCompilations) {
Foreground::delete_<ParseMapPool>(parseMapPool_);
parseMapPool_ = NULL;
}
}
#if defined(JS_TRACER) || defined(JS_METHODJIT)

View File

@ -955,6 +955,11 @@ struct JSContext
/* Temporary arena pool used while compiling and decompiling. */
JSArenaPool tempPool;
private:
/* Lazily initialized pool of maps used during parse/emit. */
js::ParseMapPool *parseMapPool_;
public:
/* Temporary arena pool used while evaluate regular expressions. */
JSArenaPool regExpPool;
@ -984,6 +989,13 @@ struct JSContext
inline js::RegExpStatics *regExpStatics();
public:
js::ParseMapPool &parseMapPool() {
JS_ASSERT(parseMapPool_);
return *parseMapPool_;
}
inline bool ensureParseMapPool();
/*
* The default script compilation version can be set iff there is no code running.
* This typically occurs via the JSAPI right after a context is constructed.
@ -1263,6 +1275,13 @@ struct JSContext
this->exception.setUndefined();
}
/*
* Count of currently active compilations.
* When there are compilations active for the context, the GC must not
* purge the ParseMapPool.
*/
uintN activeCompilations;
#ifdef DEBUG
/*
* Controls whether a quadratic-complexity assertion is performed during
@ -2292,8 +2311,6 @@ js_CurrentPCIsInImacro(JSContext *cx);
namespace js {
class RegExpStatics;
extern JS_FORCES_STACK JS_FRIEND_API(void)
LeaveTrace(JSContext *cx);

View File

@ -48,6 +48,8 @@
#include "jsregexp.h"
#include "jsgc.h"
#include "frontend/ParseMaps.h"
namespace js {
static inline GlobalObject *
@ -417,4 +419,13 @@ JSContext::setPendingException(js::Value v) {
assertSameCompartment(this, v);
}
inline bool
JSContext::ensureParseMapPool()
{
if (parseMapPool_)
return true;
parseMapPool_ = js::OffTheBooks::new_<js::ParseMapPool>(this);
return parseMapPool_;
}
#endif /* jscntxtinlines_h___ */

View File

@ -1963,7 +1963,6 @@ JS_PUBLIC_API(size_t)
JS_GetScriptTotalSize(JSContext *cx, JSScript *script)
{
size_t nbytes, pbytes;
jsatomid i;
jssrcnote *sn, *notes;
JSObjectArray *objarray;
JSPrincipals *principals;
@ -1974,7 +1973,7 @@ JS_GetScriptTotalSize(JSContext *cx, JSScript *script)
nbytes += script->length * sizeof script->code[0];
nbytes += script->atomMap.length * sizeof script->atomMap.vector[0];
for (i = 0; i < script->atomMap.length; i++)
for (size_t i = 0; i < script->atomMap.length; i++)
nbytes += GetAtomTotalSize(cx, script->atomMap.vector[i]);
if (script->filename)
@ -1987,7 +1986,7 @@ JS_GetScriptTotalSize(JSContext *cx, JSScript *script)
if (JSScript::isValidOffset(script->objectsOffset)) {
objarray = script->objects();
i = objarray->length;
size_t i = objarray->length;
nbytes += sizeof *objarray + i * sizeof objarray->vector[0];
do {
nbytes += JS_GetObjectTotalSize(cx, objarray->vector[--i]);
@ -1996,7 +1995,7 @@ JS_GetScriptTotalSize(JSContext *cx, JSScript *script)
if (JSScript::isValidOffset(script->regexpsOffset)) {
objarray = script->regexps();
i = objarray->length;
size_t i = objarray->length;
nbytes += sizeof *objarray + i * sizeof objarray->vector[0];
do {
nbytes += JS_GetObjectTotalSize(cx, objarray->vector[--i]);

View File

@ -74,6 +74,8 @@
#include "jsscopeinlines.h"
#include "jsscriptinlines.h"
#include "frontend/ParseMaps-inl.h"
/* Allocation chunk counts, must be powers of two in general. */
#define BYTECODE_CHUNK 256 /* code allocation increment */
#define SRCNOTE_CHUNK 64 /* initial srcnote allocation increment */
@ -109,6 +111,7 @@ JSCodeGenerator::JSCodeGenerator(Parser *parser,
: JSTreeContext(parser),
codePool(cpool), notePool(npool),
codeMark(JS_ARENA_MARK(cpool)), noteMark(JS_ARENA_MARK(npool)),
atomIndices(parser->context),
stackDepth(0), maxStackDepth(0),
ntrynotes(0), lastTryNode(NULL),
spanDeps(NULL), jumpTargets(NULL), jtFreeList(NULL),
@ -117,7 +120,9 @@ JSCodeGenerator::JSCodeGenerator(Parser *parser,
emitLevel(0),
constMap(parser->context),
constList(parser->context),
upvarIndices(parser->context),
globalUses(parser->context),
globalMap(parser->context),
closedArgs(parser->context),
closedVars(parser->context),
traceIndex(0)
@ -131,9 +136,11 @@ JSCodeGenerator::JSCodeGenerator(Parser *parser,
memset(&upvarMap, 0, sizeof upvarMap);
}
bool JSCodeGenerator::init()
bool
JSCodeGenerator::init(JSContext *cx, JSTreeContext::InitBehavior ib)
{
return constMap.init();
roLexdeps.init();
return JSTreeContext::init(cx, ib) && constMap.init() && atomIndices.ensureMap(cx);
}
JSCodeGenerator::~JSCodeGenerator()
@ -141,12 +148,14 @@ JSCodeGenerator::~JSCodeGenerator()
JS_ARENA_RELEASE(codePool, codeMark);
JS_ARENA_RELEASE(notePool, noteMark);
JSContext *cx = parser->context;
/* NB: non-null only after OOM. */
if (spanDeps)
parser->context->free_(spanDeps);
cx->free_(spanDeps);
if (upvarMap.vector)
parser->context->free_(upvarMap.vector);
cx->free_(upvarMap.vector);
}
static ptrdiff_t
@ -1551,17 +1560,19 @@ EmitBlockChain(JSContext *cx, JSCodeGenerator *cg)
return EmitKnownBlockChain(cx, cg, cg->blockChainBox);
}
static const jsatomid INVALID_ATOMID = -1;
static ptrdiff_t
EmitGoto(JSContext *cx, JSCodeGenerator *cg, JSStmtInfo *toStmt,
ptrdiff_t *lastp, JSAtomListElement *label, JSSrcNoteType noteType)
ptrdiff_t *lastp, jsatomid labelIndex = INVALID_ATOMID, JSSrcNoteType noteType = SRC_NULL)
{
intN index;
if (!EmitNonLocalJumpFixup(cx, cg, toStmt))
return -1;
if (label)
index = js_NewSrcNote2(cx, cg, noteType, (ptrdiff_t) ALE_INDEX(label));
if (labelIndex != INVALID_ATOMID)
index = js_NewSrcNote2(cx, cg, noteType, ptrdiff_t(labelIndex));
else if (noteType != SRC_NULL)
index = js_NewSrcNote(cx, cg, noteType);
else
@ -1823,20 +1834,18 @@ EmitIndexOp(JSContext *cx, JSOp op, uintN index, JSCodeGenerator *cg)
return JS_FALSE; \
JS_END_MACRO
static JSBool
static bool
EmitAtomOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
{
JSAtomListElement *ale;
JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
if (op == JSOP_GETPROP &&
pn->pn_atom == cx->runtime->atomState.lengthAtom) {
if (op == JSOP_GETPROP && pn->pn_atom == cx->runtime->atomState.lengthAtom)
return js_Emit1(cx, cg, JSOP_LENGTH) >= 0;
}
ale = cg->atomList.add(cg->parser, pn->pn_atom);
if (!ale)
return JS_FALSE;
return EmitIndexOp(cx, op, ALE_INDEX(ale), cg);
jsatomid index;
if (!cg->makeAtomIndex(pn->pn_atom, &index))
return false;
return EmitIndexOp(cx, op, index, cg);
}
static JSBool
@ -2042,12 +2051,13 @@ BindKnownGlobal(JSContext *cx, JSCodeGenerator *cg, JSParseNode *dn, JSParseNode
GlobalScope *globalScope = cg->compiler()->globalScope;
uint32 index;
jsatomid index;
if (dn->pn_cookie.isFree()) {
// The definition wasn't bound, so find its atom's index in the
// mapping of defined globals.
JSAtomListElement *ale = globalScope->names.lookup(atom);
index = ALE_INDEX(ale);
AtomIndexPtr p = globalScope->names.lookup(atom);
JS_ASSERT(!!p);
index = p.value();
} else {
JSCodeGenerator *globalcg = globalScope->cg;
@ -2119,8 +2129,6 @@ BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
JSOp op;
JSAtom *atom;
JSDefinition::Kind dn_kind;
JSAtomListElement *ale;
uintN index;
JS_ASSERT(pn->pn_type == TOK_NAME);
@ -2236,8 +2244,8 @@ BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
* must be globals, so try to use GNAME ops.
*/
if (caller->isGlobalFrame() && TryConvertToGname(cg, pn, &op)) {
ale = cg->atomList.add(cg->parser, atom);
if (!ale)
jsatomid _;
if (!cg->makeAtomIndex(atom, &_))
return JS_FALSE;
pn->pn_op = op;
@ -2256,8 +2264,8 @@ BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
if (!cg->mightAliasLocals() && !TryConvertToGname(cg, pn, &op))
return JS_TRUE;
ale = cg->atomList.add(cg->parser, atom);
if (!ale)
jsatomid _;
if (!cg->makeAtomIndex(atom, &_))
return JS_FALSE;
pn->pn_op = op;
@ -2272,7 +2280,7 @@ BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
const uintN skip = cg->staticLevel - level;
if (skip != 0) {
JS_ASSERT(cg->inFunction());
JS_ASSERT_IF(cookie.slot() != UpvarCookie::CALLEE_SLOT, cg->lexdeps.lookup(atom));
JS_ASSERT_IF(cookie.slot() != UpvarCookie::CALLEE_SLOT, cg->roLexdeps->lookup(atom));
JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
JS_ASSERT(cg->fun()->u.i.skipmin <= skip);
@ -2290,21 +2298,23 @@ BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
if (!cg->fun()->isFlatClosure())
return JS_TRUE;
ale = cg->upvarList.lookup(atom);
if (ale) {
index = ALE_INDEX(ale);
if (!cg->upvarIndices.ensureMap(cx))
return JS_FALSE;
AtomIndexAddPtr p = cg->upvarIndices->lookupForAdd(atom);
jsatomid index;
if (p) {
index = p.value();
} else {
if (!cg->bindings.addUpvar(cx, atom))
return JS_FALSE;
ale = cg->upvarList.add(cg->parser, atom);
if (!ale)
index = cg->upvarIndices->count();
if (!cg->upvarIndices->add(p, atom, index))
return JS_FALSE;
index = ALE_INDEX(ale);
JS_ASSERT(index == cg->upvarList.count - 1);
UpvarCookie *vector = cg->upvarMap.vector;
uint32 length = cg->lexdeps.count;
uint32 length = cg->roLexdeps->count();
if (!vector || cg->upvarMap.length != length) {
vector = (UpvarCookie *) cx->realloc_(vector, length * sizeof *vector);
if (!vector) {
@ -2442,9 +2452,13 @@ BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
bool
JSCodeGenerator::addGlobalUse(JSAtom *atom, uint32 slot, UpvarCookie *cookie)
{
JSAtomListElement *ale = globalMap.lookup(atom);
if (ale) {
cookie->set(0, uint16(ALE_INDEX(ale)));
if (!globalMap.ensureMap(context()))
return false;
AtomIndexAddPtr p = globalMap->lookupForAdd(atom);
if (p) {
jsatomid index = p.value();
cookie->set(0, index);
return true;
}
@ -2455,22 +2469,18 @@ JSCodeGenerator::addGlobalUse(JSAtom *atom, uint32 slot, UpvarCookie *cookie)
}
/* Find or add an existing atom table entry. */
ale = atomList.add(parser, atom);
if (!ale)
jsatomid allAtomIndex;
if (!makeAtomIndex(atom, &allAtomIndex))
return false;
cookie->set(0, globalUses.length());
jsatomid globalUseIndex = globalUses.length();
cookie->set(0, globalUseIndex);
GlobalSlotArray::Entry entry = { ALE_INDEX(ale), slot };
GlobalSlotArray::Entry entry = { allAtomIndex, slot };
if (!globalUses.append(entry))
return false;
ale = globalMap.add(parser, atom);
if (!ale)
return false;
ALE_SET_INDEX(ale, cookie->asInteger());
return true;
return globalMap->add(p, atom, globalUseIndex);
}
/*
@ -2760,7 +2770,7 @@ EmitXMLName(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
}
#endif
static JSBool
static bool
EmitSpecialPropOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
{
/*
@ -2768,14 +2778,14 @@ EmitSpecialPropOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
* interpreter and trace recorder, which skip dense array instances by
* going up to Array.prototype before looking up the property name.
*/
JSAtomListElement *ale = cg->atomList.add(cg->parser, pn->pn_atom);
if (!ale)
return JS_FALSE;
if (!EmitIndexOp(cx, JSOP_QNAMEPART, ALE_INDEX(ale), cg))
return JS_FALSE;
jsatomid index;
if (!cg->makeAtomIndex(pn->pn_atom, &index))
return false;
if (!EmitIndexOp(cx, JSOP_QNAMEPART, index, cg))
return false;
if (js_Emit1(cx, cg, op) < 0)
return JS_FALSE;
return JS_TRUE;
return false;
return true;
}
static JSBool
@ -2828,13 +2838,9 @@ EmitPropOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg,
case JSOP_GETLOCAL:
op = JSOP_GETLOCALPROP;
do_indexconst: {
JSAtomListElement *ale;
jsatomid atomIndex;
ale = cg->atomList.add(cg->parser, pn->pn_atom);
if (!ale)
if (!cg->makeAtomIndex(pn->pn_atom, &atomIndex))
return JS_FALSE;
atomIndex = ALE_INDEX(ale);
return EmitSlotIndexOp(cx, op, pn2->pn_cookie.asInteger(), atomIndex, cg);
}
@ -3095,7 +3101,6 @@ EmitSwitch(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
uint32 caseCount, tableLength;
JSParseNode **table;
int32_t i, low, high;
JSAtomListElement *ale;
intN noteIndex;
size_t switchSize, tableSize;
jsbytecode *pc, *savepc;
@ -3513,14 +3518,12 @@ EmitSwitch(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
pn4->pn_type == TOK_NAME) {
/* Note a propagated constant with the const's name. */
JS_ASSERT(!pn4->maybeExpr());
ale = cg->atomList.add(cg->parser, pn4->pn_atom);
if (!ale)
jsatomid index;
if (!cg->makeAtomIndex(pn4->pn_atom, &index))
goto bad;
CG_NEXT(cg) = pc;
if (js_NewSrcNote2(cx, cg, SRC_LABEL, (ptrdiff_t)
ALE_INDEX(ale)) < 0) {
if (js_NewSrcNote2(cx, cg, SRC_LABEL, ptrdiff_t(index)) < 0)
goto bad;
}
}
pc += JUMP_OFFSET_LEN;
}
@ -3530,14 +3533,12 @@ EmitSwitch(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
if (pn4 && pn4->pn_type == TOK_NAME) {
/* Note a propagated constant with the const's name. */
JS_ASSERT(!pn4->maybeExpr());
ale = cg->atomList.add(cg->parser, pn4->pn_atom);
if (!ale)
jsatomid index;
if (!cg->makeAtomIndex(pn4->pn_atom, &index))
goto bad;
CG_NEXT(cg) = pc;
if (js_NewSrcNote2(cx, cg, SRC_LABEL, (ptrdiff_t)
ALE_INDEX(ale)) < 0) {
if (js_NewSrcNote2(cx, cg, SRC_LABEL, ptrdiff_t(index)) < 0)
goto bad;
}
}
pc += INDEX_LEN + JUMP_OFFSET_LEN;
}
@ -3723,20 +3724,17 @@ UpdateLineNumberNotes(JSContext *cx, JSCodeGenerator *cg, uintN line)
return JS_TRUE;
}
static JSBool
static bool
MaybeEmitVarDecl(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
JSParseNode *pn, jsatomid *result)
{
jsatomid atomIndex;
JSAtomListElement *ale;
if (!pn->pn_cookie.isFree()) {
atomIndex = (jsatomid) pn->pn_cookie.slot();
atomIndex = pn->pn_cookie.slot();
} else {
ale = cg->atomList.add(cg->parser, pn->pn_atom);
if (!ale)
return JS_FALSE;
atomIndex = ALE_INDEX(ale);
if (!cg->makeAtomIndex(pn->pn_atom, &atomIndex))
return false;
}
if (JOF_OPTYPE(pn->pn_op) == JOF_ATOM &&
@ -3745,7 +3743,7 @@ MaybeEmitVarDecl(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
{
CG_SWITCH_TO_PROLOG(cg);
if (!UpdateLineNumberNotes(cx, cg, pn->pn_pos.begin.lineno))
return JS_FALSE;
return false;
EMIT_INDEX_OP(prologOp, atomIndex);
CG_SWITCH_TO_MAIN(cg);
}
@ -3756,12 +3754,12 @@ MaybeEmitVarDecl(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
cg->shouldNoteClosedName(pn))
{
if (!cg->closedVars.append(pn->pn_cookie.slot()))
return JS_FALSE;
return false;
}
if (result)
*result = atomIndex;
return JS_TRUE;
return true;
}
#if JS_HAS_DESTRUCTURING
@ -4550,7 +4548,6 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
ptrdiff_t top, off, tmp, beq, jmp, tmp2, tmp3;
JSParseNode *pn2, *pn3;
JSAtom *atom;
JSAtomListElement *ale;
jsatomid atomIndex;
uintN index;
ptrdiff_t noteIndex, noteIndex2;
@ -4617,7 +4614,7 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
cg->codePool, cg->notePool,
pn->pn_pos.begin.lineno);
if (!cg2->init())
if (!cg2->init(cx))
return JS_FALSE;
cg2->flags = pn->pn_funbox->tcflags | TCF_COMPILING | TCF_IN_FUNCTION |
@ -4736,10 +4733,11 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
}
case TOK_UPVARS:
JS_ASSERT(cg->lexdeps.count == 0);
JS_ASSERT(pn->pn_names.count != 0);
cg->lexdeps = pn->pn_names;
JS_ASSERT(pn->pn_names->count() != 0);
cg->roLexdeps = pn->pn_names;
ok = js_EmitTree(cx, cg, pn->pn_tree);
cg->roLexdeps.clearMap();
pn->pn_names.releaseMap(cx);
break;
case TOK_IF:
@ -4796,7 +4794,7 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
* of this switch case will fix up the backpatch chain linked from
* stmtInfo.breaks.
*/
jmp = EmitGoto(cx, cg, &stmtInfo, &stmtInfo.breaks, NULL, SRC_NULL);
jmp = EmitGoto(cx, cg, &stmtInfo, &stmtInfo.breaks);
if (jmp < 0)
return JS_FALSE;
@ -5097,7 +5095,7 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
return JS_FALSE;
}
if (!cookie.isFree()) {
atomIndex = (jsatomid) cookie.asInteger();
atomIndex = cookie.asInteger();
EMIT_UINT16_IMM_OP(op, atomIndex);
} else {
if (!EmitAtomOp(cx, pn3, op, cg))
@ -5353,35 +5351,39 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
}
break;
case TOK_BREAK:
case TOK_BREAK: {
stmt = cg->topStmt;
atom = pn->pn_atom;
jsatomid labelIndex;
if (atom) {
ale = cg->atomList.add(cg->parser, atom);
if (!ale)
if (!cg->makeAtomIndex(atom, &labelIndex))
return JS_FALSE;
while (stmt->type != STMT_LABEL || stmt->label != atom)
stmt = stmt->down;
noteType = SRC_BREAK2LABEL;
} else {
ale = NULL;
labelIndex = INVALID_ATOMID;
while (!STMT_IS_LOOP(stmt) && stmt->type != STMT_SWITCH)
stmt = stmt->down;
noteType = (stmt->type == STMT_SWITCH) ? SRC_NULL : SRC_BREAK;
}
if (EmitGoto(cx, cg, stmt, &stmt->breaks, ale, noteType) < 0)
if (EmitGoto(cx, cg, stmt, &stmt->breaks, labelIndex, noteType) < 0)
return JS_FALSE;
break;
}
case TOK_CONTINUE:
case TOK_CONTINUE: {
stmt = cg->topStmt;
atom = pn->pn_atom;
jsatomid labelIndex;
if (atom) {
/* Find the loop statement enclosed by the matching label. */
JSStmtInfo *loop = NULL;
ale = cg->atomList.add(cg->parser, atom);
if (!ale)
if (!cg->makeAtomIndex(atom, &labelIndex))
return JS_FALSE;
while (stmt->type != STMT_LABEL || stmt->label != atom) {
if (STMT_IS_LOOP(stmt))
@ -5391,15 +5393,16 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
stmt = loop;
noteType = SRC_CONT2LABEL;
} else {
ale = NULL;
labelIndex = INVALID_ATOMID;
while (!STMT_IS_LOOP(stmt))
stmt = stmt->down;
noteType = SRC_CONTINUE;
}
if (EmitGoto(cx, cg, stmt, &stmt->continues, ale, noteType) < 0)
if (EmitGoto(cx, cg, stmt, &stmt->continues, labelIndex, noteType) < 0)
return JS_FALSE;
break;
}
case TOK_WITH:
if (!js_EmitTree(cx, cg, pn->pn_left))
@ -5994,17 +5997,18 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
case TOK_COLON:
/* Emit an annotated nop so we know to decompile a label. */
atom = pn->pn_atom;
ale = cg->atomList.add(cg->parser, atom);
if (!ale)
jsatomid index;
if (!cg->makeAtomIndex(atom, &index))
return JS_FALSE;
pn2 = pn->expr();
noteType = (pn2->pn_type == TOK_LC ||
(pn2->pn_type == TOK_LEXICALSCOPE &&
pn2->expr()->pn_type == TOK_LC))
? SRC_LABELBRACE
: SRC_LABEL;
noteIndex = js_NewSrcNote2(cx, cg, noteType,
(ptrdiff_t) ALE_INDEX(ale));
noteIndex = js_NewSrcNote2(cx, cg, noteType, ptrdiff_t(index));
if (noteIndex < 0 ||
js_Emit1(cx, cg, JSOP_NOP) < 0) {
return JS_FALSE;
@ -6066,12 +6070,10 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
if (!BindNameToSlot(cx, cg, pn2))
return JS_FALSE;
if (!pn2->pn_cookie.isFree()) {
atomIndex = (jsatomid) pn2->pn_cookie.asInteger();
atomIndex = pn2->pn_cookie.asInteger();
} else {
ale = cg->atomList.add(cg->parser, pn2->pn_atom);
if (!ale)
if (!cg->makeAtomIndex(pn2->pn_atom, &atomIndex))
return JS_FALSE;
atomIndex = ALE_INDEX(ale);
if (!pn2->isConst()) {
JSOp op = PN_OP(pn2) == JSOP_SETGNAME ? JSOP_BINDGNAME : JSOP_BINDNAME;
EMIT_INDEX_OP(op, atomIndex);
@ -6081,10 +6083,8 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
case TOK_DOT:
if (!js_EmitTree(cx, cg, pn2->expr()))
return JS_FALSE;
ale = cg->atomList.add(cg->parser, pn2->pn_atom);
if (!ale)
if (!cg->makeAtomIndex(pn2->pn_atom, &atomIndex))
return JS_FALSE;
atomIndex = ALE_INDEX(ale);
break;
case TOK_LB:
JS_ASSERT(pn2->pn_arity == PN_BINARY);
@ -6447,7 +6447,7 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
if (js_Emit1(cx, cg, op) < 0)
return JS_FALSE;
} else if (!pn2->pn_cookie.isFree()) {
atomIndex = (jsatomid) pn2->pn_cookie.asInteger();
atomIndex = pn2->pn_cookie.asInteger();
EMIT_UINT16_IMM_OP(op, atomIndex);
} else {
JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
@ -6962,8 +6962,8 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
} else {
JS_ASSERT(pn3->pn_type == TOK_NAME ||
pn3->pn_type == TOK_STRING);
ale = cg->atomList.add(cg->parser, pn3->pn_atom);
if (!ale)
jsatomid index;
if (!cg->makeAtomIndex(pn3->pn_atom, &index))
return JS_FALSE;
/* Check whether we can optimize to JSOP_INITMETHOD. */
@ -6998,7 +6998,7 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
obj = NULL;
}
EMIT_INDEX_OP(op, ALE_INDEX(ale));
EMIT_INDEX_OP(op, index);
}
}
@ -7130,10 +7130,10 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
if (pn->pn_count == 0) {
JS_ASSERT(pn->pn_type == TOK_XMLLIST);
atom = cx->runtime->atomState.emptyAtom;
ale = cg->atomList.add(cg->parser, atom);
if (!ale)
jsatomid index;
if (!cg->makeAtomIndex(atom, &index))
return JS_FALSE;
EMIT_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
EMIT_INDEX_OP(JSOP_STRING, index);
}
if (js_Emit1(cx, cg, PN_OP(pn)) < 0)
return JS_FALSE;
@ -7153,13 +7153,14 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
if (js_Emit1(cx, cg, JSOP_STARTXML) < 0)
return JS_FALSE;
ale = cg->atomList.add(cg->parser,
(pn->pn_type == TOK_XMLETAGO)
? cx->runtime->atomState.etagoAtom
: cx->runtime->atomState.stagoAtom);
if (!ale)
return JS_FALSE;
EMIT_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
{
jsatomid index;
JSAtom *tmp = (pn->pn_type == TOK_XMLETAGO) ? cx->runtime->atomState.etagoAtom
: cx->runtime->atomState.stagoAtom;
if (!cg->makeAtomIndex(tmp, &index))
return JS_FALSE;
EMIT_INDEX_OP(JSOP_STRING, index);
}
JS_ASSERT(pn->pn_count != 0);
pn2 = pn->pn_head;
@ -7187,13 +7188,14 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
}
}
ale = cg->atomList.add(cg->parser,
(pn->pn_type == TOK_XMLPTAGC)
? cx->runtime->atomState.ptagcAtom
: cx->runtime->atomState.tagcAtom);
if (!ale)
return JS_FALSE;
EMIT_INDEX_OP(JSOP_STRING, ALE_INDEX(ale));
{
jsatomid index;
JSAtom *tmp = (pn->pn_type == TOK_XMLPTAGC) ? cx->runtime->atomState.ptagcAtom
: cx->runtime->atomState.tagcAtom;
if (!cg->makeAtomIndex(tmp, &index))
return JS_FALSE;
EMIT_INDEX_OP(JSOP_STRING, index);
}
if (js_Emit1(cx, cg, JSOP_ADD) < 0)
return JS_FALSE;
@ -7223,15 +7225,16 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
}
break;
case TOK_XMLPI:
ale = cg->atomList.add(cg->parser, pn->pn_atom2);
if (!ale)
return JS_FALSE;
if (!EmitIndexOp(cx, JSOP_QNAMEPART, ALE_INDEX(ale), cg))
case TOK_XMLPI: {
jsatomid index;
if (!cg->makeAtomIndex(pn->pn_atom2, &index))
return false;
if (!EmitIndexOp(cx, JSOP_QNAMEPART, index, cg))
return JS_FALSE;
if (!EmitAtomOp(cx, pn, JSOP_XMLPI, cg))
return JS_FALSE;
break;
}
#endif /* JS_HAS_XML_SUPPORT */
default:

View File

@ -51,6 +51,10 @@
#include "jsprvtd.h"
#include "jspubtd.h"
#include "frontend/ParseMaps.h"
#include "jsatominlines.h"
JS_BEGIN_EXTERN_C
/*
@ -306,7 +310,7 @@ struct JSTreeContext { /* tree context for semantic checks */
chain when in head of let block/expr) */
JSParseNode *blockNode; /* parse node for a block with let declarations
(block with its own lexical scope) */
JSAtomList decls; /* function, const, and var declarations */
js::AtomDecls decls; /* function, const, and var declarations */
js::Parser *parser; /* ptr to common parsing and lexing data */
JSParseNode *yieldNode; /* parse node for a yield expression that might
be an error if we turn out to be inside a
@ -340,7 +344,7 @@ struct JSTreeContext { /* tree context for semantic checks */
scopeChain_ = scopeChain;
}
JSAtomList lexdeps; /* unresolved lexical name dependencies */
js::OwnedAtomDefnMapPtr lexdeps;/* unresolved lexical name dependencies */
JSTreeContext *parent; /* enclosing function or global context */
uintN staticLevel; /* static compilation unit nesting level */
@ -358,12 +362,10 @@ struct JSTreeContext { /* tree context for semantic checks */
JSTreeContext(js::Parser *prs)
: flags(0), bodyid(0), blockidGen(0), parenDepth(0), yieldCount(0), argumentsCount(0),
topStmt(NULL), topScopeStmt(NULL),
blockChainBox(NULL), blockNode(NULL), parser(prs),
yieldNode(NULL), argumentsNode(NULL),
scopeChain_(NULL), parent(prs->tc), staticLevel(0), funbox(NULL), functionList(NULL),
innermostWith(NULL), bindings(prs->context, prs->emptyCallShape),
sharpSlotBase(-1)
topStmt(NULL), topScopeStmt(NULL), blockChainBox(NULL), blockNode(NULL),
decls(prs->context), parser(prs), yieldNode(NULL), argumentsNode(NULL), scopeChain_(NULL),
lexdeps(prs->context), parent(prs->tc), staticLevel(0), funbox(NULL), functionList(NULL),
innermostWith(NULL), bindings(prs->context, prs->emptyCallShape), sharpSlotBase(-1)
{
prs->tc = this;
}
@ -377,6 +379,23 @@ struct JSTreeContext { /* tree context for semantic checks */
parser->tc = this->parent;
}
/*
* JSCodeGenerator derives from JSTreeContext; however, only the top-level
* JSCodeGenerators are actually used as full-fledged tree contexts (to
* hold decls and lexdeps). We can avoid allocation overhead by making
* this distinction explicit.
*/
enum InitBehavior {
USED_AS_TREE_CONTEXT,
USED_AS_CODE_GENERATOR
};
bool init(JSContext *cx, InitBehavior ib = USED_AS_TREE_CONTEXT) {
if (ib == USED_AS_CODE_GENERATOR)
return true;
return decls.init() && lexdeps.ensureMap(cx);
}
uintN blockid() { return topStmt ? topStmt->blockid : bodyid; }
JSObject *blockChain() {
@ -593,7 +612,8 @@ struct JSCodeGenerator : public JSTreeContext
uintN currentLine; /* line number for tree-based srcnote gen */
} prolog, main, *current;
JSAtomList atomList; /* literals indexed for mapping */
js::OwnedAtomIndexMapPtr atomIndices; /* literals indexed for mapping */
js::AtomDefnMapPtr roLexdeps;
uintN firstLine; /* first line, for js_NewScriptFromCG */
intN stackDepth; /* current stack depth in script frame */
@ -623,13 +643,13 @@ struct JSCodeGenerator : public JSTreeContext
JSCGObjectList regexpList; /* list of emitted regexp that will be
cloned during execution */
JSAtomList upvarList; /* map of atoms to upvar indexes */
js::OwnedAtomIndexMapPtr upvarIndices; /* map of atoms to upvar indexes */
JSUpvarArray upvarMap; /* indexed upvar pairs (JS_realloc'ed) */
typedef js::Vector<js::GlobalSlotArray::Entry, 16> GlobalUseVector;
GlobalUseVector globalUses; /* per-script global uses */
JSAtomList globalMap; /* per-script map of global name to globalUses vector */
js::OwnedAtomIndexMapPtr globalMap; /* per-script map of global name to globalUses vector */
/* Vectors of pn_cookie slot values. */
typedef js::Vector<uint32, 8> SlotVector;
@ -646,7 +666,11 @@ struct JSCodeGenerator : public JSTreeContext
JSCodeGenerator(js::Parser *parser,
JSArenaPool *codePool, JSArenaPool *notePool,
uintN lineno);
bool init();
bool init(JSContext *cx, JSTreeContext::InitBehavior ib = USED_AS_CODE_GENERATOR);
JSContext *context() {
return parser->context;
}
/*
* Release cg->codePool, cg->notePool, and parser->context->tempPool to
@ -673,6 +697,10 @@ struct JSCodeGenerator : public JSTreeContext
*/
bool addGlobalUse(JSAtom *atom, uint32 slot, js::UpvarCookie *cookie);
bool hasUpvarIndices() const {
return upvarIndices.hasMap() && !upvarIndices->empty();
}
bool hasSharps() const {
bool rv = !!(flags & TCF_HAS_SHARPS);
JS_ASSERT((sharpSlotBase >= 0) == rv);
@ -688,6 +716,22 @@ struct JSCodeGenerator : public JSTreeContext
bool shouldNoteClosedName(JSParseNode *pn);
JS_ALWAYS_INLINE
bool makeAtomIndex(JSAtom *atom, jsatomid *indexp) {
js::AtomIndexAddPtr p = atomIndices->lookupForAdd(atom);
if (p) {
*indexp = p.value();
return true;
}
jsatomid index = atomIndices->count();
if (!atomIndices->add(p, atom, index))
return false;
*indexp = index;
return true;
}
bool checkSingletonContext() {
if (!compileAndGo() || inFunction())
return false;

View File

@ -55,6 +55,49 @@ typedef uint32 HashNumber;
namespace detail {
template <class T, class HashPolicy, class AllocPolicy>
class HashTable;
template <class T>
class HashTableEntry {
HashNumber keyHash;
typedef typename tl::StripConst<T>::result NonConstT;
static const HashNumber sFreeKey = 0;
static const HashNumber sRemovedKey = 1;
static const HashNumber sCollisionBit = 1;
template <class, class, class> friend class HashTable;
static bool isLiveHash(HashNumber hash)
{
return hash > sRemovedKey;
}
public:
HashTableEntry() : keyHash(0), t() {}
void operator=(const HashTableEntry &rhs) { keyHash = rhs.keyHash; t = rhs.t; }
NonConstT t;
bool isFree() const { return keyHash == sFreeKey; }
void setFree() { keyHash = sFreeKey; t = T(); }
bool isRemoved() const { return keyHash == sRemovedKey; }
void setRemoved() { keyHash = sRemovedKey; t = T(); }
bool isLive() const { return isLiveHash(keyHash); }
void setLive(HashNumber hn) { JS_ASSERT(isLiveHash(hn)); keyHash = hn; }
void setCollision() { JS_ASSERT(isLive()); keyHash |= sCollisionBit; }
void setCollision(HashNumber collisionBit) {
JS_ASSERT(isLive()); keyHash |= collisionBit;
}
void unsetCollision() { JS_ASSERT(isLive()); keyHash &= ~sCollisionBit; }
bool hasCollision() const { JS_ASSERT(isLive()); return keyHash & sCollisionBit; }
bool matchHash(HashNumber hn) { return (keyHash & ~sCollisionBit) == hn; }
HashNumber getKeyHash() const { JS_ASSERT(!hasCollision()); return keyHash; }
};
/*
* js::detail::HashTable is an implementation detail of the js::HashMap and
* js::HashSet templates. For js::Hash{Map,Set} API documentation and examples,
@ -69,39 +112,8 @@ class HashTable : private AllocPolicy
typedef typename HashPolicy::KeyType Key;
typedef typename HashPolicy::Lookup Lookup;
/*
* T::operator= is a private operation for HashMap::Entry. HashMap::Entry
* makes HashTable a friend, but MSVC does not allow HashMap::Entry to make
* HashTable::Entry a friend. So do assignment here:
*/
static void assignT(NonConstT &dst, const T &src) { dst = src; }
public:
class Entry {
HashNumber keyHash;
public:
Entry() : keyHash(0), t() {}
void operator=(const Entry &rhs) { keyHash = rhs.keyHash; assignT(t, rhs.t); }
NonConstT t;
bool isFree() const { return keyHash == sFreeKey; }
void setFree() { keyHash = sFreeKey; assignT(t, T()); }
bool isRemoved() const { return keyHash == sRemovedKey; }
void setRemoved() { keyHash = sRemovedKey; assignT(t, T()); }
bool isLive() const { return isLiveHash(keyHash); }
void setLive(HashNumber hn) { JS_ASSERT(isLiveHash(hn)); keyHash = hn; }
void setCollision() { JS_ASSERT(isLive()); keyHash |= sCollisionBit; }
void setCollision(HashNumber collisionBit) {
JS_ASSERT(isLive()); keyHash |= collisionBit;
}
void unsetCollision() { JS_ASSERT(isLive()); keyHash &= ~sCollisionBit; }
bool hasCollision() const { JS_ASSERT(isLive()); return keyHash & sCollisionBit; }
bool matchHash(HashNumber hn) { return (keyHash & ~sCollisionBit) == hn; }
HashNumber getKeyHash() const { JS_ASSERT(!hasCollision()); return keyHash; }
};
typedef HashTableEntry<T> Entry;
/*
* A nullable pointer to a hash table element. A Ptr |p| can be tested
@ -174,6 +186,8 @@ class HashTable : private AllocPolicy
Entry *cur, *end;
public:
Range() : cur(NULL), end(NULL) {}
bool empty() const {
return cur == end;
}
@ -287,13 +301,13 @@ class HashTable : private AllocPolicy
static const uint8 sMaxAlphaFrac = 192; /* (0x100 * .75) taken from jsdhash.h */
static const uint8 sInvMaxAlpha = 171; /* (ceil(0x100 / .75) >> 1) */
static const HashNumber sGoldenRatio = 0x9E3779B9U; /* taken from jsdhash.h */
static const HashNumber sCollisionBit = 1;
static const HashNumber sFreeKey = 0;
static const HashNumber sRemovedKey = 1;
static const HashNumber sFreeKey = Entry::sFreeKey;
static const HashNumber sRemovedKey = Entry::sRemovedKey;
static const HashNumber sCollisionBit = Entry::sCollisionBit;
static bool isLiveHash(HashNumber hash)
{
return hash > sRemovedKey;
return Entry::isLiveHash(hash);
}
static HashNumber prepareHash(const Lookup& l)
@ -573,8 +587,12 @@ class HashTable : private AllocPolicy
public:
void clear()
{
for (Entry *e = table, *end = table + tableCapacity; e != end; ++e)
*e = Entry();
if (tl::IsPodType<Entry>::result) {
memset(table, 0, sizeof(*table) * tableCapacity);
} else {
for (Entry *e = table, *end = table + tableCapacity; e != end; ++e)
*e = Entry();
}
removedCount = 0;
entryCount = 0;
#ifdef DEBUG
@ -798,6 +816,39 @@ struct DefaultHasher<T *>: PointerHasher<T *, tl::FloorLog2<sizeof(void *)>::res
/* Looking for a hasher for jsid? Try the DefaultHasher<jsid> in jsatom.h. */
template <class Key, class Value>
class HashMapEntry
{
template <class, class, class> friend class detail::HashTable;
template <class> friend class detail::HashTableEntry;
void operator=(const HashMapEntry &rhs) {
const_cast<Key &>(key) = rhs.key;
value = rhs.value;
}
public:
HashMapEntry() : key(), value() {}
HashMapEntry(const Key &k, const Value &v) : key(k), value(v) {}
const Key key;
Value value;
};
namespace tl {
template <class T>
struct IsPodType<detail::HashTableEntry<T> > {
static const bool result = IsPodType<T>::result;
};
template <class K, class V>
struct IsPodType<HashMapEntry<K, V> >
{
static const bool result = IsPodType<K>::result && IsPodType<V>::result;
};
} /* namespace tl */
/*
* JS-friendly, STL-like container providing a hash-based map from keys to
* values. In particular, HashMap calls constructors and destructors of all
@ -820,21 +871,7 @@ class HashMap
public:
typedef typename HashPolicy::Lookup Lookup;
class Entry
{
template <class, class, class> friend class detail::HashTable;
void operator=(const Entry &rhs) {
const_cast<Key &>(key) = rhs.key;
value = rhs.value;
}
public:
Entry() : key(), value() {}
Entry(const Key &k, const Value &v) : key(k), value(v) {}
const Key key;
Value value;
};
typedef HashMapEntry<Key, Value> Entry;
private:
/* Implement HashMap using HashTable. Lift |Key| operations to |Entry|. */

View File

@ -1416,7 +1416,7 @@ js_XDRBlockObject(JSXDRState *xdr, JSObject **objp);
struct JSSharpObjectMap {
jsrefcount depth;
jsatomid sharpgen;
uint32 sharpgen;
JSHashTable *table;
};

View File

@ -95,6 +95,8 @@
#include "jsregexpinlines.h"
#include "jsscriptinlines.h"
#include "frontend/ParseMaps-inl.h"
// Grr, windows.h or something under it #defines CONST...
#ifdef CONST
#undef CONST
@ -181,7 +183,6 @@ JSParseNode::clear()
Parser::Parser(JSContext *cx, JSPrincipals *prin, StackFrame *cfp, bool foldConstants)
: js::AutoGCRooter(cx, PARSER),
context(cx),
aleFreeList(NULL),
tokenStream(cx),
principals(NULL),
callerFrame(cfp),
@ -194,6 +195,7 @@ Parser::Parser(JSContext *cx, JSPrincipals *prin, StackFrame *cfp, bool foldCons
keepAtoms(cx->runtime),
foldConstants(foldConstants)
{
cx->activeCompilations++;
js::PodArrayZero(tempFreeList);
setPrincipals(prin);
JS_ASSERT_IF(cfp, cfp->isScriptFrame());
@ -204,6 +206,8 @@ Parser::init(const jschar *base, size_t length, const char *filename, uintN line
JSVersion version)
{
JSContext *cx = context;
if (!cx->ensureParseMapPool())
return false;
emptyCallShape = EmptyShape::getEmptyCallShape(cx);
if (!emptyCallShape)
return false;
@ -222,6 +226,7 @@ Parser::~Parser()
if (principals)
JSPRINCIPALS_DROP(cx, principals);
JS_ARENA_RELEASE(&cx->tempPool, tempPoolMark);
cx->activeCompilations--;
}
void
@ -367,14 +372,17 @@ AddNodeToFreeList(JSParseNode *pn, js::Parser *parser)
JS_ASSERT(pn != parser->nodeList);
/*
* It's too hard to clear these nodes from the JSAtomLists, etc. that
* hold references to them, so we never free them. It's our caller's
* job to recognize and process these, since their children do need to
* be dealt with.
* It's too hard to clear these nodes from the AtomDefnMaps, etc. that
* hold references to them, so we never free them. It's our caller's job to
* recognize and process these, since their children do need to be dealt
* with.
*/
JS_ASSERT(!pn->pn_used);
JS_ASSERT(!pn->pn_defn);
if (pn->pn_arity == PN_NAMESET && pn->pn_names.hasMap())
pn->pn_names.releaseMap(parser->context);
#ifdef DEBUG
/* Poison the node, to catch attempts to use it without initializing it. */
memset(pn, 0xab, sizeof(*pn));
@ -542,7 +550,7 @@ PushNodeChildren(JSParseNode *pn, NodeStack *stack)
case PN_NAME:
/*
* Because used/defn nodes appear in JSAtomLists and elsewhere, we
* Because used/defn nodes appear in AtomDefnMaps and elsewhere, we
* don't recycle them. (We'll recover their storage when we free
* the temporary arena.) However, we do recycle the nodes around
* them, so clean up the pointers to avoid dangling references. The
@ -839,6 +847,8 @@ Parser::parse(JSObject *chain)
* protected from the GC by a root or a stack frame reference.
*/
JSTreeContext globaltc(this);
if (!globaltc.init(context))
return NULL;
globaltc.setScopeChain(chain);
if (!GenerateBlockId(&globaltc, globaltc.bodyid))
return NULL;
@ -878,9 +888,8 @@ SetStaticLevel(JSTreeContext *tc, uintN staticLevel)
* Compile a top-level script.
*/
Compiler::Compiler(JSContext *cx, JSPrincipals *prin, StackFrame *cfp)
: parser(cx, prin, cfp)
{
}
: parser(cx, prin, cfp), globalScope(NULL)
{}
JSScript *
Compiler::compileScript(JSContext *cx, JSObject *scopeChain, StackFrame *callerFrame,
@ -917,7 +926,7 @@ Compiler::compileScript(JSContext *cx, JSObject *scopeChain, StackFrame *callerF
TokenStream &tokenStream = parser.tokenStream;
JSCodeGenerator cg(&parser, &codePool, &notePool, tokenStream.getLineno());
if (!cg.init())
if (!cg.init(cx, JSTreeContext::USED_AS_TREE_CONTEXT))
return NULL;
MUST_FLOW_THROUGH("out");
@ -926,16 +935,15 @@ Compiler::compileScript(JSContext *cx, JSObject *scopeChain, StackFrame *callerF
JSObject *globalObj = scopeChain && scopeChain == scopeChain->getGlobal()
? scopeChain->getGlobal()
: NULL;
js::GlobalScope globalScope(cx, globalObj, &cg);
if (globalObj) {
JS_ASSERT(globalObj->isNative());
JS_ASSERT((globalObj->getClass()->flags & JSCLASS_GLOBAL_FLAGS) == JSCLASS_GLOBAL_FLAGS);
}
JS_ASSERT_IF(globalObj, globalObj->isNative());
JS_ASSERT_IF(globalObj, (globalObj->getClass()->flags & JSCLASS_GLOBAL_FLAGS) ==
JSCLASS_GLOBAL_FLAGS);
/* Null script early in case of error, to reduce our code footprint. */
script = NULL;
globalScope.cg = &cg;
GlobalScope globalScope(cx, globalObj, &cg);
cg.flags |= tcflags;
cg.setScopeChain(scopeChain);
compiler.globalScope = &globalScope;
@ -964,7 +972,8 @@ Compiler::compileScript(JSContext *cx, JSObject *scopeChain, StackFrame *callerF
* eval cache (see EvalCacheLookup in jsobj.cpp).
*/
JSAtom *atom = js_AtomizeString(cx, source);
if (!atom || !cg.atomList.add(&parser, atom))
jsatomid _;
if (!atom || !cg.makeAtomIndex(atom, &_))
goto out;
}
@ -1415,7 +1424,7 @@ CheckStrictBinding(JSContext *cx, JSTreeContext *tc, JSAtom *atom, JSParseNode *
static bool
ReportBadParameter(JSContext *cx, JSTreeContext *tc, JSAtom *name, uintN errorNumber)
{
JSDefinition *dn = ALE_DEFN(tc->decls.lookup(name));
JSDefinition *dn = tc->decls.lookupFirst(name);
JSAutoByteString bytes;
return js_AtomToPrintableString(cx, name, &bytes) &&
ReportStrictModeError(cx, TS(tc->parser), tc, dn, errorNumber, bytes.ptr());
@ -1545,23 +1554,26 @@ Parser::functionBody()
return pn;
}
static JSAtomListElement *
MakePlaceholder(JSParseNode *pn, JSTreeContext *tc)
/*
* Creates a placeholder JSDefinition node for |atom| and adds it to the
* current lexdeps.
*/
static JSDefinition *
MakePlaceholder(AtomDefnAddPtr &p, JSParseNode *pn, JSTreeContext *tc)
{
JSAtomListElement *ale = tc->lexdeps.add(tc->parser, pn->pn_atom);
if (!ale)
return NULL;
JSDefinition *dn = (JSDefinition *)NameNode::create(pn->pn_atom, tc);
JSAtom *atom = pn->pn_atom;
JSDefinition *dn = (JSDefinition *) NameNode::create(atom, tc);
if (!dn)
return NULL;
ALE_SET_DEFN(ale, dn);
if (!tc->lexdeps->add(p, atom, dn))
return NULL;
dn->pn_type = TOK_NAME;
dn->pn_op = JSOP_NOP;
dn->pn_defn = true;
dn->pn_dflags |= PND_PLACEHOLDER;
return ale;
return dn;
}
static bool
@ -1570,44 +1582,43 @@ Define(JSParseNode *pn, JSAtom *atom, JSTreeContext *tc, bool let = false)
JS_ASSERT(!pn->pn_used);
JS_ASSERT_IF(pn->pn_defn, pn->isPlaceholder());
JSHashEntry **hep;
JSAtomListElement *ale = NULL;
JSAtomList *list = NULL;
bool foundLexdep = false;
JSDefinition *dn = NULL;
if (let)
ale = (list = &tc->decls)->rawLookup(atom, hep);
if (!ale)
ale = (list = &tc->lexdeps)->rawLookup(atom, hep);
dn = tc->decls.lookupFirst(atom);
if (ale) {
JSDefinition *dn = ALE_DEFN(ale);
if (dn != pn) {
JSParseNode **pnup = &dn->dn_uses;
JSParseNode *pnu;
uintN start = let ? pn->pn_blockid : tc->bodyid;
if (!dn) {
dn = tc->lexdeps.lookupDefn(atom);
foundLexdep = !!dn;
}
while ((pnu = *pnup) != NULL && pnu->pn_blockid >= start) {
JS_ASSERT(pnu->pn_used);
pnu->pn_lexdef = (JSDefinition *) pn;
pn->pn_dflags |= pnu->pn_dflags & PND_USE2DEF_FLAGS;
pnup = &pnu->pn_link;
}
if (dn && dn != pn) {
JSParseNode **pnup = &dn->dn_uses;
JSParseNode *pnu;
uintN start = let ? pn->pn_blockid : tc->bodyid;
if (pnu != dn->dn_uses) {
*pnup = pn->dn_uses;
pn->dn_uses = dn->dn_uses;
dn->dn_uses = pnu;
while ((pnu = *pnup) != NULL && pnu->pn_blockid >= start) {
JS_ASSERT(pnu->pn_used);
pnu->pn_lexdef = (JSDefinition *) pn;
pn->pn_dflags |= pnu->pn_dflags & PND_USE2DEF_FLAGS;
pnup = &pnu->pn_link;
}
if ((!pnu || pnu->pn_blockid < tc->bodyid) && list != &tc->decls)
list->rawRemove(tc->parser, ale, hep);
}
if (pnu != dn->dn_uses) {
*pnup = pn->dn_uses;
pn->dn_uses = dn->dn_uses;
dn->dn_uses = pnu;
if ((!pnu || pnu->pn_blockid < tc->bodyid) && foundLexdep)
tc->lexdeps->remove(atom);
}
}
ale = tc->decls.add(tc->parser, atom, let ? JSAtomList::SHADOW : JSAtomList::UNIQUE);
if (!ale)
JSDefinition *toAdd = (JSDefinition *) pn;
bool ok = let ? tc->decls.addShadow(atom, toAdd) : tc->decls.addUnique(atom, toAdd);
if (!ok)
return false;
ALE_SET_DEFN(ale, pn);
pn->pn_defn = true;
pn->pn_dflags &= ~PND_PLACEHOLDER;
if (!tc->parent)
@ -1787,7 +1798,7 @@ Compiler::compileFunctionBody(JSContext *cx, JSFunction *fun, JSPrincipals *prin
TokenStream &tokenStream = parser.tokenStream;
JSCodeGenerator funcg(&parser, &codePool, &notePool, tokenStream.getLineno());
if (!funcg.init())
if (!funcg.init(cx, JSTreeContext::USED_AS_TREE_CONTEXT))
return false;
funcg.flags |= TCF_IN_FUNCTION;
@ -1923,7 +1934,7 @@ BindDestructuringArg(JSContext *cx, BindData *data, JSAtom *atom, JSTreeContext
* bindings aren't added to tc->bindings until after all arguments have
* been parsed.
*/
if (tc->decls.lookup(atom)) {
if (tc->decls.lookupFirst(atom)) {
ReportCompileErrorNumber(cx, TS(tc->parser), NULL, JSREPORT_ERROR,
JSMSG_DESTRUCT_DUP_ARG);
return JS_FALSE;
@ -2082,14 +2093,12 @@ FindFunArgs(JSFunctionBox *funbox, int level, JSFunctionBoxQueue *queue)
JSParseNode *pn = fn->pn_body;
if (pn->pn_type == TOK_UPVARS) {
JSAtomList upvars(pn->pn_names);
JS_ASSERT(upvars.count != 0);
AtomDefnMapPtr &upvars = pn->pn_names;
JS_ASSERT(upvars->count() != 0);
JSAtomListIterator iter(&upvars);
JSAtomListElement *ale;
while ((ale = iter()) != NULL) {
JSDefinition *lexdep = ALE_DEFN(ale)->resolve();
for (AtomDefnRange r = upvars->all(); !r.empty(); r.popFront()) {
JSDefinition *defn = r.front().value();
JSDefinition *lexdep = defn->resolve();
if (!lexdep->isFreeVar()) {
uintN upvarLevel = lexdep->frameLevel();
@ -2163,14 +2172,12 @@ Parser::markFunArgs(JSFunctionBox *funbox)
JSParseNode *pn = fn->pn_body;
if (pn->pn_type == TOK_UPVARS) {
JSAtomList upvars(pn->pn_names);
JS_ASSERT(upvars.count != 0);
AtomDefnMapPtr upvars = pn->pn_names;
JS_ASSERT(!upvars->empty());
JSAtomListIterator iter(&upvars);
JSAtomListElement *ale;
while ((ale = iter()) != NULL) {
JSDefinition *lexdep = ALE_DEFN(ale)->resolve();
for (AtomDefnRange r = upvars->all(); !r.empty(); r.popFront()) {
JSDefinition *defn = r.front().value();
JSDefinition *lexdep = defn->resolve();
if (!lexdep->isFreeVar() &&
!lexdep->isFunArg() &&
@ -2489,11 +2496,8 @@ Parser::setFunctionKinds(JSFunctionBox *funbox, uint32 *tcflags)
*/
FUN_SET_KIND(fun, JSFUN_NULL_CLOSURE);
} else {
JSAtomList upvars(pn->pn_names);
JS_ASSERT(upvars.count != 0);
JSAtomListIterator iter(&upvars);
JSAtomListElement *ale;
AtomDefnMapPtr upvars = pn->pn_names;
JS_ASSERT(!upvars->empty());
if (!fn->isFunArg()) {
/*
@ -2506,8 +2510,10 @@ Parser::setFunctionKinds(JSFunctionBox *funbox, uint32 *tcflags)
* below), we optimize for the case where outer bindings are
* not reassigned anywhere.
*/
while ((ale = iter()) != NULL) {
JSDefinition *lexdep = ALE_DEFN(ale)->resolve();
AtomDefnRange r = upvars->all();
for (; !r.empty(); r.popFront()) {
JSDefinition *defn = r.front().value();
JSDefinition *lexdep = defn->resolve();
if (!lexdep->isFreeVar()) {
JS_ASSERT(lexdep->frameLevel() <= funbox->level);
@ -2515,7 +2521,7 @@ Parser::setFunctionKinds(JSFunctionBox *funbox, uint32 *tcflags)
}
}
if (!ale)
if (r.empty())
FUN_SET_KIND(fun, JSFUN_NULL_CLOSURE);
} else {
uintN nupvars = 0, nflattened = 0;
@ -2525,8 +2531,9 @@ Parser::setFunctionKinds(JSFunctionBox *funbox, uint32 *tcflags)
* binding, analyze whether it is safe to copy the binding's
* value into a flat closure slot when the closure is formed.
*/
while ((ale = iter()) != NULL) {
JSDefinition *lexdep = ALE_DEFN(ale)->resolve();
for (AtomDefnRange r = upvars->all(); !r.empty(); r.popFront()) {
JSDefinition *defn = r.front().value();
JSDefinition *lexdep = defn->resolve();
if (!lexdep->isFreeVar()) {
++nupvars;
@ -2588,14 +2595,12 @@ Parser::setFunctionKinds(JSFunctionBox *funbox, uint32 *tcflags)
* The emitter must see TCF_FUN_HEAVYWEIGHT accurately before
* generating any code for a tree of nested functions.
*/
JSAtomList upvars(pn->pn_names);
JS_ASSERT(upvars.count != 0);
AtomDefnMapPtr upvars = pn->pn_names;
JS_ASSERT(!upvars->empty());
JSAtomListIterator iter(&upvars);
JSAtomListElement *ale;
while ((ale = iter()) != NULL) {
JSDefinition *lexdep = ALE_DEFN(ale)->resolve();
for (AtomDefnRange r = upvars->all(); !r.empty(); r.popFront()) {
JSDefinition *defn = r.front().value();
JSDefinition *lexdep = defn->resolve();
if (!lexdep->isFreeVar())
FlagHeavyweights(lexdep, funbox, tcflags);
}
@ -2706,14 +2711,12 @@ LeaveFunction(JSParseNode *fn, JSTreeContext *funtc, JSAtom *funAtom = NULL,
* satisfied by the function's declarations, to avoid penalizing functions
* that use only their arguments and other local bindings.
*/
if (funtc->lexdeps.count != 0) {
JSAtomListIterator iter(&funtc->lexdeps);
JSAtomListElement *ale;
if (funtc->lexdeps->count()) {
int foundCallee = 0;
while ((ale = iter()) != NULL) {
JSAtom *atom = ALE_ATOM(ale);
JSDefinition *dn = ALE_DEFN(ale);
for (AtomDefnRange r = funtc->lexdeps->all(); !r.empty(); r.popFront()) {
JSAtom *atom = r.front().key();
JSDefinition *dn = r.front().value();
JS_ASSERT(dn->isPlaceholder());
if (atom == funAtom && kind == Expression) {
@ -2747,48 +2750,50 @@ LeaveFunction(JSParseNode *fn, JSTreeContext *funtc, JSAtom *funAtom = NULL,
}
}
JSAtomListElement *outer_ale = tc->decls.lookup(atom);
JSDefinition *outer_dn = tc->decls.lookupFirst(atom);
/*
* Make sure to deoptimize lexical dependencies that are polluted
* by eval or with, to safely bind globals (see bug 561923).
*/
if (funtc->callsEval() ||
(outer_ale && tc->innermostWith &&
ALE_DEFN(outer_ale)->pn_pos < tc->innermostWith->pn_pos)) {
(outer_dn && tc->innermostWith &&
outer_dn->pn_pos < tc->innermostWith->pn_pos)) {
DeoptimizeUsesWithin(dn, fn->pn_pos);
}
if (!outer_ale)
outer_ale = tc->lexdeps.lookup(atom);
if (!outer_ale) {
/*
* Create a new placeholder for our outer lexdep. We could simply re-use
* the inner placeholder, but that introduces subtleties in the case where
* we find a later definition that captures an existing lexdep. For
* example:
*
* function f() { function g() { x; } let x; }
*
* Here, g's TOK_UPVARS node lists the placeholder for x, which must be
* captured by the 'let' declaration later, since 'let's are hoisted.
* Taking g's placeholder as our own would work fine. But consider:
*
* function f() { x; { function g() { x; } let x; } }
*
* Here, the 'let' must not capture all the uses of f's lexdep entry for
* x, but it must capture the x node referred to from g's TOK_UPVARS node.
* Always turning inherited lexdeps into uses of a new outer definition
* allows us to handle both these cases in a natural way.
*/
outer_ale = MakePlaceholder(dn, tc);
if (!outer_ale)
return false;
if (!outer_dn) {
AtomDefnAddPtr p = tc->lexdeps->lookupForAdd(atom);
if (p) {
outer_dn = p.value();
} else {
/*
* Create a new placeholder for our outer lexdep. We could
* simply re-use the inner placeholder, but that introduces
* subtleties in the case where we find a later definition
* that captures an existing lexdep. For example:
*
* function f() { function g() { x; } let x; }
*
* Here, g's TOK_UPVARS node lists the placeholder for x,
* which must be captured by the 'let' declaration later,
* since 'let's are hoisted. Taking g's placeholder as our
* own would work fine. But consider:
*
* function f() { x; { function g() { x; } let x; } }
*
* Here, the 'let' must not capture all the uses of f's
* lexdep entry for x, but it must capture the x node
* referred to from g's TOK_UPVARS node. Always turning
* inherited lexdeps into uses of a new outer definition
* allows us to handle both these cases in a natural way.
*/
outer_dn = MakePlaceholder(p, dn, tc);
if (!outer_dn)
return false;
}
}
JSDefinition *outer_dn = ALE_DEFN(outer_ale);
/*
* Insert dn's uses list at the front of outer_dn's list.
*
@ -2829,7 +2834,7 @@ LeaveFunction(JSParseNode *fn, JSTreeContext *funtc, JSAtom *funAtom = NULL,
outer_dn->pn_dflags |= PND_CLOSED;
}
if (funtc->lexdeps.count - foundCallee != 0) {
if (funtc->lexdeps->count() - foundCallee != 0) {
JSParseNode *body = fn->pn_body;
fn->pn_body = NameSetNode::create(tc);
@ -2839,12 +2844,15 @@ LeaveFunction(JSParseNode *fn, JSTreeContext *funtc, JSAtom *funAtom = NULL,
fn->pn_body->pn_type = TOK_UPVARS;
fn->pn_body->pn_pos = body->pn_pos;
if (foundCallee)
funtc->lexdeps.remove(tc->parser, funAtom);
funtc->lexdeps->remove(funAtom);
/* Transfer ownership of the lexdep map to the parse node. */
fn->pn_body->pn_names = funtc->lexdeps;
funtc->lexdeps.clearMap();
fn->pn_body->pn_tree = body;
} else {
funtc->lexdeps.releaseMap(funtc->parser->context);
}
funtc->lexdeps.clear();
}
/*
@ -2855,11 +2863,10 @@ LeaveFunction(JSParseNode *fn, JSTreeContext *funtc, JSAtom *funAtom = NULL,
* calls to eval) be assigned.
*/
if (funtc->inStrictMode() && funbox->object->getFunctionPrivate()->nargs > 0) {
JSAtomListIterator iter(&funtc->decls);
JSAtomListElement *ale;
AtomDeclsIter iter(&funtc->decls);
JSDefinition *dn;
while ((ale = iter()) != NULL) {
JSDefinition *dn = ALE_DEFN(ale);
while ((dn = iter()) != NULL) {
if (dn->kind() == JSDefinition::ARG && dn->isAssigned()) {
funbox->tcflags |= TCF_FUN_MUTATES_PARAMETER;
break;
@ -2980,7 +2987,7 @@ Parser::functionArguments(JSTreeContext &funtc, JSFunctionBox *funbox, JSParseNo
* destructuring bindings aren't added to funtc.bindings
* until after all arguments have been parsed.
*/
if (funtc.decls.lookup(atom)) {
if (funtc.decls.lookupFirst(atom)) {
duplicatedArg = atom;
if (destructuringArg)
goto report_dup_and_destructuring;
@ -3003,7 +3010,7 @@ Parser::functionArguments(JSTreeContext &funtc, JSFunctionBox *funbox, JSParseNo
#if JS_HAS_DESTRUCTURING
report_dup_and_destructuring:
JSDefinition *dn = ALE_DEFN(funtc.decls.lookup(duplicatedArg));
JSDefinition *dn = funtc.decls.lookupFirst(duplicatedArg);
reportErrorNumber(dn, JSREPORT_ERROR, JSMSG_DESTRUCT_DUP_ARG);
return false;
#endif
@ -3049,8 +3056,7 @@ Parser::functionDef(JSAtom *funAtom, FunctionType type, FunctionSyntaxKind kind)
* avoid optimizing variable references that might name a function.
*/
if (kind == Statement) {
if (JSAtomListElement *ale = tc->decls.lookup(funAtom)) {
JSDefinition *dn = ALE_DEFN(ale);
if (JSDefinition *dn = tc->decls.lookupFirst(funAtom)) {
JSDefinition::Kind dn_kind = dn->kind();
JS_ASSERT(!dn->pn_used);
@ -3071,7 +3077,7 @@ Parser::functionDef(JSAtom *funAtom, FunctionType type, FunctionSyntaxKind kind)
}
if (bodyLevel) {
ALE_SET_DEFN(ale, pn);
tc->decls.update(funAtom, (JSDefinition *) pn);
pn->pn_defn = true;
pn->dn_uses = dn; /* dn->dn_uses is now pn_link */
@ -3084,12 +3090,8 @@ Parser::functionDef(JSAtom *funAtom, FunctionType type, FunctionSyntaxKind kind)
* pre-created definition node for this function that primaryExpr
* put in tc->lexdeps on first forward reference, and recycle pn.
*/
JSHashEntry **hep;
ale = tc->lexdeps.rawLookup(funAtom, hep);
if (ale) {
JSDefinition *fn = ALE_DEFN(ale);
if (JSDefinition *fn = tc->lexdeps.lookupDefn(funAtom)) {
JS_ASSERT(fn->pn_defn);
fn->pn_type = TOK_FUNCTION;
fn->pn_arity = PN_FUNC;
@ -3104,7 +3106,7 @@ Parser::functionDef(JSAtom *funAtom, FunctionType type, FunctionSyntaxKind kind)
fn->pn_body = NULL;
fn->pn_cookie.makeFree();
tc->lexdeps.rawRemove(tc->parser, ale, hep);
tc->lexdeps->remove(funAtom);
RecycleTree(pn, tc);
pn = fn;
}
@ -3152,6 +3154,8 @@ Parser::functionDef(JSAtom *funAtom, FunctionType type, FunctionSyntaxKind kind)
/* Initialize early for possible flags mutation via destructuringExpr. */
JSTreeContext funtc(tc->parser);
if (!funtc.init(context))
return NULL;
JSFunctionBox *funbox = EnterFunction(pn, &funtc, funAtom, kind);
if (!funbox)
@ -3176,11 +3180,9 @@ Parser::functionDef(JSAtom *funAtom, FunctionType type, FunctionSyntaxKind kind)
* Parser::functionArguments has returned.
*/
if (prelude) {
JSAtomListIterator iter(&funtc.decls);
while (JSAtomListElement *ale = iter()) {
JSParseNode *apn = ALE_DEFN(ale);
AtomDeclsIter iter(&funtc.decls);
while (JSDefinition *apn = iter()) {
/* Filter based on pn_op -- see BindDestructuringArg, above. */
if (apn->pn_op != JSOP_SETLOCAL)
continue;
@ -3578,7 +3580,6 @@ BindLet(JSContext *cx, BindData *data, JSAtom *atom, JSTreeContext *tc)
{
JSParseNode *pn;
JSObject *blockObj;
JSAtomListElement *ale;
jsint n;
/*
@ -3592,15 +3593,13 @@ BindLet(JSContext *cx, BindData *data, JSAtom *atom, JSTreeContext *tc)
return false;
blockObj = tc->blockChain();
ale = tc->decls.lookup(atom);
if (ale && ALE_DEFN(ale)->pn_blockid == tc->blockid()) {
JSDefinition *dn = tc->decls.lookupFirst(atom);
if (dn && dn->pn_blockid == tc->blockid()) {
JSAutoByteString name;
if (js_AtomToPrintableString(cx, atom, &name)) {
ReportCompileErrorNumber(cx, TS(tc->parser), pn,
JSREPORT_ERROR, JSMSG_REDECLARED_VAR,
(ale && ALE_DEFN(ale)->isConst())
? js_const_str
: js_variable_str,
dn->isConst() ? js_const_str : js_variable_str,
name.ptr());
}
return false;
@ -3665,7 +3664,7 @@ PopStatement(JSTreeContext *tc)
/* Beware the empty destructuring dummy. */
if (atom == tc->parser->context->runtime->atomState.emptyAtom)
continue;
tc->decls.remove(tc->parser, atom);
tc->decls.remove(atom);
}
}
js_PopStatement(tc);
@ -3708,8 +3707,8 @@ DefineGlobal(JSParseNode *pn, JSCodeGenerator *cg, JSAtom *atom)
if (!cg->compileAndGo() || !globalObj || cg->compilingForEval())
return true;
JSAtomListElement *ale = globalScope->names.lookup(atom);
if (!ale) {
AtomIndexAddPtr p = globalScope->names.lookupForAdd(atom);
if (!p) {
JSContext *cx = cg->parser->context;
JSObject *holder;
@ -3745,11 +3744,11 @@ DefineGlobal(JSParseNode *pn, JSCodeGenerator *cg, JSAtom *atom)
if (!globalScope->defs.append(def))
return false;
ale = globalScope->names.add(cg->parser, atom);
if (!ale)
jsatomid index = globalScope->names.count();
if (!globalScope->names.add(p, atom, index))
return false;
JS_ASSERT(ALE_INDEX(ale) == globalScope->defs.length() - 1);
JS_ASSERT(index == globalScope->defs.length() - 1);
} else {
/*
* Functions can be redeclared, and the last one takes effect. Check
@ -3766,7 +3765,7 @@ DefineGlobal(JSParseNode *pn, JSCodeGenerator *cg, JSAtom *atom)
*/
if (pn->pn_type == TOK_FUNCTION) {
JS_ASSERT(pn->pn_arity = PN_FUNC);
uint32 index = ALE_INDEX(ale);
jsatomid index = p.value();
globalScope->defs[index].funbox = pn->pn_funbox;
}
}
@ -3777,8 +3776,7 @@ DefineGlobal(JSParseNode *pn, JSCodeGenerator *cg, JSAtom *atom)
}
static bool
BindTopLevelVar(JSContext *cx, BindData *data, JSAtomListElement *ale, JSParseNode *pn,
JSAtom *varname, JSTreeContext *tc)
BindTopLevelVar(JSContext *cx, BindData *data, JSParseNode *pn, JSAtom *varname, JSTreeContext *tc)
{
JS_ASSERT(pn->pn_op == JSOP_NAME);
JS_ASSERT(!tc->inFunction());
@ -3831,7 +3829,7 @@ BindTopLevelVar(JSContext *cx, BindData *data, JSAtomListElement *ale, JSParseNo
}
static bool
BindFunctionLocal(JSContext *cx, BindData *data, JSAtomListElement *ale, JSParseNode *pn,
BindFunctionLocal(JSContext *cx, BindData *data, MultiDeclRange &mdl, JSParseNode *pn,
JSAtom *name, JSTreeContext *tc)
{
JS_ASSERT(tc->inFunction());
@ -3864,7 +3862,7 @@ BindFunctionLocal(JSContext *cx, BindData *data, JSAtomListElement *ale, JSParse
if (kind == ARGUMENT) {
JS_ASSERT(tc->inFunction());
JS_ASSERT(ale && ALE_DEFN(ale)->kind() == JSDefinition::ARG);
JS_ASSERT(!mdl.empty() && mdl.front()->kind() == JSDefinition::ARG);
} else {
JS_ASSERT(kind == VARIABLE || kind == CONSTANT);
}
@ -3892,11 +3890,11 @@ BindVarOrConst(JSContext *cx, BindData *data, JSAtom *atom, JSTreeContext *tc)
return true;
}
JSAtomListElement *ale = tc->decls.lookup(atom);
MultiDeclRange mdl = tc->decls.lookupMulti(atom);
JSOp op = data->op;
if (stmt || ale) {
JSDefinition *dn = ale ? ALE_DEFN(ale) : NULL;
if (stmt || !mdl.empty()) {
JSDefinition *dn = mdl.empty() ? NULL : mdl.front();
JSDefinition::Kind dn_kind = dn ? dn->kind() : JSDefinition::VAR;
if (dn_kind == JSDefinition::ARG) {
@ -3939,14 +3937,14 @@ BindVarOrConst(JSContext *cx, BindData *data, JSAtom *atom, JSTreeContext *tc)
}
}
if (!ale) {
if (mdl.empty()) {
if (!Define(pn, atom, tc))
return JS_FALSE;
} else {
/*
* A var declaration never recreates an existing binding, it restates
* it and possibly reinitializes its value. Beware that if pn becomes a
* use of ALE_DEFN(ale), and if we have an initializer for this var or
* use of |mdl.defn()|, and if we have an initializer for this var or
* const (typically a const would ;-), then pn must be rewritten into a
* TOK_ASSIGN node. See Variables, further below.
*
@ -3954,7 +3952,7 @@ BindVarOrConst(JSContext *cx, BindData *data, JSAtom *atom, JSTreeContext *tc)
* There the x definition is hoisted but the x = 2 assignment mutates
* the block-local binding of x.
*/
JSDefinition *dn = ALE_DEFN(ale);
JSDefinition *dn = mdl.front();
data->fresh = false;
@ -3972,16 +3970,15 @@ BindVarOrConst(JSContext *cx, BindData *data, JSAtom *atom, JSTreeContext *tc)
pnu->pn_op = JSOP_NAME;
}
/* Find the first non-let binding of this atom. */
while (dn->kind() == JSDefinition::LET) {
do {
ale = ALE_NEXT(ale);
} while (ale && ALE_ATOM(ale) != atom);
if (!ale)
mdl.popFront();
if (mdl.empty())
break;
dn = ALE_DEFN(ale);
dn = mdl.front();
}
if (ale) {
if (dn) {
JS_ASSERT_IF(data->op == JSOP_DEFCONST,
dn->kind() == JSDefinition::CONST);
return JS_TRUE;
@ -3993,12 +3990,8 @@ BindVarOrConst(JSContext *cx, BindData *data, JSAtom *atom, JSTreeContext *tc)
* hoisted above the let bindings.
*/
if (!pn->pn_defn) {
JSHashEntry **hep;
ale = tc->lexdeps.rawLookup(atom, hep);
if (ale) {
pn = ALE_DEFN(ale);
tc->lexdeps.rawRemove(tc->parser, ale, hep);
if (tc->lexdeps->lookup(atom)) {
tc->lexdeps->remove(atom);
} else {
JSParseNode *pn2 = NameNode::create(atom, tc);
if (!pn2)
@ -4012,10 +4005,8 @@ BindVarOrConst(JSContext *cx, BindData *data, JSAtom *atom, JSTreeContext *tc)
pn->pn_op = JSOP_NAME;
}
ale = tc->decls.add(tc->parser, atom, JSAtomList::HOIST);
if (!ale)
if (!tc->decls.addHoist(atom, (JSDefinition *) pn))
return JS_FALSE;
ALE_SET_DEFN(ale, pn);
pn->pn_defn = true;
pn->pn_dflags &= ~PND_PLACEHOLDER;
}
@ -4024,9 +4015,9 @@ BindVarOrConst(JSContext *cx, BindData *data, JSAtom *atom, JSTreeContext *tc)
pn->pn_dflags |= PND_CONST;
if (tc->inFunction())
return BindFunctionLocal(cx, data, ale, pn, atom, tc);
return BindFunctionLocal(cx, data, mdl, pn, atom, tc);
return BindTopLevelVar(cx, data, ale, pn, atom, tc);
return BindTopLevelVar(cx, data, pn, atom, tc);
}
static bool
@ -4744,18 +4735,15 @@ PushBlocklikeStatement(JSStmtInfo *stmt, JSStmtType type, JSTreeContext *tc)
static JSParseNode *
NewBindingNode(JSAtom *atom, JSTreeContext *tc, bool let = false)
{
JSParseNode *pn = NULL;
JSParseNode *pn;
AtomDefnPtr removal;
JSAtomListElement *ale = tc->decls.lookup(atom);
if (ale) {
pn = ALE_DEFN(ale);
if ((pn = tc->decls.lookupFirst(atom))) {
JS_ASSERT(!pn->isPlaceholder());
} else {
ale = tc->lexdeps.lookup(atom);
if (ale) {
pn = ALE_DEFN(ale);
JS_ASSERT(pn->isPlaceholder());
}
removal = tc->lexdeps->lookup(atom);
pn = removal ? removal.value() : NULL;
JS_ASSERT_IF(pn, pn->isPlaceholder());
}
if (pn) {
@ -4774,7 +4762,7 @@ NewBindingNode(JSAtom *atom, JSTreeContext *tc, bool let = false)
if (let)
pn->pn_blockid = tc->blockid();
tc->lexdeps.remove(tc->parser, atom);
tc->lexdeps->remove(removal);
return pn;
}
}
@ -5446,9 +5434,9 @@ Parser::withStatement()
* Make sure to deoptimize lexical dependencies inside the |with|
* to safely optimize binding globals (see bug 561923).
*/
JSAtomListIterator iter(&tc->lexdeps);
while (JSAtomListElement *ale = iter()) {
JSDefinition *lexdep = ALE_DEFN(ale)->resolve();
for (AtomDefnRange r = tc->lexdeps->all(); !r.empty(); r.popFront()) {
JSDefinition *defn = r.front().value();
JSDefinition *lexdep = defn->resolve();
DeoptimizeUsesWithin(lexdep, pn->pn_pos);
}
@ -6884,15 +6872,11 @@ CompExprTransplanter::transplant(JSParseNode *pn)
JS_ASSERT(!stmt || stmt != tc->topStmt);
#endif
if (genexp && PN_OP(dn) != JSOP_CALLEE) {
JS_ASSERT(!tc->decls.lookup(atom));
JS_ASSERT(!tc->decls.lookupFirst(atom));
if (dn->pn_pos < root->pn_pos || dn->isPlaceholder()) {
JSAtomListElement *ale = tc->lexdeps.add(tc->parser, atom);
if (!ale)
return false;
if (dn->pn_pos >= root->pn_pos) {
tc->parent->lexdeps.remove(tc->parser, atom);
tc->parent->lexdeps->remove(atom);
} else {
JSDefinition *dn2 = (JSDefinition *)NameNode::create(atom, tc);
if (!dn2)
@ -6917,8 +6901,8 @@ CompExprTransplanter::transplant(JSParseNode *pn)
dn = dn2;
}
ALE_SET_DEFN(ale, dn);
if (!tc->lexdeps->put(atom, dn))
return false;
}
}
}
@ -7181,6 +7165,8 @@ Parser::generatorExpr(JSParseNode *kid)
{
JSTreeContext *outertc = tc;
JSTreeContext gentc(tc->parser);
if (!gentc.init(context))
return NULL;
JSFunctionBox *funbox = EnterFunction(genfn, &gentc);
if (!funbox)
@ -8165,6 +8151,8 @@ Parser::parseXMLText(JSObject *chain, bool allowList)
* the one passed to us.
*/
JSTreeContext xmltc(this);
if (!xmltc.init(context))
return NULL;
xmltc.setScopeChain(chain);
/* Set XML-only mode to turn off special treatment of {expr} in XML. */
@ -8198,9 +8186,9 @@ Parser::parseXMLText(JSObject *chain, bool allowList)
* being in scope.
*
* Name binding analysis is eager with fixups, rather than multi-pass, and let
* bindings push on the front of the tc->decls JSAtomList (either the singular
* list or on a hash chain -- see JSAtomList::AddHow) in order to shadow outer
* scope bindings of the same name.
* bindings push on the front of the tc->decls AtomDecls (either the singular
* list or on a hash chain -- see JSAtomMultiList::add*) in order to shadow
* outer scope bindings of the same name.
*
* This simplifies binding lookup code at the price of a linear search here,
* but only if code uses let (var predominates), and even then this function's
@ -8404,7 +8392,8 @@ Parser::primaryExpr(TokenKind tt, JSBool afterDot)
* A map from property names we've seen thus far to a mask of property
* assignment types, stored and retrieved with ALE_SET_INDEX/ALE_INDEX.
*/
JSAutoAtomList seen(tc->parser);
AtomIndexMap seen(context);
enum AssignmentType {
GET = 0x1,
SET = 0x2,
@ -8534,8 +8523,10 @@ Parser::primaryExpr(TokenKind tt, JSBool afterDot)
assignType = VALUE; /* try to error early */
}
if (JSAtomListElement *ale = seen.lookup(atom)) {
AssignmentType oldAssignType = AssignmentType(ALE_INDEX(ale));
AtomIndexAddPtr p = seen.lookupForAdd(atom);
if (p) {
jsatomid index = p.value();
AssignmentType oldAssignType = AssignmentType(index);
if ((oldAssignType & assignType) &&
(oldAssignType != VALUE || assignType != VALUE || tc->needStrictChecks()))
{
@ -8554,12 +8545,10 @@ Parser::primaryExpr(TokenKind tt, JSBool afterDot)
return NULL;
}
}
ALE_SET_INDEX(ale, assignType | oldAssignType);
p.value() = assignType | oldAssignType;
} else {
ale = seen.add(tc->parser, atom);
if (!ale)
if (!seen.add(p, atom, assignType))
return NULL;
ALE_SET_INDEX(ale, assignType);
}
tt = tokenStream.getToken();
@ -8695,7 +8684,8 @@ Parser::primaryExpr(TokenKind tt, JSBool afterDot)
* Bind early to JSOP_ARGUMENTS to relieve later code from having
* to do this work (new rule for the emitter to count on).
*/
if (!afterDot && !(tc->flags & TCF_DECL_DESTRUCTURING) && !tc->inStatement(STMT_WITH)) {
if (!afterDot && !(tc->flags & TCF_DECL_DESTRUCTURING)
&& !tc->inStatement(STMT_WITH)) {
pn->pn_op = JSOP_ARGUMENTS;
pn->pn_dflags |= PND_BOUND;
}
@ -8712,11 +8702,11 @@ Parser::primaryExpr(TokenKind tt, JSBool afterDot)
JSStmtInfo *stmt = js_LexicalLookup(tc, pn->pn_atom, NULL);
MultiDeclRange mdl = tc->decls.lookupMulti(pn->pn_atom);
JSDefinition *dn;
JSAtomListElement *ale = tc->decls.lookup(pn->pn_atom);
if (ale) {
dn = ALE_DEFN(ale);
if (!mdl.empty()) {
dn = mdl.front();
#if JS_HAS_BLOCK_SCOPE
/*
* Skip out-of-scope let bindings along an ALE list or hash
@ -8725,36 +8715,32 @@ Parser::primaryExpr(TokenKind tt, JSBool afterDot)
* from an outer scope. See bug 496532.
*/
while (dn->isLet() && !BlockIdInScope(dn->pn_blockid, tc)) {
do {
ale = ALE_NEXT(ale);
} while (ale && ALE_ATOM(ale) != pn->pn_atom);
if (!ale)
mdl.popFront();
if (mdl.empty())
break;
dn = ALE_DEFN(ale);
dn = mdl.front();
}
#endif
}
if (ale) {
dn = ALE_DEFN(ale);
if (!mdl.empty()) {
dn = mdl.front();
} else {
ale = tc->lexdeps.lookup(pn->pn_atom);
if (ale) {
dn = ALE_DEFN(ale);
AtomDefnAddPtr p = tc->lexdeps->lookupForAdd(pn->pn_atom);
if (p) {
dn = p.value();
} else {
/*
* No definition before this use in any lexical scope.
* Add a mapping in tc->lexdeps from pn->pn_atom to a
* new node for the forward-referenced definition. This
* placeholder definition node will be adopted when we
* parse the real defining declaration form, or left as
* a free variable definition if we never see the real
* definition.
* Create a placeholder definition node to either:
* - Be adopted when we parse the real defining
* declaration, or
* - Be left as a free variable definition if we never
* see the real definition.
*/
ale = MakePlaceholder(pn, tc);
if (!ale)
dn = MakePlaceholder(p, pn, tc);
if (!dn)
return NULL;
dn = ALE_DEFN(ale);
/*
* In case this is a forward reference to a function,

View File

@ -49,6 +49,8 @@
#include "jsatom.h"
#include "jsscan.h"
#include "frontend/ParseMaps.h"
JS_BEGIN_EXTERN_C
/*
@ -300,7 +302,7 @@ typedef enum JSParseNodeArity {
PN_FUNC, /* function definition node */
PN_LIST, /* generic singly linked list */
PN_NAME, /* name use or definition node */
PN_NAMESET /* JSAtomList + JSParseNode ptr */
PN_NAMESET /* JSAtomDefnMapPtr + JSParseNode ptr */
} JSParseNodeArity;
struct JSDefinition;
@ -309,7 +311,7 @@ namespace js {
struct GlobalScope {
GlobalScope(JSContext *cx, JSObject *globalObj, JSCodeGenerator *cg)
: globalObj(globalObj), cg(cg), defs(cx)
: globalObj(globalObj), cg(cg), defs(cx), names(cx)
{ }
struct GlobalDef {
@ -339,7 +341,7 @@ struct GlobalScope {
* one that must be added after compilation succeeds.
*/
Vector<GlobalDef, 16> defs;
JSAtomList names;
AtomIndexMap names;
};
} /* namespace js */
@ -406,8 +408,8 @@ struct JSParseNode {
computation */
} name;
struct { /* lexical dependencies + sub-tree */
JSAtomSet names; /* set of names with JSDefinitions */
JSParseNode *tree; /* sub-tree containing name uses */
js::AtomDefnMapPtr defnMap;
JSParseNode *tree; /* sub-tree containing name uses */
} nameset;
struct { /* PN_NULLARY variant for E4X */
JSAtom *atom; /* first atom in pair */
@ -441,19 +443,20 @@ struct JSParseNode {
#define pn_objbox pn_u.name.objbox
#define pn_expr pn_u.name.expr
#define pn_lexdef pn_u.name.lexdef
#define pn_names pn_u.nameset.names
#define pn_names pn_u.nameset.defnMap
#define pn_tree pn_u.nameset.tree
#define pn_dval pn_u.dval
#define pn_atom2 pn_u.apair.atom2
protected:
void inline init(js::TokenKind type, JSOp op, JSParseNodeArity arity) {
void init(js::TokenKind type, JSOp op, JSParseNodeArity arity) {
pn_type = type;
pn_op = op;
pn_arity = arity;
pn_parens = false;
JS_ASSERT(!pn_used);
JS_ASSERT(!pn_defn);
pn_names.init();
pn_next = pn_link = NULL;
}
@ -752,6 +755,10 @@ struct LexicalScopeNode : public JSParseNode {
* list may be a JSDefinition instead of a JSParseNode. The pn_defn bit is set
* for all JSDefinitions, clear otherwise.
*
* In an upvars list, defn->resolve() is the outermost definition the
* name may reference. If a with block or a function that calls eval encloses
* the use, the name may end up referring to something else at runtime.
*
* Note that not all var declarations are definitions: JS allows multiple var
* declarations in a function or script, but only the first creates the hoisted
* binding. JS programmers do redeclare variables for good refactoring reasons,
@ -861,10 +868,11 @@ struct LexicalScopeNode : public JSParseNode {
struct JSDefinition : public JSParseNode
{
/*
* We store definition pointers in PN_NAMESET JSAtomLists in the AST, but
* due to redefinition these nodes may become uses of other definitions.
* This is unusual, so we simply chase the pn_lexdef link to find the final
* definition node. See methods called from Parser::analyzeFunctions.
* We store definition pointers in PN_NAMESET JSAtomDefnMapPtrs in the AST,
* but due to redefinition these nodes may become uses of other
* definitions. This is unusual, so we simply chase the pn_lexdef link to
* find the final definition node. See methods called from
* Parser::analyzeFunctions.
*
* FIXME: MakeAssignment mutates for want of a parent link...
*/
@ -1055,7 +1063,6 @@ enum FunctionSyntaxKind { Expression, Statement };
struct Parser : private js::AutoGCRooter
{
JSContext *const context; /* FIXME Bug 551291: use AutoGCRooter::context? */
JSAtomListElement *aleFreeList;
void *tempFreeList[NUM_TEMP_FREELISTS];
TokenStream tokenStream;
void *tempPoolMark; /* initial JSContext.tempPool mark */
@ -1245,17 +1252,17 @@ Parser::reportErrorNumber(JSParseNode *pn, uintN flags, uintN errorNumber, ...)
struct Compiler
{
Parser parser;
Parser parser;
GlobalScope *globalScope;
Compiler(JSContext *cx, JSPrincipals *prin = NULL, StackFrame *cfp = NULL);
/*
* Initialize a compiler. Parameters are passed on to init parser.
*/
inline bool
init(const jschar *base, size_t length, const char *filename, uintN lineno, JSVersion version)
{
JSContext *context() {
return parser.context;
}
bool init(const jschar *base, size_t length, const char *filename, uintN lineno,
JSVersion version) {
return parser.init(base, length, filename, lineno, version);
}

View File

@ -71,9 +71,9 @@ static const uintN JS_GCTHING_ALIGN = 8;
static const uintN JS_GCTHING_ZEROBITS = 3;
/* Scalar typedefs. */
typedef uint8 jsbytecode;
typedef uint8 jssrcnote;
typedef uint32 jsatomid;
typedef uint8 jsbytecode;
typedef uint8 jssrcnote;
typedef uintptr_t jsatomid;
/* Struct typedefs. */
typedef struct JSArgumentFormatMap JSArgumentFormatMap;
@ -92,13 +92,10 @@ typedef struct JSTreeContext JSTreeContext;
typedef struct JSTryNote JSTryNote;
/* Friend "Advanced API" typedefs. */
typedef struct JSAtomList JSAtomList;
typedef struct JSAtomListElement JSAtomListElement;
typedef struct JSAtomMap JSAtomMap;
typedef struct JSAtomState JSAtomState;
typedef struct JSCodeSpec JSCodeSpec;
typedef struct JSPrinter JSPrinter;
typedef struct JSRegExpStatics JSRegExpStatics;
typedef struct JSStackHeader JSStackHeader;
typedef struct JSSubString JSSubString;
typedef struct JSNativeTraceInfo JSNativeTraceInfo;
@ -126,6 +123,7 @@ class JSFixedString;
class JSStaticAtom;
class JSRope;
class JSAtom;
struct JSDefinition;
namespace js {
@ -179,6 +177,11 @@ template <class T,
class AllocPolicy = TempAllocPolicy>
class HashSet;
template <typename K,
typename V,
size_t InlineElems>
class InlineMap;
class PropertyCache;
struct PropertyCacheEntry;
@ -186,6 +189,13 @@ struct Shape;
struct EmptyShape;
class Bindings;
class MultiDeclRange;
class ParseMapPool;
class DefnOrHeader;
typedef js::InlineMap<JSAtom *, JSDefinition *, 24> AtomDefnMap;
typedef js::InlineMap<JSAtom *, jsatomid, 24> AtomIndexMap;
typedef js::InlineMap<JSAtom *, DefnOrHeader, 24> AtomDOHMap;
} /* namespace js */
} /* export "C++" */

View File

@ -140,7 +140,7 @@ enum TokenKind {
not a block */
TOK_FORHEAD = 83, /* head of for(;;)-style loop */
TOK_ARGSBODY = 84, /* formal args in list + body at end */
TOK_UPVARS = 85, /* lexical dependencies as JSAtomList
TOK_UPVARS = 85, /* lexical dependencies as JSAtomDefnMap
of definitions paired with a parse
tree full of uses of those names */
TOK_RESERVED, /* reserved keywords */

View File

@ -1240,7 +1240,7 @@ JSScript::NewScriptFromCG(JSContext *cx, JSCodeGenerator *cg)
JSFunction *fun;
/* The counts of indexed things must be checked during code generation. */
JS_ASSERT(cg->atomList.count <= INDEX_LIMIT);
JS_ASSERT(cg->atomIndices->count() <= INDEX_LIMIT);
JS_ASSERT(cg->objectList.length <= INDEX_LIMIT);
JS_ASSERT(cg->regexpList.length <= INDEX_LIMIT);
@ -1252,9 +1252,10 @@ JSScript::NewScriptFromCG(JSContext *cx, JSCodeGenerator *cg)
JS_ASSERT(nClosedArgs == cg->closedArgs.length());
uint16 nClosedVars = uint16(cg->closedVars.length());
JS_ASSERT(nClosedVars == cg->closedVars.length());
size_t upvarIndexCount = cg->upvarIndices.hasMap() ? cg->upvarIndices->count() : 0;
script = NewScript(cx, prologLength + mainLength, nsrcnotes,
cg->atomList.count, cg->objectList.length,
cg->upvarList.count, cg->regexpList.length,
cg->atomIndices->count(), cg->objectList.length,
upvarIndexCount, cg->regexpList.length,
cg->ntrynotes, cg->constList.length(),
cg->globalUses.length(), nClosedArgs, nClosedVars, cg->version());
if (!script)
@ -1272,7 +1273,7 @@ JSScript::NewScriptFromCG(JSContext *cx, JSCodeGenerator *cg)
: cg->sharpSlots();
JS_ASSERT(nfixed < SLOTNO_LIMIT);
script->nfixed = (uint16) nfixed;
js_InitAtomMap(cx, &script->atomMap, &cg->atomList);
js_InitAtomMap(cx, &script->atomMap, cg->atomIndices.getMap());
filename = cg->parser->tokenStream.getFilename();
if (filename) {
@ -1316,11 +1317,11 @@ JSScript::NewScriptFromCG(JSContext *cx, JSCodeGenerator *cg)
if (cg->flags & TCF_HAS_SINGLETONS)
script->hasSingletons = true;
if (cg->upvarList.count != 0) {
JS_ASSERT(cg->upvarList.count <= cg->upvarMap.length);
if (cg->hasUpvarIndices()) {
JS_ASSERT(cg->upvarIndices->count() <= cg->upvarMap.length);
memcpy(script->upvars()->vector, cg->upvarMap.vector,
cg->upvarList.count * sizeof(uint32));
cg->upvarList.clear();
cg->upvarIndices->count() * sizeof(uint32));
cg->upvarIndices->clear();
cx->free_(cg->upvarMap.vector);
cg->upvarMap.vector = NULL;
}

View File

@ -165,6 +165,7 @@ template <> struct IsPodType<long> { static const bool result = true;
template <> struct IsPodType<unsigned long> { static const bool result = true; };
template <> struct IsPodType<float> { static const bool result = true; };
template <> struct IsPodType<double> { static const bool result = true; };
template <typename T> struct IsPodType<T *> { static const bool result = true; };
/* Return the size/end of an array without using macros. */
template <class T, size_t N> inline T *ArraySize(T (&)[N]) { return N; }

View File

@ -379,6 +379,9 @@ class Vector : private AllocPolicy
/* Shorthand for shrinkBy(length()). */
void clear();
/* Clears and releases any heap-allocated storage. */
void clearAndFree();
/* Potentially fallible append operations. */
bool append(const T &t);
bool appendN(const T &t, size_t n);
@ -654,6 +657,23 @@ Vector<T,N,AP>::clear()
mLength = 0;
}
template <class T, size_t N, class AP>
inline void
Vector<T,N,AP>::clearAndFree()
{
clear();
if (usingInlineStorage())
return;
this->free_(beginNoCheck());
mBegin = (T *)storage.addr();
mCapacity = sInlineCapacity;
#ifdef DEBUG
mReserved = 0;
#endif
}
template <class T, size_t N, class AP>
JS_ALWAYS_INLINE bool
Vector<T,N,AP>::append(const T &t)

408
js/src/mfbt/InlineMap.h Normal file
View File

@ -0,0 +1,408 @@
/* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=4 sw=4 et tw=99 ft=cpp:
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is SpiderMonkey JavaScript engine.
*
* The Initial Developer of the Original Code is
* Mozilla Corporation.
* Portions created by the Initial Developer are Copyright (C) 2011
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Chris Leary <cdleary@mozilla.com>
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
#ifndef InlineMap_h__
#define InlineMap_h__
#include "jshashtable.h"
namespace js {
/*
* A type can only be used as an InlineMap key if zero is an invalid key value
* (and thus may be used as a tombstone value by InlineMap).
*/
template <typename T> struct ZeroIsReserved { static const bool result = false; };
template <typename T> struct ZeroIsReserved<T *> { static const bool result = true; };
template <typename K, typename V, size_t InlineElems>
class InlineMap
{
public:
typedef HashMap<K, V, DefaultHasher<K>, TempAllocPolicy> WordMap;
struct InlineElem
{
K key;
V value;
};
private:
typedef typename WordMap::Ptr WordMapPtr;
typedef typename WordMap::AddPtr WordMapAddPtr;
typedef typename WordMap::Range WordMapRange;
size_t inlNext;
size_t inlCount;
InlineElem inl[InlineElems];
WordMap map;
void checkStaticInvariants() {
JS_STATIC_ASSERT(ZeroIsReserved<K>::result);
}
bool usingMap() const {
return inlNext > InlineElems;
}
bool switchToMap() {
JS_ASSERT(inlNext == InlineElems);
if (map.initialized()) {
map.clear();
} else {
if (!map.init(count()))
return false;
JS_ASSERT(map.initialized());
}
for (InlineElem *it = inl, *end = inl + inlNext; it != end; ++it) {
if (it->key && !map.putNew(it->key, it->value))
return false;
}
inlNext = InlineElems + 1;
JS_ASSERT(map.count() == inlCount);
JS_ASSERT(usingMap());
return true;
}
JS_NEVER_INLINE
bool switchAndAdd(const K &key, const V &value) {
if (!switchToMap())
return false;
return map.putNew(key, value);
}
public:
explicit InlineMap(JSContext *cx)
: inlNext(0), inlCount(0), map(cx) {
checkStaticInvariants(); /* Force the template to instantiate the static invariants. */
}
class Entry
{
friend class InlineMap;
const K &key_;
const V &value_;
Entry(const K &key, const V &value) : key_(key), value_(value) {}
public:
const K &key() { return key_; }
const V &value() { return value_; }
}; /* class Entry */
class Ptr
{
friend class InlineMap;
WordMapPtr mapPtr;
InlineElem *inlPtr;
bool isInlinePtr;
typedef Ptr ******* ConvertibleToBool;
explicit Ptr(WordMapPtr p) : mapPtr(p), isInlinePtr(false) {}
explicit Ptr(InlineElem *ie) : inlPtr(ie), isInlinePtr(true) {}
void operator==(const Ptr &other);
public:
/* Leaves Ptr uninitialized. */
Ptr() {
#ifdef DEBUG
inlPtr = (InlineElem *) 0xbad;
isInlinePtr = true;
#endif
}
/* Default copy constructor works for this structure. */
bool found() const {
return isInlinePtr ? bool(inlPtr) : mapPtr.found();
}
operator ConvertibleToBool() const {
return ConvertibleToBool(found());
}
K &key() {
JS_ASSERT(found());
return isInlinePtr ? inlPtr->key : mapPtr->key;
}
V &value() {
JS_ASSERT(found());
return isInlinePtr ? inlPtr->value : mapPtr->value;
}
}; /* class Ptr */
class AddPtr
{
friend class InlineMap;
WordMapAddPtr mapAddPtr;
InlineElem *inlAddPtr;
bool isInlinePtr;
/* Indicates whether inlAddPtr is a found result or an add pointer. */
bool inlPtrFound;
AddPtr(InlineElem *ptr, bool found)
: inlAddPtr(ptr), isInlinePtr(true), inlPtrFound(found)
{}
AddPtr(const WordMapAddPtr &p) : mapAddPtr(p), isInlinePtr(false) {}
void operator==(const AddPtr &other);
typedef AddPtr ******* ConvertibleToBool;
public:
AddPtr() {}
bool found() const {
return isInlinePtr ? inlPtrFound : mapAddPtr.found();
}
operator ConvertibleToBool() const {
return found() ? ConvertibleToBool(1) : ConvertibleToBool(0);
}
V &value() {
JS_ASSERT(found());
if (isInlinePtr)
return inlAddPtr->value;
return mapAddPtr->value;
}
}; /* class AddPtr */
size_t count() {
return usingMap() ? map.count() : inlCount;
}
bool empty() const {
return usingMap() ? map.empty() : !inlCount;
}
void clear() {
inlNext = 0;
inlCount = 0;
}
bool isMap() const {
return usingMap();
}
const WordMap &asMap() const {
JS_ASSERT(isMap());
return map;
}
const InlineElem *asInline() const {
JS_ASSERT(!isMap());
return inl;
}
const InlineElem *inlineEnd() const {
JS_ASSERT(!isMap());
return inl + inlNext;
}
JS_ALWAYS_INLINE
Ptr lookup(const K &key) {
if (usingMap())
return Ptr(map.lookup(key));
for (InlineElem *it = inl, *end = inl + inlNext; it != end; ++it) {
if (it->key == key)
return Ptr(it);
}
return Ptr(NULL);
}
JS_ALWAYS_INLINE
AddPtr lookupForAdd(const K &key) {
if (usingMap())
return AddPtr(map.lookupForAdd(key));
for (InlineElem *it = inl, *end = inl + inlNext; it != end; ++it) {
if (it->key == key)
return AddPtr(it, true);
}
/*
* The add pointer that's returned here may indicate the limit entry of
* the linear space, in which case the |add| operation will initialize
* the map if necessary and add the entry there.
*/
return AddPtr(inl + inlNext, false);
}
JS_ALWAYS_INLINE
bool add(AddPtr &p, const K &key, const V &value) {
JS_ASSERT(!p);
if (p.isInlinePtr) {
InlineElem *addPtr = p.inlAddPtr;
JS_ASSERT(addPtr == inl + inlNext);
/* Switching to map mode before we add this pointer. */
if (addPtr == inl + InlineElems)
return switchAndAdd(key, value);
JS_ASSERT(!p.found());
JS_ASSERT(uintptr_t(inl + inlNext) == uintptr_t(p.inlAddPtr));
p.inlAddPtr->key = key;
p.inlAddPtr->value = value;
++inlCount;
++inlNext;
return true;
}
return map.add(p.mapAddPtr, key, value);
}
JS_ALWAYS_INLINE
bool put(const K &key, const V &value) {
AddPtr p = lookupForAdd(key);
if (p) {
p.value() = value;
return true;
}
return add(p, key, value);
}
void remove(Ptr p) {
JS_ASSERT(p);
if (p.isInlinePtr) {
JS_ASSERT(inlCount > 0);
JS_ASSERT(p.inlPtr->key != NULL);
p.inlPtr->key = NULL;
--inlCount;
return;
}
JS_ASSERT(map.initialized() && usingMap());
map.remove(p.mapPtr);
}
void remove(const K &key) {
if (Ptr p = lookup(key))
remove(p);
}
class Range
{
friend class InlineMap;
WordMapRange mapRange;
InlineElem *cur;
InlineElem *end;
bool isInline;
explicit Range(WordMapRange r) : isInline(false) {
mapRange = r;
JS_ASSERT(!isInlineRange());
}
Range(const InlineElem *begin, const InlineElem *end_)
: cur(const_cast<InlineElem *>(begin)),
end(const_cast<InlineElem *>(end_)),
isInline(true) {
advancePastNulls(cur);
JS_ASSERT(isInlineRange());
}
bool checkInlineRangeInvariants() const {
JS_ASSERT(uintptr_t(cur) <= uintptr_t(end));
JS_ASSERT_IF(cur != end, cur->key != NULL);
return true;
}
bool isInlineRange() const {
JS_ASSERT_IF(isInline, checkInlineRangeInvariants());
return isInline;
}
void advancePastNulls(InlineElem *begin) {
InlineElem *newCur = begin;
while (newCur < end && NULL == newCur->key)
++newCur;
JS_ASSERT(uintptr_t(newCur) <= uintptr_t(end));
cur = newCur;
}
void bumpCurPtr() {
JS_ASSERT(isInlineRange());
advancePastNulls(cur + 1);
}
void operator==(const Range &other);
public:
bool empty() const {
return isInlineRange() ? cur == end : mapRange.empty();
}
Entry front() {
JS_ASSERT(!empty());
if (isInlineRange())
return Entry(cur->key, cur->value);
return Entry(mapRange.front().key, mapRange.front().value);
}
void popFront() {
JS_ASSERT(!empty());
if (isInlineRange())
bumpCurPtr();
else
mapRange.popFront();
}
}; /* class Range */
Range all() const {
return usingMap() ? Range(map.all()) : Range(inl, inl + inlNext);
}
}; /* class InlineMap */
} /* namespace js */
#endif

View File

@ -188,7 +188,7 @@ struct DebugOnly
/*
* This utility pales in comparison to Boost's aligned_storage. The utility
* simply assumes that JSUint64 is enough alignment for anyone. This may need
* simply assumes that uint64 is enough alignment for anyone. This may need
* to be extended one day...
*
* As an important side effect, pulling the storage into this template is
@ -292,6 +292,11 @@ class Maybe
return asT();
}
const T &ref() const {
MOZ_ASSERT(constructed);
return const_cast<Maybe *>(this)->asT();
}
void destroy() {
ref().~T();
constructed = false;