[JAEGER] Fast paths for start/end iterators and GC hazard removal, bug 578756. r=dmandelin, gal

This commit is contained in:
Brian Hackett 2010-08-10 11:12:54 -07:00
parent c9d0890ba0
commit fa84710155
6 changed files with 267 additions and 54 deletions

View File

@ -599,6 +599,7 @@ JSThreadData::purge(JSContext *cx)
/* Purge cached native iterators. */
memset(cachedNativeIterators, 0, sizeof(cachedNativeIterators));
lastNativeIterator = NULL;
dtoaCache.s = NULL;
}

View File

@ -1065,6 +1065,9 @@ struct JSThreadData {
/* Cached native iterators. */
JSObject *cachedNativeIterators[NATIVE_ITER_CACHE_SIZE];
/* Native iterator most recently started. */
JSObject *lastNativeIterator;
/* Base address of the native stack for the current thread. */
jsuword *nativeStackBase;

View File

@ -484,11 +484,11 @@ NativeIterator::allocateKeyIterator(JSContext *cx, uint32 slength, const AutoIdV
}
NativeIterator *
NativeIterator::allocateValueIterator(JSContext *cx, uint32 slength, const AutoValueVector &props)
NativeIterator::allocateValueIterator(JSContext *cx, const AutoValueVector &props)
{
size_t plength = props.length();
NativeIterator *ni = (NativeIterator *)
cx->malloc(sizeof(NativeIterator) + plength * sizeof(Value) + slength * sizeof(uint32));
cx->malloc(sizeof(NativeIterator) + plength * sizeof(Value));
if (!ni)
return NULL;
ni->props_array = ni->props_cursor = (Value *) (ni + 1);
@ -499,20 +499,21 @@ NativeIterator::allocateValueIterator(JSContext *cx, uint32 slength, const AutoV
}
inline void
NativeIterator::init(JSObject *obj, uintN flags, const uint32 *sarray, uint32 slength, uint32 key)
NativeIterator::init(JSObject *obj, uintN flags, uint32 slength, uint32 key)
{
this->obj = obj;
this->flags = flags;
this->shapes_array = (uint32 *) this->props_end;
this->shapes_length = slength;
this->shapes_key = key;
if (slength)
memcpy(this->shapes_array, sarray, slength * sizeof(uint32));
}
static inline void
RegisterEnumerator(JSContext *cx, JSObject *iterobj, NativeIterator *ni)
{
JS_ASSERT(!(ni->flags & JSITER_ACTIVE));
ni->flags |= JSITER_ACTIVE;
/* Register non-escaping native enumerators (for-in) with the current context. */
if (ni->flags & JSITER_ENUMERATE) {
ni->next = cx->enumerators;
@ -522,7 +523,7 @@ RegisterEnumerator(JSContext *cx, JSObject *iterobj, NativeIterator *ni)
static inline bool
VectorToKeyIterator(JSContext *cx, JSObject *obj, uintN flags, AutoIdVector &keys,
const uint32 *sarray, uint32 slength, uint32 key, Value *vp)
uint32 slength, uint32 key, Value *vp)
{
JS_ASSERT(!(flags & JSITER_FOREACH));
@ -530,11 +531,27 @@ VectorToKeyIterator(JSContext *cx, JSObject *obj, uintN flags, AutoIdVector &key
if (!iterobj)
return false;
NativeIterator *ni = NativeIterator::allocateKeyIterator(cx, slength, keys);
if (!ni)
return NULL;
ni->init(obj, flags, sarray, slength, key);
return false;
ni->init(obj, flags, slength, key);
if (slength) {
/*
* Fill in the shape array from scratch. We can't use the array that was
* computed for the cache lookup earlier, as constructing iterobj could
* have triggered a shape-regenerating GC. Don't bother with regenerating
* the shape key; if such a GC *does* occur, we can only get hits through
* the one-slot lastNativeIterator cache.
*/
JSObject *pobj = obj;
size_t ind = 0;
do {
ni->shapes_array[ind++] = pobj->shape();
pobj = pobj->getProto();
} while (pobj);
JS_ASSERT(ind == slength);
}
iterobj->setNativeIterator(ni);
vp->setObject(*iterobj);
@ -546,12 +563,12 @@ VectorToKeyIterator(JSContext *cx, JSObject *obj, uintN flags, AutoIdVector &key
bool
VectorToKeyIterator(JSContext *cx, JSObject *obj, uintN flags, AutoIdVector &props, Value *vp)
{
return VectorToKeyIterator(cx, obj, flags, props, NULL, 0, 0, vp);
return VectorToKeyIterator(cx, obj, flags, props, 0, 0, vp);
}
static inline bool
bool
VectorToValueIterator(JSContext *cx, JSObject *obj, uintN flags, AutoValueVector &vals,
const uint32 *sarray, uint32 slength, uint32 key, Value *vp)
Value *vp)
{
JS_ASSERT(flags & JSITER_FOREACH);
@ -559,10 +576,10 @@ VectorToValueIterator(JSContext *cx, JSObject *obj, uintN flags, AutoValueVector
if (!iterobj)
return false;
NativeIterator *ni = NativeIterator::allocateValueIterator(cx, slength, vals);
NativeIterator *ni = NativeIterator::allocateValueIterator(cx, vals);
if (!ni)
return NULL;
ni->init(obj, flags, sarray, slength, key);
return false;
ni->init(obj, flags, 0, 0);
iterobj->setNativeIterator(ni);
vp->setObject(*iterobj);
@ -571,12 +588,6 @@ VectorToValueIterator(JSContext *cx, JSObject *obj, uintN flags, AutoValueVector
return true;
}
bool
VectorToValueIterator(JSContext *cx, JSObject *obj, uintN flags, AutoValueVector &props, Value *vp)
{
return VectorToValueIterator(cx, obj, flags, props, NULL, 0, 0, vp);
}
bool
EnumeratedIdVectorToIterator(JSContext *cx, JSObject *obj, uintN flags, AutoIdVector &props, Value *vp)
{
@ -613,11 +624,29 @@ GetIterator(JSContext *cx, JSObject *obj, uintN flags, Value *vp)
if (obj) {
if (keysOnly) {
/*
* Quick check to see if this is the same as the most recent
* object which was iterated over.
*/
JSObject *last = JS_THREAD_DATA(cx)->lastNativeIterator;
JSObject *proto = obj->getProto();
if (last) {
NativeIterator *lastni = last->getNativeIterator();
if (!(lastni->flags & JSITER_ACTIVE) &&
obj->shape() == lastni->shapes_array[0] &&
proto && proto->shape() == lastni->shapes_array[1] &&
!proto->getProto()) {
vp->setObject(*last);
RegisterEnumerator(cx, last, lastni);
return true;
}
}
/*
* The iterator object for JSITER_ENUMERATE never escapes, so we
* don't care for the proper parent/proto to be set. This also
* allows us to re-use a previous iterator object that was freed
* by JSOP_ENDITER.
* allows us to re-use a previous iterator object that is not
* currently active.
*/
JSObject *pobj = obj;
do {
@ -639,13 +668,15 @@ GetIterator(JSContext *cx, JSObject *obj, uintN flags, Value *vp)
JSObject *iterobj = *hp;
if (iterobj) {
NativeIterator *ni = iterobj->getNativeIterator();
if (ni->shapes_key == key &&
if (!(ni->flags & JSITER_ACTIVE) &&
ni->shapes_key == key &&
ni->shapes_length == shapes.length() &&
Compare(ni->shapes_array, shapes.begin(), ni->shapes_length)) {
vp->setObject(*iterobj);
*hp = ni->next;
RegisterEnumerator(cx, iterobj, ni);
if (shapes.length() == 2)
JS_THREAD_DATA(cx)->lastNativeIterator = iterobj;
return true;
}
}
@ -666,13 +697,29 @@ GetIterator(JSContext *cx, JSObject *obj, uintN flags, Value *vp)
AutoValueVector vals(cx);
if (JS_LIKELY(obj != NULL) && !Snapshot<ValueEnumeration>(cx, obj, flags, vals))
return false;
return VectorToValueIterator(cx, obj, flags, vals, shapes.begin(), shapes.length(), key, vp);
JS_ASSERT(shapes.empty());
if (!VectorToValueIterator(cx, obj, flags, vals, vp))
return false;
} else {
AutoIdVector keys(cx);
if (JS_LIKELY(obj != NULL) && !Snapshot<KeyEnumeration>(cx, obj, flags, keys))
return false;
if (!VectorToKeyIterator(cx, obj, flags, keys, shapes.length(), key, vp))
return false;
}
AutoIdVector keys(cx);
if (JS_LIKELY(obj != NULL) && !Snapshot<KeyEnumeration>(cx, obj, flags, keys))
return false;
return VectorToKeyIterator(cx, obj, flags, keys, shapes.begin(), shapes.length(), key, vp);
JSObject *iterobj = &vp->toObject();
/* Cache the iterator object if possible. */
if (shapes.length()) {
uint32 hash = key % NATIVE_ITER_CACHE_SIZE;
JSObject **hp = &JS_THREAD_DATA(cx)->cachedNativeIterators[hash];
*hp = iterobj;
}
if (shapes.length() == 2)
JS_THREAD_DATA(cx)->lastNativeIterator = iterobj;
return true;
}
static JSObject *
@ -799,21 +846,18 @@ js_CloseIterator(JSContext *cx, JSObject *obj)
if (clasp == &js_IteratorClass) {
/* Remove enumerators from the active list, which is a stack. */
NativeIterator *ni = obj->getNativeIterator();
JS_ASSERT(ni->flags & JSITER_ACTIVE);
ni->flags &= ~JSITER_ACTIVE;
if (ni->flags & JSITER_ENUMERATE) {
JS_ASSERT(cx->enumerators == obj);
cx->enumerators = ni->next;
}
/* Cache the iterator object if possible. */
if (ni->shapes_length) {
uint32 hash = ni->shapes_key % NATIVE_ITER_CACHE_SIZE;
JSObject **hp = &JS_THREAD_DATA(cx)->cachedNativeIterators[hash];
ni->props_cursor = ni->props_array;
ni->next = *hp;
*hp = obj;
} else {
iterator_finalize(cx, obj);
}
/* Reset the enumerator; it may still be in the cached iterators
* for this thread, and can be reused. */
ni->props_cursor = ni->props_array;
}
#if JS_HAS_GENERATORS
else if (clasp == &js_GeneratorClass) {

View File

@ -58,6 +58,9 @@
#define JSITER_OWNONLY 0x8 /* iterate over obj's own properties only */
#define JSITER_HIDDEN 0x10 /* also enumerate non-enumerable properties */
/* Whether the iterator is currently active. Not serialized by XDR. */
#define JSITER_ACTIVE 0x1000
struct NativeIterator {
JSObject *obj;
void *props_array;
@ -66,8 +69,8 @@ struct NativeIterator {
uint32 *shapes_array;
uint32 shapes_length;
uint32 shapes_key;
uintN flags;
JSObject *next;
uint32 flags;
JSObject *next; /* Forms cx->enumerators list, garbage otherwise. */
bool isKeyIter() const { return (flags & JSITER_FOREACH) == 0; }
@ -121,9 +124,9 @@ struct NativeIterator {
static NativeIterator *allocateKeyIterator(JSContext *cx, uint32 slength,
const js::AutoIdVector &props);
static NativeIterator *allocateValueIterator(JSContext *cx, uint32 slength,
static NativeIterator *allocateValueIterator(JSContext *cx,
const js::AutoValueVector &props);
void init(JSObject *obj, uintN flags, const uint32 *sarray, uint32 slength, uint32 key);
void init(JSObject *obj, uintN flags, uint32 slength, uint32 key);
void mark(JSTracer *trc);
};

View File

@ -945,13 +945,7 @@ mjit::Compiler::generateMethod()
END_CASE(JSOP_STRICTNE)
BEGIN_CASE(JSOP_ITER)
{
prepareStubCall(Uses(1));
masm.move(Imm32(PC[1]), Registers::ArgReg1);
stubCall(stubs::Iter);
frame.pop();
frame.pushSynced();
}
iter(PC[1]);
END_CASE(JSOP_ITER)
BEGIN_CASE(JSOP_MOREITER)
@ -961,9 +955,7 @@ mjit::Compiler::generateMethod()
END_CASE(JSOP_MOREITER)
BEGIN_CASE(JSOP_ENDITER)
prepareStubCall(Uses(1));
stubCall(stubs::EndIter);
frame.pop();
iterEnd();
END_CASE(JSOP_ENDITER)
BEGIN_CASE(JSOP_POP)
@ -3088,6 +3080,120 @@ mjit::Compiler::jsop_propinc(JSOp op, VoidStubAtom stub, uint32 index)
PC += JSOP_PROPINC_LENGTH;
}
void
mjit::Compiler::iter(uintN flags)
{
FrameEntry *fe = frame.peek(-1);
/*
* Stub the call if this is not a simple 'for in' loop or if the iterated
* value is known to not be an object.
*/
if ((flags != JSITER_ENUMERATE) || fe->isNotType(JSVAL_TYPE_OBJECT)) {
prepareStubCall(Uses(1));
masm.move(Imm32(flags), Registers::ArgReg1);
stubCall(stubs::Iter);
frame.pop();
frame.pushSynced();
return;
}
if (!fe->isTypeKnown()) {
Jump notObject = frame.testObject(Assembler::NotEqual, fe);
stubcc.linkExit(notObject, Uses(1));
}
RegisterID reg = frame.tempRegForData(fe);
frame.pinReg(reg);
RegisterID ioreg = frame.allocReg(); /* Will hold iterator JSObject */
RegisterID nireg = frame.allocReg(); /* Will hold NativeIterator */
RegisterID T1 = frame.allocReg();
RegisterID T2 = frame.allocReg();
frame.unpinReg(reg);
/*
* Fetch the most recent iterator. TODO: bake this pointer in when
* iterator caches become per-compartment.
*/
masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), T1);
#ifdef JS_THREADSAFE
masm.loadPtr(Address(T1, offsetof(JSContext, thread)), T1);
masm.loadPtr(Address(T1, offsetof(JSThread, data.lastNativeIterator)), ioreg);
#else
masm.loadPtr(Address(T1, offsetof(JSContext, runtime)), T1);
masm.loadPtr(Address(T1, offsetof(JSRuntime, threadData.lastNativeIterator)), ioreg);
#endif
/* Test for NULL. */
Jump nullIterator = masm.branchTest32(Assembler::Zero, ioreg, ioreg);
stubcc.linkExit(nullIterator, Uses(1));
/* Get NativeIterator from iter obj. :FIXME: X64, also most of this function */
Address privSlot(ioreg, offsetof(JSObject, fslots) + sizeof(Value) * JSSLOT_PRIVATE);
masm.loadPayload(privSlot, nireg);
/* Test for active iterator. */
Address flagsAddr(nireg, offsetof(NativeIterator, flags));
masm.load32(flagsAddr, T1);
masm.and32(Imm32(JSITER_ACTIVE), T1);
Jump activeIterator = masm.branchTest32(Assembler::NonZero, T1, T1);
stubcc.linkExit(activeIterator, Uses(1));
/* Compare shape of object with iterator. */
masm.loadShape(reg, T1);
masm.loadPtr(Address(nireg, offsetof(NativeIterator, shapes_array)), T2);
masm.load32(Address(T2, 0), T2);
Jump mismatchedObject = masm.branch32(Assembler::NotEqual, T1, T2);
stubcc.linkExit(mismatchedObject, Uses(1));
/* Compare shape of object's prototype with iterator. */
masm.loadPtr(Address(reg, offsetof(JSObject, proto)), T1);
masm.loadShape(T1, T1);
masm.loadPtr(Address(nireg, offsetof(NativeIterator, shapes_array)), T2);
masm.load32(Address(T2, sizeof(uint32)), T2);
Jump mismatchedProto = masm.branch32(Assembler::NotEqual, T1, T2);
stubcc.linkExit(mismatchedProto, Uses(1));
/*
* Compare object's prototype's prototype with NULL. The last native
* iterator will always have a prototype chain length of one
* (i.e. it must be a plain object), so we do not need to generate
* a loop here.
*/
masm.loadPtr(Address(reg, offsetof(JSObject, proto)), T1);
masm.loadPtr(Address(T1, offsetof(JSObject, proto)), T1);
Jump overlongChain = masm.branchPtr(Assembler::NonZero, T1, T1);
stubcc.linkExit(overlongChain, Uses(1));
/* Found a match with the most recent iterator. Hooray! */
/* Mark iterator as active. */
masm.load32(flagsAddr, T1);
masm.or32(Imm32(JSITER_ACTIVE), T1);
masm.store32(T1, flagsAddr);
/* Chain onto the active iterator stack. */
masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), T1);
masm.loadPtr(Address(T1, offsetof(JSContext, enumerators)), T2);
masm.storePtr(T2, Address(nireg, offsetof(NativeIterator, next)));
masm.storePtr(ioreg, Address(T1, offsetof(JSContext, enumerators)));
frame.freeReg(nireg);
frame.freeReg(T1);
frame.freeReg(T2);
stubcc.leave();
stubcc.masm.move(Imm32(flags), Registers::ArgReg1);
stubcc.call(stubs::Iter);
/* Push the iterator object. */
frame.pop();
frame.pushTypedPayload(JSVAL_TYPE_OBJECT, ioreg);
stubcc.rejoin(Changes(1));
}
/*
* This big nasty function emits a fast-path for native iterators, producing
* a temporary value on the stack for FORLOCAL,ARG,GLOBAL,etc ops to use.
@ -3195,6 +3301,60 @@ mjit::Compiler::iterMore()
jumpAndTrace(jFast, target, &j);
}
void
mjit::Compiler::iterEnd()
{
FrameEntry *fe= frame.peek(-1);
RegisterID reg = frame.tempRegForData(fe);
frame.pinReg(reg);
RegisterID T1 = frame.allocReg();
frame.unpinReg(reg);
/* Test clasp */
masm.loadPtr(Address(reg, offsetof(JSObject, clasp)), T1);
Jump notIterator = masm.branchPtr(Assembler::NotEqual, T1, ImmPtr(&js_IteratorClass));
stubcc.linkExit(notIterator, Uses(1));
/* Get private from iter obj. :FIXME: X64 */
Address privSlot(reg, offsetof(JSObject, fslots) + sizeof(Value) * JSSLOT_PRIVATE);
masm.loadPayload(privSlot, T1);
RegisterID T2 = frame.allocReg();
/* Load flags. */
Address flagAddr(T1, offsetof(NativeIterator, flags));
masm.loadPtr(flagAddr, T2);
/* Test for (flags == ENUMERATE | ACTIVE). */
Jump notEnumerate = masm.branch32(Assembler::NotEqual, T2,
Imm32(JSITER_ENUMERATE | JSITER_ACTIVE));
stubcc.linkExit(notEnumerate, Uses(1));
/* Clear active bit. */
masm.and32(Imm32(~JSITER_ACTIVE), T2);
masm.storePtr(T2, flagAddr);
/* Reset property cursor. */
masm.loadPtr(Address(T1, offsetof(NativeIterator, props_array)), T2);
masm.storePtr(T2, Address(T1, offsetof(NativeIterator, props_cursor)));
/* Advance enumerators list. */
masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), T2);
masm.loadPtr(Address(T1, offsetof(NativeIterator, next)), T1);
masm.storePtr(T1, Address(T2, offsetof(JSContext, enumerators)));
frame.freeReg(T1);
frame.freeReg(T2);
stubcc.leave();
stubcc.call(stubs::EndIter);
frame.pop();
stubcc.rejoin(Changes(1));
}
void
mjit::Compiler::jsop_eleminc(JSOp op, VoidStub stub)
{

View File

@ -226,8 +226,10 @@ class Compiler
/* Emitting helpers. */
void restoreFrameRegs(Assembler &masm);
void emitStubCmpOp(BoolStub stub, jsbytecode *target, JSOp fused);
void iter(uintN flags);
void iterNext();
void iterMore();
void iterEnd();
MaybeJump loadDouble(FrameEntry *fe, FPRegisterID fpReg);
/* Opcode handlers. */