Backed out changeset f5ea964eb493. (Brendan Eich — High-level CSE for shape guards (518448, r=jorendorff).

This commit is contained in:
Robert Sayre 2009-09-26 08:16:58 -07:00
parent 69fd06459c
commit 5dbce00686
5 changed files with 14 additions and 177 deletions

View File

@ -1022,27 +1022,8 @@ JSScope::reportReadOnlyScope(JSContext *cx)
void
JSScope::generateOwnShape(JSContext *cx)
{
#ifdef JS_TRACER
if (object) {
js_LeaveTraceIfGlobalObject(cx, object);
/*
* The JIT must have arranged to re-guard after any unpredictable shape
* change, so if we are on trace here, we should already be prepared to
* bail off trace.
*/
JS_ASSERT_IF(JS_ON_TRACE(cx), cx->bailExit);
/*
* If we are recording, here is where we forget already-guarded shapes.
* Any subsequent property operation upon object on the trace currently
* being recorded will re-guard (and re-memoize).
*/
JSTraceMonitor *tm = &JS_TRACE_MONITOR(cx);
if (TraceRecorder *tr = tm->recorder)
tr->forgetGuardedShapesForObject(object);
}
#endif
if (object)
js_LeaveTraceIfGlobalObject(cx, object);
shape = js_GenerateShape(cx, false);
setOwnShape();

View File

@ -2181,8 +2181,6 @@ TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* _anchor, Fragment* _frag
this->newobj_ins = NULL;
this->loopLabel = NULL;
guardedShapeTable.ops = NULL;
#ifdef JS_JIT_SPEW
debug_only_print0(LC_TMMinimal, "\n");
debug_only_printf(LC_TMMinimal, "Recording starting from %s:%u@%u (FragID=%06u)\n",
@ -2244,7 +2242,7 @@ TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* _anchor, Fragment* _frag
fragment->loopLabel = entryLabel;
})
lirbuf->sp = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, sp)), "sp");
lirbuf->sp = addName(lir->insLoad(LIR_ldp, lirbuf->state, (int)offsetof(InterpState, sp)), "sp");
lirbuf->rp = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, rp)), "rp");
cx_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, cx)), "cx");
eos_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eos)), "eos");
@ -2301,8 +2299,6 @@ TraceRecorder::~TraceRecorder()
delete func_filter;
delete float_filter;
delete lir_buf_writer;
forgetGuardedShapes();
}
bool
@ -8587,136 +8583,20 @@ TraceRecorder::binary(LOpcode op)
return JSRS_STOP;
}
struct GuardedShapeEntry : public JSDHashEntryStub
{
JSObject* obj;
};
#ifdef DEBUG
#include <stdio.h>
static FILE* shapefp = NULL;
static void
DumpShape(JSObject* obj, const char* prefix)
{
JSScope* scope = OBJ_SCOPE(obj);
if (!shapefp) {
shapefp = fopen("/tmp/shapes.dump", "w");
if (!shapefp)
return;
}
fprintf(shapefp, "\n%s: shape %u flags %x\n", prefix, scope->shape, scope->flags);
for (JSScopeProperty* sprop = scope->lastProp; sprop; sprop = sprop->parent) {
if (JSID_IS_ATOM(sprop->id)) {
fprintf(shapefp, " %s", JS_GetStringBytes(JSVAL_TO_STRING(ID_TO_VALUE(sprop->id))));
} else {
JS_ASSERT(!JSID_IS_OBJECT(sprop->id));
fprintf(shapefp, " %d", JSID_TO_INT(sprop->id));
}
fprintf(shapefp, " %u %p %p %x %x %d\n",
sprop->slot, sprop->getter, sprop->setter, sprop->attrs, sprop->flags,
sprop->shortid);
}
fflush(shapefp);
}
static JSDHashOperator
DumpShapeEnumerator(JSDHashTable* table, JSDHashEntryHdr* hdr, uint32 number, void* arg)
{
GuardedShapeEntry* entry = (GuardedShapeEntry*) hdr;
const char* prefix = (const char*) arg;
DumpShape(entry->obj, prefix);
return JS_DHASH_NEXT;
}
void
TraceRecorder::dumpGuardedShapes(const char* prefix)
{
if (guardedShapeTable.ops)
JS_DHashTableEnumerate(&guardedShapeTable, DumpShapeEnumerator, (void*) prefix);
}
#endif /* DEBUG */
JS_REQUIRES_STACK JSRecordingStatus
TraceRecorder::guardShape(LIns* obj_ins, JSObject* obj, uint32 shape, const char* guardName,
LIns* map_ins, VMSideExit* exit)
{
if (!guardedShapeTable.ops) {
JS_DHashTableInit(&guardedShapeTable, JS_DHashGetStubOps(), NULL,
sizeof(GuardedShapeEntry), JS_DHASH_MIN_SIZE);
}
// Test (with add if missing) for a remembered guard for (obj_ins, obj).
GuardedShapeEntry* entry = (GuardedShapeEntry*)
JS_DHashTableOperate(&guardedShapeTable, obj_ins, JS_DHASH_ADD);
if (!entry) {
JS_ReportOutOfMemory(cx);
return JSRS_ERROR;
}
// If already guarded, emit an assertion that the shape matches.
if (entry->key) {
JS_ASSERT(entry->key == obj_ins);
JS_ASSERT(entry->obj == obj);
debug_only_stmt(
lir->insAssert(lir->ins2i(LIR_eq,
lir->insLoad(LIR_ld, map_ins, offsetof(JSScope, shape)),
shape));
)
return JSRS_CONTINUE;
}
// Not yet guarded. Remember obj_ins along with obj (for invalidation).
entry->key = obj_ins;
entry->obj = obj;
debug_only_stmt(DumpShape(obj, "guard");)
debug_only_stmt(fprintf(shapefp, "for obj_ins %p\n", obj_ins);)
// Finally, emit the shape guard.
LIns* shape_ins = addName(lir->insLoad(LIR_ld, map_ins, offsetof(JSScope, shape)), "shape");
guard(true,
addName(lir->ins2i(LIR_eq, shape_ins, shape), guardName),
exit);
return JSRS_CONTINUE;
}
static JSDHashOperator
ForgetGuardedShapesForObject(JSDHashTable* table, JSDHashEntryHdr* hdr, uint32 number, void* arg)
{
GuardedShapeEntry* entry = (GuardedShapeEntry*) hdr;
if (entry->obj == arg) {
debug_only_stmt(DumpShape(entry->obj, "forget");)
return JS_DHASH_REMOVE;
}
return JS_DHASH_NEXT;
}
void
TraceRecorder::forgetGuardedShapesForObject(JSObject* obj)
{
if (guardedShapeTable.ops)
JS_DHashTableEnumerate(&guardedShapeTable, ForgetGuardedShapesForObject, obj);
}
void
TraceRecorder::forgetGuardedShapes()
{
if (guardedShapeTable.ops) {
debug_only_stmt(dumpGuardedShapes("forget-all");)
JS_DHashTableFinish(&guardedShapeTable);
guardedShapeTable.ops = NULL;
}
}
JS_STATIC_ASSERT(offsetof(JSObjectOps, objectMap) == 0);
inline LIns*
TraceRecorder::map(LIns* obj_ins)
TraceRecorder::map(LIns *obj_ins)
{
return addName(lir->insLoad(LIR_ldp, obj_ins, (int) offsetof(JSObject, map)), "map");
}
@ -8896,7 +8776,7 @@ TraceRecorder::guardPropertyCacheHit(LIns* obj_ins,
// Otherwise guard on key object exact match.
if (PCVCAP_TAG(entry->vcap) <= 1) {
if (aobj != globalObj)
CHECK_STATUS(guardShape(obj_ins, aobj, entry->kshape, "guard_kshape", map_ins, exit));
guardShape(obj_ins, aobj, entry->kshape, "guard_kshape", map_ins, exit);
if (entry->adding()) {
if (aobj == globalObj)
@ -8945,7 +8825,7 @@ TraceRecorder::guardPropertyCacheHit(LIns* obj_ins,
} else {
obj2_ins = INS_CONSTOBJ(obj2);
}
CHECK_STATUS(guardShape(obj2_ins, obj2, vshape, "guard_vshape", map(obj2_ins), exit));
guardShape(obj2_ins, obj2, vshape, "guard_vshape", map(obj2_ins), exit);
}
pcval = entry->vword;
@ -9234,7 +9114,7 @@ TraceRecorder::guardPrototypeHasNoIndexedProperties(JSObject* obj, LIns* obj_ins
return JSRS_STOP;
while (guardHasPrototype(obj, obj_ins, &obj, &obj_ins, exit))
CHECK_STATUS(guardShape(obj_ins, obj, OBJ_SHAPE(obj), "guard(shape)", map(obj_ins), exit));
guardShape(obj_ins, obj, OBJ_SHAPE(obj), "guard(shape)", map(obj_ins), exit);
return JSRS_CONTINUE;
}
@ -10709,11 +10589,9 @@ TraceRecorder::setProp(jsval &l, JSPropCacheEntry* entry, JSScopeProperty* sprop
if (obj == globalObj)
ABORT_TRACE("can't trace function-valued property set in branded global scope");
enterDeepBailCall();
LIns* args[] = { v_ins, INS_CONSTSPROP(sprop), obj_ins, cx_ins };
LIns* ok_ins = lir->insCall(&MethodWriteBarrier_ci, args);
guard(false, lir->ins_eq0(ok_ins), OOM_EXIT);
leaveDeepBailCall();
}
// Find obj2. If entry->adding(), the TAG bits are all 0.
@ -10822,9 +10700,6 @@ TraceRecorder::enterDeepBailCall()
// Tell nanojit not to discard or defer stack writes before this call.
GuardRecord* guardRec = createGuardRecord(exit);
lir->insGuard(LIR_xbarrier, NULL, guardRec);
// Forget about guarded shapes, since deep bailers can reshape the world.
forgetGuardedShapes();
return exit;
}
@ -11978,8 +11853,7 @@ TraceRecorder::prop(JSObject* obj, LIns* obj_ins, uint32 *slotp, LIns** v_insp,
LIns* map_ins = map(obj_ins);
LIns* ops_ins;
if (map_is_native(obj->map, map_ins, ops_ins)) {
CHECK_STATUS(guardShape(obj_ins, obj, OBJ_SHAPE(obj), "guard(shape)", map_ins,
exit));
guardShape(obj_ins, obj, OBJ_SHAPE(obj), "guard(shape)", map_ins, exit);
} else if (!guardDenseArray(obj, obj_ins, exit)) {
ABORT_TRACE("non-native object involved in undefined property access");
}
@ -12058,10 +11932,8 @@ TraceRecorder::prop(JSObject* obj, LIns* obj_ins, uint32 *slotp, LIns** v_insp,
* leaked to the calling script.
*/
if (isMethod && !cx->fp->imacpc) {
enterDeepBailCall();
LIns* args[] = { v_ins, INS_CONSTSPROP(sprop), obj_ins, cx_ins };
v_ins = lir->insCall(&MethodReadBarrier_ci, args);
leaveDeepBailCall();
}
if (slotp) {
@ -12806,7 +12678,7 @@ TraceRecorder::record_JSOP_INSTANCEOF()
stack(-2, lir->insCall(&HasInstance_ci, args));
LIns* status_ins = lir->insLoad(LIR_ld,
lirbuf->state,
offsetof(InterpState, builtinStatus));
(int) offsetof(InterpState, builtinStatus));
pendingGuardCondition = lir->ins_eq0(status_ins);
leaveDeepBailCall();

View File

@ -47,7 +47,6 @@
#include "jstypes.h"
#include "jsbuiltins.h"
#include "jscntxt.h"
#include "jsdhash.h"
#include "jsinterp.h"
#include "jslock.h"
#include "jsnum.h"
@ -838,17 +837,9 @@ class TraceRecorder {
JS_REQUIRES_STACK JSRecordingStatus unary(nanojit::LOpcode op);
JS_REQUIRES_STACK JSRecordingStatus binary(nanojit::LOpcode op);
JS_REQUIRES_STACK JSRecordingStatus guardShape(nanojit::LIns* obj_ins, JSObject* obj,
uint32 shape, const char* name,
nanojit::LIns* map_ins, VMSideExit* exit);
JSDHashTable guardedShapeTable;
#ifdef DEBUG
void dumpGuardedShapes(const char* prefix);
#endif
void forgetGuardedShapes();
JS_REQUIRES_STACK void guardShape(nanojit::LIns* obj_ins, JSObject* obj,
uint32 shape, const char* guardName,
nanojit::LIns* map_ins, VMSideExit* exit);
inline nanojit::LIns* map(nanojit::LIns *obj_ins);
JS_REQUIRES_STACK bool map_is_native(JSObjectMap* map, nanojit::LIns* map_ins,
@ -1056,7 +1047,7 @@ public:
JS_REQUIRES_STACK JSRecordingStatus record_DefLocalFunSetSlot(uint32 slot, JSObject* obj);
JS_REQUIRES_STACK JSRecordingStatus record_NativeCallComplete();
void forgetGuardedShapesForObject(JSObject* obj);
TreeInfo* getTreeInfo() { return treeInfo; }
#ifdef DEBUG
void tprint(const char *format, int count, nanojit::LIns *insa[]);

View File

@ -1079,13 +1079,6 @@ namespace nanojit
virtual LInsp insSkip(size_t size) {
return out->insSkip(size);
}
void insAssert(LIns* expr) {
#if defined DEBUG
LIns* branch = insBranch(LIR_jt, expr, NULL);
ins0(LIR_dbreak);
branch->setTarget(ins0(LIR_label));
#endif
}
// convenience functions

View File

@ -74,7 +74,7 @@
OPDEF(start, 0, 0, Op0) // start of a fragment
OPDEF(regfence, 1, 0, Op0) // register fence, no register allocation is allowed across this meta instruction
OPDEF(skip, 2, 1, Sk) // holds blobs ("payloads") of data; also links pages
OPDEF(dbreak, 3, 0, Op0)
OPDEF(unused3, 3,-1, None)
OPDEF(unused4, 4,-1, None)
OPDEF(unused5, 5,-1, None)
OPDEF(unused6, 6,-1, None)