Bug 795803 - Enable ICing of JSNative and PropertyOp getters. (r=jandem)

This commit is contained in:
Kannan Vijayan 2012-10-03 22:55:24 -04:00
parent 691b3b77ff
commit a95c44ef34
14 changed files with 580 additions and 62 deletions

View File

@ -674,7 +674,7 @@ CodeGenerator::visitCallDOMNative(LCallDOMNative *call)
uint32 safepointOffset;
if (!masm.buildFakeExitFrame(argJSContext, &safepointOffset))
return false;
masm.enterFakeDOMFrame(ION_FRAME_DOMMETHOD);
masm.enterFakeExitFrame(ION_FRAME_DOMMETHOD);
if (!markSafepointAt(safepointOffset, call))
return false;
@ -3222,7 +3222,7 @@ CodeGenerator::visitOutOfLineCacheGetProperty(OutOfLineCache *ool)
RegisterSet liveRegs = ool->cache()->safepoint()->liveRegs();
LInstruction *ins = ool->cache();
const MInstruction *mir = ins->mirRaw()->toInstruction();
MInstruction *mir = ins->mirRaw()->toInstruction();
TypedOrValueRegister output;
@ -3233,6 +3233,7 @@ CodeGenerator::visitOutOfLineCacheGetProperty(OutOfLineCache *ool)
// Note: because all registers are saved, the output register should be
// a def register, else the result will be overriden by restoreLive(ins)
PropertyName *name = NULL;
bool allowGetters = false;
switch (ins->op()) {
case LInstruction::LOp_InstanceOfO:
case LInstruction::LOp_InstanceOfV:
@ -3244,11 +3245,15 @@ CodeGenerator::visitOutOfLineCacheGetProperty(OutOfLineCache *ool)
name = ((LGetPropertyCacheT *) ins)->mir()->name();
objReg = ToRegister(ins->getOperand(0));
output = TypedOrValueRegister(mir->type(), ToAnyRegister(ins->getDef(0)));
JS_ASSERT(mir->isGetPropertyCache());
allowGetters = mir->toGetPropertyCache()->allowGetters();
break;
case LInstruction::LOp_GetPropertyCacheV:
name = ((LGetPropertyCacheV *) ins)->mir()->name();
objReg = ToRegister(ins->getOperand(0));
output = TypedOrValueRegister(GetValueOutput(ins));
JS_ASSERT(mir->isGetPropertyCache());
allowGetters = mir->toGetPropertyCache()->allowGetters();
break;
default:
JS_NOT_REACHED("Bad instruction");
@ -3257,7 +3262,7 @@ CodeGenerator::visitOutOfLineCacheGetProperty(OutOfLineCache *ool)
IonCacheGetProperty cache(ool->getInlineJump(), ool->getInlineLabel(),
masm.labelForPatch(), liveRegs,
objReg, name, output);
objReg, name, output, allowGetters);
if (mir->resumePoint())
cache.setScriptedLocation(mir->block()->info().script(), mir->resumePoint()->pc());
@ -4039,7 +4044,7 @@ CodeGenerator::visitGetDOMProperty(LGetDOMProperty *ins)
uint32 safepointOffset;
if (!masm.buildFakeExitFrame(JSContextReg, &safepointOffset))
return false;
masm.enterFakeDOMFrame(ION_FRAME_DOMGETTER);
masm.enterFakeExitFrame(ION_FRAME_DOMGETTER);
if (!markSafepointAt(safepointOffset, ins))
return false;
@ -4105,7 +4110,7 @@ CodeGenerator::visitSetDOMProperty(LSetDOMProperty *ins)
uint32 safepointOffset;
if (!masm.buildFakeExitFrame(JSContextReg, &safepointOffset))
return false;
masm.enterFakeDOMFrame(ION_FRAME_DOMSETTER);
masm.enterFakeExitFrame(ION_FRAME_DOMSETTER);
if (!markSafepointAt(safepointOffset, ins))
return false;

View File

@ -5878,6 +5878,7 @@ IonBuilder::jsop_getprop(HandlePropertyName name)
return makeCallBarrier(getter, 0, false, types, barrier);
}
bool accessGetter = oracle->propertyReadAccessGetter(script_, pc);
if (unary.ival == MIRType_Object) {
MIRType rvalType = MIRType_Value;
if (!barrier && !IsNullOrUndefined(unary.rval))
@ -5918,11 +5919,19 @@ IonBuilder::jsop_getprop(HandlePropertyName name)
load->setIdempotent();
}
ins = load;
if (JSOp(*pc) == JSOP_CALLPROP) {
if (!annotateGetPropertyCache(cx, obj, load, unaryTypes.inTypes, types))
return false;
}
// If the cache is known to access getters, then enable generation of
// getter stubs and set its result type to value.
if (accessGetter) {
load->setResultType(MIRType_Value);
load->setAllowGetters();
}
ins = load;
} else {
ins = MCallGetProperty::New(obj, name);
}
@ -5933,7 +5942,7 @@ IonBuilder::jsop_getprop(HandlePropertyName name)
if (ins->isEffectful() && !resumeAfter(ins))
return false;
if (ins->isCallGetProperty())
if (ins->isCallGetProperty() || accessGetter)
monitorResult(ins, types);
return pushTypeBarrier(ins, types, barrier);
}

View File

@ -80,6 +80,23 @@ CodeOffsetJump::fixup(MacroAssembler *masm)
static const size_t MAX_STUBS = 16;
static bool
IsCacheableListBase(JSObject *obj)
{
if (!obj->isProxy())
return false;
BaseProxyHandler *handler = GetProxyHandler(obj);
if (handler->family() != GetListBaseHandlerFamily())
return false;
if (obj->numFixedSlots() <= GetListBaseExpandoSlot())
return false;
return true;
}
static void
GeneratePrototypeGuards(JSContext *cx, MacroAssembler &masm, JSObject *obj, JSObject *holder,
Register objectReg, Register scratchReg, Label *failures)
@ -94,7 +111,10 @@ GeneratePrototypeGuards(JSContext *cx, MacroAssembler &masm, JSObject *obj, JSOb
masm.branchPtr(Assembler::NotEqual, proto, ImmGCPtr(obj->getProto()), failures);
}
JSObject *pobj = obj->getProto();
JSObject *pobj = IsCacheableListBase(obj)
? obj->getTaggedProto().toObjectOrNull()
: obj->getProto();
JS_ASSERT(pobj);
while (pobj != holder) {
if (pobj->hasUncacheableProto()) {
if (pobj->hasSingletonType()) {
@ -112,14 +132,71 @@ GeneratePrototypeGuards(JSContext *cx, MacroAssembler &masm, JSObject *obj, JSOb
}
}
static bool
IsCacheableProtoChain(JSObject *obj, JSObject *holder)
{
while (obj != holder) {
/*
* We cannot assume that we find the holder object on the prototype
* chain and must check for null proto. The prototype chain can be
* altered during the lookupProperty call.
*/
JSObject *proto = IsCacheableListBase(obj)
? obj->getTaggedProto().toObjectOrNull()
: obj->getProto();
if (!proto || !proto->isNative())
return false;
obj = proto;
}
return true;
}
static bool
IsCacheableGetPropReadSlot(JSObject *obj, JSObject *holder, const Shape *shape)
{
if (!shape || !IsCacheableProtoChain(obj, holder))
return false;
if (!shape->hasSlot() || !shape->hasDefaultGetter())
return false;
return true;
}
static bool
IsCacheableGetPropCallNative(JSObject *obj, JSObject *holder, const Shape *shape)
{
if (!shape || !IsCacheableProtoChain(obj, holder))
return false;
if (!shape->hasGetterValue() || !shape->getterValue().isObject())
return false;
return shape->getterValue().toObject().isFunction() &&
shape->getterValue().toObject().toFunction()->isNative();
}
static bool
IsCacheableGetPropCallPropertyOp(JSObject *obj, JSObject *holder, const Shape *shape)
{
if (!shape || !IsCacheableProtoChain(obj, holder))
return false;
if (shape->hasSlot() || shape->hasGetterValue() || shape->hasDefaultGetter())
return false;
return true;
}
struct GetNativePropertyStub
{
CodeOffsetJump exitOffset;
CodeOffsetJump rejoinOffset;
CodeOffsetLabel stubCodePatchOffset;
void generate(JSContext *cx, MacroAssembler &masm, JSObject *obj, JSObject *holder,
const Shape *shape, Register object, TypedOrValueRegister output,
RepatchLabel *failures, Label *nonRepatchFailures = NULL)
void generateReadSlot(JSContext *cx, MacroAssembler &masm, JSObject *obj, PropertyName *propName,
JSObject *holder, const Shape *shape, Register object, TypedOrValueRegister output,
RepatchLabel *failures, Label *nonRepatchFailures = NULL)
{
// If there's a single jump to |failures|, we can patch the shape guard
// jump directly. Otherwise, jump to the end of the stub, so there's a
@ -149,6 +226,7 @@ struct GetNativePropertyStub
}
}
// Generate prototype guards.
Register holderReg;
if (obj != holder) {
// Note: this may clobber the object register if it's used as scratch.
@ -165,6 +243,7 @@ struct GetNativePropertyStub
holderReg = object;
}
// Slot access.
if (holder->isFixedSlot(shape->slot())) {
Address addr(holderReg, JSObject::getFixedSlotOffset(shape->slot()));
masm.loadTypedOrValue(addr, output);
@ -196,17 +275,284 @@ struct GetNativePropertyStub
masm.bind(failures);
}
}
bool generateCallGetter(JSContext *cx, MacroAssembler &masm, JSObject *obj,
PropertyName *propName, JSObject *holder, const Shape *shape,
RegisterSet &liveRegs, Register object, TypedOrValueRegister output,
types::TypeSet *outputTypes, void *returnAddr, jsbytecode *pc,
RepatchLabel *failures, Label *nonRepatchFailures = NULL)
{
// Initial shape check.
Label stubFailure;
masm.branchPtr(Assembler::NotEqual, Address(object, JSObject::offsetOfShape()),
ImmGCPtr(obj->lastProperty()), &stubFailure);
// If this is a stub for a ListBase object, guard the following:
// 1. The object is a ListBase.
// 2. The object does not have expando properties, or has an expando
// which is known to not have the desired property.
if (IsCacheableListBase(obj)) {
Address handlerAddr(object, JSObject::getFixedSlotOffset(JSSLOT_PROXY_HANDLER));
Address expandoAddr(object, JSObject::getFixedSlotOffset(GetListBaseExpandoSlot()));
// Check that object is a ListBase.
masm.branchPrivatePtr(Assembler::NotEqual, handlerAddr, ImmWord(GetProxyHandler(obj)), &stubFailure);
// For the remaining code, we need to reserve some registers to load a value.
// This is ugly, but unvaoidable.
RegisterSet listBaseRegSet(RegisterSet::All());
listBaseRegSet.take(AnyRegister(object));
ValueOperand tempVal = listBaseRegSet.takeValueOperand();
masm.pushValue(tempVal);
Label failListBaseCheck;
Label listBaseOk;
Value expandoVal = obj->getFixedSlot(GetListBaseExpandoSlot());
JSObject *expando = expandoVal.isObject() ? &(expandoVal.toObject()) : NULL;
JS_ASSERT_IF(expando, expando->isNative() && expando->getProto() == NULL);
masm.loadValue(expandoAddr, tempVal);
if (expando && expando->nativeLookupNoAllocation(propName)) {
// Reference object has an expando that doesn't define the name.
// Check incoming object's expando and make sure it's an object.
// If checkExpando is true, we'll temporarily use register(s) for a ValueOperand.
// If we do that, we save the register(s) on stack before use and pop them
// on both exit paths.
masm.branchTestObject(Assembler::NotEqual, tempVal, &failListBaseCheck);
masm.extractObject(tempVal, tempVal.scratchReg());
masm.branchPtr(Assembler::Equal,
Address(tempVal.scratchReg(), JSObject::offsetOfShape()),
ImmGCPtr(expando->lastProperty()),
&listBaseOk);
} else {
// Reference object has no expando. Check incoming object and ensure
// it has no expando.
masm.branchTestUndefined(Assembler::Equal, tempVal, &listBaseOk);
}
// Failure case: restore the tempVal registers and jump to failures.
masm.bind(&failListBaseCheck);
masm.popValue(tempVal);
masm.jump(&stubFailure);
// Success case: restore the tempval and proceed.
masm.bind(&listBaseOk);
masm.popValue(tempVal);
}
// Reserve scratch register for prototype guards.
bool restoreScratch = false;
Register scratchReg = Register::FromCode(0); // Quell compiler warning.
// If we need a scratch register, use either an output register or the object
// register (and restore it afterwards). After this point, we cannot jump
// directly to |stubFailure| since we may still have to pop the object register.
Label prototypeFailures;
JS_ASSERT(output.hasValue());
scratchReg = output.valueReg().scratchReg();
// Note: this may clobber the object register if it's used as scratch.
if (obj != holder)
GeneratePrototypeGuards(cx, masm, obj, holder, object, scratchReg, &prototypeFailures);
// Guard on the holder's shape.
Register holderReg = scratchReg;
masm.movePtr(ImmGCPtr(holder), holderReg);
masm.branchPtr(Assembler::NotEqual,
Address(holderReg, JSObject::offsetOfShape()),
ImmGCPtr(holder->lastProperty()),
&prototypeFailures);
if (restoreScratch)
masm.pop(scratchReg);
// Now we're good to go to invoke the native call.
// saveLive()
masm.PushRegsInMask(liveRegs);
// Remaining registers should basically be free, but we need to use |object| still
// so leave it alone.
RegisterSet regSet(RegisterSet::All());
regSet.take(AnyRegister(object));
// This is a slower stub path, and we're going to be doing a call anyway. Don't need
// to try so hard to not use the stack. Scratch regs are just taken from the register
// set not including the input, current value saved on the stack, and restored when
// we're done with it.
scratchReg = regSet.takeGeneral();
Register argJSContextReg = regSet.takeGeneral();
Register argUintNReg = regSet.takeGeneral();
Register argVpReg = regSet.takeGeneral();
// Shape has a getter function.
bool callNative = IsCacheableGetPropCallNative(obj, holder, shape);
JS_ASSERT_IF(!callNative, IsCacheableGetPropCallPropertyOp(obj, holder, shape));
// TODO: ensure stack is aligned?
DebugOnly<uint32> initialStack = masm.framePushed();
masm.checkStackAlignment();
Label success, exception;
// Push the IonCode pointer for the stub we're generating.
// WARNING:
// WARNING: If IonCode ever becomes relocatable, the following code is incorrect.
// WARNING: Note that we're not marking the pointer being pushed as an ImmGCPtr.
// WARNING: This is not a marking issue since the stub IonCode won't be collected
// WARNING: between the time it's called and when we get here, but it would fail
// WARNING: if the IonCode object ever moved, since we'd be rooting a nonsense
// WARNING: value here.
// WARNING:
stubCodePatchOffset = masm.pushWithPatch(ImmWord(uintptr_t(-1)));
// Manually adjust framePushed to account for this push which is not otherwise
// accounted for.
masm.setFramePushed(masm.framePushed() + sizeof(uintptr_t));
if (callNative) {
JS_ASSERT(shape->hasGetterValue() && shape->getterValue().isObject() &&
shape->getterValue().toObject().isFunction());
JSFunction *target = shape->getterValue().toObject().toFunction();
JS_ASSERT(target);
JS_ASSERT(target->isNative());
// Native functions have the signature:
// bool (*)(JSContext *, unsigned, Value *vp)
// Where vp[0] is space for an outparam, vp[1] is |this|, and vp[2] onward
// are the function arguments.
// Construct vp array:
// Push object value for |this|
masm.Push(TypedOrValueRegister(MIRType_Object, AnyRegister(object)));
// Push callee/outparam.
masm.Push(ObjectValue(*target));
// Preload arguments into registers.
masm.loadJSContext(argJSContextReg);
masm.move32(Imm32(0), argUintNReg);
masm.movePtr(StackPointer, argVpReg);
if (!masm.buildOOLFakeExitFrame(returnAddr))
return false;
masm.enterFakeExitFrame(ION_FRAME_OOL_NATIVE_GETTER);
// Construct and execute call.
masm.setupUnalignedABICall(3, scratchReg);
masm.passABIArg(argJSContextReg);
masm.passABIArg(argUintNReg);
masm.passABIArg(argVpReg);
masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, target->native()));
// Test for failure.
masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, &exception);
// Load the outparam vp[0] into output register(s).
masm.loadValue(
Address(StackPointer, IonOOLNativeGetterExitFrameLayout::offsetOfResult()),
JSReturnOperand);
} else {
Register argObjReg = argUintNReg;
Register argIdReg = regSet.takeGeneral();
PropertyOp target = shape->getterOp();
JS_ASSERT(target);
// JSPropertyOp: JSBool fn(JSContext *cx, JSHandleObject obj, JSHandleId id, JSMutableHandleValue vp)
// Push args on stack first so we can take pointers to make handles.
masm.Push(UndefinedValue());
masm.movePtr(StackPointer, argVpReg);
// push canonical jsid from shape instead of propertyname.
jsid propId;
if (!shape->getUserId(cx, &propId))
return false;
masm.Push(propId, scratchReg);
masm.movePtr(StackPointer, argIdReg);
masm.Push(object);
masm.movePtr(StackPointer, argObjReg);
masm.loadJSContext(argJSContextReg);
if (!masm.buildOOLFakeExitFrame(returnAddr))
return false;
masm.enterFakeExitFrame(ION_FRAME_OOL_PROPERTY_OP);
// Make the call.
masm.setupUnalignedABICall(4, scratchReg);
masm.passABIArg(argJSContextReg);
masm.passABIArg(argObjReg);
masm.passABIArg(argIdReg);
masm.passABIArg(argVpReg);
masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, target));
// Test for failure.
masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, &exception);
// Load the outparam vp[0] into output register(s).
masm.loadValue(
Address(StackPointer, IonOOLPropertyOpExitFrameLayout::offsetOfResult()),
JSReturnOperand);
}
// If generating getter call stubs, then return type MUST have been generalized
// to MIRType_Value.
masm.jump(&success);
// Handle exception case.
masm.bind(&exception);
masm.handleException();
// Handle success case.
masm.bind(&success);
masm.storeCallResultValue(output);
// The next instruction is removing the footer of the exit frame, so there
// is no need for leaveFakeExitFrame.
// Move the StackPointer back to its original location, unwinding the native exit frame.
if (callNative)
masm.adjustStack(IonOOLNativeGetterExitFrameLayout::Size());
else
masm.adjustStack(IonOOLPropertyOpExitFrameLayout::Size());
JS_ASSERT(masm.framePushed() == initialStack);
// restoreLive()
masm.PopRegsInMask(liveRegs);
// Rejoin jump.
RepatchLabel rejoin_;
rejoinOffset = masm.jumpWithPatch(&rejoin_);
masm.bind(&rejoin_);
// Exit jump.
masm.bind(&prototypeFailures);
if (restoreScratch)
masm.pop(scratchReg);
masm.bind(&stubFailure);
if (nonRepatchFailures)
masm.bind(nonRepatchFailures);
RepatchLabel exit_;
exitOffset = masm.jumpWithPatch(&exit_);
masm.bind(&exit_);
return true;
}
};
bool
IonCacheGetProperty::attachNative(JSContext *cx, IonScript *ion, JSObject *obj, JSObject *holder,
const Shape *shape)
IonCacheGetProperty::attachReadSlot(JSContext *cx, IonScript *ion, JSObject *obj, JSObject *holder,
const Shape *shape)
{
MacroAssembler masm;
RepatchLabel failures;
GetNativePropertyStub getprop;
getprop.generate(cx, masm, obj, holder, shape, object(), output(), &failures);
getprop.generateReadSlot(cx, masm, obj, name(), holder, shape, object(), output(), &failures);
Linker linker(masm);
IonCode *code = linker.newCode(cx);
@ -233,54 +579,106 @@ IonCacheGetProperty::attachNative(JSContext *cx, IonScript *ion, JSObject *obj,
return true;
}
static bool
IsCacheableProtoChain(JSObject *obj, JSObject *holder)
bool
IonCacheGetProperty::attachCallGetter(JSContext *cx, IonScript *ion, JSObject *obj,
JSObject *holder, const Shape *shape,
const SafepointIndex *safepointIndex, void *returnAddr)
{
while (obj != holder) {
/*
* We cannot assume that we find the holder object on the prototype
* chain and must check for null proto. The prototype chain can be
* altered during the lookupProperty call.
*/
JSObject *proto = obj->getProto();
if (!proto || !proto->isNative())
return false;
obj = proto;
}
return true;
}
MacroAssembler masm;
RepatchLabel failures;
static bool
IsCacheableGetProp(JSObject *obj, JSObject *holder, const Shape *shape)
{
return (shape &&
IsCacheableProtoChain(obj, holder) &&
shape->hasSlot() &&
shape->hasDefaultGetter());
JS_ASSERT(!idempotent());
JS_ASSERT(allowGetters());
// Need to set correct framePushed on the masm so that exit frame descriptors are
// properly constructed.
masm.setFramePushed(script->ionScript()->frameSize());
// Generating a call getter may need the pushed typeset.
types::StackTypeSet *outputTypes = script->analysis()->pushedTypes(pc, 0);
GetNativePropertyStub getprop;
if (!getprop.generateCallGetter(cx, masm, obj, name(), holder, shape, liveRegs,
object(), output(), outputTypes, returnAddr, pc,
&failures))
{
return false;
}
Linker linker(masm);
IonCode *code = linker.newCode(cx);
if (!code)
return false;
getprop.rejoinOffset.fixup(&masm);
getprop.exitOffset.fixup(&masm);
getprop.stubCodePatchOffset.fixup(&masm);
if (ion->invalidated())
return true;
Assembler::patchDataWithValueCheck(CodeLocationLabel(code, getprop.stubCodePatchOffset),
ImmWord(uintptr_t(code)), ImmWord(uintptr_t(-1)));
CodeLocationJump rejoinJump(code, getprop.rejoinOffset);
CodeLocationJump exitJump(code, getprop.exitOffset);
CodeLocationJump lastJump_ = lastJump();
PatchJump(lastJump_, CodeLocationLabel(code));
PatchJump(rejoinJump, rejoinLabel());
PatchJump(exitJump, cacheLabel());
updateLastJump(exitJump);
IonSpew(IonSpew_InlineCaches, "Generated native GETPROP stub at %p %s", code->raw(),
idempotent() ? "(idempotent)" : "(not idempotent)");
return true;
}
static bool
TryAttachNativeStub(JSContext *cx, IonScript *ion,
IonCacheGetProperty &cache, HandleObject obj,
HandlePropertyName name, bool *isCacheableNative)
HandlePropertyName name, const SafepointIndex *safepointIndex,
void *returnAddr, bool *isCacheable)
{
JS_ASSERT(!*isCacheableNative);
JS_ASSERT(!*isCacheable);
if (!obj->isNative())
RootedObject checkObj(cx, obj);
bool isListBase = IsCacheableListBase(obj);
if (isListBase)
checkObj = obj->getTaggedProto().toObjectOrNull();
if (!checkObj || !checkObj->isNative())
return true;
// If the cache is idempotent, watch out for resolve hooks or non-native
// objects on the proto chain. We check this before calling lookupProperty,
// to make sure no effectful lookup hooks or resolve hooks are called.
if (cache.idempotent() && !obj->hasIdempotentProtoChain())
if (cache.idempotent() && !checkObj->hasIdempotentProtoChain())
return true;
RootedShape shape(cx);
RootedObject holder(cx);
if (!JSObject::lookupProperty(cx, obj, name, &holder, &shape))
if (!JSObject::lookupProperty(cx, checkObj, name, &holder, &shape))
return false;
if (!IsCacheableGetProp(obj, holder, shape))
// Check what kind of cache stub we can emit: either a slot read,
// or a getter call.
bool readSlot = false;
bool callGetter = false;
if (IsCacheableGetPropReadSlot(checkObj, holder, shape)) {
readSlot = true;
} else if (IsCacheableGetPropCallNative(checkObj, holder, shape) ||
IsCacheableGetPropCallPropertyOp(checkObj, holder, shape))
{
// Don't enable getter call if cache is idempotent, since
// they can be effectful.
if (!cache.idempotent() && cache.allowGetters())
callGetter = true;
}
// Only continue if one of the cache methods is viable.
if (!readSlot && !callGetter)
return true;
// TI infers the possible types of native object properties. There's one
@ -295,13 +693,19 @@ TryAttachNativeStub(JSContext *cx, IonScript *ion,
return true;
}
*isCacheableNative = true;
*isCacheable = true;
// readSlot and callGetter are mutually exclusive
JS_ASSERT_IF(readSlot, !callGetter);
JS_ASSERT_IF(callGetter, !readSlot);
if (cache.stubCount() < MAX_STUBS) {
cache.incrementStubCount();
if (!cache.attachNative(cx, ion, obj, holder, shape))
return false;
if (readSlot)
return cache.attachReadSlot(cx, ion, obj, holder, shape);
else
return cache.attachCallGetter(cx, ion, obj, holder, shape, safepointIndex, returnAddr);
}
return true;
@ -311,7 +715,9 @@ bool
js::ion::GetPropertyCache(JSContext *cx, size_t cacheIndex, HandleObject obj, MutableHandleValue vp)
{
AutoFlushCache afc ("GetPropertyCache");
JSScript *topScript = GetTopIonJSScript(cx);
const SafepointIndex *safepointIndex;
void *returnAddr;
JSScript *topScript = GetTopIonJSScript(cx, &safepointIndex, &returnAddr);
IonScript *ion = topScript->ionScript();
IonCacheGetProperty &cache = ion->getCache(cacheIndex).toGetProperty();
@ -331,11 +737,11 @@ js::ion::GetPropertyCache(JSContext *cx, size_t cacheIndex, HandleObject obj, Mu
// For now, just stop generating new stubs once we hit the stub count
// limit. Once we can make calls from within generated stubs, a new call
// stub will be generated instead and the previous stubs unlinked.
bool isCacheableNative = false;
if (!TryAttachNativeStub(cx, ion, cache, obj, name, &isCacheableNative))
bool isCacheable = false;
if (!TryAttachNativeStub(cx, ion, cache, obj, name, safepointIndex, returnAddr, &isCacheable))
return false;
if (cache.idempotent() && !isCacheableNative) {
if (cache.idempotent() && !isCacheable) {
// Invalidate the cache if the property was not found, or was found on
// a non-native object. This ensures:
// 1) The property read has no observable side-effects.
@ -683,7 +1089,7 @@ IonCacheGetElement::attachGetProp(JSContext *cx, IonScript *ion, HandleObject ob
if (!JSObject::lookupProperty(cx, obj, name, &holder, &shape))
return false;
if (!IsCacheableGetProp(obj, holder, shape)) {
if (!IsCacheableGetPropReadSlot(obj, holder, shape)) {
IonSpew(IonSpew_InlineCaches, "GETELEM uncacheable property");
return true;
}
@ -699,7 +1105,7 @@ IonCacheGetElement::attachGetProp(JSContext *cx, IonScript *ion, HandleObject ob
masm.branchTestValue(Assembler::NotEqual, val, idval, &nonRepatchFailures);
GetNativePropertyStub getprop;
getprop.generate(cx, masm, obj, holder, shape, object(), output(), &failures, &nonRepatchFailures);
getprop.generateReadSlot(cx, masm, obj, name, holder, shape, object(), output(), &failures, &nonRepatchFailures);
Linker linker(masm);
IonCode *code = linker.newCode(cx);
@ -1148,7 +1554,7 @@ IsCacheableName(JSContext *cx, HandleObject scopeChain, HandleObject obj, Handle
if (obj->isGlobal()) {
// Support only simple property lookups.
if (!IsCacheableGetProp(obj, holder, shape))
if (!IsCacheableGetPropReadSlot(obj, holder, shape))
return false;
} else if (obj->isCall()) {
if (!shape->hasDefaultGetter())

View File

@ -111,6 +111,7 @@ class IonCache
Register object;
PropertyName *name;
TypedOrValueRegisterSpace output;
bool allowGetters;
} getprop;
struct {
Register object;
@ -256,20 +257,26 @@ class IonCacheGetProperty : public IonCache
CodeOffsetLabel cacheLabel,
RegisterSet liveRegs,
Register object, PropertyName *name,
TypedOrValueRegister output)
TypedOrValueRegister output,
bool allowGetters)
{
init(GetProperty, liveRegs, initialJump, rejoinLabel, cacheLabel);
u.getprop.object = object;
u.getprop.name = name;
u.getprop.output.data() = output;
u.getprop.allowGetters = allowGetters;
}
Register object() const { return u.getprop.object; }
PropertyName *name() const { return u.getprop.name; }
TypedOrValueRegister output() const { return u.getprop.output.data(); }
bool allowGetters() const { return u.getprop.allowGetters; }
bool attachNative(JSContext *cx, IonScript *ion, JSObject *obj, JSObject *holder,
const Shape *shape);
bool attachReadSlot(JSContext *cx, IonScript *ion, JSObject *obj, JSObject *holder,
const Shape *shape);
bool attachCallGetter(JSContext *cx, IonScript *ion, JSObject *obj, JSObject *holder,
const Shape *shape,
const SafepointIndex *safepointIndex, void *returnAddr);
};
class IonCacheSetProperty : public IonCache

View File

@ -271,6 +271,33 @@ class MacroAssembler : public MacroAssemblerSpecific
using MacroAssemblerSpecific::Push;
void Push(jsid id, Register scratchReg) {
if (JSID_IS_GCTHING(id)) {
// If we're pushing a gcthing, then we can't just push the tagged jsid
// value since the GC won't have any idea that the push instruction
// carries a reference to a gcthing. Need to unpack the pointer,
// push it using ImmGCPtr, and then rematerialize the id at runtime.
// double-checking this here to ensure we don't lose sync
// with implementation of JSID_IS_GCTHING.
if (JSID_IS_OBJECT(id)) {
JSObject *obj = JSID_TO_OBJECT(id);
movePtr(ImmGCPtr(obj), scratchReg);
JS_ASSERT(((size_t)obj & JSID_TYPE_MASK) == 0);
orPtr(Imm32(JSID_TYPE_OBJECT), scratchReg);
Push(scratchReg);
} else {
JSString *str = JSID_TO_STRING(id);
JS_ASSERT(((size_t)str & JSID_TYPE_MASK) == 0);
JS_ASSERT(JSID_TYPE_STRING == 0x0);
Push(ImmGCPtr(str));
}
} else {
size_t idbits = JSID_BITS(id);
Push(ImmWord(idbits));
}
}
void Push(TypedOrValueRegister v) {
if (v.hasValue())
Push(v.valueReg());
@ -447,13 +474,7 @@ class MacroAssembler : public MacroAssemblerSpecific
// Push VMFunction pointer, to mark arguments.
Push(ImmWord(f));
}
void enterFakeExitFrame() {
linkExitFrame();
Push(ImmWord(uintptr_t(NULL)));
Push(ImmWord(uintptr_t(NULL)));
}
void enterFakeDOMFrame(void *codeVal) {
void enterFakeExitFrame(void *codeVal = NULL) {
linkExitFrame();
Push(ImmWord(uintptr_t(codeVal)));
Push(ImmWord(uintptr_t(NULL)));

View File

@ -4078,6 +4078,7 @@ class MGetPropertyCache
{
CompilerRootPropertyName name_;
bool idempotent_;
bool allowGetters_;
InlinePropertyTable *inlinePropertyTable_;
@ -4085,6 +4086,7 @@ class MGetPropertyCache
: MUnaryInstruction(obj),
name_(name),
idempotent_(false),
allowGetters_(false),
inlinePropertyTable_(NULL)
{
setResultType(MIRType_Value);
@ -4129,6 +4131,12 @@ class MGetPropertyCache
idempotent_ = true;
setMovable();
}
bool allowGetters() const {
return allowGetters_;
}
void setAllowGetters() {
allowGetters_ = true;
}
TypePolicy *typePolicy() { return this; }
bool congruentTo(MDefinition * const &ins) const {

View File

@ -449,6 +449,15 @@ class RegisterSet {
Register takeGeneral() {
return gpr_.takeAny();
}
ValueOperand takeValueOperand() {
#if defined(JS_NUNBOX32)
return ValueOperand(takeGeneral(), takeGeneral());
#elif defined(JS_PUNBOX64)
return ValueOperand(takeGeneral());
#else
#error "Bad architecture"
#endif
}
void take(const AnyRegister &reg) {
if (reg.isFloat())
fpu_.take(reg.fpu());

View File

@ -280,6 +280,12 @@ TypeInferenceOracle::propertyReadIdempotent(JSScript *script, jsbytecode *pc, Ha
return true;
}
bool
TypeInferenceOracle::propertyReadAccessGetter(JSScript *script, jsbytecode *pc)
{
return script->analysis()->getCode(pc).accessGetter;
}
bool
TypeInferenceOracle::elementReadIsDenseArray(JSScript *script, jsbytecode *pc)
{

View File

@ -84,6 +84,9 @@ class TypeOracle
virtual bool propertyReadIdempotent(JSScript *script, jsbytecode *pc, HandleId id) {
return false;
}
virtual bool propertyReadAccessGetter(JSScript *script, jsbytecode *pc) {
return false;
}
virtual types::HeapTypeSet *globalPropertyWrite(JSScript *script, jsbytecode *pc,
jsid id, bool *canSpecialize) {
*canSpecialize = true;
@ -234,6 +237,7 @@ class TypeInferenceOracle : public TypeOracle
types::StackTypeSet *propertyRead(JSScript *script, jsbytecode *pc);
types::StackTypeSet *propertyReadBarrier(JSScript *script, jsbytecode *pc);
bool propertyReadIdempotent(JSScript *script, jsbytecode *pc, HandleId id);
bool propertyReadAccessGetter(JSScript *script, jsbytecode *pc);
types::HeapTypeSet *globalPropertyWrite(JSScript *script, jsbytecode *pc, jsid id, bool *canSpecialize);
types::StackTypeSet *returnTypeSet(JSScript *script, jsbytecode *pc, types::StackTypeSet **barrier);
types::StackTypeSet *getCallTarget(JSScript *caller, uint32 argc, jsbytecode *pc);

View File

@ -1343,6 +1343,21 @@ MacroAssemblerARMCompat::buildFakeExitFrame(const Register &scratch, uint32 *off
return true;
}
bool
MacroAssemblerARMCompat::buildOOLFakeExitFrame(void *fakeReturnAddr)
{
DebugOnly<uint32> initialDepth = framePushed();
uint32 descriptor = MakeFrameDescriptor(framePushed(), IonFrame_JS);
Push(Imm32(descriptor)); // descriptor_
enterNoPool();
push(Imm32((uint32) fakeReturnAddr));
leaveNoPool();
return true;
}
void
MacroAssemblerARMCompat::callWithExitFrame(IonCode *target)
{

View File

@ -602,6 +602,15 @@ class MacroAssemblerARMCompat : public MacroAssemblerARM
void branchPtr(Condition cond, const Address &lhs, Register rhs, Label *label) {
branch32(cond, lhs, rhs, label);
}
void branchPrivatePtr(Condition cond, const Address &lhs, ImmWord ptr, Label *label) {
branchPtr(cond, lhs, ptr, label);
}
void branchPrivatePtr(Condition cond, Register lhs, ImmWord ptr, Label *label) {
branchPtr(cond, lhs, ptr, label);
}
template<typename T>
void branchTestDouble(Condition cond, const T & t, Label *label) {
Condition c = testDouble(cond, t);
@ -870,6 +879,7 @@ class MacroAssemblerARMCompat : public MacroAssemblerARM
// Builds an exit frame on the stack, with a return address to an internal
// non-function. Returns offset to be passed to markSafepointAt().
bool buildFakeExitFrame(const Register &scratch, uint32 *offset);
bool buildOOLFakeExitFrame(void *fakeReturnAddr);
void callWithExitFrame(IonCode *target);
void callWithExitFrame(IonCode *target, Register dynStack);

View File

@ -375,6 +375,13 @@ class MacroAssemblerX86Shared : public Assembler
return true;
}
bool buildOOLFakeExitFrame(void *fakeReturnAddr) {
uint32 descriptor = MakeFrameDescriptor(framePushed(), IonFrame_JS);
Push(Imm32(descriptor));
Push(ImmWord(fakeReturnAddr));
return true;
}
void callWithExitFrame(IonCode *target) {
uint32 descriptor = MakeFrameDescriptor(framePushed(), IonFrame_JS);
Push(Imm32(descriptor));

View File

@ -416,6 +416,11 @@ class MacroAssemblerX64 : public MacroAssemblerX86Shared
branchPtr(cond, Operand(ScratchReg, 0x0), ptr, label);
}
template <typename T>
void branchPrivatePtr(Condition cond, T lhs, ImmWord ptr, Label *label) {
branchPtr(cond, lhs, ImmWord(ptr.value >> 1), label);
}
template <typename T, typename S>
void branchPtr(Condition cond, T lhs, S ptr, Label *label) {
cmpPtr(Operand(lhs), ptr);

View File

@ -396,6 +396,12 @@ class MacroAssemblerX86 : public MacroAssemblerX86Shared
cmpl(Operand(lhs), ptr);
j(cond, label);
}
template <typename T>
void branchPrivatePtr(Condition cond, T lhs, ImmWord ptr, Label *label) {
branchPtr(cond, lhs, ptr, label);
}
template <typename T, typename S>
void branchPtr(Condition cond, T lhs, S ptr, RepatchLabel *label) {
cmpl(Operand(lhs), ptr);