Eagerly generate a single copy of Ion stubs and wrappers, bug 786146. r=dvander

This commit is contained in:
Brian Hackett 2012-11-14 06:46:31 -08:00
parent 76912e334e
commit 760213adf7
24 changed files with 527 additions and 517 deletions

File diff suppressed because it is too large Load Diff

View File

@ -123,20 +123,32 @@ ion::InitializeIon()
return true;
}
IonCompartment::IonCompartment()
IonRuntime::IonRuntime()
: execAlloc_(NULL),
enterJIT_(NULL),
bailoutHandler_(NULL),
argumentsRectifier_(NULL),
invalidator_(NULL),
functionWrappers_(NULL),
flusher_(NULL)
functionWrappers_(NULL)
{
}
bool
IonCompartment::initialize(JSContext *cx)
IonRuntime::~IonRuntime()
{
js_delete(functionWrappers_);
}
bool
IonRuntime::initialize(JSContext *cx)
{
AutoEnterAtomsCompartment ac(cx);
if (!cx->compartment->ensureIonCompartmentExists(cx))
return false;
IonContext ictx(cx, cx->compartment, NULL);
AutoFlushCache afc("IonRuntime::initialize");
execAlloc_ = cx->runtime->getExecAlloc(cx);
if (!execAlloc_)
return false;
@ -145,9 +157,53 @@ IonCompartment::initialize(JSContext *cx)
if (!functionWrappers_ || !functionWrappers_->init())
return false;
if (!bailoutTables_.reserve(FrameSizeClass::ClassLimit().classId()))
return false;
for (uint32 id = 0;; id++) {
FrameSizeClass class_ = FrameSizeClass::FromClass(id);
if (class_ == FrameSizeClass::ClassLimit())
break;
bailoutTables_.infallibleAppend(NULL);
bailoutTables_[id] = generateBailoutTable(cx, id);
if (!bailoutTables_[id])
return false;
}
bailoutHandler_ = generateBailoutHandler(cx);
if (!bailoutHandler_)
return false;
argumentsRectifier_ = generateArgumentsRectifier(cx);
if (!argumentsRectifier_)
return false;
invalidator_ = generateInvalidator(cx);
if (!invalidator_)
return false;
enterJIT_ = generateEnterJIT(cx);
if (!enterJIT_)
return false;
preBarrier_ = generatePreBarrier(cx);
if (!preBarrier_)
return false;
for (VMFunction *fun = VMFunction::functions; fun; fun = fun->next) {
if (!generateVMWrapper(cx, *fun))
return false;
}
return true;
}
IonCompartment::IonCompartment(IonRuntime *rt)
: rt(rt),
flusher_(NULL)
{
}
void
ion::FinishOffThreadBuilder(IonBuilder *builder)
{
@ -171,34 +227,18 @@ FinishAllOffThreadCompilations(IonCompartment *ion)
compilations.clear();
}
/* static */ void
IonRuntime::Mark(JSTracer *trc)
{
for (gc::CellIterUnderGC i(trc->runtime->atomsCompartment, gc::FINALIZE_IONCODE); !i.done(); i.next()) {
IonCode *code = i.get<IonCode>();
MarkIonCodeRoot(trc, &code, "wrapper");
}
}
void
IonCompartment::mark(JSTracer *trc, JSCompartment *compartment)
{
// This function marks Ion code objects that must be kept alive if there is
// any Ion code currently running. These pointers are marked at the start
// of incremental GC. Entering Ion code in the middle of an incremental GC
// triggers a read barrier on both these pointers, so they will still be
// marked in that case.
bool runningIonCode = false;
for (IonActivationIterator iter(trc->runtime); iter.more(); ++iter) {
IonActivation *activation = iter.activation();
if (activation->compartment() != compartment)
continue;
runningIonCode = true;
break;
}
// Don't destroy enterJIT if we are running Ion code. Note that enterJIT is
// not used for JM -> Ion calls, so it may be NULL in that case.
if (runningIonCode && enterJIT_)
MarkIonCodeRoot(trc, enterJIT_.unsafeGet(), "enterJIT");
// functionWrappers_ are not marked because this is a WeakCache of VM
// function implementations.
// Cancel any active or pending off thread compilations.
CancelOffThreadIonCompile(compartment, NULL);
FinishAllOffThreadCompilations(this);
@ -207,55 +247,26 @@ IonCompartment::mark(JSTracer *trc, JSCompartment *compartment)
void
IonCompartment::sweep(FreeOp *fop)
{
if (enterJIT_ && !IsIonCodeMarked(enterJIT_.unsafeGet()))
enterJIT_ = NULL;
if (bailoutHandler_ && !IsIonCodeMarked(bailoutHandler_.unsafeGet()))
bailoutHandler_ = NULL;
if (argumentsRectifier_ && !IsIonCodeMarked(argumentsRectifier_.unsafeGet()))
argumentsRectifier_ = NULL;
if (invalidator_ && !IsIonCodeMarked(invalidator_.unsafeGet()))
invalidator_ = NULL;
if (preBarrier_ && !IsIonCodeMarked(preBarrier_.unsafeGet()))
preBarrier_ = NULL;
for (size_t i = 0; i < bailoutTables_.length(); i++) {
if (bailoutTables_[i] && !IsIonCodeMarked(bailoutTables_[i].unsafeGet()))
bailoutTables_[i] = NULL;
}
// Sweep cache of VM function implementations.
functionWrappers_->sweep(fop);
}
IonCode *
IonCompartment::getBailoutTable(const FrameSizeClass &frameClass)
{
JS_ASSERT(frameClass != FrameSizeClass::None());
return bailoutTables_[frameClass.classId()];
return rt->bailoutTables_[frameClass.classId()];
}
IonCode *
IonCompartment::getBailoutTable(JSContext *cx, const FrameSizeClass &frameClass)
IonCompartment::getVMWrapper(const VMFunction &f)
{
uint32 id = frameClass.classId();
typedef MoveResolver::MoveOperand MoveOperand;
if (id >= bailoutTables_.length()) {
size_t numToPush = id - bailoutTables_.length() + 1;
if (!bailoutTables_.reserve(bailoutTables_.length() + numToPush))
return NULL;
for (size_t i = 0; i < numToPush; i++)
bailoutTables_.infallibleAppend(NULL);
}
JS_ASSERT(rt->functionWrappers_);
JS_ASSERT(rt->functionWrappers_->initialized());
IonRuntime::VMWrapperMap::Ptr p = rt->functionWrappers_->lookup(&f);
JS_ASSERT(p);
if (!bailoutTables_[id])
bailoutTables_[id] = generateBailoutTable(cx, id);
return bailoutTables_[id];
}
IonCompartment::~IonCompartment()
{
js_delete(functionWrappers_);
return p->value;
}
IonActivation::IonActivation(JSContext *cx, StackFrame *fp)
@ -327,15 +338,8 @@ IonCode::trace(JSTracer *trc)
{
// Note that we cannot mark invalidated scripts, since we've basically
// corrupted the code stream by injecting bailouts.
if (invalidated()) {
// Note that since we're invalidated, we won't mark the precious
// invalidator thunk referenced in the epilogue. We don't move
// executable code so the actual reference is okay, we just need to
// make sure it says alive before we return.
IonCompartment *ion = compartment()->ionCompartment();
MarkIonCodeUnbarriered(trc, ion->getInvalidationThunkAddr(), "invalidator");
if (invalidated())
return;
}
if (jumpRelocTableBytes_) {
uint8 *start = code_ + jumpRelocTableOffset();
@ -999,6 +1003,8 @@ AttachFinishedCompilations(JSContext *cx)
CodeGenerator codegen(builder, *builder->backgroundCompiledLir);
types::AutoEnterTypeInference enterTypes(cx);
ExecutionMode executionMode = builder->info().executionMode();
types::AutoEnterCompilation enterCompiler(cx, CompilerOutputKind(executionMode));
enterCompiler.initExisting(builder->recompileInfo);
@ -1311,13 +1317,6 @@ ion::CanEnterAtBranch(JSContext *cx, HandleScript script, StackFrame *fp, jsbyte
if (script->ion->osrPc() != pc)
return Method_Skipped;
// This can GC, so afterward, script->ion is not guaranteed to be valid.
if (!cx->compartment->ionCompartment()->enterJIT(cx))
return Method_Error;
if (!script->ion)
return Method_Skipped;
return Method_Compiled;
}
@ -1364,13 +1363,6 @@ ion::CanEnter(JSContext *cx, HandleScript script, StackFrame *fp, bool newType)
return status;
}
// This can GC, so afterward, script->ion is not guaranteed to be valid.
if (!cx->compartment->ionCompartment()->enterJIT(cx))
return Method_Error;
if (!script->ion)
return Method_Skipped;
return Method_Compiled;
}
@ -1393,7 +1385,7 @@ ion::CanEnterUsingFastInvoke(JSContext *cx, HandleScript script, uint32_t numAct
// This can GC, so afterward, script->ion is not guaranteed to be valid.
AssertCanGC();
if (!cx->compartment->ionCompartment()->enterJIT(cx))
if (!cx->compartment->ionCompartment()->enterJIT())
return Method_Error;
if (!script->ion)
@ -1411,7 +1403,7 @@ EnterIon(JSContext *cx, StackFrame *fp, void *jitcode)
JS_ASSERT(CheckFrame(fp));
JS_ASSERT(!fp->script()->ion->bailoutExpected());
EnterIonCode enter = cx->compartment->ionCompartment()->enterJITInfallible();
EnterIonCode enter = cx->compartment->ionCompartment()->enterJIT();
// maxArgc is the maximum of arguments between the number of actual
// arguments and the number of formal arguments. It accounts for |this|.
@ -1585,7 +1577,7 @@ ion::FastInvoke(JSContext *cx, HandleFunction fun, CallArgsList &args)
activation.setPrevPc(cx->regs().pc);
EnterIonCode enter = cx->compartment->ionCompartment()->enterJITInfallible();
EnterIonCode enter = cx->compartment->ionCompartment()->enterJIT();
void *calleeToken = CalleeToToken(fun);
Value result = Int32Value(args.length());
@ -1878,6 +1870,8 @@ ion::ForbidCompilation(JSContext *cx, JSScript *script)
IonSpew(IonSpew_Abort, "Disabling Ion compilation of script %s:%d",
script->filename, script->lineno);
CancelOffThreadIonCompile(cx->compartment, script);
if (script->hasIonScript()) {
// It is only safe to modify script->ion if the script is not currently
// running, because IonFrameIterator needs to tell what ionScript to

View File

@ -27,37 +27,60 @@ class IonBuilder;
typedef Vector<IonBuilder*, 0, SystemAllocPolicy> OffThreadCompilationVector;
class IonCompartment
class IonRuntime
{
typedef WeakCache<const VMFunction *, ReadBarriered<IonCode> > VMWrapperMap;
friend class IonCompartment;
friend class IonActivation;
// Executable allocator (owned by the runtime).
// Executable allocator.
JSC::ExecutableAllocator *execAlloc_;
// Trampoline for entering JIT code. Contains OSR prologue.
ReadBarriered<IonCode> enterJIT_;
IonCode *enterJIT_;
// Vector mapping frame class sizes to bailout tables.
Vector<ReadBarriered<IonCode>, 4, SystemAllocPolicy> bailoutTables_;
Vector<IonCode*, 4, SystemAllocPolicy> bailoutTables_;
// Generic bailout table; used if the bailout table overflows.
ReadBarriered<IonCode> bailoutHandler_;
IonCode *bailoutHandler_;
// Argument-rectifying thunk, in the case of insufficient arguments passed
// to a function call site. Pads with |undefined|.
ReadBarriered<IonCode> argumentsRectifier_;
IonCode *argumentsRectifier_;
// Thunk that invalides an (Ion compiled) caller on the Ion stack.
ReadBarriered<IonCode> invalidator_;
IonCode *invalidator_;
// Thunk that calls the GC pre barrier.
ReadBarriered<IonCode> preBarrier_;
IonCode *preBarrier_;
// Map VMFunction addresses to the IonCode of the wrapper.
typedef WeakCache<const VMFunction *, IonCode *> VMWrapperMap;
VMWrapperMap *functionWrappers_;
private:
IonCode *generateEnterJIT(JSContext *cx);
IonCode *generateArgumentsRectifier(JSContext *cx);
IonCode *generateBailoutTable(JSContext *cx, uint32 frameClass);
IonCode *generateBailoutHandler(JSContext *cx);
IonCode *generateInvalidator(JSContext *cx);
IonCode *generatePreBarrier(JSContext *cx);
IonCode *generateVMWrapper(JSContext *cx, const VMFunction &f);
public:
IonRuntime();
~IonRuntime();
bool initialize(JSContext *cx);
static void Mark(JSTracer *trc);
};
class IonCompartment
{
friend class IonActivation;
// Ion state for the compartment's runtime.
IonRuntime *rt;
// Any scripts for which off thread compilation has successfully finished,
// failed, or been cancelled. All off thread compilations which are started
// will eventually appear in this list asynchronously. Protected by the
@ -67,94 +90,45 @@ class IonCompartment
// Keep track of memoryregions that are going to be flushed.
AutoFlushCache *flusher_;
private:
IonCode *generateEnterJIT(JSContext *cx);
IonCode *generateReturnError(JSContext *cx);
IonCode *generateArgumentsRectifier(JSContext *cx);
IonCode *generateBailoutTable(JSContext *cx, uint32 frameClass);
IonCode *generateBailoutHandler(JSContext *cx);
IonCode *generateInvalidator(JSContext *cx);
IonCode *generatePreBarrier(JSContext *cx);
public:
IonCode *generateVMWrapper(JSContext *cx, const VMFunction &f);
IonCode *getVMWrapper(const VMFunction &f);
OffThreadCompilationVector &finishedOffThreadCompilations() {
return finishedOffThreadCompilations_;
}
public:
bool initialize(JSContext *cx);
IonCompartment();
~IonCompartment();
IonCompartment(IonRuntime *rt);
void mark(JSTracer *trc, JSCompartment *compartment);
void sweep(FreeOp *fop);
JSC::ExecutableAllocator *execAlloc() {
return execAlloc_;
return rt->execAlloc_;
}
IonCode *getBailoutTable(JSContext *cx, const FrameSizeClass &frameClass);
IonCode *getGenericBailoutHandler(JSContext *cx) {
if (!bailoutHandler_) {
bailoutHandler_ = generateBailoutHandler(cx);
if (!bailoutHandler_)
return NULL;
}
return bailoutHandler_;
IonCode *getGenericBailoutHandler() {
return rt->bailoutHandler_;
}
// Infallible; does not generate a table.
IonCode *getBailoutTable(const FrameSizeClass &frameClass);
// Fallible; generates a thunk and returns the target.
IonCode *getArgumentsRectifier(JSContext *cx) {
if (!argumentsRectifier_) {
argumentsRectifier_ = generateArgumentsRectifier(cx);
if (!argumentsRectifier_)
return NULL;
}
return argumentsRectifier_;
}
IonCode **getArgumentsRectifierAddr() {
return argumentsRectifier_.unsafeGet();
IonCode *getArgumentsRectifier() {
return rt->argumentsRectifier_;
}
IonCode *getOrCreateInvalidationThunk(JSContext *cx) {
if (!invalidator_) {
invalidator_ = generateInvalidator(cx);
if (!invalidator_)
return NULL;
}
return invalidator_;
}
IonCode **getInvalidationThunkAddr() {
return invalidator_.unsafeGet();
IonCode *getInvalidationThunk() {
return rt->invalidator_;
}
EnterIonCode enterJITInfallible() {
JS_ASSERT(enterJIT_);
return enterJIT_.get()->as<EnterIonCode>();
EnterIonCode enterJIT() {
return rt->enterJIT_->as<EnterIonCode>();
}
EnterIonCode enterJIT(JSContext *cx) {
if (!enterJIT_) {
enterJIT_ = generateEnterJIT(cx);
if (!enterJIT_)
return NULL;
}
return enterJIT_.get()->as<EnterIonCode>();
IonCode *preBarrier() {
return rt->preBarrier_;
}
IonCode *preBarrier(JSContext *cx) {
if (!preBarrier_) {
preBarrier_ = generatePreBarrier(cx);
if (!preBarrier_)
return NULL;
}
return preBarrier_;
}
AutoFlushCache *flusher() {
return flusher_;
}

View File

@ -638,11 +638,8 @@ MarkIonActivation(JSTracer *trc, const IonActivationIterator &activations)
JS_NOT_REACHED("invalid");
break;
case IonFrame_Rectifier:
case IonFrame_Bailed_Rectifier: {
IonCompartment *ionCompartment = activations.activation()->compartment()->ionCompartment();
MarkIonCodeRoot(trc, ionCompartment->getArgumentsRectifierAddr(), "Arguments Rectifier");
case IonFrame_Bailed_Rectifier:
break;
}
case IonFrame_Osr:
// The callee token will be marked by the callee JS frame;
// otherwise, it does not need to be marked, since the frame is

View File

@ -220,8 +220,9 @@ class FrameSizeClass
return FrameSizeClass(class_);
}
// These two functions are implemented in specific CodeGenerator-* files.
// These functions are implemented in specific CodeGenerator-* files.
static FrameSizeClass FromDepth(uint32 frameDepth);
static FrameSizeClass ClassLimit();
uint32 frameSize() const;
uint32 classId() const {

View File

@ -381,18 +381,15 @@ class MacroAssembler : public MacroAssemblerSpecific
JS_ASSERT(type == MIRType_Value || type == MIRType_String || type == MIRType_Object);
Label done;
JSContext *cx = GetIonContext()->cx;
IonCode *preBarrier = cx->compartment->ionCompartment()->preBarrier(cx);
if (!preBarrier) {
enoughMemory_ = false;
return;
}
if (type == MIRType_Value)
branchTestGCThing(Assembler::NotEqual, address, &done);
Push(PreBarrierReg);
computeEffectiveAddress(address, PreBarrierReg);
JSCompartment *compartment = GetIonContext()->compartment;
IonCode *preBarrier = compartment->ionCompartment()->preBarrier();
call(preBarrier);
Pop(PreBarrierReg);

View File

@ -21,6 +21,22 @@ using namespace js::ion;
namespace js {
namespace ion {
// Don't explicitly initialize, it's not guaranteed that this initializer will
// run before the constructors for static VMFunctions.
/* static */ VMFunction *VMFunction::functions;
void
VMFunction::addToFunctions()
{
static bool initialized = false;
if (!initialized) {
initialized = true;
functions = NULL;
}
this->next = functions;
functions = this;
}
static inline bool
ShouldMonitorReturnType(JSFunction *fun)
{

View File

@ -34,6 +34,10 @@ enum DataType {
// argument, and are treated as re-entrant into the VM and therefore fallible.
struct VMFunction
{
// Global linked list of all VMFunctions.
static VMFunction *functions;
VMFunction *next;
// Address of the C function.
void *wrapped;
@ -168,6 +172,16 @@ struct VMFunction
JS_ASSERT_IF(outParam != Type_Void, returnType == Type_Bool);
JS_ASSERT(returnType == Type_Bool || returnType == Type_Object);
}
VMFunction(const VMFunction &o)
{
*this = o;
addToFunctions();
}
private:
// Add this to the global list of VMFunctions.
void addToFunctions();
};
template <class> struct TypeToDataType { /* Unexpected return type for a VMFunction. */ };

View File

@ -176,11 +176,9 @@ CodeGeneratorARM::generateOutOfLineCode()
// Push the frame size, so the handler can recover the IonScript.
masm.ma_mov(Imm32(frameSize()), lr);
JSContext *cx = GetIonContext()->cx;
IonCompartment *ion = cx->compartment->ionCompartment();
IonCode *handler = ion->getGenericBailoutHandler(cx);
if (!handler)
return false;
IonCompartment *ion = GetIonContext()->compartment->ionCompartment();
IonCode *handler = ion->getGenericBailoutHandler();
masm.branch(handler);
}
@ -982,11 +980,7 @@ CodeGeneratorARM::visitTruncateDToInt32(LTruncateDToInt32 *ins)
return emitTruncateDouble(ToFloatRegister(ins->input()), ToRegister(ins->output()));
}
// The first two size classes are 128 and 256 bytes respectively. After that we
// increment by 512.
static const uint32 LAST_FRAME_SIZE = 512;
static const uint32 LAST_FRAME_INCREMENT = 512;
static const uint32 FrameSizes[] = { 128, 256, LAST_FRAME_SIZE };
static const uint32 FrameSizes[] = { 128, 256, 512, 1024 };
FrameSizeClass
FrameSizeClass::FromDepth(uint32 frameDepth)
@ -996,21 +990,22 @@ FrameSizeClass::FromDepth(uint32 frameDepth)
return FrameSizeClass(i);
}
uint32 newFrameSize = frameDepth - LAST_FRAME_SIZE;
uint32 sizeClass = (newFrameSize / LAST_FRAME_INCREMENT) + 1;
return FrameSizeClass(JS_ARRAY_LENGTH(FrameSizes) + sizeClass);
return FrameSizeClass::None();
}
FrameSizeClass
FrameSizeClass::ClassLimit()
{
return FrameSizeClass(JS_ARRAY_LENGTH(FrameSizes));
}
uint32
FrameSizeClass::frameSize() const
{
JS_ASSERT(class_ != NO_FRAME_SIZE_CLASS_ID);
JS_ASSERT(class_ < JS_ARRAY_LENGTH(FrameSizes));
if (class_ < JS_ARRAY_LENGTH(FrameSizes))
return FrameSizes[class_];
uint32 step = class_ - JS_ARRAY_LENGTH(FrameSizes);
return LAST_FRAME_SIZE + step * LAST_FRAME_INCREMENT;
return FrameSizes[class_];
}
ValueOperand
@ -1471,13 +1466,13 @@ CodeGeneratorARM::visitRecompileCheck(LRecompileCheck *lir)
return true;
}
typedef bool (*InterruptCheckFn)(JSContext *);
static const VMFunction InterruptCheckInfo = FunctionInfo<InterruptCheckFn>(InterruptCheck);
bool
CodeGeneratorARM::visitInterruptCheck(LInterruptCheck *lir)
{
typedef bool (*pf)(JSContext *);
static const VMFunction interruptCheckInfo = FunctionInfo<pf>(InterruptCheck);
OutOfLineCode *ool = oolCallVM(interruptCheckInfo, lir, (ArgList()), StoreNothing());
OutOfLineCode *ool = oolCallVM(InterruptCheckInfo, lir, (ArgList()), StoreNothing());
if (!ool)
return false;
@ -1503,13 +1498,9 @@ CodeGeneratorARM::generateInvalidateEpilogue()
// Push the return address of the point that we bailed out at onto the stack
masm.Push(lr);
JSContext *cx = GetIonContext()->cx;
// Push the Ion script onto the stack (when we determine what that pointer is).
invalidateEpilogueData_ = masm.pushWithPatch(ImmWord(uintptr_t(-1)));
IonCode *thunk = cx->compartment->ionCompartment()->getOrCreateInvalidationThunk(cx);
if (!thunk)
return false;
IonCode *thunk = GetIonContext()->compartment->ionCompartment()->getInvalidationThunk();
masm.branch(thunk);

View File

@ -70,7 +70,7 @@ struct EnterJITStack
* ...using standard EABI calling convention
*/
IonCode *
IonCompartment::generateEnterJIT(JSContext *cx)
IonRuntime::generateEnterJIT(JSContext *cx)
{
const Register reg_code = r0;
@ -199,22 +199,9 @@ IonCompartment::generateEnterJIT(JSContext *cx)
}
IonCode *
IonCompartment::generateReturnError(JSContext *cx)
IonRuntime::generateInvalidator(JSContext *cx)
{
MacroAssembler masm(cx);
// This is where the stack size is stored on x86. where is it stored here?
masm.ma_pop(r0);
masm.ma_add(r0, sp, sp);
GenerateReturn(masm, JS_FALSE);
Linker linker(masm);
return linker.newCode(cx);
}
IonCode *
IonCompartment::generateInvalidator(JSContext *cx)
{
// See large comment in x86's IonCompartment::generateInvalidator.
// See large comment in x86's IonRuntime::generateInvalidator.
AutoIonContextAlloc aica(cx);
MacroAssembler masm(cx);
//masm.as_bkpt();
@ -261,7 +248,7 @@ IonCompartment::generateInvalidator(JSContext *cx)
}
IonCode *
IonCompartment::generateArgumentsRectifier(JSContext *cx)
IonRuntime::generateArgumentsRectifier(JSContext *cx)
{
MacroAssembler masm(cx);
// ArgumentsRectifierReg contains the |nargs| pushed onto the current frame.
@ -448,7 +435,7 @@ GenerateBailoutThunk(MacroAssembler &masm, uint32 frameClass)
}
IonCode *
IonCompartment::generateBailoutTable(JSContext *cx, uint32 frameClass)
IonRuntime::generateBailoutTable(JSContext *cx, uint32 frameClass)
{
MacroAssembler masm;
@ -464,7 +451,7 @@ IonCompartment::generateBailoutTable(JSContext *cx, uint32 frameClass)
}
IonCode *
IonCompartment::generateBailoutHandler(JSContext *cx)
IonRuntime::generateBailoutHandler(JSContext *cx)
{
MacroAssembler masm;
GenerateBailoutThunk(masm, NO_FRAME_SIZE_CLASS_ID);
@ -474,7 +461,7 @@ IonCompartment::generateBailoutHandler(JSContext *cx)
}
IonCode *
IonCompartment::generateVMWrapper(JSContext *cx, const VMFunction &f)
IonRuntime::generateVMWrapper(JSContext *cx, const VMFunction &f)
{
typedef MoveResolver::MoveOperand MoveOperand;
@ -622,7 +609,7 @@ IonCompartment::generateVMWrapper(JSContext *cx, const VMFunction &f)
}
IonCode *
IonCompartment::generatePreBarrier(JSContext *cx)
IonRuntime::generatePreBarrier(JSContext *cx)
{
MacroAssembler masm;

View File

@ -219,7 +219,8 @@ CodeGeneratorShared::encode(LSnapshot *snapshot)
DebugOnly<jsbytecode *> bailPC = pc;
if (mir->mode() == MResumePoint::ResumeAfter)
bailPC = GetNextPc(pc);
JS_ASSERT(exprStack == js_ReconstructStackDepth(GetIonContext()->cx, script, bailPC));
JS_ASSERT_IF(GetIonContext()->cx,
exprStack == js_ReconstructStackDepth(GetIonContext()->cx, script, bailPC));
#ifdef TRACK_SNAPSHOTS
LInstruction *ins = instruction();
@ -374,10 +375,9 @@ CodeGeneratorShared::callVM(const VMFunction &fun, LInstruction *ins, const Regi
pushedArgs_ = 0;
#endif
// Generate the wrapper of the VM function.
JSContext *cx = GetIonContext()->cx;
IonCompartment *ion = cx->compartment->ionCompartment();
IonCode *wrapper = ion->generateVMWrapper(cx, fun);
// Get the wrapper of the VM function.
IonCompartment *ion = GetIonContext()->compartment->ionCompartment();
IonCode *wrapper = ion->getVMWrapper(fun);
if (!wrapper)
return false;

View File

@ -293,11 +293,8 @@ CodeGeneratorX86Shared::generateOutOfLineCode()
// Push the frame size, so the handler can recover the IonScript.
masm.push(Imm32(frameSize()));
JSContext *cx = GetIonContext()->cx;
IonCompartment *ion = cx->compartment->ionCompartment();
IonCode *handler = ion->getGenericBailoutHandler(cx);
if (!handler)
return false;
IonCompartment *ion = GetIonContext()->compartment->ionCompartment();
IonCode *handler = ion->getGenericBailoutHandler();
masm.jmp(handler->raw(), Relocation::IONCODE);
}
@ -1329,13 +1326,9 @@ CodeGeneratorX86Shared::generateInvalidateEpilogue()
masm.bind(&invalidate_);
JSContext *cx = GetIonContext()->cx;
// Push the Ion script onto the stack (when we determine what that pointer is).
invalidateEpilogueData_ = masm.pushWithPatch(ImmWord(uintptr_t(-1)));
IonCode *thunk = cx->compartment->ionCompartment()->getOrCreateInvalidationThunk(cx);
if (!thunk)
return false;
IonCode *thunk = GetIonContext()->compartment->ionCompartment()->getInvalidationThunk();
masm.call(thunk);

View File

@ -53,6 +53,12 @@ FrameSizeClass::FromDepth(uint32 frameDepth)
return FrameSizeClass::None();
}
FrameSizeClass
FrameSizeClass::ClassLimit()
{
return FrameSizeClass(0);
}
uint32
FrameSizeClass::frameSize() const
{
@ -293,13 +299,13 @@ CodeGeneratorX64::visitRecompileCheck(LRecompileCheck *lir)
return true;
}
typedef bool (*InterruptCheckFn)(JSContext *);
static const VMFunction InterruptCheckInfo = FunctionInfo<InterruptCheckFn>(InterruptCheck);
bool
CodeGeneratorX64::visitInterruptCheck(LInterruptCheck *lir)
{
typedef bool (*pf)(JSContext *);
static const VMFunction interruptCheckInfo = FunctionInfo<pf>(InterruptCheck);
OutOfLineCode *ool = oolCallVM(interruptCheckInfo, lir, (ArgList()), StoreNothing());
OutOfLineCode *ool = oolCallVM(InterruptCheckInfo, lir, (ArgList()), StoreNothing());
if (!ool)
return false;

View File

@ -868,7 +868,7 @@ class MacroAssemblerX64 : public MacroAssemblerX86Shared
// Save an exit frame (which must be aligned to the stack pointer) to
// ThreadData::ionTop.
void linkExitFrame() {
mov(ImmWord(GetIonContext()->cx->runtime), ScratchReg);
mov(ImmWord(GetIonContext()->compartment->rt), ScratchReg);
mov(StackPointer, Operand(ScratchReg, offsetof(JSRuntime, ionTop)));
}

View File

@ -25,7 +25,7 @@ using namespace js::ion;
* ...using standard x64 fastcall calling convention
*/
IonCode *
IonCompartment::generateEnterJIT(JSContext *cx)
IonRuntime::generateEnterJIT(JSContext *cx)
{
MacroAssembler masm(cx);
@ -180,26 +180,12 @@ IonCompartment::generateEnterJIT(JSContext *cx)
}
IonCode *
IonCompartment::generateReturnError(JSContext *cx)
{
MacroAssembler masm(cx);
masm.pop(r14); // sizeDescriptor.
masm.xorl(Imm32(0x1), r14); // Unmark EntryFrame.
masm.addq(r14, rsp); // Remove arguments.
masm.pop(r11); // Discard |vp|: returning from error.
Linker linker(masm);
return linker.newCode(cx);
}
IonCode *
IonCompartment::generateInvalidator(JSContext *cx)
IonRuntime::generateInvalidator(JSContext *cx)
{
AutoIonContextAlloc aica(cx);
MacroAssembler masm(cx);
// See explanatory comment in x86's IonCompartment::generateInvalidator.
// See explanatory comment in x86's IonRuntime::generateInvalidator.
masm.addq(Imm32(sizeof(uintptr_t)), rsp);
@ -236,7 +222,7 @@ IonCompartment::generateInvalidator(JSContext *cx)
}
IonCode *
IonCompartment::generateArgumentsRectifier(JSContext *cx)
IonRuntime::generateArgumentsRectifier(JSContext *cx)
{
// Do not erase the frame pointer in this function.
@ -359,14 +345,14 @@ GenerateBailoutThunk(JSContext *cx, MacroAssembler &masm, uint32 frameClass)
}
IonCode *
IonCompartment::generateBailoutTable(JSContext *cx, uint32 frameClass)
IonRuntime::generateBailoutTable(JSContext *cx, uint32 frameClass)
{
JS_NOT_REACHED("x64 does not use bailout tables");
return NULL;
}
IonCode *
IonCompartment::generateBailoutHandler(JSContext *cx)
IonRuntime::generateBailoutHandler(JSContext *cx)
{
MacroAssembler masm;
@ -377,7 +363,7 @@ IonCompartment::generateBailoutHandler(JSContext *cx)
}
IonCode *
IonCompartment::generateVMWrapper(JSContext *cx, const VMFunction &f)
IonRuntime::generateVMWrapper(JSContext *cx, const VMFunction &f)
{
typedef MoveResolver::MoveOperand MoveOperand;
@ -531,7 +517,7 @@ IonCompartment::generateVMWrapper(JSContext *cx, const VMFunction &f)
}
IonCode *
IonCompartment::generatePreBarrier(JSContext *cx)
IonRuntime::generatePreBarrier(JSContext *cx)
{
MacroAssembler masm;

View File

@ -23,11 +23,7 @@ CodeGeneratorX86::CodeGeneratorX86(MIRGenerator *gen, LIRGraph &graph)
{
}
// The first two size classes are 128 and 256 bytes respectively. After that we
// increment by 512.
static const uint32 LAST_FRAME_SIZE = 512;
static const uint32 LAST_FRAME_INCREMENT = 512;
static const uint32 FrameSizes[] = { 128, 256, LAST_FRAME_SIZE };
static const uint32 FrameSizes[] = { 128, 256, 512, 1024 };
FrameSizeClass
FrameSizeClass::FromDepth(uint32 frameDepth)
@ -37,21 +33,22 @@ FrameSizeClass::FromDepth(uint32 frameDepth)
return FrameSizeClass(i);
}
uint32 newFrameSize = frameDepth - LAST_FRAME_SIZE;
uint32 sizeClass = (newFrameSize / LAST_FRAME_INCREMENT) + 1;
return FrameSizeClass(JS_ARRAY_LENGTH(FrameSizes) + sizeClass);
return FrameSizeClass::None();
}
FrameSizeClass
FrameSizeClass::ClassLimit()
{
return FrameSizeClass(JS_ARRAY_LENGTH(FrameSizes));
}
uint32
FrameSizeClass::frameSize() const
{
JS_ASSERT(class_ != NO_FRAME_SIZE_CLASS_ID);
JS_ASSERT(class_ < JS_ARRAY_LENGTH(FrameSizes));
if (class_ < JS_ARRAY_LENGTH(FrameSizes))
return FrameSizes[class_];
uint32 step = class_ - JS_ARRAY_LENGTH(FrameSizes);
return LAST_FRAME_SIZE + step * LAST_FRAME_INCREMENT;
return FrameSizes[class_];
}
ValueOperand
@ -298,13 +295,13 @@ CodeGeneratorX86::visitRecompileCheck(LRecompileCheck *lir)
return true;
}
typedef bool (*InterruptCheckFn)(JSContext *);
static const VMFunction InterruptCheckInfo = FunctionInfo<InterruptCheckFn>(InterruptCheck);
bool
CodeGeneratorX86::visitInterruptCheck(LInterruptCheck *lir)
{
typedef bool (*pf)(JSContext *);
static const VMFunction interruptCheckInfo = FunctionInfo<pf>(InterruptCheck);
OutOfLineCode *ool = oolCallVM(interruptCheckInfo, lir, (ArgList()), StoreNothing());
OutOfLineCode *ool = oolCallVM(InterruptCheckInfo, lir, (ArgList()), StoreNothing());
if (!ool)
return false;

View File

@ -33,7 +33,7 @@ enum EnterJitEbpArgumentOffset {
* using the standard cdecl calling convention.
*/
IonCode *
IonCompartment::generateEnterJIT(JSContext *cx)
IonRuntime::generateEnterJIT(JSContext *cx)
{
MacroAssembler masm(cx);
@ -161,7 +161,7 @@ IonCompartment::generateEnterJIT(JSContext *cx)
}
IonCode *
IonCompartment::generateInvalidator(JSContext *cx)
IonRuntime::generateInvalidator(JSContext *cx)
{
AutoIonContextAlloc aica(cx);
MacroAssembler masm(cx);
@ -211,7 +211,7 @@ IonCompartment::generateInvalidator(JSContext *cx)
}
IonCode *
IonCompartment::generateArgumentsRectifier(JSContext *cx)
IonRuntime::generateArgumentsRectifier(JSContext *cx)
{
MacroAssembler masm(cx);
@ -357,7 +357,7 @@ GenerateBailoutThunk(JSContext *cx, MacroAssembler &masm, uint32 frameClass)
}
IonCode *
IonCompartment::generateBailoutTable(JSContext *cx, uint32 frameClass)
IonRuntime::generateBailoutTable(JSContext *cx, uint32 frameClass)
{
MacroAssembler masm;
@ -373,7 +373,7 @@ IonCompartment::generateBailoutTable(JSContext *cx, uint32 frameClass)
}
IonCode *
IonCompartment::generateBailoutHandler(JSContext *cx)
IonRuntime::generateBailoutHandler(JSContext *cx)
{
MacroAssembler masm;
@ -384,7 +384,7 @@ IonCompartment::generateBailoutHandler(JSContext *cx)
}
IonCode *
IonCompartment::generateVMWrapper(JSContext *cx, const VMFunction &f)
IonRuntime::generateVMWrapper(JSContext *cx, const VMFunction &f)
{
AssertCanGC();
typedef MoveResolver::MoveOperand MoveOperand;
@ -544,7 +544,7 @@ IonCompartment::generateVMWrapper(JSContext *cx, const VMFunction &f)
}
IonCode *
IonCompartment::generatePreBarrier(JSContext *cx)
IonRuntime::generatePreBarrier(JSContext *cx)
{
MacroAssembler masm;

View File

@ -743,6 +743,7 @@ JSRuntime::JSRuntime(JSUseHelperThreads useHelperThreads)
#ifdef JS_METHODJIT
jaegerRuntime_(NULL),
#endif
ionRuntime_(NULL),
selfHostedGlobal_(NULL),
nativeStackBase(0),
nativeStackQuota(0),
@ -1014,6 +1015,9 @@ JSRuntime::~JSRuntime()
js_delete(mathCache_);
#ifdef JS_METHODJIT
js_delete(jaegerRuntime_);
#endif
#ifdef JS_ION
js_delete(ionRuntime_);
#endif
js_delete(execAlloc_); /* Delete after jaegerRuntime_. */

View File

@ -91,6 +91,7 @@ class JaegerRuntime;
class MathCache;
namespace ion {
class IonRuntime;
class IonActivation;
}
@ -499,12 +500,14 @@ struct JSRuntime : js::RuntimeFriendFields
#ifdef JS_METHODJIT
js::mjit::JaegerRuntime *jaegerRuntime_;
#endif
js::ion::IonRuntime *ionRuntime_;
JSObject *selfHostedGlobal_;
JSC::ExecutableAllocator *createExecutableAllocator(JSContext *cx);
WTF::BumpPointerAllocator *createBumpPointerAllocator(JSContext *cx);
js::mjit::JaegerRuntime *createJaegerRuntime(JSContext *cx);
js::ion::IonRuntime *createIonRuntime(JSContext *cx);
public:
JSC::ExecutableAllocator *getExecAlloc(JSContext *cx) {
@ -532,6 +535,9 @@ struct JSRuntime : js::RuntimeFriendFields
return *jaegerRuntime_;
}
#endif
js::ion::IonRuntime *getIonRuntime(JSContext *cx) {
return ionRuntime_ ? ionRuntime_ : createIonRuntime(cx);
}
bool initSelfHosting(JSContext *cx);
void markSelfHostedGlobal(JSTracer *trc);

View File

@ -157,6 +157,29 @@ JSCompartment::setNeedsBarrier(bool needs, ShouldUpdateIon updateIon)
}
#ifdef JS_ION
ion::IonRuntime *
JSRuntime::createIonRuntime(JSContext *cx)
{
ionRuntime_ = cx->new_<ion::IonRuntime>();
if (!ionRuntime_)
return NULL;
if (!ionRuntime_->initialize(cx)) {
js_delete(ionRuntime_);
ionRuntime_ = NULL;
if (cx->runtime->atomsCompartment->ionCompartment_) {
js_delete(cx->runtime->atomsCompartment->ionCompartment_);
cx->runtime->atomsCompartment->ionCompartment_ = NULL;
}
return NULL;
}
return ionRuntime_;
}
bool
JSCompartment::ensureIonCompartmentExists(JSContext *cx)
{
@ -164,15 +187,15 @@ JSCompartment::ensureIonCompartmentExists(JSContext *cx)
if (ionCompartment_)
return true;
/* Set the compartment early, so linking works. */
ionCompartment_ = cx->new_<IonCompartment>();
if (!ionCompartment_ || !ionCompartment_->initialize(cx)) {
if (ionCompartment_)
delete ionCompartment_;
ionCompartment_ = NULL;
IonRuntime *ionRuntime = cx->runtime->getIonRuntime(cx);
if (!ionRuntime)
return false;
/* Set the compartment early, so linking works. */
ionCompartment_ = cx->new_<IonCompartment>(ionRuntime);
if (!ionCompartment_)
return false;
}
return true;
}

View File

@ -122,6 +122,7 @@ struct JSCompartment
JSPrincipals *principals;
private:
friend struct JSRuntime;
friend struct JSContext;
js::GlobalObject *global_;
public:

View File

@ -2514,8 +2514,15 @@ MarkRuntime(JSTracer *trc, bool useSavedRoots = false)
MarkScriptRoot(trc, &vec[i].script, "scriptAndCountsVector");
}
if (!IS_GC_MARKING_TRACER(trc) || rt->atomsCompartment->isCollecting())
if (!IS_GC_MARKING_TRACER(trc) || rt->atomsCompartment->isCollecting()) {
MarkAtoms(trc);
#ifdef JS_ION
/* Any Ion wrappers survive until the runtime is being torn down. */
if (rt->hasContexts())
ion::IonRuntime::Mark(trc);
#endif
}
rt->staticStrings.trace(trc);
for (ContextIter acx(rt); !acx.done(); acx.next())

View File

@ -465,7 +465,9 @@ NewGCThing(JSContext *cx, js::gc::AllocKind kind, size_t thingSize)
AssertCanGC();
JS_ASSERT(thingSize == js::gc::Arena::thingSize(kind));
JS_ASSERT_IF(cx->compartment == cx->runtime->atomsCompartment,
kind == js::gc::FINALIZE_STRING || kind == js::gc::FINALIZE_SHORT_STRING);
kind == FINALIZE_STRING ||
kind == FINALIZE_SHORT_STRING ||
kind == FINALIZE_IONCODE);
JS_ASSERT(!cx->runtime->isHeapBusy());
JS_ASSERT(!cx->runtime->noGCOrAllocationCheck);

View File

@ -20,8 +20,7 @@ using mozilla::DebugOnly;
bool
js::OffThreadCompilationAvailable(JSContext *cx)
{
WorkerThreadState &state = *cx->runtime->workerThreadState;
return state.numThreads > 0;
return cx->runtime->useHelperThreads();
}
bool
@ -77,6 +76,8 @@ CompiledScriptMatches(JSCompartment *compartment, JSScript *script, JSScript *ta
void
js::CancelOffThreadIonCompile(JSCompartment *compartment, JSScript *script)
{
AutoAssertNoGC nogc;
if (!compartment->rt->workerThreadState)
return;