Bug 1171945: IonMonkey - Part 2: Add platform in ionmonkey for sharedcaches, r=jandem

This commit is contained in:
Hannes Verschore 2015-08-19 15:15:46 +02:00
parent 2c7c90a745
commit dc9455106a
32 changed files with 494 additions and 98 deletions

View File

@ -441,10 +441,7 @@ BaselineScript::trace(JSTracer* trc)
// Mark all IC stub codes hanging off the IC stub entries.
for (size_t i = 0; i < numICEntries(); i++) {
ICEntry& ent = icEntry(i);
if (!ent.hasStub())
continue;
for (ICStub* stub = ent.firstStub(); stub; stub = stub->next())
stub->trace(trc);
ent.trace(trc);
}
}

View File

@ -1682,6 +1682,51 @@ CodeGenerator::visitStringReplace(LStringReplace* lir)
callVM(StringReplaceInfo, lir);
}
void
CodeGenerator::emitSharedStub(ICStub::Kind kind, LInstruction* lir)
{
JSScript* script = lir->mirRaw()->block()->info().script();
jsbytecode* pc = lir->mirRaw()->toInstruction()->resumePoint()->pc();
// Create descriptor signifying end of Ion frame.
uint32_t descriptor = MakeFrameDescriptor(masm.framePushed(), JitFrame_IonJS);
masm.Push(Imm32(descriptor));
// Call into the stubcode.
CodeOffsetLabel patchOffset;
IonICEntry entry(script->pcToOffset(pc), ICEntry::Kind_Op, script);
EmitCallIC(&patchOffset, masm);
entry.setReturnOffset(CodeOffsetLabel(masm.currentOffset()));
SharedStub sharedStub(kind, entry, patchOffset);
masm.propagateOOM(sharedStubs_.append(sharedStub));
// Fix up upon return.
uint32_t callOffset = masm.currentOffset();
masm.freeStack(sizeof(intptr_t));
markSafepointAt(callOffset, lir);
}
void
CodeGenerator::visitBinarySharedStub(LBinarySharedStub* lir)
{
JSOp jsop = JSOp(*lir->mir()->resumePoint()->pc());
switch (jsop) {
default:
MOZ_CRASH("Unsupported jsop in shared stubs.");
}
}
void
CodeGenerator::visitUnarySharedStub(LUnarySharedStub* lir)
{
JSOp jsop = JSOp(*lir->mir()->resumePoint()->pc());
switch (jsop) {
default:
MOZ_CRASH("Unsupported jsop in shared stubs.");
}
}
typedef JSObject* (*LambdaFn)(JSContext*, HandleFunction, HandleObject);
static const VMFunction LambdaInfo = FunctionInfo<LambdaFn>(js::Lambda);
@ -7805,6 +7850,25 @@ struct AutoDiscardIonCode
}
};
bool
CodeGenerator::linkSharedStubs(JSContext* cx)
{
for (uint32_t i = 0; i < sharedStubs_.length(); i++) {
ICStub *stub = nullptr;
switch (sharedStubs_[i].kind) {
default:
MOZ_CRASH("Unsupported shared stub.");
}
if (!stub)
return false;
sharedStubs_[i].entry.setFirstStub(stub);
}
return true;
}
bool
CodeGenerator::link(JSContext* cx, CompilerConstraintList* constraints)
{
@ -7830,6 +7894,9 @@ CodeGenerator::link(JSContext* cx, CompilerConstraintList* constraints)
if (scriptCounts_ && !script->hasScriptCounts() && !script->initScriptCounts(cx))
return false;
if (!linkSharedStubs(cx))
return false;
// Check to make sure we didn't have a mid-build invalidation. If so, we
// will trickle to jit::Compile() and return Method_Skipped.
uint32_t warmUpCount = script->getWarmUpCount();
@ -7861,7 +7928,8 @@ CodeGenerator::link(JSContext* cx, CompilerConstraintList* constraints)
recovers_.size(), bailouts_.length(), graph.numConstants(),
safepointIndices_.length(), osiIndices_.length(),
cacheList_.length(), runtimeData_.length(),
safepoints_.size(), patchableBackedges_.length(), optimizationLevel);
safepoints_.size(), patchableBackedges_.length(),
sharedStubs_.length(), optimizationLevel);
if (!ionScript)
return false;
discardIonCode.ionScript = ionScript;
@ -7955,6 +8023,9 @@ CodeGenerator::link(JSContext* cx, CompilerConstraintList* constraints)
script->setIonScript(cx, ionScript);
// Adopt fallback shared stubs from the compiler into the ion script.
ionScript->adoptFallbackStubs(&stubSpace_);
{
AutoWritableJitCode awjc(code);
invalidateEpilogueData_.fixup(&masm);
@ -7991,6 +8062,23 @@ CodeGenerator::link(JSContext* cx, CompilerConstraintList* constraints)
}
}
#endif
// Patch shared stub IC loads using IC entries
for (size_t i = 0; i < sharedStubs_.length(); i++) {
CodeOffsetLabel label = sharedStubs_[i].label;
label.fixup(&masm);
IonICEntry& entry = ionScript->sharedStubList()[i];
entry = sharedStubs_[i].entry;
entry.fixupReturnOffset(masm);
Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, label),
ImmPtr(&entry),
ImmPtr((void*)-1));
MOZ_ASSERT(entry.hasStub());
MOZ_ASSERT(entry.firstStub()->isFallback());
entry.firstStub()->toFallbackStub()->fixupICEntry(&entry);
}
}
JitSpew(JitSpew_Codegen, "Created IonScript %p (raw %p)",

View File

@ -59,6 +59,7 @@ class CodeGenerator : public CodeGeneratorSpecific
bool generate();
bool generateAsmJS(AsmJSFunctionLabels* labels);
bool link(JSContext* cx, CompilerConstraintList* constraints);
bool linkSharedStubs(JSContext* cx);
void visitOsiPoint(LOsiPoint* lir);
void visitGoto(LGoto* lir);
@ -106,6 +107,9 @@ class CodeGenerator : public CodeGeneratorSpecific
void visitOutOfLineRegExpTest(OutOfLineRegExpTest* ool);
void visitRegExpReplace(LRegExpReplace* lir);
void visitStringReplace(LStringReplace* lir);
void emitSharedStub(ICStub::Kind kind, LInstruction* lir);
void visitBinarySharedStub(LBinarySharedStub* lir);
void visitUnarySharedStub(LUnarySharedStub* lir);
void visitLambda(LLambda* lir);
void visitOutOfLineLambdaArrow(OutOfLineLambdaArrow* ool);
void visitLambdaArrow(LLambdaArrow* lir);
@ -479,6 +483,18 @@ class CodeGenerator : public CodeGeneratorSpecific
Vector<CodeOffsetLabel, 0, JitAllocPolicy> ionScriptLabels_;
struct SharedStub {
ICStub::Kind kind;
IonICEntry entry;
CodeOffsetLabel label;
SharedStub(ICStub::Kind kind, IonICEntry entry, CodeOffsetLabel label)
: kind(kind), entry(entry), label(label)
{}
};
Vector<SharedStub, 0, SystemAllocPolicy> sharedStubs_;
void branchIfInvalidated(Register temp, Label* invalidated);
#ifdef DEBUG

78
js/src/jit/ICStubSpace.h Normal file
View File

@ -0,0 +1,78 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sts=4 et sw=4 tw=99:
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef jit_ICStubSpace_h
#define jit_ICStubSpace_h
#include "ds/LifoAlloc.h"
namespace js {
namespace jit {
// ICStubSpace is an abstraction for allocation policy and storage for stub data.
// There are two kinds of stubs: optimized stubs and fallback stubs (the latter
// also includes stubs that can make non-tail calls that can GC).
//
// Optimized stubs are allocated per-compartment and are always purged when
// JIT-code is discarded. Fallback stubs are allocated per BaselineScript and
// are only destroyed when the BaselineScript is destroyed.
class ICStubSpace
{
protected:
LifoAlloc allocator_;
explicit ICStubSpace(size_t chunkSize)
: allocator_(chunkSize)
{}
public:
inline void* alloc(size_t size) {
return allocator_.alloc(size);
}
JS_DECLARE_NEW_METHODS(allocate, alloc, inline)
size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
return allocator_.sizeOfExcludingThis(mallocSizeOf);
}
};
// Space for optimized stubs. Every JitCompartment has a single
// OptimizedICStubSpace.
struct OptimizedICStubSpace : public ICStubSpace
{
static const size_t STUB_DEFAULT_CHUNK_SIZE = 4 * 1024;
public:
OptimizedICStubSpace()
: ICStubSpace(STUB_DEFAULT_CHUNK_SIZE)
{}
void free() {
allocator_.freeAll();
}
};
// Space for fallback stubs. Every BaselineScript has a
// FallbackICStubSpace.
struct FallbackICStubSpace : public ICStubSpace
{
static const size_t STUB_DEFAULT_CHUNK_SIZE = 256;
public:
FallbackICStubSpace()
: ICStubSpace(STUB_DEFAULT_CHUNK_SIZE)
{}
inline void adoptFrom(FallbackICStubSpace* other) {
allocator_.steal(&(other->allocator_));
}
};
} // namespace jit
} // namespace js
#endif /* jit_ICStubSpace_h */

View File

@ -897,7 +897,8 @@ IonScript::IonScript()
backedgeEntries_(0),
invalidationCount_(0),
recompileInfo_(),
osrPcMismatchCounter_(0)
osrPcMismatchCounter_(0),
fallbackStubSpace_()
{
}
@ -909,7 +910,8 @@ IonScript::New(JSContext* cx, RecompileInfo recompileInfo,
size_t constants, size_t safepointIndices,
size_t osiIndices, size_t cacheEntries,
size_t runtimeSize, size_t safepointsSize,
size_t backedgeEntries, OptimizationLevel optimizationLevel)
size_t backedgeEntries, size_t sharedStubEntries,
OptimizationLevel optimizationLevel)
{
static const int DataAlignment = sizeof(void*);
@ -933,6 +935,8 @@ IonScript::New(JSContext* cx, RecompileInfo recompileInfo,
size_t paddedRuntimeSize = AlignBytes(runtimeSize, DataAlignment);
size_t paddedSafepointSize = AlignBytes(safepointsSize, DataAlignment);
size_t paddedBackedgeSize = AlignBytes(backedgeEntries * sizeof(PatchableBackedge), DataAlignment);
size_t paddedSharedStubSize = AlignBytes(sharedStubEntries * sizeof(IonICEntry), DataAlignment);
size_t bytes = paddedSnapshotsSize +
paddedRecoversSize +
paddedBailoutSize +
@ -942,7 +946,8 @@ IonScript::New(JSContext* cx, RecompileInfo recompileInfo,
paddedCacheEntriesSize +
paddedRuntimeSize +
paddedSafepointSize +
paddedBackedgeSize;
paddedBackedgeSize +
paddedSharedStubSize;
IonScript* script = cx->zone()->pod_malloc_with_extra<IonScript, uint8_t>(bytes);
if (!script)
return nullptr;
@ -991,6 +996,10 @@ IonScript::New(JSContext* cx, RecompileInfo recompileInfo,
script->backedgeEntries_ = backedgeEntries;
offsetCursor += paddedBackedgeSize;
script->sharedStubList_ = offsetCursor;
script->sharedStubEntries_ = sharedStubEntries;
offsetCursor += paddedSharedStubSize;
script->frameSlots_ = frameSlots;
script->argumentSlots_ = argumentSlots;
@ -1002,6 +1011,13 @@ IonScript::New(JSContext* cx, RecompileInfo recompileInfo,
return script;
}
void
IonScript::adoptFallbackStubs(FallbackICStubSpace* stubSpace)
{
fallbackStubSpace()->adoptFrom(stubSpace);
}
void
IonScript::trace(JSTracer* trc)
{
@ -1013,6 +1029,12 @@ IonScript::trace(JSTracer* trc)
for (size_t i = 0; i < numConstants(); i++)
TraceEdge(trc, &getConstant(i), "constant");
// Mark all IC stub codes hanging off the IC stub entries.
for (size_t i = 0; i < numSharedStubs(); i++) {
ICEntry& ent = sharedStubList()[i];
ent.trace(trc);
}
}
/* static */ void
@ -1227,6 +1249,64 @@ IonScript::toggleBarriers(bool enabled)
method()->togglePreBarriers(enabled);
}
void
IonScript::purgeOptimizedStubs(Zone* zone)
{
for (size_t i = 0; i < numSharedStubs(); i++) {
ICEntry& entry = sharedStubList()[i];
if (!entry.hasStub())
continue;
ICStub* lastStub = entry.firstStub();
while (lastStub->next())
lastStub = lastStub->next();
if (lastStub->isFallback()) {
// Unlink all stubs allocated in the optimized space.
ICStub* stub = entry.firstStub();
ICStub* prev = nullptr;
while (stub->next()) {
if (!stub->allocatedInFallbackSpace()) {
lastStub->toFallbackStub()->unlinkStub(zone, prev, stub);
stub = stub->next();
continue;
}
prev = stub;
stub = stub->next();
}
if (lastStub->isMonitoredFallback()) {
// Monitor stubs can't make calls, so are always in the
// optimized stub space.
ICTypeMonitor_Fallback* lastMonStub =
lastStub->toMonitoredFallbackStub()->fallbackMonitorStub();
lastMonStub->resetMonitorStubChain(zone);
}
} else if (lastStub->isTypeMonitor_Fallback()) {
lastStub->toTypeMonitor_Fallback()->resetMonitorStubChain(zone);
} else {
MOZ_ASSERT(lastStub->isTableSwitch());
}
}
#ifdef DEBUG
// All remaining stubs must be allocated in the fallback space.
for (size_t i = 0; i < numSharedStubs(); i++) {
ICEntry& entry = sharedStubList()[i];
if (!entry.hasStub())
continue;
ICStub* stub = entry.firstStub();
while (stub->next()) {
MOZ_ASSERT(stub->allocatedInFallbackSpace());
stub = stub->next();
}
}
#endif
}
void
IonScript::purgeCaches()
{
@ -2675,6 +2755,7 @@ InvalidateActivation(FreeOp* fop, const JitActivationIterator& activations, bool
// prevent lastJump_ from appearing to be a bogus pointer, just
// in case anyone tries to read it.
ionScript->purgeCaches();
ionScript->purgeOptimizedStubs(script->zone());
// Clean up any pointers from elsewhere in the runtime to this IonScript
// which is about to become disconnected from its JSScript.

View File

@ -15,6 +15,7 @@
#include "gc/Heap.h"
#include "jit/ExecutableAllocator.h"
#include "jit/ICStubSpace.h"
#include "jit/IonOptimizationLevels.h"
#include "jit/IonTypes.h"
#include "js/UbiNode.h"
@ -27,6 +28,7 @@ namespace jit {
class MacroAssembler;
class PatchableBackedge;
class IonBuilder;
class IonICEntry;
typedef Vector<JSObject*, 4, JitAllocPolicy> ObjectVector;
@ -259,6 +261,10 @@ struct IonScript
uint32_t backedgeList_;
uint32_t backedgeEntries_;
// List of entries to the shared stub.
uint32_t sharedStubList_;
uint32_t sharedStubEntries_;
// Number of references from invalidation records.
uint32_t invalidationCount_;
@ -272,6 +278,9 @@ struct IonScript
// a LOOPENTRY pc other than osrPc_.
uint32_t osrPcMismatchCounter_;
// Allocated space for fallback stubs.
FallbackICStubSpace fallbackStubSpace_;
// The tracelogger event used to log the start/stop of this IonScript.
TraceLoggerEvent traceLoggerScriptEvent_;
@ -327,7 +336,8 @@ struct IonScript
size_t constants, size_t safepointIndexEntries,
size_t osiIndexEntries, size_t cacheEntries,
size_t runtimeSize, size_t safepointsSize,
size_t backedgeEntries, OptimizationLevel optimizationLevel);
size_t backedgeEntries, size_t sharedStubEntries,
OptimizationLevel optimizationLevel);
static void Trace(JSTracer* trc, IonScript* script);
static void Destroy(FreeOp* fop, IonScript* script);
@ -486,6 +496,12 @@ struct IonScript
size_t numCaches() const {
return cacheEntries_;
}
IonICEntry* sharedStubList() {
return (IonICEntry*) &bottomBuffer()[sharedStubList_];
}
size_t numSharedStubs() const {
return sharedStubEntries_;
}
size_t runtimeSize() const {
return runtimeSize_;
}
@ -556,6 +572,12 @@ struct IonScript
recompiling_ = false;
}
FallbackICStubSpace* fallbackStubSpace() {
return &fallbackStubSpace_;
}
void adoptFallbackStubs(FallbackICStubSpace* stubSpace);
void purgeOptimizedStubs(Zone* zone);
enum ShouldIncreaseAge {
IncreaseAge = true,
KeepAge = false

View File

@ -14,6 +14,7 @@
#include "builtin/TypedObject.h"
#include "jit/CompileInfo.h"
#include "jit/ICStubSpace.h"
#include "jit/IonCode.h"
#include "jit/JitFrames.h"
#include "jit/shared/Assembler-shared.h"
@ -59,66 +60,6 @@ typedef void (*EnterJitCode)(void* code, unsigned argc, Value* argv, Interpreter
class JitcodeGlobalTable;
// ICStubSpace is an abstraction for allocation policy and storage for stub data.
// There are two kinds of stubs: optimized stubs and fallback stubs (the latter
// also includes stubs that can make non-tail calls that can GC).
//
// Optimized stubs are allocated per-compartment and are always purged when
// JIT-code is discarded. Fallback stubs are allocated per BaselineScript and
// are only destroyed when the BaselineScript is destroyed.
class ICStubSpace
{
protected:
LifoAlloc allocator_;
explicit ICStubSpace(size_t chunkSize)
: allocator_(chunkSize)
{}
public:
inline void* alloc(size_t size) {
return allocator_.alloc(size);
}
JS_DECLARE_NEW_METHODS(allocate, alloc, inline)
size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
return allocator_.sizeOfExcludingThis(mallocSizeOf);
}
};
// Space for optimized stubs. Every JitCompartment has a single
// OptimizedICStubSpace.
struct OptimizedICStubSpace : public ICStubSpace
{
static const size_t STUB_DEFAULT_CHUNK_SIZE = 4 * 1024;
public:
OptimizedICStubSpace()
: ICStubSpace(STUB_DEFAULT_CHUNK_SIZE)
{}
void free() {
allocator_.freeAll();
}
};
// Space for fallback stubs. Every BaselineScript has a
// FallbackICStubSpace.
struct FallbackICStubSpace : public ICStubSpace
{
static const size_t STUB_DEFAULT_CHUNK_SIZE = 256;
public:
FallbackICStubSpace()
: ICStubSpace(STUB_DEFAULT_CHUNK_SIZE)
{}
inline void adoptFrom(FallbackICStubSpace* other) {
allocator_.steal(&(other->allocator_));
}
};
// Information about a loop backedge in the runtime, which can be set to
// point to either the loop header or to an OOL interrupt checking stub,
// if signal handlers are being used to implement interrupts.

View File

@ -252,12 +252,12 @@ class LUse : public LAllocation
explicit LUse(FloatRegister reg, bool usedAtStart = false) {
set(FIXED, reg.code(), usedAtStart);
}
LUse(Register reg, uint32_t virtualRegister) {
set(FIXED, reg.code(), false);
LUse(Register reg, uint32_t virtualRegister, bool usedAtStart = false) {
set(FIXED, reg.code(), usedAtStart);
setVirtualRegister(virtualRegister);
}
LUse(FloatRegister reg, uint32_t virtualRegister) {
set(FIXED, reg.code(), false);
LUse(FloatRegister reg, uint32_t virtualRegister, bool usedAtStart = false) {
set(FIXED, reg.code(), usedAtStart);
setVirtualRegister(virtualRegister);
}

View File

@ -30,6 +30,16 @@ LIRGenerator::useBoxAtStart(LInstruction* lir, size_t n, MDefinition* mir, LUse:
return useBox(lir, n, mir, policy, true);
}
void
LIRGenerator::useBoxFixedAtStart(LInstruction* lir, size_t n, MDefinition* mir, ValueOperand op)
{
#if defined(JS_NUNBOX32)
return useBoxFixed(lir, n, mir, op.typeReg(), op.payloadReg(), true);
#elif defined(JS_PUNBOX64)
return useBoxFixed(lir, n, mir, op.valueReg(), op.scratchReg(), true);
#endif
}
void
LIRGenerator::visitCloneLiteral(MCloneLiteral* ins)
{
@ -2112,6 +2122,38 @@ LIRGenerator::visitStringReplace(MStringReplace* ins)
assignSafepoint(lir, ins);
}
void
LIRGenerator::visitBinarySharedStub(MBinarySharedStub* ins)
{
MDefinition* lhs = ins->getOperand(0);
MDefinition* rhs = ins->getOperand(1);
MOZ_ASSERT(ins->type() == MIRType_Value);
MOZ_ASSERT(ins->type() == MIRType_Value);
LBinarySharedStub* lir = new(alloc()) LBinarySharedStub();
useBoxFixedAtStart(lir, LBinarySharedStub::LhsInput, lhs, R0);
useBoxFixedAtStart(lir, LBinarySharedStub::RhsInput, rhs, R1);
defineSharedStubReturn(lir, ins);
assignSafepoint(lir, ins);
}
void
LIRGenerator::visitUnarySharedStub(MUnarySharedStub* ins)
{
MDefinition* input = ins->getOperand(0);
MOZ_ASSERT(ins->type() == MIRType_Value);
LUnarySharedStub* lir = new(alloc()) LUnarySharedStub();
useBoxFixedAtStart(lir, LUnarySharedStub::Input, input, R0);
defineSharedStubReturn(lir, ins);
assignSafepoint(lir, ins);
}
void
LIRGenerator::visitLambda(MLambda* ins)
{

View File

@ -50,6 +50,7 @@ class LIRGenerator : public LIRGeneratorSpecific
void useBoxAtStart(LInstruction* lir, size_t n, MDefinition* mir,
LUse::Policy policy = LUse::REGISTER);
void useBoxFixedAtStart(LInstruction* lir, size_t n, MDefinition* mir, ValueOperand op);
void lowerBitOp(JSOp op, MInstruction* ins);
void lowerShiftOp(JSOp op, MShiftInstruction* ins);
@ -160,6 +161,8 @@ class LIRGenerator : public LIRGeneratorSpecific
void visitRegExpTest(MRegExpTest* ins);
void visitRegExpReplace(MRegExpReplace* ins);
void visitStringReplace(MStringReplace* ins);
void visitBinarySharedStub(MBinarySharedStub* ins);
void visitUnarySharedStub(MUnarySharedStub* ins);
void visitLambda(MLambda* ins);
void visitLambdaArrow(MLambdaArrow* ins);
void visitKeepAliveObject(MKeepAliveObject* ins);

View File

@ -7066,6 +7066,45 @@ class MOsrReturnValue
}
};
class MBinarySharedStub
: public MBinaryInstruction,
public MixPolicy<BoxPolicy<0>, BoxPolicy<1> >::Data
{
explicit MBinarySharedStub(MDefinition* left, MDefinition* right)
: MBinaryInstruction(left, right)
{
setResultType(MIRType_Value);
}
public:
INSTRUCTION_HEADER(BinarySharedStub)
static MBinarySharedStub* New(TempAllocator& alloc, MDefinition* left, MDefinition* right)
{
return new(alloc) MBinarySharedStub(left, right);
}
};
class MUnarySharedStub
: public MUnaryInstruction,
public BoxPolicy<0>::Data
{
explicit MUnarySharedStub(MDefinition* input)
: MUnaryInstruction(input)
{
setResultType(MIRType_Value);
}
public:
INSTRUCTION_HEADER(UnarySharedStub)
static MUnarySharedStub* New(TempAllocator& alloc, MDefinition* input)
{
return new(alloc) MUnarySharedStub(input);
}
};
// Check the current frame for over-recursion past the global stack limit.
class MCheckOverRecursed
: public MNullaryInstruction

View File

@ -49,6 +49,8 @@ namespace jit {
_(OsrReturnValue) \
_(OsrArgumentsObject) \
_(ReturnFromCtor) \
_(BinarySharedStub) \
_(UnarySharedStub) \
_(CheckOverRecursed) \
_(DefVar) \
_(DefFun) \

View File

@ -81,6 +81,14 @@ ICEntry::fallbackStub() const
return firstStub()->getChainFallback();
}
void
ICEntry::trace(JSTracer* trc)
{
if (!hasStub())
return;
for (ICStub* stub = firstStub(); stub; stub = stub->next())
stub->trace(trc);
}
ICStubConstIterator&
ICStubConstIterator::operator++()
@ -142,7 +150,7 @@ ICStub::updateCode(JitCode* code)
/* static */ void
ICStub::trace(JSTracer* trc)
{
markCode(trc, "baseline-stub-jitcode");
markCode(trc, "shared-stub-jitcode");
// If the stub is a monitored fallback stub, then mark the monitor ICs hanging
// off of that stub. We don't need to worry about the regular monitored stubs,

View File

@ -206,12 +206,12 @@ void TypeFallbackICSpew(JSContext* cx, ICTypeMonitor_Fallback* stub, const char*
#endif
//
// An entry in the Baseline IC descriptor table.
// An entry in the JIT IC descriptor table.
//
class ICEntry
{
private:
// A pointer to the baseline IC stub for this instruction.
// A pointer to the shared IC stub for this instruction.
ICStub* firstStub_;
// Offset from the start of the JIT code where the IC
@ -335,8 +335,27 @@ class ICEntry
inline ICStub** addressOfFirstStub() {
return &firstStub_;
}
void trace(JSTracer* trc);
};
class IonICEntry : public ICEntry
{
JSScript* script_;
public:
IonICEntry(uint32_t pcOffset, Kind kind, JSScript* script)
: ICEntry(pcOffset, kind),
script_(script)
{ }
JSScript* script() {
return script_;
}
};
class ICMonitoredStub;
class ICMonitoredFallbackStub;
class ICUpdatedStub;
@ -782,7 +801,7 @@ class ICFallbackStub : public ICStub
// The icEntry and lastStubPtrAddr_ fields can't be initialized when the stub is
// created since the stub is created at compile time, and we won't know the IC entry
// address until after compile when the BaselineScript is created. This method
// address until after compile when the JitScript is created. This method
// allows these fields to be fixed up at that point.
void fixupICEntry(ICEntry* icEntry) {
MOZ_ASSERT(icEntry_ == nullptr);

View File

@ -1195,6 +1195,7 @@ FilterTypeSetPolicy::adjustInputs(TempAllocator& alloc, MInstruction* ins)
_(MixPolicy<SimdSameAsReturnedTypePolicy<0>, SimdScalarPolicy<1> >) \
_(MixPolicy<StringPolicy<0>, IntPolicy<1> >) \
_(MixPolicy<StringPolicy<0>, StringPolicy<1> >) \
_(MixPolicy<BoxPolicy<0>, BoxPolicy<1> >) \
_(NoFloatPolicy<0>) \
_(NoFloatPolicyAfter<1>) \
_(NoFloatPolicyAfter<2>) \

View File

@ -19,14 +19,14 @@ using mozilla::FloorLog2;
void
LIRGeneratorARM::useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1,
Register reg2)
Register reg2, bool useAtStart)
{
MOZ_ASSERT(mir->type() == MIRType_Value);
MOZ_ASSERT(reg1 != reg2);
ensureDefined(mir);
lir->setOperand(n, LUse(reg1, mir->virtualRegister()));
lir->setOperand(n + 1, LUse(reg2, VirtualRegisterOfPayload(mir)));
lir->setOperand(n, LUse(reg1, mir->virtualRegister(), useAtStart));
lir->setOperand(n + 1, LUse(reg2, VirtualRegisterOfPayload(mir), useAtStart));
}
LAllocation

View File

@ -22,7 +22,8 @@ class LIRGeneratorARM : public LIRGeneratorShared
protected:
// Adds a box input to an instruction, setting operand |n| to the type and
// |n+1| to the payload.
void useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1, Register reg2);
void useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1, Register reg2,
bool useAtStart = false);
// x86 has constraints on what registers can be formatted for 1-byte
// stores and loads; on ARM all registers are okay.

View File

@ -18,7 +18,7 @@ using namespace js::jit;
using mozilla::FloorLog2;
void
LIRGeneratorARM64::useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1, Register)
LIRGeneratorARM64::useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1, Register, bool useAtStart)
{
MOZ_CRASH("useBoxFixed");
}

View File

@ -22,7 +22,8 @@ class LIRGeneratorARM64 : public LIRGeneratorShared
protected:
// Adds a box input to an instruction, setting operand |n| to the type and
// |n+1| to the payload.
void useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1, Register reg2);
void useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1, Register reg2,
bool useAtStart = false);
LAllocation useByteOpRegister(MDefinition* mir);
LAllocation useByteOpRegisterOrNonDoubleConstant(MDefinition* mir);

View File

@ -20,14 +20,14 @@ using mozilla::FloorLog2;
void
LIRGeneratorMIPS::useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1,
Register reg2)
Register reg2, bool useAtStart)
{
MOZ_ASSERT(mir->type() == MIRType_Value);
MOZ_ASSERT(reg1 != reg2);
ensureDefined(mir);
lir->setOperand(n, LUse(reg1, mir->virtualRegister()));
lir->setOperand(n + 1, LUse(reg2, VirtualRegisterOfPayload(mir)));
lir->setOperand(n, LUse(reg1, mir->virtualRegister(), useAtStart));
lir->setOperand(n + 1, LUse(reg2, VirtualRegisterOfPayload(mir), useAtStart));
}
LAllocation

View File

@ -22,7 +22,8 @@ class LIRGeneratorMIPS : public LIRGeneratorShared
protected:
// Adds a box input to an instruction, setting operand |n| to the type and
// |n+1| to the payload.
void useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1, Register reg2);
void useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1, Register reg2
bool useAtStart = false);
// x86 has constraints on what registers can be formatted for 1-byte
// stores and loads; on MIPS all registers are okay.

View File

@ -21,7 +21,7 @@ class LIRGeneratorNone : public LIRGeneratorShared
MOZ_CRASH();
}
void useBoxFixed(LInstruction*, size_t, MDefinition*, Register, Register) { MOZ_CRASH(); }
void useBoxFixed(LInstruction*, size_t, MDefinition*, Register, Register, bool useAtStart = false) { MOZ_CRASH(); }
LAllocation useByteOpRegister(MDefinition*) { MOZ_CRASH(); }
LAllocation useByteOpRegisterOrNonDoubleConstant(MDefinition*) { MOZ_CRASH(); }

View File

@ -55,6 +55,7 @@ CodeGeneratorShared::CodeGeneratorShared(MIRGenerator* gen, LIRGraph* graph, Mac
lastOsiPointOffset_(0),
safepoints_(graph->totalSlotCount(), (gen->info().nargs() + 1) * sizeof(Value)),
returnLabel_(),
stubSpace_(),
nativeToBytecodeMap_(nullptr),
nativeToBytecodeMapSize_(0),
nativeToBytecodeTableOffset_(0),

View File

@ -85,6 +85,11 @@ class CodeGeneratorShared : public LElementVisitor
Label invalidate_;
CodeOffsetLabel invalidateEpilogueData_;
// Label for the common return path.
NonAssertingLabel returnLabel_;
FallbackICStubSpace stubSpace_;
js::Vector<SafepointIndex, 0, SystemAllocPolicy> safepointIndices_;
js::Vector<OsiIndex, 0, SystemAllocPolicy> osiIndices_;
@ -105,9 +110,6 @@ class CodeGeneratorShared : public LElementVisitor
js::Vector<CodeOffsetLabel, 0, SystemAllocPolicy> patchableTLScripts_;
#endif
// Label for the common return path.
NonAssertingLabel returnLabel_;
public:
struct NativeToBytecode {
CodeOffsetLabel nativeOffset;

View File

@ -3920,6 +3920,31 @@ class LStringReplace: public LStrReplace
}
};
class LBinarySharedStub : public LCallInstructionHelper<BOX_PIECES, 2 * BOX_PIECES, 0>
{
public:
LIR_HEADER(BinarySharedStub)
const MBinarySharedStub* mir() const {
return mir_->toBinarySharedStub();
}
static const size_t LhsInput = 0;
static const size_t RhsInput = BOX_PIECES;
};
class LUnarySharedStub : public LCallInstructionHelper<BOX_PIECES, BOX_PIECES, 0>
{
public:
LIR_HEADER(UnarySharedStub)
const MUnarySharedStub* mir() const {
return mir_->toUnarySharedStub();
}
static const size_t Input = 0;
};
class LLambdaForSingleton : public LCallInstructionHelper<1, 1, 0>
{
public:

View File

@ -193,6 +193,8 @@
_(RegExpReplace) \
_(StringReplace) \
_(Substr) \
_(BinarySharedStub) \
_(UnarySharedStub) \
_(Lambda) \
_(LambdaArrow) \
_(LambdaForSingleton) \

View File

@ -108,6 +108,30 @@ LIRGeneratorShared::defineBox(LInstructionHelper<BOX_PIECES, Ops, Temps>* lir, M
add(lir);
}
void
LIRGeneratorShared::defineSharedStubReturn(LInstruction* lir, MDefinition* mir)
{
lir->setMir(mir);
MOZ_ASSERT(lir->isBinarySharedStub() || lir->isUnarySharedStub());
MOZ_ASSERT(mir->type() == MIRType_Value);
uint32_t vreg = getVirtualRegister();
#if defined(JS_NUNBOX32)
lir->setDef(TYPE_INDEX, LDefinition(vreg + VREG_TYPE_OFFSET, LDefinition::TYPE,
LGeneralReg(JSReturnReg_Type)));
lir->setDef(PAYLOAD_INDEX, LDefinition(vreg + VREG_DATA_OFFSET, LDefinition::PAYLOAD,
LGeneralReg(JSReturnReg_Data)));
getVirtualRegister();
#elif defined(JS_PUNBOX64)
lir->setDef(0, LDefinition(vreg, LDefinition::BOX, LGeneralReg(JSReturnReg)));
#endif
mir->setVirtualRegister(vreg);
add(lir);
}
void
LIRGeneratorShared::defineReturn(LInstruction* lir, MDefinition* mir)
{

View File

@ -142,6 +142,7 @@ class LIRGeneratorShared : public MDefinitionVisitor
inline void defineBox(LInstructionHelper<BOX_PIECES, Ops, Temps>* lir, MDefinition* mir,
LDefinition::Policy policy = LDefinition::REGISTER);
inline void defineSharedStubReturn(LInstruction* lir, MDefinition* mir);
inline void defineReturn(LInstruction* lir, MDefinition* mir);
template <size_t X>

View File

@ -15,12 +15,12 @@ using namespace js;
using namespace js::jit;
void
LIRGeneratorX64::useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1, Register)
LIRGeneratorX64::useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1, Register, bool useAtStart)
{
MOZ_ASSERT(mir->type() == MIRType_Value);
ensureDefined(mir);
lir->setOperand(n, LUse(reg1, mir->virtualRegister()));
lir->setOperand(n, LUse(reg1, mir->virtualRegister(), useAtStart));
}
LAllocation

View File

@ -24,7 +24,7 @@ class LIRGeneratorX64 : public LIRGeneratorX86Shared
void defineUntypedPhi(MPhi* phi, size_t lirIndex);
// Adds a use at operand |n| of a value-typed insturction.
void useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1, Register);
void useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1, Register, bool useAtStart = false);
// x86 has constraints on what registers can be formatted for 1-byte
// stores and loads; on x64 all registers are okay.

View File

@ -16,14 +16,14 @@ using namespace js::jit;
void
LIRGeneratorX86::useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1,
Register reg2)
Register reg2, bool useAtStart)
{
MOZ_ASSERT(mir->type() == MIRType_Value);
MOZ_ASSERT(reg1 != reg2);
ensureDefined(mir);
lir->setOperand(n, LUse(reg1, mir->virtualRegister()));
lir->setOperand(n + 1, LUse(reg2, VirtualRegisterOfPayload(mir)));
lir->setOperand(n, LUse(reg1, mir->virtualRegister(), useAtStart));
lir->setOperand(n + 1, LUse(reg2, VirtualRegisterOfPayload(mir), useAtStart));
}
LAllocation

View File

@ -22,7 +22,8 @@ class LIRGeneratorX86 : public LIRGeneratorX86Shared
protected:
// Adds a box input to an instruction, setting operand |n| to the type and
// |n+1| to the payload.
void useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1, Register reg2);
void useBoxFixed(LInstruction* lir, size_t n, MDefinition* mir, Register reg1, Register reg2,
bool useAtStart = false);
// It's a trap! On x86, the 1-byte store can only use one of
// {al,bl,cl,dl,ah,bh,ch,dh}. That means if the register allocator