Bug 998490 - OdinMonkey: make asm.js frames show up in FrameIter (r=dougc)

--HG--
extra : rebase_source : 8ed09da1be0afda32c1f669a6736b266019a10ae
This commit is contained in:
Luke Wagner 2014-04-16 18:46:03 -05:00
parent aadd9c9a70
commit 1fcf7c85b2
19 changed files with 611 additions and 154 deletions

View File

@ -1,38 +1,93 @@
load(libdir + "asm.js");
load(libdir + "asserts.js");
var callFFI1 = asmCompile('global', 'ffis', USE_ASM + "var ffi=ffis.ffi; function asmfun1() { return ffi(1)|0 } return asmfun1");
var callFFI2 = asmCompile('global', 'ffis', USE_ASM + "var ffi=ffis.ffi; function asmfun2() { return ffi(2)|0 } return asmfun2");
function matchStack(stackString, stackArray)
{
var match = 0;
for (name of stackArray) {
match = stackString.indexOf(name, match);
if (match === -1)
throw name + " not found in the stack " + stack;
}
}
var stack;
function dumpStack(i) { stack = new Error().stack; return i+11 }
function dumpStack()
{
stack = new Error().stack
}
var asmfun1 = asmLink(callFFI1, null, {ffi:dumpStack});
assertEq(asmfun1(), 12);
assertEq(stack.indexOf("asmfun1") === -1, false);
var callFFI = asmCompile('global', 'ffis', USE_ASM + "var ffi=ffis.ffi; function f() { return ffi()|0 } return f");
var asmfun2 = asmLink(callFFI2, null, {ffi:function ffi(i){return asmfun1()+20}});
assertEq(asmfun2(), 32);
assertEq(stack.indexOf("asmfun1") == -1, false);
assertEq(stack.indexOf("asmfun2") == -1, false);
assertEq(stack.indexOf("asmfun2") > stack.indexOf("asmfun1"), true);
var f = asmLink(callFFI, null, {ffi:dumpStack});
for (var i = 0; i < 5000; i++) {
stack = null;
f();
matchStack(stack, ['dumpStack', 'f']);
}
if (isAsmJSCompilationAvailable() && isCachingEnabled()) {
var callFFI = asmCompile('global', 'ffis', USE_ASM + "var ffi=ffis.ffi; function f() { return ffi()|0 } return f");
assertEq(isAsmJSModuleLoadedFromCache(callFFI), true);
stack = null;
f();
matchStack(stack, ['dumpStack', 'f']);
}
var f1 = asmLink(callFFI, null, {ffi:dumpStack});
var f2 = asmLink(callFFI, null, {ffi:function middle() { f1() }});
stack = null;
(function outer() { f2() })();
matchStack(stack, ["dumpStack", "f", "middle", "f"]);
function returnStackDumper() { return { valueOf:function() { stack = new Error().stack } } }
var f = asmLink(callFFI, null, {ffi:returnStackDumper});
for (var i = 0; i < 5000; i++) {
stack = null;
f();
matchStack(stack, ['valueOf', 'f']);
}
var caught = false;
try {
stack = null;
asmLink(asmCompile(USE_ASM + "function asmRec() { asmRec() } return asmRec"))();
} catch (e) {
caught = true;
matchStack(e.stack, ['asmRec', 'asmRec', 'asmRec', 'asmRec']);
}
assertEq(caught, true);
var caught = false;
try {
callFFI1(null, {ffi:Object.preventExtensions})();
callFFI(null, {ffi:Object.preventExtensions})();
} catch (e) {
caught = true;
}
assertEq(caught, true);
assertEq(asmLink(callFFI1, null, {ffi:eval})(), 1);
assertEq(asmLink(callFFI1, null, {ffi:Function})(), 0);
assertEq(asmLink(callFFI1, null, {ffi:Error})(), 0);
asmLink(callFFI, null, {ffi:eval})();
asmLink(callFFI, null, {ffi:Function})();
asmLink(callFFI, null, {ffi:Error})();
var manyCalls = asmCompile('global', 'ffis',
USE_ASM +
"var ffi=ffis.ffi;\
function f1(a,b,c,d,e,f,g,h,i,j,k) { \
a=a|0;b=b|0;c=c|0;d=d|0;e=e|0;f=f|0;g=g|0;h=h|0;i=i|0;j=j|0;k=k|0; \
ffi(); \
return (a+b+c+d+e+f+g+h+i+j+k)|0; \
} \
function f2() { \
return f1(1,2,3,4,5,6,7,8,f1(1,2,3,4,5,6,7,8,9,10,11)|0,10,11)|0; \
} \
function f3() { return 13 } \
function f4(i) { \
i=i|0; \
return TBL[i&3]()|0; \
} \
var TBL=[f3, f3, f2, f3]; \
return f4;");
stack = null;
assertEq(asmLink(manyCalls, null, {ffi:dumpStack})(2), 123);
matchStack(stack, ['dumpStack', 'f1', 'f2', 'f4']);

View File

@ -1089,9 +1089,9 @@ class MOZ_STACK_CLASS ModuleCompiler
~ModuleCompiler() {
if (errorString_) {
JS_ASSERT(errorOffset_ != UINT32_MAX);
parser_.tokenStream.reportAsmJSError(errorOffset_,
JSMSG_USE_ASM_TYPE_FAIL,
errorString_);
tokenStream().reportAsmJSError(errorOffset_,
JSMSG_USE_ASM_TYPE_FAIL,
errorString_);
js_free(errorString_);
}
if (errorOverRecursed_)
@ -1140,7 +1140,7 @@ class MOZ_STACK_CLASS ModuleCompiler
}
uint32_t funcStart = parser_.pc->maybeFunction->pn_body->pn_pos.begin;
uint32_t offsetToEndOfUseAsm = parser_.tokenStream.currentToken().pos.end;
uint32_t offsetToEndOfUseAsm = tokenStream().currentToken().pos.end;
// "use strict" should be added to the source if we are in an implicit
// strict context, see also comment above addUseStrict in
@ -1172,14 +1172,14 @@ class MOZ_STACK_CLASS ModuleCompiler
// Since pn is typically only null under OOM, this suppression simply forces any GC to be
// delayed until the compilation is off the stack and more memory can be freed.
gc::AutoSuppressGC nogc(cx_);
return failOffset(parser_.tokenStream.peekTokenPos().begin, str);
return failOffset(tokenStream().peekTokenPos().begin, str);
}
bool failfVA(ParseNode *pn, const char *fmt, va_list ap) {
JS_ASSERT(!errorString_);
JS_ASSERT(errorOffset_ == UINT32_MAX);
JS_ASSERT(fmt);
errorOffset_ = pn ? pn->pn_pos.begin : parser_.tokenStream.currentToken().pos.end;
errorOffset_ = pn ? pn->pn_pos.begin : tokenStream().currentToken().pos.end;
errorString_ = JS_vsmprintf(fmt, ap);
return false;
}
@ -1214,7 +1214,7 @@ class MOZ_STACK_CLASS ModuleCompiler
SlowFunction sf;
sf.name = func.name();
sf.ms = func.compileTime();
parser_.tokenStream.srcCoords.lineNumAndColumnIndex(func.srcOffset(), &sf.line, &sf.column);
tokenStream().srcCoords.lineNumAndColumnIndex(func.srcOffset(), &sf.line, &sf.column);
return slowFunctions_.append(sf);
}
@ -1222,6 +1222,7 @@ class MOZ_STACK_CLASS ModuleCompiler
ExclusiveContext *cx() const { return cx_; }
AsmJSParser &parser() const { return parser_; }
TokenStream &tokenStream() const { return parser_.tokenStream; }
MacroAssembler &masm() { return masm_; }
Label &stackOverflowLabel() { return stackOverflowLabel_; }
Label &interruptLabel() { return interruptLabel_; }
@ -1395,11 +1396,8 @@ class MOZ_STACK_CLASS ModuleCompiler
for (unsigned i = 0; i < args.length(); i++)
argCoercions[i] = args[i].toCoercion();
AsmJSModule::ReturnType retType = func->sig().retType().toModuleReturnType();
uint32_t line, column;
parser_.tokenStream.srcCoords.lineNumAndColumnIndex(func->srcOffset(), &line, &column);
return module_->addExportedFunction(func->name(), line, column,
func->srcOffset(), func->endOffset(), maybeFieldName,
Move(argCoercions), retType);
return module_->addExportedFunction(func->name(), func->srcOffset(), func->endOffset(),
maybeFieldName, Move(argCoercions), retType);
}
bool addExit(unsigned ffiIndex, PropertyName *name, Signature &&sig, unsigned *exitIndex) {
ExitDescriptor exitDescriptor(name, Move(sig));
@ -1412,6 +1410,9 @@ class MOZ_STACK_CLASS ModuleCompiler
return false;
return exits_.add(p, Move(exitDescriptor), *exitIndex);
}
bool addFunctionName(PropertyName *name, uint32_t *index) {
return module_->addFunctionName(name, index);
}
// Note a constraint on the minimum size of the heap. The heap size is
// constrained when linking to be at least the maximum of all such constraints.
@ -1428,7 +1429,7 @@ class MOZ_STACK_CLASS ModuleCompiler
#if defined(MOZ_VTUNE) || defined(JS_ION_PERF)
bool trackProfiledFunction(const Func &func, unsigned endCodeOffset) {
unsigned lineno = 0U, columnIndex = 0U;
parser().tokenStream.srcCoords.lineNumAndColumnIndex(func.srcOffset(), &lineno, &columnIndex);
tokenStream().srcCoords.lineNumAndColumnIndex(func.srcOffset(), &lineno, &columnIndex);
unsigned startCodeOffset = func.code()->offset();
return module_->trackProfiledFunction(func.name(), startCodeOffset, endCodeOffset,
lineno, columnIndex);
@ -1501,12 +1502,13 @@ class MOZ_STACK_CLASS ModuleCompiler
bool finish(ScopedJSDeletePtr<AsmJSModule> *module)
{
module_->initFuncEnd(parser_.tokenStream.currentToken().pos.end,
parser_.tokenStream.peekTokenPos().end);
module_->initFuncEnd(tokenStream().currentToken().pos.end,
tokenStream().peekTokenPos().end);
masm_.finish();
if (masm_.oom())
return false;
module_->assignCallSites(masm_.extractCallSites());
module_->assignHeapAccesses(masm_.extractAsmJSHeapAccesses());
#if defined(JS_CODEGEN_ARM)
@ -1516,6 +1518,10 @@ class MOZ_STACK_CLASS ModuleCompiler
AsmJSHeapAccess &a = module_->heapAccess(i);
a.setOffset(masm_.actualOffset(a.offset()));
}
for (unsigned i = 0; i < module_->numCallSites(); i++) {
CallSite &c = module_->callSite(i);
c.setReturnAddressOffset(masm_.actualOffset(c.returnAddressOffset()));
}
#endif
// The returned memory is owned by module_.
@ -1591,7 +1597,7 @@ class MOZ_STACK_CLASS ModuleCompiler
// address of the global. Globals are allocated sequentially after the
// code section so we can just use an RelativeLink.
for (unsigned i = 0; i < masm_.numAsmJSGlobalAccesses(); i++) {
AsmJSGlobalAccess a = masm_.asmJSGlobalAccesses(i);
AsmJSGlobalAccess a = masm_.asmJSGlobalAccess(i);
AsmJSModule::RelativeLink link;
link.patchAtOffset = masm_.labelOffsetToPatchOffset(a.patchAt.offset());
link.targetOffset = module_->offsetOfGlobalData() + a.globalDataOffset;
@ -1887,6 +1893,7 @@ class FunctionCompiler
ModuleCompiler & m_;
LifoAlloc & lifo_;
ParseNode * fn_;
uint32_t functionNameIndex_;
LocalMap locals_;
VarInitializerVector varInitializers_;
@ -1907,11 +1914,15 @@ class FunctionCompiler
LabeledBlockMap labeledBreaks_;
LabeledBlockMap labeledContinues_;
static const uint32_t NO_FUNCTION_NAME_INDEX = UINT32_MAX;
JS_STATIC_ASSERT(NO_FUNCTION_NAME_INDEX > CallSiteDesc::FUNCTION_NAME_INDEX_MAX);
public:
FunctionCompiler(ModuleCompiler &m, ParseNode *fn, LifoAlloc &lifo)
: m_(m),
lifo_(lifo),
fn_(fn),
functionNameIndex_(NO_FUNCTION_NAME_INDEX),
locals_(m.cx()),
varInitializers_(m.cx()),
alloc_(nullptr),
@ -2261,6 +2272,7 @@ class FunctionCompiler
class Call
{
ParseNode *node_;
ABIArgGenerator abi_;
uint32_t prevMaxStackBytes_;
uint32_t maxChildStackBytes_;
@ -2273,15 +2285,16 @@ class FunctionCompiler
friend class FunctionCompiler;
public:
Call(FunctionCompiler &f, RetType retType)
: prevMaxStackBytes_(0),
Call(FunctionCompiler &f, ParseNode *callNode, RetType retType)
: node_(callNode),
prevMaxStackBytes_(0),
maxChildStackBytes_(0),
spIncrement_(0),
sig_(f.m().lifo(), retType),
regArgs_(f.cx()),
stackArgs_(f.cx()),
childClobbers_(false)
{}
{ }
Signature &sig() { return sig_; }
const Signature &sig() const { return sig_; }
};
@ -2347,10 +2360,21 @@ class FunctionCompiler
*def = nullptr;
return true;
}
MAsmJSCall *ins = MAsmJSCall::New(alloc(), callee, call.regArgs_, returnType,
uint32_t line, column;
m_.tokenStream().srcCoords.lineNumAndColumnIndex(call.node_->pn_pos.begin, &line, &column);
if (functionNameIndex_ == NO_FUNCTION_NAME_INDEX) {
if (!m_.addFunctionName(FunctionName(fn_), &functionNameIndex_))
return false;
}
CallSiteDesc desc(line, column, functionNameIndex_);
MAsmJSCall *ins = MAsmJSCall::New(alloc(), desc, callee, call.regArgs_, returnType,
call.spIncrement_);
if (!ins)
return false;
curBlock_->add(ins);
*def = ins;
return true;
@ -2751,7 +2775,7 @@ class FunctionCompiler
#if defined(JS_ION_PERF)
if (pn) {
unsigned line = 0U, column = 0U;
m().parser().tokenStream.srcCoords.lineNumAndColumnIndex(pn->pn_pos.begin, &line, &column);
m().tokenStream().srcCoords.lineNumAndColumnIndex(pn->pn_pos.begin, &line, &column);
blk->setLineno(line);
blk->setColumnIndex(column);
}
@ -3866,7 +3890,7 @@ static bool
CheckInternalCall(FunctionCompiler &f, ParseNode *callNode, PropertyName *calleeName,
RetType retType, MDefinition **def, Type *type)
{
FunctionCompiler::Call call(f, retType);
FunctionCompiler::Call call(f, callNode, retType);
if (!CheckCallArgs(f, callNode, CheckIsVarType, &call))
return false;
@ -3942,7 +3966,7 @@ CheckFuncPtrCall(FunctionCompiler &f, ParseNode *callNode, RetType retType, MDef
if (!indexType.isIntish())
return f.failf(indexNode, "%s is not a subtype of intish", indexType.toChars());
FunctionCompiler::Call call(f, retType);
FunctionCompiler::Call call(f, callNode, retType);
if (!CheckCallArgs(f, callNode, CheckIsVarType, &call))
return false;
@ -3975,7 +3999,7 @@ CheckFFICall(FunctionCompiler &f, ParseNode *callNode, unsigned ffiIndex, RetTyp
if (retType == RetType::Float)
return f.fail(callNode, "FFI calls can't return float");
FunctionCompiler::Call call(f, retType);
FunctionCompiler::Call call(f, callNode, retType);
if (!CheckCallArgs(f, callNode, CheckIsExternType, &call))
return false;
@ -4100,7 +4124,7 @@ CheckMathBuiltinCall(FunctionCompiler &f, ParseNode *callNode, AsmJSMathBuiltinF
if (retType != RetType::Double && retType != RetType::Float)
return f.failf(callNode, "return type of math function is double or float, used as %s", retType.toType().toChars());
FunctionCompiler::Call call(f, retType);
FunctionCompiler::Call call(f, callNode, retType);
if (retType == RetType::Float && !CheckCallArgs(f, callNode, CheckIsMaybeFloat, &call))
return false;
if (retType == RetType::Double && !CheckCallArgs(f, callNode, CheckIsMaybeDouble, &call))
@ -5302,7 +5326,7 @@ CheckStatement(FunctionCompiler &f, ParseNode *stmt, LabelVector *maybeLabels)
static bool
ParseFunction(ModuleCompiler &m, ParseNode **fnOut)
{
TokenStream &tokenStream = m.parser().tokenStream;
TokenStream &tokenStream = m.tokenStream();
DebugOnly<TokenKind> tk = tokenStream.getToken();
JS_ASSERT(tk == TOK_FUNCTION);
@ -5342,7 +5366,7 @@ ParseFunction(ModuleCompiler &m, ParseNode **fnOut)
AsmJSParseContext funpc(&m.parser(), outerpc, fn, funbox, &newDirectives,
outerpc->staticLevel + 1, outerpc->blockidGen,
/* blockScopeDepth = */ 0);
if (!funpc.init(m.parser().tokenStream))
if (!funpc.init(tokenStream))
return false;
if (!m.parser().functionArgsAndBodyGeneric(fn, fun, Normal, Statement, &newDirectives))
@ -6022,6 +6046,16 @@ StackDecrementForCall(MacroAssembler &masm, const VectorT &argTypes, unsigned ex
static const unsigned FramePushedAfterSave = NonVolatileRegs.gprs().size() * sizeof(intptr_t) +
NonVolatileRegs.fpus().size() * sizeof(double);
// On arm, we need to include an extra word of space at the top of the stack so
// we can explicitly store the return address before making the call to C++ or
// Ion. On x86/x64, this isn't necessary since the call instruction pushes the
// return address.
#ifdef JS_CODEGEN_ARM
static const unsigned MaybeRetAddr = sizeof(void*);
#else
static const unsigned MaybeRetAddr = 0;
#endif
static bool
GenerateEntry(ModuleCompiler &m, const AsmJSModule::ExportedFunction &exportedFunc)
{
@ -6101,7 +6135,7 @@ GenerateEntry(ModuleCompiler &m, const AsmJSModule::ExportedFunction &exportedFu
// Call into the real function.
AssertStackAlignment(masm);
masm.call(func.code());
masm.call(CallSiteDesc::Entry(), func.code());
// Pop the stack and recover the original 'argv' argument passed to the
// trampoline (which was pushed on the stack).
@ -6295,15 +6329,15 @@ GenerateFFIInterpreterExit(ModuleCompiler &m, const ModuleCompiler::ExitDescript
MIRTypeVector invokeArgTypes(m.cx());
invokeArgTypes.infallibleAppend(typeArray, ArrayLength(typeArray));
// Reserve space for a call to InvokeFromAsmJS_* and an array of values
// passed to this FFI call.
// The stack layout looks like:
// | return address | stack arguments | array of values |
unsigned arraySize = Max<size_t>(1, exit.sig().args().length()) * sizeof(Value);
unsigned stackDec = StackDecrementForCall(masm, invokeArgTypes, arraySize);
unsigned stackDec = StackDecrementForCall(masm, invokeArgTypes, arraySize + MaybeRetAddr);
masm.reserveStack(stackDec);
// Fill the argument array.
unsigned offsetToCallerStackArgs = AlignmentAtPrologue + masm.framePushed();
unsigned offsetToArgv = StackArgBytes(invokeArgTypes);
unsigned offsetToArgv = StackArgBytes(invokeArgTypes) + MaybeRetAddr;
Register scratch = ABIArgGenerator::NonArgReturnVolatileReg0;
FillArgumentArray(m, exit.sig().args(), offsetToArgv, offsetToCallerStackArgs, scratch);
@ -6312,6 +6346,9 @@ GenerateFFIInterpreterExit(ModuleCompiler &m, const ModuleCompiler::ExitDescript
Register activation = ABIArgGenerator::NonArgReturnVolatileReg1;
LoadAsmJSActivationIntoRegister(masm, activation);
// Record sp in the AsmJSActivation for stack-walking.
masm.storePtr(StackPointer, Address(activation, AsmJSActivation::offsetOfExitSP()));
// argument 0: cx
if (i->kind() == ABIArg::GPR) {
LoadJSContextFromActivation(masm, activation, i->gpr());
@ -6351,16 +6388,16 @@ GenerateFFIInterpreterExit(ModuleCompiler &m, const ModuleCompiler::ExitDescript
AssertStackAlignment(masm);
switch (exit.sig().retType().which()) {
case RetType::Void:
masm.call(AsmJSImm_InvokeFromAsmJS_Ignore);
masm.callExit(AsmJSImm_InvokeFromAsmJS_Ignore, i.stackBytesConsumedSoFar());
masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
break;
case RetType::Signed:
masm.call(AsmJSImm_InvokeFromAsmJS_ToInt32);
masm.callExit(AsmJSImm_InvokeFromAsmJS_ToInt32, i.stackBytesConsumedSoFar());
masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
masm.unboxInt32(argv, ReturnReg);
break;
case RetType::Double:
masm.call(AsmJSImm_InvokeFromAsmJS_ToNumber);
masm.callExit(AsmJSImm_InvokeFromAsmJS_ToNumber, i.stackBytesConsumedSoFar());
masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
masm.loadDouble(argv, ReturnFloatReg);
break;
@ -6389,16 +6426,20 @@ GenerateOOLConvert(ModuleCompiler &m, RetType retType, Label *throwLabel)
// the stack usage here needs to kept in sync with GenerateFFIIonExit.
// Store value
unsigned offsetToArgv = StackArgBytes(callArgTypes);
unsigned offsetToArgv = StackArgBytes(callArgTypes) + MaybeRetAddr;
masm.storeValue(JSReturnOperand, Address(StackPointer, offsetToArgv));
Register scratch = ABIArgGenerator::NonArgReturnVolatileReg0;
Register activation = ABIArgGenerator::NonArgReturnVolatileReg1;
LoadAsmJSActivationIntoRegister(masm, activation);
// Record sp in the AsmJSActivation for stack-walking.
masm.storePtr(StackPointer, Address(activation, AsmJSActivation::offsetOfExitSP()));
// Store real arguments
ABIArgMIRTypeIter i(callArgTypes);
Register scratch = ABIArgGenerator::NonArgReturnVolatileReg0;
// argument 0: cx
Register activation = ABIArgGenerator::NonArgReturnVolatileReg1;
LoadAsmJSActivationIntoRegister(masm, activation);
if (i->kind() == ABIArg::GPR) {
LoadJSContextFromActivation(masm, activation, i->gpr());
} else {
@ -6422,17 +6463,17 @@ GenerateOOLConvert(ModuleCompiler &m, RetType retType, Label *throwLabel)
AssertStackAlignment(masm);
switch (retType.which()) {
case RetType::Signed:
masm.call(AsmJSImm_CoerceInPlace_ToInt32);
masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
masm.unboxInt32(Address(StackPointer, offsetToArgv), ReturnReg);
break;
masm.callExit(AsmJSImm_CoerceInPlace_ToInt32, i.stackBytesConsumedSoFar());
masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
masm.unboxInt32(Address(StackPointer, offsetToArgv), ReturnReg);
break;
case RetType::Double:
masm.call(AsmJSImm_CoerceInPlace_ToNumber);
masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
masm.loadDouble(Address(StackPointer, offsetToArgv), ReturnFloatReg);
break;
masm.callExit(AsmJSImm_CoerceInPlace_ToNumber, i.stackBytesConsumedSoFar());
masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, throwLabel);
masm.loadDouble(Address(StackPointer, offsetToArgv), ReturnFloatReg);
break;
default:
MOZ_ASSUME_UNREACHABLE("Unsupported convert type");
MOZ_ASSUME_UNREACHABLE("Unsupported convert type");
}
}
@ -6462,19 +6503,9 @@ GenerateFFIIonExit(ModuleCompiler &m, const ModuleCompiler::ExitDescriptor &exit
// conversion of the result. A frame large enough for both is allocated.
//
// Arguments to the Ion function are in the following order on the stack:
// descriptor | callee | argc | this | arg1 | arg2 | ...
// | return address | descriptor | callee | argc | this | arg1 | arg2 | ...
unsigned argBytes = 3 * sizeof(size_t) + (1 + exit.sig().args().length()) * sizeof(Value);
// On ARM, we call with ma_callIonNoPush which, following the Ion calling convention,
// stores the return address into *sp. This means we need to include an extra word of
// space before the arguments in the stack allocation. (On x86/x64, the call
// instruction does the push itself and the ABI just requires us to be aligned before
// the call instruction.)
unsigned offsetToArgs = 0;
#if defined(JS_CODEGEN_ARM)
offsetToArgs += sizeof(size_t);
#endif
unsigned offsetToArgs = MaybeRetAddr;
unsigned stackDecForIonCall = StackDecrementForCall(masm, argBytes + offsetToArgs);
// Reserve space for a call to AsmJSImm_CoerceInPlace_* and an array of values used by
@ -6483,7 +6514,8 @@ GenerateFFIIonExit(ModuleCompiler &m, const ModuleCompiler::ExitDescriptor &exit
MIRType typeArray[] = { MIRType_Pointer, MIRType_Pointer }; // cx, argv
MIRTypeVector callArgTypes(m.cx());
callArgTypes.infallibleAppend(typeArray, ArrayLength(typeArray));
unsigned stackDecForOOLCall = StackDecrementForCall(masm, callArgTypes, sizeof(Value));
unsigned oolExtraBytes = sizeof(Value) + MaybeRetAddr;
unsigned stackDecForOOLCall = StackDecrementForCall(masm, callArgTypes, oolExtraBytes);
// Allocate a frame large enough for both of the above calls.
unsigned stackDec = Max(stackDecForIonCall, stackDecForOOLCall);
@ -6566,6 +6598,9 @@ GenerateFFIIonExit(ModuleCompiler &m, const ModuleCompiler::ExitDescriptor &exit
LoadAsmJSActivationIntoRegister(masm, reg0);
// Record sp in the AsmJSActivation for stack-walking.
masm.storePtr(StackPointer, Address(reg0, AsmJSActivation::offsetOfExitSP()));
// The following is inlined:
// JSContext *cx = activation->cx();
// Activation *act = cx->mainThread().activation();
@ -6592,14 +6627,7 @@ GenerateFFIIonExit(ModuleCompiler &m, const ModuleCompiler::ExitDescriptor &exit
// 2. Call
AssertStackAlignment(masm);
#if defined(JS_CODEGEN_ARM)
masm.ma_callIonNoPush(callee);
// The return address has been popped from the stack, so adjust the stack
// without changing the frame-pushed counter to keep the stack aligned.
masm.subPtr(Imm32(4), sp);
#else
masm.callIon(callee);
#endif
masm.callIonFromAsmJS(callee);
AssertStackAlignment(masm);
{
@ -6720,27 +6748,30 @@ GenerateStackOverflowExit(ModuleCompiler &m, Label *throwLabel)
MIRTypeVector argTypes(m.cx());
argTypes.infallibleAppend(MIRType_Pointer); // cx
unsigned stackDec = StackDecrementForCall(masm, argTypes);
unsigned stackDec = StackDecrementForCall(masm, argTypes, MaybeRetAddr);
masm.reserveStack(stackDec);
Register activation = ABIArgGenerator::NonArgReturnVolatileReg0;
LoadAsmJSActivationIntoRegister(masm, activation);
// Record sp in the AsmJSActivation for stack-walking.
masm.storePtr(StackPointer, Address(activation, AsmJSActivation::offsetOfExitSP()));
ABIArgMIRTypeIter i(argTypes);
Register scratch = ABIArgGenerator::NonArgReturnVolatileReg0;
LoadAsmJSActivationIntoRegister(masm, scratch);
// argument 0: cx
if (i->kind() == ABIArg::GPR) {
LoadJSContextFromActivation(masm, scratch, i->gpr());
LoadJSContextFromActivation(masm, activation, i->gpr());
} else {
LoadJSContextFromActivation(masm, scratch, scratch);
masm.storePtr(scratch, Address(StackPointer, i->offsetFromArgBase()));
LoadJSContextFromActivation(masm, activation, activation);
masm.storePtr(activation, Address(StackPointer, i->offsetFromArgBase()));
}
i++;
JS_ASSERT(i.done());
AssertStackAlignment(masm);
masm.call(AsmJSImm_ReportOverRecursed);
masm.callExit(AsmJSImm_ReportOverRecursed, i.stackBytesConsumedSoFar());
// Don't worry about restoring the stack; throwLabel will pop everything.
masm.jump(throwLabel);
@ -6895,6 +6926,8 @@ GenerateStubs(ModuleCompiler &m)
m.setEntryOffset(i);
if (!GenerateEntry(m, m.module().exportedFunction(i)))
return false;
if (m.masm().oom())
return false;
}
Label throwLabel;

View File

@ -6,6 +6,9 @@
#include "jit/AsmJSLink.h"
#include "mozilla/BinarySearch.h"
#include "mozilla/PodOperations.h"
#ifdef MOZ_VTUNE
# include "vtune/VTuneWrapper.h"
#endif
@ -29,7 +32,102 @@
using namespace js;
using namespace js::jit;
using mozilla::BinarySearch;
using mozilla::IsNaN;
using mozilla::PodZero;
AsmJSFrameIterator::AsmJSFrameIterator(const AsmJSActivation *activation)
{
if (!activation || activation->isInterruptedSP()) {
PodZero(this);
JS_ASSERT(done());
return;
}
module_ = &activation->module();
sp_ = activation->exitSP();
#if defined(JS_CODEGEN_X86) || defined(JS_CODEGEN_X64)
// For calls to Ion/C++ on x86/x64, the exitSP is the SP right before the call
// to C++. Since the call instruction pushes the return address, we know
// that the return address is 1 word below exitSP.
returnAddress_ = *(uint8_t**)(sp_ - sizeof(void*));
#else
// For calls to Ion/C++ on ARM, the *caller* pushes the return address on
// the stack. For Ion, this is just part of the ABI. For C++, the return
// address is explicitly pushed before the call since we cannot expect the
// callee to immediately push lr. This means that exitSP points to the
// return address.
returnAddress_ = *(uint8_t**)sp_;
#endif
settle();
}
struct GetCallSite
{
const AsmJSModule &module;
GetCallSite(const AsmJSModule &module) : module(module) {}
uint32_t operator[](size_t index) const {
return module.callSite(index).returnAddressOffset();
}
};
void
AsmJSFrameIterator::popFrame()
{
// After adding stackDepth, sp points to the word before the return address,
// on both ARM and x86/x64.
sp_ += callsite_->stackDepth();
returnAddress_ = *(uint8_t**)(sp_ - sizeof(void*));
}
void
AsmJSFrameIterator::settle()
{
while (true) {
uint32_t target = returnAddress_ - module_->codeBase();
size_t lowerBound = 0;
size_t upperBound = module_->numCallSites();
size_t match;
if (!BinarySearch(GetCallSite(*module_), lowerBound, upperBound, target, &match)) {
callsite_ = nullptr;
return;
}
callsite_ = &module_->callSite(match);
if (callsite_->isExit()) {
popFrame();
continue;
}
if (callsite_->isEntry()) {
callsite_ = nullptr;
return;
}
JS_ASSERT(callsite_->isNormal());
return;
}
}
JSAtom *
AsmJSFrameIterator::functionDisplayAtom() const
{
JS_ASSERT(!done());
return module_->functionName(callsite_->functionNameIndex());
}
unsigned
AsmJSFrameIterator::computeLine(uint32_t *column) const
{
JS_ASSERT(!done());
if (column)
*column = callsite_->column();
return callsite_->line();
}
static bool
CloneModule(JSContext *cx, MutableHandle<AsmJSModuleObject*> moduleObj)
@ -408,8 +506,7 @@ CallAsmJS(JSContext *cx, unsigned argc, Value *vp)
// that the optimized asm.js-to-Ion FFI call path (which we want to be
// very fast) can avoid doing so. The JitActivation is marked as
// inactive so stack iteration will skip over it.
unsigned exportIndex = FunctionToExportedFunctionIndex(callee);
AsmJSActivation activation(cx, module, exportIndex);
AsmJSActivation activation(cx, module);
JitActivation jitActivation(cx, /* firstFrameIsConstructing = */ false, /* active */ false);
// Call the per-exported-function trampoline created by GenerateEntry.

View File

@ -9,8 +9,33 @@
#include "NamespaceImports.h"
class JSAtom;
namespace js {
class AsmJSActivation;
class AsmJSModule;
namespace jit { class CallSite; }
// Iterates over the frames of a single AsmJSActivation.
class AsmJSFrameIterator
{
const AsmJSModule *module_;
const jit::CallSite *callsite_;
uint8_t *sp_;
uint8_t *returnAddress_;
void popFrame();
void settle();
public:
AsmJSFrameIterator(const AsmJSActivation *activation);
void operator++() { popFrame(); settle(); }
bool done() const { return !callsite_; }
JSAtom *functionDisplayAtom() const;
unsigned computeLine(uint32_t *column) const;
};
#ifdef JS_ION
// Create a new JSFunction to replace originalFun as the representation of the

View File

@ -382,6 +382,8 @@ AsmJSModule::addSizeOfMisc(mozilla::MallocSizeOf mallocSizeOf, size_t *asmJSModu
globals_.sizeOfExcludingThis(mallocSizeOf) +
exits_.sizeOfExcludingThis(mallocSizeOf) +
exports_.sizeOfExcludingThis(mallocSizeOf) +
callSites_.sizeOfExcludingThis(mallocSizeOf) +
functionNames_.sizeOfExcludingThis(mallocSizeOf) +
heapAccesses_.sizeOfExcludingThis(mallocSizeOf) +
#if defined(MOZ_VTUNE) || defined(JS_ION_PERF)
profiledFunctions_.sizeOfExcludingThis(mallocSizeOf) +
@ -477,6 +479,12 @@ SerializedNameSize(PropertyName *name)
(name ? name->length() * sizeof(jschar) : 0);
}
size_t
AsmJSModule::Name::serializedSize() const
{
return SerializedNameSize(name_);
}
static uint8_t *
SerializeName(uint8_t *cursor, PropertyName *name)
{
@ -490,6 +498,12 @@ SerializeName(uint8_t *cursor, PropertyName *name)
return cursor;
}
uint8_t *
AsmJSModule::Name::serialize(uint8_t *cursor) const
{
return SerializeName(cursor, name_);
}
static const uint8_t *
DeserializeName(ExclusiveContext *cx, const uint8_t *cursor, PropertyName **name)
{
@ -521,6 +535,19 @@ DeserializeName(ExclusiveContext *cx, const uint8_t *cursor, PropertyName **name
return cursor + length * sizeof(jschar);
}
const uint8_t *
AsmJSModule::Name::deserialize(ExclusiveContext *cx, const uint8_t *cursor)
{
return DeserializeName(cx, cursor, &name_);
}
bool
AsmJSModule::Name::clone(ExclusiveContext *cx, Name *out) const
{
out->name_ = name_;
return true;
}
template <class T>
size_t
SerializedVectorSize(const js::Vector<T, 0, SystemAllocPolicy> &vec)
@ -788,6 +815,8 @@ AsmJSModule::serializedSize() const
SerializedVectorSize(globals_) +
SerializedVectorSize(exits_) +
SerializedVectorSize(exports_) +
SerializedPodVectorSize(callSites_) +
SerializedVectorSize(functionNames_) +
SerializedPodVectorSize(heapAccesses_) +
#if defined(MOZ_VTUNE) || defined(JS_ION_PERF)
SerializedVectorSize(profiledFunctions_) +
@ -806,6 +835,8 @@ AsmJSModule::serialize(uint8_t *cursor) const
cursor = SerializeVector(cursor, globals_);
cursor = SerializeVector(cursor, exits_);
cursor = SerializeVector(cursor, exports_);
cursor = SerializePodVector(cursor, callSites_);
cursor = SerializeVector(cursor, functionNames_);
cursor = SerializePodVector(cursor, heapAccesses_);
#if defined(MOZ_VTUNE) || defined(JS_ION_PERF)
cursor = SerializeVector(cursor, profiledFunctions_);
@ -830,6 +861,8 @@ AsmJSModule::deserialize(ExclusiveContext *cx, const uint8_t *cursor)
(cursor = DeserializeVector(cx, cursor, &globals_)) &&
(cursor = DeserializeVector(cx, cursor, &exits_)) &&
(cursor = DeserializeVector(cx, cursor, &exports_)) &&
(cursor = DeserializePodVector(cx, cursor, &callSites_)) &&
(cursor = DeserializeVector(cx, cursor, &functionNames_)) &&
(cursor = DeserializePodVector(cx, cursor, &heapAccesses_)) &&
#if defined(MOZ_VTUNE) || defined(JS_ION_PERF)
(cursor = DeserializeVector(cx, cursor, &profiledFunctions_)) &&
@ -897,6 +930,8 @@ AsmJSModule::clone(JSContext *cx, ScopedJSDeletePtr<AsmJSModule> *moduleOut) con
if (!CloneVector(cx, globals_, &out.globals_) ||
!CloneVector(cx, exits_, &out.exits_) ||
!CloneVector(cx, exports_, &out.exports_) ||
!ClonePodVector(cx, callSites_, &out.callSites_) ||
!CloneVector(cx, functionNames_, &out.functionNames_) ||
!ClonePodVector(cx, heapAccesses_, &out.heapAccesses_) ||
!staticLinkData_.clone(cx, &out.staticLinkData_))
{

View File

@ -221,8 +221,6 @@ class AsmJSModule
struct Pod {
ReturnType returnType_;
uint32_t codeOffset_;
uint32_t line_;
uint32_t column_;
// These two fields are offsets to the beginning of the ScriptSource
// of the module, and thus invariant under serialization (unlike
// absolute offsets into ScriptSource).
@ -233,7 +231,6 @@ class AsmJSModule
friend class AsmJSModule;
ExportedFunction(PropertyName *name,
uint32_t line, uint32_t column,
uint32_t startOffsetInModule, uint32_t endOffsetInModule,
PropertyName *maybeFieldName,
ArgCoercionVector &&argCoercions,
@ -244,8 +241,6 @@ class AsmJSModule
argCoercions_ = mozilla::Move(argCoercions);
pod.returnType_ = returnType;
pod.codeOffset_ = UINT32_MAX;
pod.line_ = line;
pod.column_ = column;
pod.startOffsetInModule_ = startOffsetInModule;
pod.endOffsetInModule_ = endOffsetInModule;
JS_ASSERT_IF(maybeFieldName_, name_->isTenured());
@ -274,12 +269,6 @@ class AsmJSModule
PropertyName *name() const {
return name_;
}
uint32_t line() const {
return pod.line_;
}
uint32_t column() const {
return pod.column_;
}
uint32_t startOffsetInModule() const {
return pod.startOffsetInModule_;
}
@ -305,6 +294,20 @@ class AsmJSModule
bool clone(ExclusiveContext *cx, ExportedFunction *out) const;
};
class Name
{
PropertyName *name_;
public:
Name() : name_(nullptr) {}
Name(PropertyName *name) : name_(name) {}
PropertyName *name() const { return name_; }
PropertyName *&name() { return name_; }
size_t serializedSize() const;
uint8_t *serialize(uint8_t *cursor) const;
const uint8_t *deserialize(ExclusiveContext *cx, const uint8_t *cursor);
bool clone(ExclusiveContext *cx, Name *out) const;
};
#if defined(MOZ_VTUNE) || defined(JS_ION_PERF)
// Function information to add to the VTune JIT profiler following linking.
struct ProfiledFunction
@ -400,9 +403,11 @@ class AsmJSModule
};
private:
typedef Vector<ExportedFunction, 0, SystemAllocPolicy> ExportedFunctionVector;
typedef Vector<Global, 0, SystemAllocPolicy> GlobalVector;
typedef Vector<Exit, 0, SystemAllocPolicy> ExitVector;
typedef Vector<ExportedFunction, 0, SystemAllocPolicy> ExportedFunctionVector;
typedef Vector<jit::CallSite, 0, SystemAllocPolicy> CallSiteVector;
typedef Vector<Name, 0, SystemAllocPolicy> FunctionNameVector;
typedef Vector<jit::AsmJSHeapAccess, 0, SystemAllocPolicy> HeapAccessVector;
typedef Vector<jit::IonScriptCounts *, 0, SystemAllocPolicy> FunctionCountsVector;
#if defined(MOZ_VTUNE) || defined(JS_ION_PERF)
@ -420,6 +425,8 @@ class AsmJSModule
GlobalVector globals_;
ExitVector exits_;
ExportedFunctionVector exports_;
CallSiteVector callSites_;
FunctionNameVector functionNames_;
HeapAccessVector heapAccesses_;
#if defined(MOZ_VTUNE) || defined(JS_ION_PERF)
ProfiledFunctionVector profiledFunctions_;
@ -476,6 +483,8 @@ class AsmJSModule
if (exitIndexToGlobalDatum(i).fun)
MarkObject(trc, &exitIndexToGlobalDatum(i).fun, "asm.js imported function");
}
for (unsigned i = 0; i < functionNames_.length(); i++)
MarkStringUnbarriered(trc, &functionNames_[i].name(), "asm.js module function name");
#if defined(MOZ_VTUNE) || defined(JS_ION_PERF)
for (unsigned i = 0; i < profiledFunctions_.length(); i++)
profiledFunctions_[i].trace(trc);
@ -595,13 +604,12 @@ class AsmJSModule
return exits_.append(Exit(ffiIndex, globalDataOffset));
}
bool addExportedFunction(PropertyName *name, uint32_t line, uint32_t column,
uint32_t srcStart, uint32_t srcEnd,
bool addExportedFunction(PropertyName *name, uint32_t srcStart, uint32_t srcEnd,
PropertyName *maybeFieldName,
ArgCoercionVector &&argCoercions,
ReturnType returnType)
{
ExportedFunction func(name, line, column, srcStart, srcEnd, maybeFieldName,
ExportedFunction func(name, srcStart, srcEnd, maybeFieldName,
mozilla::Move(argCoercions), returnType);
if (exports_.length() >= UINT32_MAX)
return false;
@ -621,6 +629,17 @@ class AsmJSModule
return JS_DATA_TO_FUNC_PTR(CodePtr, code_ + func.pod.codeOffset_);
}
bool addFunctionName(PropertyName *name, uint32_t *nameIndex) {
JS_ASSERT(name->isTenured());
if (functionNames_.length() > jit::CallSiteDesc::FUNCTION_NAME_INDEX_MAX)
return false;
*nameIndex = functionNames_.length();
return functionNames_.append(name);
}
PropertyName *functionName(uint32_t i) const {
return functionNames_[i].name();
}
#if defined(MOZ_VTUNE) || defined(JS_ION_PERF)
bool trackProfiledFunction(PropertyName *name, unsigned startCodeOffset, unsigned endCodeOffset,
unsigned line, unsigned column)
@ -766,6 +785,19 @@ class AsmJSModule
return heapAccesses_[i];
}
void assignCallSites(jit::CallSiteVector &&callsites) {
callSites_ = Move(callsites);
}
unsigned numCallSites() const {
return callSites_.length();
}
const jit::CallSite &callSite(unsigned i) const {
return callSites_[i];
}
jit::CallSite &callSite(unsigned i) {
return callSites_[i];
}
void initHeap(Handle<ArrayBufferObject*> heap, JSContext *cx);
void requireHeapLengthToBeAtLeast(uint32_t len) {

View File

@ -342,7 +342,7 @@ HandleSimulatorInterrupt(JSRuntime *rt, AsmJSActivation *activation, void *fault
if (module.containsPC((void *)rt->mainThread.simulator()->get_pc()) &&
module.containsPC(faultingAddress))
{
activation->setResumePC(nullptr);
activation->setInterrupted(nullptr);
int32_t nextpc = int32_t(module.interruptExit());
rt->mainThread.simulator()->set_resume_pc(nextpc);
return true;
@ -452,7 +452,7 @@ HandleException(PEXCEPTION_POINTERS exception)
// The trampoline will jump to activation->resumePC if execution isn't
// interrupted.
if (module.containsPC(faultingAddress)) {
activation->setResumePC(pc);
activation->setInterrupted(pc);
*ppc = module.interruptExit();
JSRuntime::AutoLockForInterrupt lock(rt);
@ -655,7 +655,7 @@ HandleMachException(JSRuntime *rt, const ExceptionRequest &request)
// The trampoline will jump to activation->resumePC if execution isn't
// interrupted.
if (module.containsPC(faultingAddress)) {
activation->setResumePC(pc);
activation->setInterrupted(pc);
*ppc = module.interruptExit();
JSRuntime::AutoLockForInterrupt lock(rt);
@ -905,7 +905,7 @@ HandleSignal(int signum, siginfo_t *info, void *ctx)
// The trampoline will jump to activation->resumePC if execution isn't
// interrupted.
if (module.containsPC(faultingAddress)) {
activation->setResumePC(pc);
activation->setInterrupted(pc);
*ppc = module.interruptExit();
JSRuntime::AutoLockForInterrupt lock(rt);

View File

@ -8218,6 +8218,7 @@ CodeGenerator::visitAsmJSCall(LAsmJSCall *ins)
}
}
#endif
if (mir->spIncrement())
masm.freeStack(mir->spIncrement());
@ -8234,13 +8235,13 @@ CodeGenerator::visitAsmJSCall(LAsmJSCall *ins)
MAsmJSCall::Callee callee = mir->callee();
switch (callee.which()) {
case MAsmJSCall::Callee::Internal:
masm.call(callee.internal());
masm.call(mir->desc(), callee.internal());
break;
case MAsmJSCall::Callee::Dynamic:
masm.call(ToRegister(ins->getOperand(mir->dynamicCalleeOperandIndex())));
masm.call(mir->desc(), ToRegister(ins->getOperand(mir->dynamicCalleeOperandIndex())));
break;
case MAsmJSCall::Callee::Builtin:
masm.call(callee.builtin());
masm.call(mir->desc(), callee.builtin());
break;
}

View File

@ -2968,12 +2968,10 @@ MAsmJSUnsignedToFloat32::foldsTo(TempAllocator &alloc, bool useValueNumbers)
}
MAsmJSCall *
MAsmJSCall::New(TempAllocator &alloc, Callee callee, const Args &args, MIRType resultType,
size_t spIncrement)
MAsmJSCall::New(TempAllocator &alloc, const CallSiteDesc &desc, Callee callee,
const Args &args, MIRType resultType, size_t spIncrement)
{
MAsmJSCall *call = new(alloc) MAsmJSCall;
call->spIncrement_ = spIncrement;
call->callee_ = callee;
MAsmJSCall *call = new(alloc) MAsmJSCall(desc, callee, spIncrement);
call->setResultType(resultType);
if (!call->argRegs_.init(alloc, args.length()))

View File

@ -9982,11 +9982,16 @@ class MAsmJSCall MOZ_FINAL : public MInstruction
MUse use;
};
CallSiteDesc desc_;
Callee callee_;
FixedList<MUse> operands_;
FixedList<AnyRegister> argRegs_;
size_t spIncrement_;
MAsmJSCall(const CallSiteDesc &desc, Callee callee, size_t spIncrement)
: desc_(desc), callee_(callee), spIncrement_(spIncrement)
{ }
protected:
void setOperand(size_t index, MDefinition *operand) {
operands_[index].set(operand, this, index);
@ -10006,8 +10011,8 @@ class MAsmJSCall MOZ_FINAL : public MInstruction
};
typedef Vector<Arg, 8> Args;
static MAsmJSCall *New(TempAllocator &alloc, Callee callee, const Args &args,
MIRType resultType, size_t spIncrement);
static MAsmJSCall *New(TempAllocator &alloc, const CallSiteDesc &desc, Callee callee,
const Args &args, MIRType resultType, size_t spIncrement);
size_t numOperands() const {
return operands_.length();
@ -10023,6 +10028,9 @@ class MAsmJSCall MOZ_FINAL : public MInstruction
JS_ASSERT(index < numArgs());
return argRegs_[index];
}
const CallSiteDesc &desc() const {
return desc_;
}
Callee callee() const {
return callee_;
}

View File

@ -3568,6 +3568,19 @@ MacroAssemblerARM::ma_call(ImmPtr dest)
as_blx(CallReg);
}
void
MacroAssemblerARM::ma_callAndStoreRet(const Register r, uint32_t stackArgBytes)
{
// Note: this function stores the return address to sp[0]. The caller must
// anticipate this by pushing additional space on the stack. The ABI does
// not provide space for a return address so this function may only be
// called if no argument are passed.
JS_ASSERT(stackArgBytes == 0);
AutoForbidPools afp(this);
as_dtr(IsStore, 32, Offset, pc, DTRAddr(sp, DtrOffImm(0)));
as_blx(r);
}
void
MacroAssemblerARMCompat::breakpoint()
{

View File

@ -399,6 +399,9 @@ class MacroAssemblerARM : public Assembler
void ma_call(ImmPtr dest);
// calls reg, storing the return address into sp[0]
void ma_callAndStoreRet(const Register reg, uint32_t stackArgBytes);
// Float registers can only be loaded/stored in continuous runs
// when using vstm/vldm.
// This function breaks set into continuous runs and loads/stores
@ -543,7 +546,6 @@ class MacroAssemblerARMCompat : public MacroAssemblerARM
void call(const Register reg) {
as_blx(reg);
}
void call(Label *label) {
// for now, assume that it'll be nearby?
as_bl(label, Always);
@ -572,6 +574,38 @@ class MacroAssemblerARMCompat : public MacroAssemblerARM
ma_movPatchable(ImmPtr(c->raw()), ScratchRegister, Always, rs);
ma_callIonHalfPush(ScratchRegister);
}
void appendCallSite(const CallSiteDesc &desc) {
enoughMemory_ &= append(CallSite(desc, currentOffset(), framePushed_));
}
void call(const CallSiteDesc &desc, const Register reg) {
call(reg);
appendCallSite(desc);
}
void call(const CallSiteDesc &desc, Label *label) {
call(label);
appendCallSite(desc);
}
void call(const CallSiteDesc &desc, AsmJSImmPtr imm) {
call(imm);
appendCallSite(desc);
}
void callExit(AsmJSImmPtr imm, uint32_t stackArgBytes) {
movePtr(imm, CallReg);
ma_callAndStoreRet(CallReg, stackArgBytes);
appendCallSite(CallSiteDesc::Exit());
}
void callIonFromAsmJS(const Register reg) {
ma_callIonNoPush(reg);
appendCallSite(CallSiteDesc::Exit());
// The Ion ABI has the callee pop the return address off the stack.
// The asm.js caller assumes that the call leaves sp unchanged, so bump
// the stack.
subPtr(Imm32(sizeof(void*)), sp);
}
void branch(JitCode *c) {
BufferOffset bo = m_buffer.nextOffset();
addPendingJump(bo, ImmPtr(c->raw()), Relocation::JITCODE);

View File

@ -4071,7 +4071,7 @@ Simulator::execute()
int32_t rpc = resume_pc_;
if (MOZ_UNLIKELY(rpc != 0)) {
// AsmJS signal handler ran and we have to adjust the pc.
activation->setResumePC((void *)get_pc());
activation->setInterrupted((void *)get_pc());
set_pc(rpc);
resume_pc_ = 0;
}

View File

@ -663,6 +663,77 @@ class CodeLocationLabel
}
};
// Describes the user-visible properties of a callsite.
//
// A few general notes about the stack-walking supported by CallSite(Desc):
// - This information facilitates stack-walking performed by FrameIter which
// is used by Error.stack and other user-visible stack-walking functions.
// - Ion/asm.js calling conventions do not maintain a frame-pointer so
// stack-walking must lookup the stack depth based on the PC.
// - Stack-walking only occurs from C++ after a synchronous calls (JS-to-JS and
// JS-to-C++). Thus, we do not need to map arbitrary PCs to stack-depths,
// just the return address at callsites.
// - An exception to the above rule is the interrupt callback which can happen
// at arbitrary PCs. In such cases, we drop frames from the stack-walk. In
// the future when a full PC->stack-depth map is maintained, we handle this
// case.
class CallSiteDesc
{
uint32_t line_;
uint32_t column_;
uint32_t functionNameIndex_;
static const uint32_t sEntryTrampoline = UINT32_MAX;
static const uint32_t sExit = UINT32_MAX - 1;
public:
static const uint32_t FUNCTION_NAME_INDEX_MAX = UINT32_MAX - 2;
CallSiteDesc() {}
CallSiteDesc(uint32_t line, uint32_t column, uint32_t functionNameIndex)
: line_(line), column_(column), functionNameIndex_(functionNameIndex)
{}
static CallSiteDesc Entry() { return CallSiteDesc(0, 0, sEntryTrampoline); }
static CallSiteDesc Exit() { return CallSiteDesc(0, 0, sExit); }
bool isEntry() const { return functionNameIndex_ == sEntryTrampoline; }
bool isExit() const { return functionNameIndex_ == sExit; }
bool isNormal() const { return !(isEntry() || isExit()); }
uint32_t line() const { JS_ASSERT(isNormal()); return line_; }
uint32_t column() const { JS_ASSERT(isNormal()); return column_; }
uint32_t functionNameIndex() const { JS_ASSERT(isNormal()); return functionNameIndex_; }
};
// Adds to CallSiteDesc the metadata necessary to walk the stack given an
// initial stack-pointer.
struct CallSite : public CallSiteDesc
{
uint32_t returnAddressOffset_;
uint32_t stackDepth_;
public:
CallSite() {}
CallSite(CallSiteDesc desc, uint32_t returnAddressOffset, uint32_t stackDepth)
: CallSiteDesc(desc),
returnAddressOffset_(returnAddressOffset),
stackDepth_(stackDepth)
{ }
void setReturnAddressOffset(uint32_t r) { returnAddressOffset_ = r; }
uint32_t returnAddressOffset() const { return returnAddressOffset_; }
// The stackDepth measures the amount of stack space pushed since the
// function was called. In particular, this includes the word pushed by the
// call instruction on x86/x64.
uint32_t stackDepth() const { JS_ASSERT(!isEntry()); return stackDepth_; }
};
typedef Vector<CallSite, 0, SystemAllocPolicy> CallSiteVector;
// Summarizes a heap access made by asm.js code that needs to be patched later
// and/or looked up by the asm.js signal handlers. Different architectures need
// to know different things (x64: offset and length, ARM: where to patch in
@ -814,11 +885,15 @@ struct AsmJSAbsoluteLink
// The base class of all Assemblers for all archs.
class AssemblerShared
{
Vector<CallSite, 0, SystemAllocPolicy> callsites_;
Vector<AsmJSHeapAccess, 0, SystemAllocPolicy> asmJSHeapAccesses_;
Vector<AsmJSGlobalAccess, 0, SystemAllocPolicy> asmJSGlobalAccesses_;
Vector<AsmJSAbsoluteLink, 0, SystemAllocPolicy> asmJSAbsoluteLinks_;
public:
bool append(CallSite callsite) { return callsites_.append(callsite); }
CallSiteVector &&extractCallSites() { return Move(callsites_); }
bool append(AsmJSHeapAccess access) { return asmJSHeapAccesses_.append(access); }
AsmJSHeapAccessVector &&extractAsmJSHeapAccesses() { return Move(asmJSHeapAccesses_); }

View File

@ -30,6 +30,8 @@ class MacroAssemblerX86Shared : public Assembler
uint32_t framePushed_;
public:
using Assembler::call;
MacroAssemblerX86Shared()
: framePushed_(0)
{ }
@ -662,6 +664,24 @@ class MacroAssemblerX86Shared : public Assembler
call(callee);
}
void appendCallSite(const CallSiteDesc &desc) {
// Add an extra sizeof(void*) to include the return address that was
// pushed by the call instruction (see CallSite::stackDepth).
enoughMemory_ &= append(CallSite(desc, currentOffset(), framePushed_ + sizeof(void*)));
}
void call(const CallSiteDesc &desc, Label *label) {
call(label);
appendCallSite(desc);
}
void call(const CallSiteDesc &desc, const Register &reg) {
call(reg);
appendCallSite(desc);
}
void callIonFromAsmJS(const Register &reg) {
call(CallSiteDesc::Exit(), reg);
}
void checkStackAlignment() {
// Exists for ARM compatibility.
}

View File

@ -110,6 +110,14 @@ class MacroAssemblerX64 : public MacroAssemblerX86Shared
call(rax);
}
void call(const CallSiteDesc &desc, AsmJSImmPtr target) {
call(target);
appendCallSite(desc);
}
void callExit(AsmJSImmPtr target, uint32_t stackArgBytes) {
call(CallSiteDesc::Exit(), target);
}
// Refers to the upper 32 bits of a 64-bit Value operand.
// On x86_64, the upper 32 bits do not necessarily only contain the type.
Operand ToUpper32(Operand base) {

View File

@ -71,6 +71,7 @@ class MacroAssemblerX86 : public MacroAssemblerX86Shared
using MacroAssemblerX86Shared::branch32;
using MacroAssemblerX86Shared::load32;
using MacroAssemblerX86Shared::store32;
using MacroAssemblerX86Shared::call;
MacroAssemblerX86()
: inCall_(false),
@ -1105,6 +1106,13 @@ class MacroAssemblerX86 : public MacroAssemblerX86Shared
Push(dynStack);
call(target);
}
void call(const CallSiteDesc &desc, AsmJSImmPtr target) {
call(target);
appendCallSite(desc);
}
void callExit(AsmJSImmPtr target, uint32_t stackArgBytes) {
call(CallSiteDesc::Exit(), target);
}
// Save an exit frame to the thread data of the current thread, given a
// register that holds a PerThreadData *.

View File

@ -586,18 +586,23 @@ FrameIter::settleOnActivation()
return;
}
if (activation->isAsmJS()) {
data_.asmJSFrames_ = AsmJSFrameIterator(data_.activations_->asAsmJS());
if (data_.asmJSFrames_.done()) {
++data_.activations_;
continue;
}
data_.state_ = ASMJS;
return;
}
// ForkJoin activations don't contain iterable frames, so skip them.
if (activation->isForkJoin()) {
++data_.activations_;
continue;
}
// Until asm.js has real stack-walking, we have each AsmJSActivation
// expose a single function (the entry function).
if (activation->isAsmJS()) {
data_.state_ = ASMJS;
return;
}
#endif
JS_ASSERT(activation->isInterpreter());
@ -634,6 +639,7 @@ FrameIter::Data::Data(JSContext *cx, SavedOption savedOption, ContextOption cont
#ifdef JS_ION
, jitFrames_((uint8_t *)nullptr, SequentialExecution)
, ionInlineFrameNo_(0)
, asmJSFrames_(nullptr)
#endif
{
}
@ -650,6 +656,7 @@ FrameIter::Data::Data(const FrameIter::Data &other)
#ifdef JS_ION
, jitFrames_(other.jitFrames_)
, ionInlineFrameNo_(other.ionInlineFrameNo_)
, asmJSFrames_(other.asmJSFrames_)
#endif
{
}
@ -731,6 +738,16 @@ FrameIter::popJitFrame()
popActivation();
}
void
FrameIter::popAsmJSFrame()
{
JS_ASSERT(data_.state_ == ASMJS);
++data_.asmJSFrames_;
if (data_.asmJSFrames_.done())
popActivation();
}
#endif
FrameIter &
@ -780,11 +797,7 @@ FrameIter::operator++()
MOZ_ASSUME_UNREACHABLE("Unexpected state");
#endif
case ASMJS:
// As described in settleOnActivation, an AsmJSActivation currently only
// represents a single asm.js function, so, if the FrameIter is
// currently stopped on an ASMJS frame, then we can pop the entire
// AsmJSActivation.
popActivation();
popAsmJSFrame();
break;
}
return *this;
@ -940,8 +953,7 @@ FrameIter::functionDisplayAtom() const
return callee()->displayAtom();
case ASMJS: {
#ifdef JS_ION
AsmJSActivation &act = *data_.activations_->asAsmJS();
return act.module().exportedFunction(act.exportIndex()).name();
return data_.asmJSFrames_.functionDisplayAtom();
#else
break;
#endif
@ -1000,17 +1012,12 @@ FrameIter::computeLine(uint32_t *column) const
case INTERP:
case JIT:
return PCToLineNumber(script(), pc(), column);
case ASMJS: {
case ASMJS:
#ifdef JS_ION
AsmJSActivation &act = *data_.activations_->asAsmJS();
AsmJSModule::ExportedFunction &func = act.module().exportedFunction(act.exportIndex());
if (column)
*column = func.column();
return func.line();
return data_.asmJSFrames_.computeLine(column);
#else
break;
#endif
}
}
MOZ_ASSUME_UNREACHABLE("Unexpected state");
@ -1654,13 +1661,13 @@ jit::JitActivation::markRematerializedFrames(JSTracer *trc)
#endif // JS_ION
AsmJSActivation::AsmJSActivation(JSContext *cx, AsmJSModule &module, unsigned exportIndex)
AsmJSActivation::AsmJSActivation(JSContext *cx, AsmJSModule &module)
: Activation(cx, AsmJS),
module_(module),
errorRejoinSP_(nullptr),
profiler_(nullptr),
resumePC_(nullptr),
exportIndex_(exportIndex)
exitSP_(nullptr)
{
if (cx->runtime()->spsProfiler.enabled()) {
// Use a profiler string that matches jsMatch regex in

View File

@ -12,6 +12,7 @@
#include "jsfun.h"
#include "jsscript.h"
#include "jit/AsmJSLink.h"
#include "jit/JitFrameIterator.h"
#ifdef CHECK_OSIPOINT_REGISTERS
#include "jit/Registers.h" // for RegisterDump
@ -1502,18 +1503,16 @@ class AsmJSActivation : public Activation
void *errorRejoinSP_;
SPSProfiler *profiler_;
void *resumePC_;
uint8_t *exitSP_;
// These bits are temporary and will be replaced when real asm.js
// stack-walking support lands:
unsigned exportIndex_;
static const intptr_t InterruptedSP = -1;
public:
AsmJSActivation(JSContext *cx, AsmJSModule &module, unsigned exportIndex);
AsmJSActivation(JSContext *cx, AsmJSModule &module);
~AsmJSActivation();
JSContext *cx() { return cx_; }
AsmJSModule &module() const { return module_; }
unsigned exportIndex() const { return exportIndex_; }
AsmJSActivation *prevAsmJS() const { return prevAsmJS_; }
// Read by JIT code:
@ -1522,9 +1521,16 @@ class AsmJSActivation : public Activation
// Initialized by JIT code:
static unsigned offsetOfErrorRejoinSP() { return offsetof(AsmJSActivation, errorRejoinSP_); }
static unsigned offsetOfExitSP() { return offsetof(AsmJSActivation, exitSP_); }
// Set from SIGSEGV handler:
void setResumePC(void *pc) { resumePC_ = pc; }
void setInterrupted(void *pc) { resumePC_ = pc; exitSP_ = (uint8_t*)InterruptedSP; }
bool isInterruptedSP() const { return exitSP_ == (uint8_t*)InterruptedSP; }
// Note: exitSP is the sp right before the call instruction. On x86, this
// means before the return address is pushed on the stack, on ARM, this
// means after.
uint8_t *exitSP() const { JS_ASSERT(!isInterruptedSP()); return exitSP_; }
};
// A FrameIter walks over the runtime's stack of JS script activations,
@ -1575,6 +1581,7 @@ class FrameIter
#ifdef JS_ION
jit::JitFrameIterator jitFrames_;
unsigned ionInlineFrameNo_;
AsmJSFrameIterator asmJSFrames_;
#endif
Data(JSContext *cx, SavedOption savedOption, ContextOption contextOption,
@ -1701,6 +1708,7 @@ class FrameIter
#ifdef JS_ION
void nextJitFrame();
void popJitFrame();
void popAsmJSFrame();
#endif
void settleOnActivation();