mirror of
https://gitlab.winehq.org/wine/wine-gecko.git
synced 2024-09-13 09:24:08 -07:00
Bug 1207827 - Delete Assembler::actualOffset() and transitive closure. r=nbp
The implemetation of Assembler::actualOffset() is now a no-op in all targets, and it is no longer necessary to rewrite assembler buffer offsets to their final form after finishing the assembler buffer. - Delete Assembler::actualOffset() in all targets. - Delete AsmJSModule::CodeRange::updateOffsets(). - Delete AsmJSModule::updateCodeOffset(). - Delete AsmJSModule::updateOffsets(). - Delete CodeOffsetLabel::fixup(). - Delete ICEnty::fixupReturnOffset(). - Delete LSafepoint::fixupOffset(). - Delete OsiIndex::fixUpOffset(). - Delete PCMappingEntry::fixupNativeOffset(). Also delete all code calling these functions.
This commit is contained in:
parent
2162088676
commit
80ecf39fd8
@ -314,10 +314,9 @@ AsmJSModule::finish(ExclusiveContext* cx, TokenStream& tokenStream, MacroAssembl
|
||||
MOZ_ASSERT(masm.preBarrierTableBytes() == 0);
|
||||
MOZ_ASSERT(!masm.hasSelfReference());
|
||||
|
||||
// Copy over metadata, making sure to update all offsets on ARM.
|
||||
|
||||
staticLinkData_.interruptExitOffset = masm.actualOffset(interruptLabel.offset());
|
||||
staticLinkData_.outOfBoundsExitOffset = masm.actualOffset(outOfBoundsLabel.offset());
|
||||
// Copy over metadata.
|
||||
staticLinkData_.interruptExitOffset = interruptLabel.offset();
|
||||
staticLinkData_.outOfBoundsExitOffset = outOfBoundsLabel.offset();
|
||||
|
||||
// Heap-access metadata used for link-time patching and fault-handling.
|
||||
heapAccesses_ = masm.extractAsmJSHeapAccesses();
|
||||
@ -325,30 +324,6 @@ AsmJSModule::finish(ExclusiveContext* cx, TokenStream& tokenStream, MacroAssembl
|
||||
// Call-site metadata used for stack unwinding.
|
||||
callSites_ = masm.extractCallSites();
|
||||
|
||||
#if defined(JS_CODEGEN_ARM)
|
||||
// ARM requires the offsets to be updated.
|
||||
pod.functionBytes_ = masm.actualOffset(pod.functionBytes_);
|
||||
for (size_t i = 0; i < heapAccesses_.length(); i++) {
|
||||
AsmJSHeapAccess& a = heapAccesses_[i];
|
||||
a.setInsnOffset(masm.actualOffset(a.insnOffset()));
|
||||
}
|
||||
for (unsigned i = 0; i < numExportedFunctions(); i++) {
|
||||
if (!exportedFunction(i).isChangeHeap())
|
||||
exportedFunction(i).updateCodeOffset(masm);
|
||||
}
|
||||
for (unsigned i = 0; i < numExits(); i++)
|
||||
exit(i).updateOffsets(masm);
|
||||
for (size_t i = 0; i < callSites_.length(); i++) {
|
||||
CallSite& c = callSites_[i];
|
||||
c.setReturnAddressOffset(masm.actualOffset(c.returnAddressOffset()));
|
||||
}
|
||||
for (size_t i = 0; i < codeRanges_.length(); i++) {
|
||||
codeRanges_[i].updateOffsets(masm);
|
||||
MOZ_ASSERT_IF(i > 0, codeRanges_[i - 1].end() <= codeRanges_[i].begin());
|
||||
}
|
||||
for (size_t i = 0; i < builtinThunkOffsets_.length(); i++)
|
||||
builtinThunkOffsets_[i] = masm.actualOffset(builtinThunkOffsets_[i]);
|
||||
#endif
|
||||
MOZ_ASSERT(pod.functionBytes_ % AsmJSPageSize == 0);
|
||||
|
||||
// Absolute link metadata: absolute addresses that refer to some fixed
|
||||
@ -356,7 +331,7 @@ AsmJSModule::finish(ExclusiveContext* cx, TokenStream& tokenStream, MacroAssembl
|
||||
AbsoluteLinkArray& absoluteLinks = staticLinkData_.absoluteLinks;
|
||||
for (size_t i = 0; i < masm.numAsmJSAbsoluteLinks(); i++) {
|
||||
AsmJSAbsoluteLink src = masm.asmJSAbsoluteLink(i);
|
||||
if (!absoluteLinks[src.target].append(masm.actualOffset(src.patchAt.offset())))
|
||||
if (!absoluteLinks[src.target].append(src.patchAt.offset()))
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -368,7 +343,7 @@ AsmJSModule::finish(ExclusiveContext* cx, TokenStream& tokenStream, MacroAssembl
|
||||
for (size_t i = 0; i < masm.numCodeLabels(); i++) {
|
||||
CodeLabel src = masm.codeLabel(i);
|
||||
int32_t labelOffset = src.dest()->offset();
|
||||
int32_t targetOffset = masm.actualOffset(src.src()->offset());
|
||||
int32_t targetOffset = src.src()->offset();
|
||||
// The patched uses of a label embed a linked list where the
|
||||
// to-be-patched immediate is the offset of the next to-be-patched
|
||||
// instruction.
|
||||
@ -420,15 +395,6 @@ AsmJSModule::finish(ExclusiveContext* cx, TokenStream& tokenStream, MacroAssembl
|
||||
}
|
||||
#endif
|
||||
|
||||
#if defined(MOZ_VTUNE) || defined(JS_ION_PERF)
|
||||
// Fix up the code offsets.
|
||||
for (size_t i = 0; i < profiledFunctions_.length(); i++) {
|
||||
ProfiledFunction& pf = profiledFunctions_[i];
|
||||
pf.pod.startCodeOffset = masm.actualOffset(pf.pod.startCodeOffset);
|
||||
pf.pod.endCodeOffset = masm.actualOffset(pf.pod.endCodeOffset);
|
||||
}
|
||||
#endif
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -1437,29 +1403,6 @@ AsmJSModule::CodeRange::CodeRange(AsmJSExit::BuiltinKind builtin, uint32_t begin
|
||||
MOZ_ASSERT(profilingReturn_ < end_);
|
||||
}
|
||||
|
||||
void
|
||||
AsmJSModule::CodeRange::updateOffsets(jit::MacroAssembler& masm)
|
||||
{
|
||||
uint32_t entryBefore = 0;
|
||||
uint32_t profilingJumpBefore = 0;
|
||||
uint32_t profilingEpilogueBefore = 0;
|
||||
if (isFunction()) {
|
||||
entryBefore = entry();
|
||||
profilingJumpBefore = profilingJump();
|
||||
profilingEpilogueBefore = profilingEpilogue();
|
||||
}
|
||||
|
||||
begin_ = masm.actualOffset(begin_);
|
||||
profilingReturn_ = masm.actualOffset(profilingReturn_);
|
||||
end_ = masm.actualOffset(end_);
|
||||
|
||||
if (isFunction()) {
|
||||
setDeltas(masm.actualOffset(entryBefore),
|
||||
masm.actualOffset(profilingJumpBefore),
|
||||
masm.actualOffset(profilingEpilogueBefore));
|
||||
}
|
||||
}
|
||||
|
||||
#if defined(MOZ_VTUNE) || defined(JS_ION_PERF)
|
||||
size_t
|
||||
AsmJSModule::ProfiledFunction::serializedSize() const
|
||||
|
@ -398,10 +398,6 @@ class AsmJSModule
|
||||
MOZ_ASSERT(!jitCodeOffset_);
|
||||
jitCodeOffset_ = off;
|
||||
}
|
||||
void updateOffsets(jit::MacroAssembler& masm) {
|
||||
interpCodeOffset_ = masm.actualOffset(interpCodeOffset_);
|
||||
jitCodeOffset_ = masm.actualOffset(jitCodeOffset_);
|
||||
}
|
||||
|
||||
size_t serializedSize() const;
|
||||
uint8_t* serialize(uint8_t* cursor) const;
|
||||
@ -516,10 +512,6 @@ class AsmJSModule
|
||||
MOZ_ASSERT(pod.codeOffset_ == UINT32_MAX);
|
||||
pod.codeOffset_ = off;
|
||||
}
|
||||
void updateCodeOffset(jit::MacroAssembler& masm) {
|
||||
MOZ_ASSERT(!isChangeHeap());
|
||||
pod.codeOffset_ = masm.actualOffset(pod.codeOffset_);
|
||||
}
|
||||
|
||||
unsigned numArgs() const {
|
||||
MOZ_ASSERT(!isChangeHeap());
|
||||
@ -574,7 +566,6 @@ class AsmJSModule
|
||||
CodeRange(Kind kind, uint32_t begin, uint32_t end);
|
||||
CodeRange(Kind kind, uint32_t begin, uint32_t profilingReturn, uint32_t end);
|
||||
CodeRange(AsmJSExit::BuiltinKind builtin, uint32_t begin, uint32_t pret, uint32_t end);
|
||||
void updateOffsets(jit::MacroAssembler& masm);
|
||||
|
||||
Kind kind() const { return Kind(u.kind_); }
|
||||
bool isFunction() const { return kind() == Function; }
|
||||
|
@ -1498,7 +1498,7 @@ class MOZ_STACK_CLASS ModuleValidator
|
||||
AsmJSModule::RelativeLink link(AsmJSModule::RelativeLink::RawPointer);
|
||||
link.patchAtOffset = tableBaseOffset + elemIndex * sizeof(uint8_t*);
|
||||
Label* entry = functionEntry(table.elem(elemIndex).funcIndex());
|
||||
link.targetOffset = masm().actualOffset(entry->offset());
|
||||
link.targetOffset = entry->offset();
|
||||
if (!module_->addRelativeLink(link))
|
||||
return false;
|
||||
}
|
||||
|
@ -470,10 +470,8 @@ NativeRegExpMacroAssembler::GenerateCode(JSContext* cx, bool match_only)
|
||||
for (size_t i = 0; i < labelPatches.length(); i++) {
|
||||
LabelPatch& v = labelPatches[i];
|
||||
MOZ_ASSERT(!v.label);
|
||||
v.patchOffset.fixup(&masm);
|
||||
uintptr_t offset = masm.actualOffset(v.labelOffset);
|
||||
Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, v.patchOffset),
|
||||
ImmPtr(code->raw() + offset),
|
||||
ImmPtr(code->raw() + v.labelOffset),
|
||||
ImmPtr(0));
|
||||
}
|
||||
|
||||
|
@ -155,7 +155,6 @@ BaselineCompiler::compile()
|
||||
|
||||
for (size_t i = 0; i < pcMappingEntries_.length(); i++) {
|
||||
PCMappingEntry& entry = pcMappingEntries_[i];
|
||||
entry.fixupNativeOffset(masm);
|
||||
|
||||
if (entry.addIndexEntry) {
|
||||
PCMappingIndexEntry indexEntry;
|
||||
@ -190,16 +189,6 @@ BaselineCompiler::compile()
|
||||
return Method_Error;
|
||||
}
|
||||
|
||||
prologueOffset_.fixup(&masm);
|
||||
epilogueOffset_.fixup(&masm);
|
||||
profilerEnterFrameToggleOffset_.fixup(&masm);
|
||||
profilerExitFrameToggleOffset_.fixup(&masm);
|
||||
#ifdef JS_TRACE_LOGGING
|
||||
traceLoggerEnterToggleOffset_.fixup(&masm);
|
||||
traceLoggerExitToggleOffset_.fixup(&masm);
|
||||
#endif
|
||||
postDebugPrologueOffset_.fixup(&masm);
|
||||
|
||||
// Note: There is an extra entry in the bytecode type map for the search hint, see below.
|
||||
size_t bytecodeTypeMapEntries = script->nTypeSets() + 1;
|
||||
|
||||
@ -258,7 +247,6 @@ BaselineCompiler::compile()
|
||||
// Patch IC loads using IC entries.
|
||||
for (size_t i = 0; i < icLoadLabels_.length(); i++) {
|
||||
CodeOffsetLabel label = icLoadLabels_[i].label;
|
||||
label.fixup(&masm);
|
||||
size_t icEntry = icLoadLabels_[i].icEntry;
|
||||
ICEntry* entryAddr = &(baselineScript->icEntry(icEntry));
|
||||
Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, label),
|
||||
|
@ -1168,7 +1168,6 @@ JitRuntime::generateBaselineDebugModeOSRHandler(JSContext* cx, uint32_t* noFrame
|
||||
if (!code)
|
||||
return nullptr;
|
||||
|
||||
noFrameRegPopOffset.fixup(&masm);
|
||||
*noFrameRegPopOffsetOut = noFrameRegPopOffset.offset();
|
||||
|
||||
#ifdef JS_ION_PERF
|
||||
|
@ -6260,9 +6260,7 @@ ICGetProp_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
|
||||
void
|
||||
ICGetProp_Fallback::Compiler::postGenerateStubCode(MacroAssembler& masm, Handle<JitCode*> code)
|
||||
{
|
||||
CodeOffsetLabel offset(returnOffset_);
|
||||
offset.fixup(&masm);
|
||||
cx->compartment()->jitCompartment()->initBaselineGetPropReturnAddr(code->raw() + offset.offset());
|
||||
cx->compartment()->jitCompartment()->initBaselineGetPropReturnAddr(code->raw() + returnOffset_);
|
||||
}
|
||||
|
||||
bool
|
||||
@ -7759,9 +7757,7 @@ ICSetProp_Fallback::Compiler::generateStubCode(MacroAssembler& masm)
|
||||
void
|
||||
ICSetProp_Fallback::Compiler::postGenerateStubCode(MacroAssembler& masm, Handle<JitCode*> code)
|
||||
{
|
||||
CodeOffsetLabel offset(returnOffset_);
|
||||
offset.fixup(&masm);
|
||||
cx->compartment()->jitCompartment()->initBaselineSetPropReturnAddr(code->raw() + offset.offset());
|
||||
cx->compartment()->jitCompartment()->initBaselineSetPropReturnAddr(code->raw() + returnOffset_);
|
||||
}
|
||||
|
||||
static void
|
||||
@ -9537,9 +9533,7 @@ ICCall_Fallback::Compiler::postGenerateStubCode(MacroAssembler& masm, Handle<Jit
|
||||
if (MOZ_UNLIKELY(isSpread_))
|
||||
return;
|
||||
|
||||
CodeOffsetLabel offset(returnOffset_);
|
||||
offset.fixup(&masm);
|
||||
cx->compartment()->jitCompartment()->initBaselineCallReturnAddr(code->raw() + offset.offset(),
|
||||
cx->compartment()->jitCompartment()->initBaselineCallReturnAddr(code->raw() + returnOffset_,
|
||||
isConstructing_);
|
||||
}
|
||||
|
||||
|
@ -714,7 +714,6 @@ BaselineScript::copyICEntries(JSScript* script, const ICEntry* entries, MacroAss
|
||||
for (uint32_t i = 0; i < numICEntries(); i++) {
|
||||
ICEntry& realEntry = icEntry(i);
|
||||
realEntry = entries[i];
|
||||
realEntry.fixupReturnOffset(masm);
|
||||
|
||||
if (!realEntry.hasStub()) {
|
||||
// VM call without any stubs.
|
||||
|
@ -8140,13 +8140,11 @@ CodeGenerator::link(JSContext* cx, CompilerConstraintList* constraints)
|
||||
|
||||
{
|
||||
AutoWritableJitCode awjc(code);
|
||||
invalidateEpilogueData_.fixup(&masm);
|
||||
Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, invalidateEpilogueData_),
|
||||
ImmPtr(ionScript),
|
||||
ImmPtr((void*)-1));
|
||||
|
||||
for (size_t i = 0; i < ionScriptLabels_.length(); i++) {
|
||||
ionScriptLabels_[i].fixup(&masm);
|
||||
Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, ionScriptLabels_[i]),
|
||||
ImmPtr(ionScript),
|
||||
ImmPtr((void*)-1));
|
||||
@ -8155,7 +8153,6 @@ CodeGenerator::link(JSContext* cx, CompilerConstraintList* constraints)
|
||||
#ifdef JS_TRACE_LOGGING
|
||||
TraceLoggerThread* logger = TraceLoggerForMainThread(cx->runtime());
|
||||
for (uint32_t i = 0; i < patchableTraceLoggers_.length(); i++) {
|
||||
patchableTraceLoggers_[i].fixup(&masm);
|
||||
Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, patchableTraceLoggers_[i]),
|
||||
ImmPtr(logger),
|
||||
ImmPtr(nullptr));
|
||||
@ -8167,7 +8164,6 @@ CodeGenerator::link(JSContext* cx, CompilerConstraintList* constraints)
|
||||
ionScript->setTraceLoggerEvent(event);
|
||||
uint32_t textId = event.payload()->textId();
|
||||
for (uint32_t i = 0; i < patchableTLScripts_.length(); i++) {
|
||||
patchableTLScripts_[i].fixup(&masm);
|
||||
Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, patchableTLScripts_[i]),
|
||||
ImmPtr((void*) uintptr_t(textId)),
|
||||
ImmPtr((void*)0));
|
||||
@ -8177,11 +8173,9 @@ CodeGenerator::link(JSContext* cx, CompilerConstraintList* constraints)
|
||||
// Patch shared stub IC loads using IC entries
|
||||
for (size_t i = 0; i < sharedStubs_.length(); i++) {
|
||||
CodeOffsetLabel label = sharedStubs_[i].label;
|
||||
label.fixup(&masm);
|
||||
|
||||
IonICEntry& entry = ionScript->sharedStubList()[i];
|
||||
entry = sharedStubs_[i].entry;
|
||||
entry.fixupReturnOffset(masm);
|
||||
Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, label),
|
||||
ImmPtr(&entry),
|
||||
ImmPtr((void*)-1));
|
||||
@ -8205,8 +8199,7 @@ CodeGenerator::link(JSContext* cx, CompilerConstraintList* constraints)
|
||||
ionScript->setInvalidationEpilogueDataOffset(invalidateEpilogueData_.offset());
|
||||
ionScript->setOsrPc(gen->info().osrPc());
|
||||
ionScript->setOsrEntryOffset(getOsrEntryOffset());
|
||||
ptrdiff_t real_invalidate = masm.actualOffset(invalidate_.offset());
|
||||
ionScript->setInvalidationEpilogueOffset(real_invalidate);
|
||||
ionScript->setInvalidationEpilogueOffset(invalidate_.offset());
|
||||
|
||||
ionScript->setDeoptTable(deoptTable_);
|
||||
|
||||
|
@ -1116,14 +1116,10 @@ IonScript::copyPatchableBackedges(JSContext* cx, JitCode* code,
|
||||
PatchableBackedgeInfo& info = backedges[i];
|
||||
PatchableBackedge* patchableBackedge = &backedgeList()[i];
|
||||
|
||||
// Convert to actual offsets for the benefit of the ARM backend.
|
||||
info.backedge.fixup(&masm);
|
||||
uint32_t loopHeaderOffset = masm.actualOffset(info.loopHeader->offset());
|
||||
uint32_t interruptCheckOffset = masm.actualOffset(info.interruptCheck->offset());
|
||||
|
||||
CodeLocationJump backedge(code, info.backedge);
|
||||
CodeLocationLabel loopHeader(code, CodeOffsetLabel(loopHeaderOffset));
|
||||
CodeLocationLabel interruptCheck(code, CodeOffsetLabel(interruptCheckOffset));
|
||||
CodeLocationLabel loopHeader(code, CodeOffsetLabel(info.loopHeader->offset()));
|
||||
CodeLocationLabel interruptCheck(code, CodeOffsetLabel(info.interruptCheck->offset()));
|
||||
new(patchableBackedge) PatchableBackedge(backedge, loopHeader, interruptCheck);
|
||||
|
||||
// Point the backedge to either of its possible targets, according to
|
||||
@ -1147,16 +1143,12 @@ IonScript::copySafepointIndices(const SafepointIndex* si, MacroAssembler& masm)
|
||||
// final code address now.
|
||||
SafepointIndex* table = safepointIndices();
|
||||
memcpy(table, si, safepointIndexEntries_ * sizeof(SafepointIndex));
|
||||
for (size_t i = 0; i < safepointIndexEntries_; i++)
|
||||
table[i].adjustDisplacement(masm.actualOffset(table[i].displacement()));
|
||||
}
|
||||
|
||||
void
|
||||
IonScript::copyOsiIndices(const OsiIndex* oi, MacroAssembler& masm)
|
||||
{
|
||||
memcpy(osiIndices(), oi, osiIndexEntries_ * sizeof(OsiIndex));
|
||||
for (unsigned i = 0; i < osiIndexEntries_; i++)
|
||||
osiIndices()[i].fixUpOffset(masm);
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -51,7 +51,7 @@ CodeLocationJump::repoint(JitCode* code, MacroAssembler* masm)
|
||||
#ifdef JS_CODEGEN_X64
|
||||
MOZ_ASSERT((uint64_t)raw_ <= UINT32_MAX);
|
||||
#endif
|
||||
new_off = masm->actualOffset((uintptr_t)raw_);
|
||||
new_off = (uintptr_t)raw_;
|
||||
#ifdef JS_SMALL_BRANCH
|
||||
jumpTableEntryOffset = masm->actualIndex(jumpTableEntryOffset);
|
||||
#endif
|
||||
@ -72,7 +72,7 @@ CodeLocationLabel::repoint(JitCode* code, MacroAssembler* masm)
|
||||
#ifdef JS_CODEGEN_X64
|
||||
MOZ_ASSERT((uint64_t)raw_ <= UINT32_MAX);
|
||||
#endif
|
||||
new_off = masm->actualOffset((uintptr_t)raw_);
|
||||
new_off = (uintptr_t)raw_;
|
||||
}
|
||||
MOZ_ASSERT(new_off < code->instructionsSize());
|
||||
|
||||
@ -80,16 +80,9 @@ CodeLocationLabel::repoint(JitCode* code, MacroAssembler* masm)
|
||||
setAbsolute();
|
||||
}
|
||||
|
||||
void
|
||||
CodeOffsetLabel::fixup(MacroAssembler* masm)
|
||||
{
|
||||
offset_ = masm->actualOffset(offset_);
|
||||
}
|
||||
|
||||
void
|
||||
CodeOffsetJump::fixup(MacroAssembler* masm)
|
||||
{
|
||||
offset_ = masm->actualOffset(offset_);
|
||||
#ifdef JS_SMALL_BRANCH
|
||||
jumpTableIndex_ = masm->actualIndex(jumpTableIndex_);
|
||||
#endif
|
||||
@ -251,10 +244,9 @@ class IonCache::StubAttacher
|
||||
PatchJump(rejoinJump, rejoinLabel_);
|
||||
}
|
||||
|
||||
void patchStubCodePointer(MacroAssembler& masm, JitCode* code) {
|
||||
void patchStubCodePointer(JitCode* code) {
|
||||
if (hasStubCodePatchOffset_) {
|
||||
AutoWritableJitCode awjc(code);
|
||||
stubCodePatchOffset_.fixup(&masm);
|
||||
Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, stubCodePatchOffset_),
|
||||
ImmPtr(code), STUB_ADDR);
|
||||
}
|
||||
@ -305,7 +297,7 @@ IonCache::attachStub(MacroAssembler& masm, StubAttacher& attacher, Handle<JitCod
|
||||
// Replace the STUB_ADDR constant by the address of the generated stub, such
|
||||
// as it can be kept alive even if the cache is flushed (see
|
||||
// MarkJitExitFrame).
|
||||
attacher.patchStubCodePointer(masm, code);
|
||||
attacher.patchStubCodePointer(code);
|
||||
|
||||
// Update the failure path.
|
||||
attacher.patchNextStubJump(masm, code);
|
||||
|
@ -1683,12 +1683,6 @@ GetPcScript(JSContext* cx, JSScript** scriptRes, jsbytecode** pcRes)
|
||||
rt->ionPcScriptCache->add(hash, retAddr, pc, *scriptRes);
|
||||
}
|
||||
|
||||
void
|
||||
OsiIndex::fixUpOffset(MacroAssembler& masm)
|
||||
{
|
||||
callPointDisplacement_ = masm.actualOffset(callPointDisplacement_);
|
||||
}
|
||||
|
||||
uint32_t
|
||||
OsiIndex::returnPointDisplacement() const
|
||||
{
|
||||
|
@ -167,7 +167,6 @@ class OsiIndex
|
||||
uint32_t snapshotOffset() const {
|
||||
return snapshotOffset_;
|
||||
}
|
||||
void fixUpOffset(MacroAssembler& masm);
|
||||
};
|
||||
|
||||
// The layout of an Ion frame on the C stack is roughly:
|
||||
|
@ -1595,9 +1595,6 @@ class LSafepoint : public TempObject
|
||||
MOZ_ASSERT(!osiCallPointOffset_);
|
||||
osiCallPointOffset_ = osiCallPointOffset;
|
||||
}
|
||||
void fixupOffset(MacroAssembler* masm) {
|
||||
osiCallPointOffset_ = masm->actualOffset(osiCallPointOffset_);
|
||||
}
|
||||
};
|
||||
|
||||
class LInstruction::InputIterator
|
||||
|
@ -2192,7 +2192,6 @@ MacroAssembler::linkProfilerCallSites(JitCode* code)
|
||||
{
|
||||
for (size_t i = 0; i < profilerCallSites_.length(); i++) {
|
||||
CodeOffsetLabel offset = profilerCallSites_[i];
|
||||
offset.fixup(this);
|
||||
CodeLocationLabel location(code, offset);
|
||||
PatchDataWithValueCheck(location, ImmPtr(location.raw()), ImmPtr((void*)-1));
|
||||
}
|
||||
@ -2628,7 +2627,6 @@ MacroAssembler::linkSelfReference(JitCode* code)
|
||||
// the JitCode onto the stack in order to GC it correctly. exitCodePatch should
|
||||
// be unset if the code never needed to push its JitCode*.
|
||||
if (hasSelfReference()) {
|
||||
selfReferencePatch_.fixup(this);
|
||||
PatchDataWithValueCheck(CodeLocationLabel(code, selfReferencePatch_),
|
||||
ImmPtr(code),
|
||||
ImmPtr((void*)-1));
|
||||
|
@ -223,11 +223,11 @@ PerfSpewer::writeProfile(JSScript* script,
|
||||
|
||||
uint32_t thisFunctionIndex = nextFunctionIndex++;
|
||||
uintptr_t funcStart = uintptr_t(code->raw());
|
||||
uintptr_t funcEndInlineCode = funcStart + masm.actualOffset(endInlineCode.offset());
|
||||
uintptr_t funcEndInlineCode = funcStart + endInlineCode.offset();
|
||||
uintptr_t funcEnd = funcStart + code->instructionsSize();
|
||||
|
||||
// function begins with the prologue, which is located before the first basic block
|
||||
size_t prologueSize = masm.actualOffset(basicBlocks_[0].start.offset());
|
||||
size_t prologueSize = basicBlocks_[0].start.offset();
|
||||
|
||||
if (prologueSize > 0) {
|
||||
fprintf(PerfFilePtr, "%" PRIxSIZE " %" PRIxSIZE " %s:%" PRIuSIZE ": Func%02d-Prologue\n",
|
||||
@ -238,8 +238,8 @@ PerfSpewer::writeProfile(JSScript* script,
|
||||
for (uint32_t i = 0; i < basicBlocks_.length(); i++) {
|
||||
Record& r = basicBlocks_[i];
|
||||
|
||||
uintptr_t blockStart = funcStart + masm.actualOffset(r.start.offset());
|
||||
uintptr_t blockEnd = funcStart + masm.actualOffset(r.end.offset());
|
||||
uintptr_t blockStart = funcStart + r.start.offset();
|
||||
uintptr_t blockEnd = funcStart + r.end.offset();
|
||||
|
||||
MOZ_ASSERT(cur <= blockStart);
|
||||
if (cur < blockStart) {
|
||||
|
@ -286,13 +286,6 @@ class ICEntry
|
||||
returnOffset_ = (uint32_t) offset.offset();
|
||||
}
|
||||
|
||||
void fixupReturnOffset(MacroAssembler& masm) {
|
||||
CodeOffsetLabel offset = returnOffset();
|
||||
offset.fixup(&masm);
|
||||
MOZ_ASSERT(offset.offset() <= UINT32_MAX);
|
||||
returnOffset_ = (uint32_t) offset.offset();
|
||||
}
|
||||
|
||||
uint32_t pcOffset() const {
|
||||
return pcOffset_;
|
||||
}
|
||||
|
@ -1271,7 +1271,6 @@ class Assembler : public AssemblerShared
|
||||
|
||||
public:
|
||||
void resetCounter();
|
||||
uint32_t actualOffset(uint32_t off) const { return off; }
|
||||
uint32_t actualIndex(uint32_t) const;
|
||||
static uint8_t* PatchableJumpAddress(JitCode* code, uint32_t index);
|
||||
static uint32_t NopFill;
|
||||
@ -1715,7 +1714,7 @@ class Assembler : public AssemblerShared
|
||||
|
||||
// See Bind
|
||||
size_t labelOffsetToPatchOffset(size_t offset) {
|
||||
return actualOffset(offset);
|
||||
return offset;
|
||||
}
|
||||
|
||||
void as_bkpt();
|
||||
|
@ -569,10 +569,8 @@ JitRuntime::generateArgumentsRectifier(JSContext* cx, void** returnAddrOut)
|
||||
AutoFlushICache afc("ArgumentsRectifier");
|
||||
JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
|
||||
|
||||
CodeOffsetLabel returnLabel(returnOffset);
|
||||
returnLabel.fixup(&masm);
|
||||
if (returnAddrOut)
|
||||
*returnAddrOut = (void*) (code->raw() + returnLabel.offset());
|
||||
*returnAddrOut = (void*) (code->raw() + returnOffset);
|
||||
|
||||
#ifdef JS_ION_PERF
|
||||
writePerfSpewerJitCodeProfile(code, "ArgumentsRectifier");
|
||||
|
@ -239,13 +239,12 @@ class Assembler : public vixl::Assembler
|
||||
void processCodeLabels(uint8_t* rawCode) {
|
||||
for (size_t i = 0; i < codeLabels_.length(); i++) {
|
||||
CodeLabel label = codeLabels_[i];
|
||||
Bind(rawCode, label.dest(), rawCode + actualOffset(label.src()->offset()));
|
||||
Bind(rawCode, label.dest(), rawCode + label.src()->offset());
|
||||
}
|
||||
}
|
||||
|
||||
void Bind(uint8_t* rawCode, AbsoluteLabel* label, const void* address) {
|
||||
uint32_t off = actualOffset(label->offset());
|
||||
*reinterpret_cast<const void**>(rawCode + off) = address;
|
||||
*reinterpret_cast<const void**>(rawCode + label->offset()) = address;
|
||||
}
|
||||
bool nextLink(BufferOffset cur, BufferOffset* next) {
|
||||
Instruction* link = getInstructionAt(cur);
|
||||
@ -263,14 +262,11 @@ class Assembler : public vixl::Assembler
|
||||
armbuffer_.flushPool();
|
||||
}
|
||||
|
||||
int actualOffset(int curOffset) { return curOffset; }
|
||||
int actualIndex(int curOffset) {
|
||||
ARMBuffer::PoolEntry pe(curOffset);
|
||||
return armbuffer_.poolEntryOffset(pe);
|
||||
}
|
||||
int labelOffsetToPatchOffset(int labelOff) {
|
||||
return actualOffset(labelOff);
|
||||
}
|
||||
size_t labelOffsetToPatchOffset(size_t labelOff) { return labelOff; }
|
||||
static uint8_t* PatchableJumpAddress(JitCode* code, uint32_t index) {
|
||||
return code->raw() + index;
|
||||
}
|
||||
|
@ -422,11 +422,8 @@ JitRuntime::generateArgumentsRectifier(JSContext* cx, void** returnAddrOut)
|
||||
Linker linker(masm);
|
||||
JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
|
||||
|
||||
if (returnAddrOut) {
|
||||
CodeOffsetLabel returnLabel(returnOffset);
|
||||
returnLabel.fixup(&masm);
|
||||
*returnAddrOut = (void*) (code->raw() + returnLabel.offset());
|
||||
}
|
||||
if (returnAddrOut)
|
||||
*returnAddrOut = (void*) (code->raw() + returnOffset);
|
||||
|
||||
return code;
|
||||
}
|
||||
|
@ -88,12 +88,6 @@ AssemblerMIPSShared::finish()
|
||||
isFinished = true;
|
||||
}
|
||||
|
||||
uint32_t
|
||||
AssemblerMIPSShared::actualOffset(uint32_t off_) const
|
||||
{
|
||||
return off_;
|
||||
}
|
||||
|
||||
uint32_t
|
||||
AssemblerMIPSShared::actualIndex(uint32_t idx_) const
|
||||
{
|
||||
@ -138,7 +132,7 @@ AssemblerMIPSShared::processCodeLabels(uint8_t* rawCode)
|
||||
{
|
||||
for (size_t i = 0; i < codeLabels_.length(); i++) {
|
||||
CodeLabel label = codeLabels_[i];
|
||||
asAsm().Bind(rawCode, label.dest(), rawCode + actualOffset(label.src()->offset()));
|
||||
asAsm().Bind(rawCode, label.dest(), rawCode + label.src()->offset());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -732,7 +732,6 @@ class AssemblerMIPSShared : public AssemblerShared
|
||||
return m_buffer.getInst(bo);
|
||||
}
|
||||
public:
|
||||
uint32_t actualOffset(uint32_t) const;
|
||||
uint32_t actualIndex(uint32_t) const;
|
||||
static uint8_t* PatchableJumpAddress(JitCode* code, uint32_t index);
|
||||
protected:
|
||||
@ -1043,9 +1042,7 @@ class AssemblerMIPSShared : public AssemblerShared
|
||||
void retarget(Label* label, Label* target);
|
||||
|
||||
// See Bind
|
||||
size_t labelOffsetToPatchOffset(size_t offset) {
|
||||
return actualOffset(offset);
|
||||
}
|
||||
size_t labelOffsetToPatchOffset(size_t offset) { return offset; }
|
||||
|
||||
void call(Label* label);
|
||||
void call(void* target);
|
||||
|
@ -557,10 +557,8 @@ JitRuntime::generateArgumentsRectifier(JSContext* cx, void** returnAddrOut)
|
||||
AutoFlushICache afc("ArgumentsRectifier");
|
||||
JitCode* code = linker.newCode<NoGC>(cx, OTHER_CODE);
|
||||
|
||||
CodeOffsetLabel returnLabel(returnOffset);
|
||||
returnLabel.fixup(&masm);
|
||||
if (returnAddrOut)
|
||||
*returnAddrOut = (void*) (code->raw() + returnLabel.offset());
|
||||
*returnAddrOut = (void*) (code->raw() + returnOffset);
|
||||
|
||||
#ifdef JS_ION_PERF
|
||||
writePerfSpewerJitCodeProfile(code, "ArgumentsRectifier");
|
||||
|
@ -189,7 +189,6 @@ class MacroAssemblerNone : public Assembler
|
||||
void nopAlign(size_t) { MOZ_CRASH(); }
|
||||
void checkStackAlignment() { MOZ_CRASH(); }
|
||||
uint32_t currentOffset() { MOZ_CRASH(); }
|
||||
uint32_t actualOffset(uint32_t) { MOZ_CRASH(); }
|
||||
uint32_t labelOffsetToPatchOffset(uint32_t) { MOZ_CRASH(); }
|
||||
CodeOffsetLabel labelForPatch() { MOZ_CRASH(); }
|
||||
|
||||
|
@ -495,8 +495,6 @@ class CodeOffsetLabel
|
||||
size_t offset() const {
|
||||
return offset_;
|
||||
}
|
||||
void fixup(MacroAssembler* masm);
|
||||
|
||||
};
|
||||
|
||||
// Absolute location of a jump or a label in some generated JitCode block.
|
||||
|
@ -43,13 +43,6 @@ class BaselineCompilerShared
|
||||
// If set, insert a PCMappingIndexEntry before encoding the
|
||||
// current entry.
|
||||
bool addIndexEntry;
|
||||
|
||||
void fixupNativeOffset(MacroAssembler& masm) {
|
||||
CodeOffsetLabel offset(nativeOffset);
|
||||
offset.fixup(&masm);
|
||||
MOZ_ASSERT(offset.offset() <= UINT32_MAX);
|
||||
nativeOffset = (uint32_t) offset.offset();
|
||||
}
|
||||
};
|
||||
|
||||
js::Vector<PCMappingEntry, 16, SystemAllocPolicy> pcMappingEntries_;
|
||||
|
@ -632,10 +632,8 @@ CodeGeneratorShared::encodeSafepoints()
|
||||
for (SafepointIndex& index : safepointIndices_) {
|
||||
LSafepoint* safepoint = index.safepoint();
|
||||
|
||||
if (!safepoint->encoded()) {
|
||||
safepoint->fixupOffset(&masm);
|
||||
if (!safepoint->encoded())
|
||||
safepoints_.encode(safepoint);
|
||||
}
|
||||
|
||||
index.resolve();
|
||||
}
|
||||
@ -710,14 +708,6 @@ CodeGeneratorShared::generateCompactNativeToBytecodeMap(JSContext* cx, JitCode*
|
||||
MOZ_ASSERT(nativeToBytecodeTableOffset_ == 0);
|
||||
MOZ_ASSERT(nativeToBytecodeNumRegions_ == 0);
|
||||
|
||||
// Iterate through all nativeToBytecode entries, fix up their masm offsets.
|
||||
for (unsigned i = 0; i < nativeToBytecodeList_.length(); i++) {
|
||||
NativeToBytecode& entry = nativeToBytecodeList_[i];
|
||||
|
||||
// Fixup code offsets.
|
||||
entry.nativeOffset = CodeOffsetLabel(masm.actualOffset(entry.nativeOffset.offset()));
|
||||
}
|
||||
|
||||
if (!createNativeToBytecodeScriptList(cx))
|
||||
return false;
|
||||
|
||||
@ -863,12 +853,9 @@ CodeGeneratorShared::generateCompactTrackedOptimizationsMap(JSContext* cx, JitCo
|
||||
if (!unique.init())
|
||||
return false;
|
||||
|
||||
// Iterate through all entries, fix up their masm offsets and deduplicate
|
||||
// their optimization attempts.
|
||||
// Iterate through all entries to deduplicate their optimization attempts.
|
||||
for (size_t i = 0; i < trackedOptimizations_.length(); i++) {
|
||||
NativeToTrackedOptimizations& entry = trackedOptimizations_[i];
|
||||
entry.startOffset = CodeOffsetLabel(masm.actualOffset(entry.startOffset.offset()));
|
||||
entry.endOffset = CodeOffsetLabel(masm.actualOffset(entry.endOffset.offset()));
|
||||
if (!unique.add(entry.optimizations))
|
||||
return false;
|
||||
}
|
||||
|
@ -543,10 +543,8 @@ JitRuntime::generateArgumentsRectifier(JSContext* cx, void** returnAddrOut)
|
||||
writePerfSpewerJitCodeProfile(code, "ArgumentsRectifier");
|
||||
#endif
|
||||
|
||||
CodeOffsetLabel returnLabel(returnOffset);
|
||||
returnLabel.fixup(&masm);
|
||||
if (returnAddrOut)
|
||||
*returnAddrOut = (void*) (code->raw() + returnLabel.offset());
|
||||
*returnAddrOut = (void*)(code->raw() + returnOffset);
|
||||
return code;
|
||||
}
|
||||
|
||||
|
@ -3123,10 +3123,6 @@ class AssemblerX86Shared : public AssemblerShared
|
||||
}
|
||||
|
||||
// Defined for compatibility with ARM's assembler
|
||||
uint32_t actualOffset(uint32_t x) {
|
||||
return x;
|
||||
}
|
||||
|
||||
uint32_t actualIndex(uint32_t x) {
|
||||
return x;
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user