mirror of
https://gitlab.winehq.org/wine/wine-gecko.git
synced 2024-09-13 09:24:08 -07:00
Bug 1228369: Rename CodeOffsetLabel into CodeOffset; r=luke
This commit is contained in:
parent
6104b714c8
commit
9d22d2a3f7
@ -7286,8 +7286,8 @@ GenerateCheckForHeapDetachment(ModuleValidator& m, Register scratch)
|
||||
MOZ_ASSERT(int(masm.framePushed()) >= int(ShadowStackSpace));
|
||||
AssertStackAlignment(masm, ABIStackAlignment);
|
||||
#if defined(JS_CODEGEN_X86)
|
||||
CodeOffsetLabel label = masm.movlWithPatch(PatchedAbsoluteAddress(), scratch);
|
||||
masm.append(AsmJSGlobalAccess(label, AsmJSHeapGlobalDataOffset));
|
||||
CodeOffset offset = masm.movlWithPatch(PatchedAbsoluteAddress(), scratch);
|
||||
masm.append(AsmJSGlobalAccess(offset, AsmJSHeapGlobalDataOffset));
|
||||
masm.branchTestPtr(Assembler::Zero, scratch, scratch, &m.onDetachedLabel());
|
||||
#else
|
||||
masm.branchTestPtr(Assembler::Zero, HeapReg, HeapReg, &m.onDetachedLabel());
|
||||
|
@ -999,7 +999,7 @@ NativeRegExpMacroAssembler::PushBacktrack(Label* label)
|
||||
{
|
||||
JitSpew(SPEW_PREFIX "PushBacktrack");
|
||||
|
||||
CodeOffsetLabel patchOffset = masm.movWithPatch(ImmPtr(nullptr), temp0);
|
||||
CodeOffset patchOffset = masm.movWithPatch(ImmPtr(nullptr), temp0);
|
||||
|
||||
MOZ_ASSERT(!label->bound());
|
||||
|
||||
|
@ -191,9 +191,9 @@ class MOZ_STACK_CLASS NativeRegExpMacroAssembler : public RegExpMacroAssembler
|
||||
jit::Label* label;
|
||||
size_t labelOffset;
|
||||
|
||||
jit::CodeOffsetLabel patchOffset;
|
||||
jit::CodeOffset patchOffset;
|
||||
|
||||
LabelPatch(jit::Label* label, jit::CodeOffsetLabel patchOffset)
|
||||
LabelPatch(jit::Label* label, jit::CodeOffset patchOffset)
|
||||
: label(label), labelOffset(0), patchOffset(patchOffset)
|
||||
{}
|
||||
};
|
||||
|
@ -248,7 +248,7 @@ BaselineCompiler::compile()
|
||||
|
||||
// Patch IC loads using IC entries.
|
||||
for (size_t i = 0; i < icLoadLabels_.length(); i++) {
|
||||
CodeOffsetLabel label = icLoadLabels_[i].label;
|
||||
CodeOffset label = icLoadLabels_[i].label;
|
||||
size_t icEntry = icLoadLabels_[i].icEntry;
|
||||
ICEntry* entryAddr = &(baselineScript->icEntry(icEntry));
|
||||
Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, label),
|
||||
@ -416,7 +416,7 @@ BaselineCompiler::emitPrologue()
|
||||
|
||||
// Record the offset of the prologue, because Ion can bailout before
|
||||
// the scope chain is initialized.
|
||||
prologueOffset_ = CodeOffsetLabel(masm.currentOffset());
|
||||
prologueOffset_ = CodeOffset(masm.currentOffset());
|
||||
|
||||
// When compiling with Debugger instrumentation, set the debuggeeness of
|
||||
// the frame before any operation that can call into the VM.
|
||||
@ -447,7 +447,7 @@ BaselineCompiler::emitEpilogue()
|
||||
{
|
||||
// Record the offset of the epilogue, so we can do early return from
|
||||
// Debugger handlers during on-stack recompile.
|
||||
epilogueOffset_ = CodeOffsetLabel(masm.currentOffset());
|
||||
epilogueOffset_ = CodeOffset(masm.currentOffset());
|
||||
|
||||
masm.bind(&return_);
|
||||
|
||||
@ -509,9 +509,9 @@ BaselineCompiler::emitIC(ICStub* stub, ICEntry::Kind kind)
|
||||
if (!entry)
|
||||
return false;
|
||||
|
||||
CodeOffsetLabel patchOffset;
|
||||
CodeOffset patchOffset;
|
||||
EmitCallIC(&patchOffset, masm);
|
||||
entry->setReturnOffset(CodeOffsetLabel(masm.currentOffset()));
|
||||
entry->setReturnOffset(CodeOffset(masm.currentOffset()));
|
||||
if (!addICLoadLabel(patchOffset))
|
||||
return false;
|
||||
|
||||
@ -624,7 +624,7 @@ BaselineCompiler::emitDebugPrologue()
|
||||
masm.bind(&done);
|
||||
}
|
||||
|
||||
postDebugPrologueOffset_ = CodeOffsetLabel(masm.currentOffset());
|
||||
postDebugPrologueOffset_ = CodeOffset(masm.currentOffset());
|
||||
|
||||
return true;
|
||||
}
|
||||
@ -798,7 +798,7 @@ BaselineCompiler::emitDebugTrap()
|
||||
JitCode* handler = cx->runtime()->jitRuntime()->debugTrapHandler(cx);
|
||||
if (!handler)
|
||||
return false;
|
||||
mozilla::DebugOnly<CodeOffsetLabel> offset = masm.toggledCall(handler, enabled);
|
||||
mozilla::DebugOnly<CodeOffset> offset = masm.toggledCall(handler, enabled);
|
||||
|
||||
#ifdef DEBUG
|
||||
// Patchable call offset has to match the pc mapping offset.
|
||||
@ -886,7 +886,7 @@ BaselineCompiler::emitProfilerEnterFrame()
|
||||
// Store stack position to lastProfilingFrame variable, guarded by a toggled jump.
|
||||
// Starts off initially disabled.
|
||||
Label noInstrument;
|
||||
CodeOffsetLabel toggleOffset = masm.toggledJump(&noInstrument);
|
||||
CodeOffset toggleOffset = masm.toggledJump(&noInstrument);
|
||||
masm.profilerEnterFrame(masm.getStackPointer(), R0.scratchReg());
|
||||
masm.bind(&noInstrument);
|
||||
|
||||
@ -901,7 +901,7 @@ BaselineCompiler::emitProfilerExitFrame()
|
||||
// Store previous frame to lastProfilingFrame variable, guarded by a toggled jump.
|
||||
// Starts off initially disabled.
|
||||
Label noInstrument;
|
||||
CodeOffsetLabel toggleOffset = masm.toggledJump(&noInstrument);
|
||||
CodeOffset toggleOffset = masm.toggledJump(&noInstrument);
|
||||
masm.profilerExitFrame();
|
||||
masm.bind(&noInstrument);
|
||||
|
||||
|
@ -226,15 +226,15 @@ class BaselineCompiler : public BaselineCompilerSpecific
|
||||
NonAssertingLabel postBarrierSlot_;
|
||||
|
||||
// Native code offset right before the scope chain is initialized.
|
||||
CodeOffsetLabel prologueOffset_;
|
||||
CodeOffset prologueOffset_;
|
||||
|
||||
// Native code offset right before the frame is popped and the method
|
||||
// returned from.
|
||||
CodeOffsetLabel epilogueOffset_;
|
||||
CodeOffset epilogueOffset_;
|
||||
|
||||
// Native code offset right after debug prologue and epilogue, or
|
||||
// equivalent positions when debug mode is off.
|
||||
CodeOffsetLabel postDebugPrologueOffset_;
|
||||
CodeOffset postDebugPrologueOffset_;
|
||||
|
||||
// For each INITIALYIELD or YIELD op, this Vector maps the yield index
|
||||
// to the bytecode offset of the next op.
|
||||
|
@ -1115,7 +1115,7 @@ JitRuntime::generateBaselineDebugModeOSRHandler(JSContext* cx, uint32_t* noFrame
|
||||
|
||||
// Not all patched baseline frames are returning from a situation where
|
||||
// the frame reg is already fixed up.
|
||||
CodeOffsetLabel noFrameRegPopOffset(masm.currentOffset());
|
||||
CodeOffset noFrameRegPopOffset(masm.currentOffset());
|
||||
|
||||
// Record the stack pointer for syncing.
|
||||
masm.moveStackPtrTo(syncedStackStart);
|
||||
|
@ -575,7 +575,7 @@ BaselineScript::pcMappingReader(size_t indexEntry)
|
||||
}
|
||||
|
||||
ICEntry&
|
||||
BaselineScript::icEntryFromReturnOffset(CodeOffsetLabel returnOffset)
|
||||
BaselineScript::icEntryFromReturnOffset(CodeOffset returnOffset)
|
||||
{
|
||||
size_t bottom = 0;
|
||||
size_t top = numICEntries();
|
||||
@ -702,7 +702,7 @@ BaselineScript::icEntryFromReturnAddress(uint8_t* returnAddr)
|
||||
{
|
||||
MOZ_ASSERT(returnAddr > method_->raw());
|
||||
MOZ_ASSERT(returnAddr < method_->raw() + method_->instructionsSize());
|
||||
CodeOffsetLabel offset(returnAddr - method_->raw());
|
||||
CodeOffset offset(returnAddr - method_->raw());
|
||||
return icEntryFromReturnOffset(offset);
|
||||
}
|
||||
|
||||
@ -912,7 +912,7 @@ BaselineScript::toggleDebugTraps(JSScript* script, jsbytecode* pc)
|
||||
script->hasBreakpointsAt(curPC);
|
||||
|
||||
// Patch the trap.
|
||||
CodeLocationLabel label(method(), CodeOffsetLabel(nativeOffset));
|
||||
CodeLocationLabel label(method(), CodeOffset(nativeOffset));
|
||||
Assembler::ToggleCall(label, enabled);
|
||||
}
|
||||
|
||||
@ -934,8 +934,8 @@ BaselineScript::initTraceLogger(JSRuntime* runtime, JSScript* script)
|
||||
traceLoggerScriptEvent_ = TraceLoggerEvent(logger, TraceLogger_Scripts, script);
|
||||
|
||||
if (TraceLogTextIdEnabled(TraceLogger_Engine) || TraceLogTextIdEnabled(TraceLogger_Scripts)) {
|
||||
CodeLocationLabel enter(method_, CodeOffsetLabel(traceLoggerEnterToggleOffset_));
|
||||
CodeLocationLabel exit(method_, CodeOffsetLabel(traceLoggerExitToggleOffset_));
|
||||
CodeLocationLabel enter(method_, CodeOffset(traceLoggerEnterToggleOffset_));
|
||||
CodeLocationLabel exit(method_, CodeOffset(traceLoggerExitToggleOffset_));
|
||||
Assembler::ToggleToCmp(enter);
|
||||
Assembler::ToggleToCmp(exit);
|
||||
}
|
||||
@ -960,8 +960,8 @@ BaselineScript::toggleTraceLoggerScripts(JSRuntime* runtime, JSScript* script, b
|
||||
AutoWritableJitCode awjc(method());
|
||||
|
||||
// Enable/Disable the traceLogger prologue and epilogue.
|
||||
CodeLocationLabel enter(method_, CodeOffsetLabel(traceLoggerEnterToggleOffset_));
|
||||
CodeLocationLabel exit(method_, CodeOffsetLabel(traceLoggerExitToggleOffset_));
|
||||
CodeLocationLabel enter(method_, CodeOffset(traceLoggerEnterToggleOffset_));
|
||||
CodeLocationLabel exit(method_, CodeOffset(traceLoggerExitToggleOffset_));
|
||||
if (!engineEnabled) {
|
||||
if (enable) {
|
||||
Assembler::ToggleToCmp(enter);
|
||||
@ -988,8 +988,8 @@ BaselineScript::toggleTraceLoggerEngine(bool enable)
|
||||
AutoWritableJitCode awjc(method());
|
||||
|
||||
// Enable/Disable the traceLogger prologue and epilogue.
|
||||
CodeLocationLabel enter(method_, CodeOffsetLabel(traceLoggerEnterToggleOffset_));
|
||||
CodeLocationLabel exit(method_, CodeOffsetLabel(traceLoggerExitToggleOffset_));
|
||||
CodeLocationLabel enter(method_, CodeOffset(traceLoggerEnterToggleOffset_));
|
||||
CodeLocationLabel exit(method_, CodeOffset(traceLoggerExitToggleOffset_));
|
||||
if (!scriptsEnabled) {
|
||||
if (enable) {
|
||||
Assembler::ToggleToCmp(enter);
|
||||
@ -1018,8 +1018,8 @@ BaselineScript::toggleProfilerInstrumentation(bool enable)
|
||||
AutoWritableJitCode awjc(method());
|
||||
|
||||
// Toggle the jump
|
||||
CodeLocationLabel enterToggleLocation(method_, CodeOffsetLabel(profilerEnterToggleOffset_));
|
||||
CodeLocationLabel exitToggleLocation(method_, CodeOffsetLabel(profilerExitToggleOffset_));
|
||||
CodeLocationLabel enterToggleLocation(method_, CodeOffset(profilerEnterToggleOffset_));
|
||||
CodeLocationLabel exitToggleLocation(method_, CodeOffset(profilerExitToggleOffset_));
|
||||
if (enable) {
|
||||
Assembler::ToggleToCmp(enterToggleLocation);
|
||||
Assembler::ToggleToCmp(exitToggleLocation);
|
||||
|
@ -365,7 +365,7 @@ struct BaselineScript
|
||||
}
|
||||
|
||||
ICEntry& icEntry(size_t index);
|
||||
ICEntry& icEntryFromReturnOffset(CodeOffsetLabel returnOffset);
|
||||
ICEntry& icEntryFromReturnOffset(CodeOffset returnOffset);
|
||||
ICEntry& icEntryFromPCOffset(uint32_t pcOffset);
|
||||
ICEntry& icEntryFromPCOffset(uint32_t pcOffset, ICEntry* prevLookedUpEntry);
|
||||
ICEntry& callVMEntryFromPCOffset(uint32_t pcOffset);
|
||||
|
@ -1701,10 +1701,10 @@ CodeGenerator::emitSharedStub(ICStub::Kind kind, LInstruction* lir)
|
||||
masm.Push(Imm32(descriptor));
|
||||
|
||||
// Call into the stubcode.
|
||||
CodeOffsetLabel patchOffset;
|
||||
CodeOffset patchOffset;
|
||||
IonICEntry entry(script->pcToOffset(pc), ICEntry::Kind_Op, script);
|
||||
EmitCallIC(&patchOffset, masm);
|
||||
entry.setReturnOffset(CodeOffsetLabel(masm.currentOffset()));
|
||||
entry.setReturnOffset(CodeOffset(masm.currentOffset()));
|
||||
|
||||
SharedStub sharedStub(kind, entry, patchOffset);
|
||||
masm.propagateOOM(sharedStubs_.append(sharedStub));
|
||||
@ -3989,7 +3989,7 @@ struct ScriptCountBlockState
|
||||
void
|
||||
CodeGenerator::branchIfInvalidated(Register temp, Label* invalidated)
|
||||
{
|
||||
CodeOffsetLabel label = masm.movWithPatch(ImmWord(uintptr_t(-1)), temp);
|
||||
CodeOffset label = masm.movWithPatch(ImmWord(uintptr_t(-1)), temp);
|
||||
masm.propagateOOM(ionScriptLabels_.append(label));
|
||||
|
||||
// If IonScript::invalidationCount_ != 0, the script has been invalidated.
|
||||
@ -8274,7 +8274,7 @@ CodeGenerator::link(JSContext* cx, CompilerConstraintList* constraints)
|
||||
#endif
|
||||
// Patch shared stub IC loads using IC entries
|
||||
for (size_t i = 0; i < sharedStubs_.length(); i++) {
|
||||
CodeOffsetLabel label = sharedStubs_[i].label;
|
||||
CodeOffset label = sharedStubs_[i].label;
|
||||
|
||||
IonICEntry& entry = ionScript->sharedStubList()[i];
|
||||
entry = sharedStubs_[i].entry;
|
||||
@ -10219,7 +10219,7 @@ CodeGenerator::visitRecompileCheck(LRecompileCheck* ins)
|
||||
}
|
||||
|
||||
// Check if not yet recompiling.
|
||||
CodeOffsetLabel label = masm.movWithPatch(ImmWord(uintptr_t(-1)), tmp);
|
||||
CodeOffset label = masm.movWithPatch(ImmWord(uintptr_t(-1)), tmp);
|
||||
masm.propagateOOM(ionScriptLabels_.append(label));
|
||||
masm.branch32(Assembler::Equal,
|
||||
Address(tmp, IonScript::offsetOfRecompiling()),
|
||||
|
@ -484,14 +484,14 @@ class CodeGenerator : public CodeGeneratorSpecific
|
||||
void emitAssertRangeI(const Range* r, Register input);
|
||||
void emitAssertRangeD(const Range* r, FloatRegister input, FloatRegister temp);
|
||||
|
||||
Vector<CodeOffsetLabel, 0, JitAllocPolicy> ionScriptLabels_;
|
||||
Vector<CodeOffset, 0, JitAllocPolicy> ionScriptLabels_;
|
||||
|
||||
struct SharedStub {
|
||||
ICStub::Kind kind;
|
||||
IonICEntry entry;
|
||||
CodeOffsetLabel label;
|
||||
CodeOffset label;
|
||||
|
||||
SharedStub(ICStub::Kind kind, IonICEntry entry, CodeOffsetLabel label)
|
||||
SharedStub(ICStub::Kind kind, IonICEntry entry, CodeOffset label)
|
||||
: kind(kind), entry(entry), label(label)
|
||||
{}
|
||||
};
|
||||
|
@ -875,7 +875,7 @@ JitCode::togglePreBarriers(bool enabled)
|
||||
|
||||
while (reader.more()) {
|
||||
size_t offset = reader.readUnsigned();
|
||||
CodeLocationLabel loc(this, CodeOffsetLabel(offset));
|
||||
CodeLocationLabel loc(this, CodeOffset(offset));
|
||||
if (enabled)
|
||||
Assembler::ToggleToCmp(loc);
|
||||
else
|
||||
@ -1118,8 +1118,8 @@ IonScript::copyPatchableBackedges(JSContext* cx, JitCode* code,
|
||||
|
||||
info.backedge.fixup(&masm);
|
||||
CodeLocationJump backedge(code, info.backedge);
|
||||
CodeLocationLabel loopHeader(code, CodeOffsetLabel(info.loopHeader->offset()));
|
||||
CodeLocationLabel interruptCheck(code, CodeOffsetLabel(info.interruptCheck->offset()));
|
||||
CodeLocationLabel loopHeader(code, CodeOffset(info.loopHeader->offset()));
|
||||
CodeLocationLabel interruptCheck(code, CodeOffset(info.interruptCheck->offset()));
|
||||
new(patchableBackedge) PatchableBackedge(backedge, loopHeader, interruptCheck);
|
||||
|
||||
// Point the backedge to either of its possible targets, according to
|
||||
@ -2984,7 +2984,7 @@ InvalidateActivation(FreeOp* fop, const JitActivationIterator& activations, bool
|
||||
Assembler::PatchWrite_Imm32(dataLabelToMunge, Imm32(delta));
|
||||
|
||||
CodeLocationLabel osiPatchPoint = SafepointReader::InvalidationPatchPoint(ionScript, si);
|
||||
CodeLocationLabel invalidateEpilogue(ionCode, CodeOffsetLabel(ionScript->invalidateEpilogueOffset()));
|
||||
CodeLocationLabel invalidateEpilogue(ionCode, CodeOffset(ionScript->invalidateEpilogueOffset()));
|
||||
|
||||
JitSpew(JitSpew_IonInvalidate, " ! Invalidate ionScript %p (inv count %u) -> patching osipoint %p",
|
||||
ionScript, ionScript->invalidationCount(), (void*) osiPatchPoint.raw());
|
||||
|
@ -168,7 +168,7 @@ class IonCache::StubAttacher
|
||||
CodeLocationLabel rejoinLabel_;
|
||||
CodeOffsetJump nextStubOffset_;
|
||||
CodeOffsetJump rejoinOffset_;
|
||||
CodeOffsetLabel stubCodePatchOffset_;
|
||||
CodeOffset stubCodePatchOffset_;
|
||||
|
||||
public:
|
||||
explicit StubAttacher(IonCache& cache)
|
||||
@ -282,7 +282,7 @@ IonCache::emitInitialJump(MacroAssembler& masm, RepatchLabel& entry)
|
||||
lastJump_ = initialJump_;
|
||||
Label label;
|
||||
masm.bind(&label);
|
||||
rejoinLabel_ = CodeOffsetLabel(label.offset());
|
||||
rejoinLabel_ = CodeOffset(label.offset());
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -256,7 +256,7 @@ class IonCache
|
||||
// Set the initial 'out-of-line' jump state of the cache. The fallbackLabel is
|
||||
// the location of the out-of-line update (slow) path. This location will
|
||||
// be set to the exitJump of the last generated stub.
|
||||
void setFallbackLabel(CodeOffsetLabel fallbackLabel) {
|
||||
void setFallbackLabel(CodeOffset fallbackLabel) {
|
||||
fallbackLabel_ = fallbackLabel;
|
||||
}
|
||||
|
||||
|
@ -62,14 +62,14 @@ MacroAssembler::implicitPop(uint32_t bytes)
|
||||
// ===============================================================
|
||||
// Stack manipulation functions.
|
||||
|
||||
CodeOffsetLabel
|
||||
CodeOffset
|
||||
MacroAssembler::PushWithPatch(ImmWord word)
|
||||
{
|
||||
framePushed_ += sizeof(word.value);
|
||||
return pushWithPatch(word);
|
||||
}
|
||||
|
||||
CodeOffsetLabel
|
||||
CodeOffset
|
||||
MacroAssembler::PushWithPatch(ImmPtr imm)
|
||||
{
|
||||
return PushWithPatch(ImmWord(uintptr_t(imm.value)));
|
||||
@ -81,21 +81,21 @@ MacroAssembler::PushWithPatch(ImmPtr imm)
|
||||
void
|
||||
MacroAssembler::call(const CallSiteDesc& desc, const Register reg)
|
||||
{
|
||||
CodeOffsetLabel l = call(reg);
|
||||
CodeOffset l = call(reg);
|
||||
append(desc, l, framePushed());
|
||||
}
|
||||
|
||||
void
|
||||
MacroAssembler::call(const CallSiteDesc& desc, Label* label)
|
||||
{
|
||||
CodeOffsetLabel l = call(label);
|
||||
CodeOffset l = call(label);
|
||||
append(desc, l, framePushed());
|
||||
}
|
||||
|
||||
void
|
||||
MacroAssembler::call(const CallSiteDesc& desc, AsmJSInternalCallee callee)
|
||||
{
|
||||
CodeOffsetLabel l = callWithPatch();
|
||||
CodeOffset l = callWithPatch();
|
||||
append(desc, l, framePushed(), callee.index);
|
||||
}
|
||||
|
||||
|
@ -2103,7 +2103,7 @@ MacroAssembler::AutoProfilerCallInstrumentation::AutoProfilerCallInstrumentation
|
||||
JitContext* icx = GetJitContext();
|
||||
AbsoluteAddress profilingActivation(icx->runtime->addressOfProfilingActivation());
|
||||
|
||||
CodeOffsetLabel label = masm.movWithPatch(ImmWord(uintptr_t(-1)), reg);
|
||||
CodeOffset label = masm.movWithPatch(ImmWord(uintptr_t(-1)), reg);
|
||||
masm.loadPtr(profilingActivation, reg2);
|
||||
masm.storePtr(reg, Address(reg2, JitActivation::offsetOfLastProfilingCallSite()));
|
||||
|
||||
@ -2117,7 +2117,7 @@ void
|
||||
MacroAssembler::linkProfilerCallSites(JitCode* code)
|
||||
{
|
||||
for (size_t i = 0; i < profilerCallSites_.length(); i++) {
|
||||
CodeOffsetLabel offset = profilerCallSites_[i];
|
||||
CodeOffset offset = profilerCallSites_[i];
|
||||
CodeLocationLabel location(code, offset);
|
||||
PatchDataWithValueCheck(location, ImmPtr(location.raw()), ImmPtr((void*)-1));
|
||||
}
|
||||
|
@ -459,8 +459,8 @@ class MacroAssembler : public MacroAssemblerSpecific
|
||||
void Push(JSValueType type, Register reg);
|
||||
void PushValue(const Address& addr);
|
||||
void PushEmptyRooted(VMFunction::RootType rootType);
|
||||
inline CodeOffsetLabel PushWithPatch(ImmWord word);
|
||||
inline CodeOffsetLabel PushWithPatch(ImmPtr imm);
|
||||
inline CodeOffset PushWithPatch(ImmWord word);
|
||||
inline CodeOffset PushWithPatch(ImmPtr imm);
|
||||
|
||||
void Pop(const Operand op) DEFINED_ON(x86_shared);
|
||||
void Pop(Register reg) PER_SHARED_ARCH;
|
||||
@ -491,8 +491,8 @@ class MacroAssembler : public MacroAssemblerSpecific
|
||||
// ===============================================================
|
||||
// Simple call functions.
|
||||
|
||||
CodeOffsetLabel call(Register reg) PER_SHARED_ARCH;
|
||||
CodeOffsetLabel call(Label* label) PER_SHARED_ARCH;
|
||||
CodeOffset call(Register reg) PER_SHARED_ARCH;
|
||||
CodeOffset call(Label* label) PER_SHARED_ARCH;
|
||||
void call(const Address& addr) DEFINED_ON(x86_shared);
|
||||
void call(ImmWord imm) PER_SHARED_ARCH;
|
||||
// Call a target native function, which is neither traceable nor movable.
|
||||
@ -505,7 +505,7 @@ class MacroAssembler : public MacroAssemblerSpecific
|
||||
inline void call(const CallSiteDesc& desc, Label* label);
|
||||
inline void call(const CallSiteDesc& desc, AsmJSInternalCallee callee);
|
||||
|
||||
CodeOffsetLabel callWithPatch() PER_SHARED_ARCH;
|
||||
CodeOffset callWithPatch() PER_SHARED_ARCH;
|
||||
void patchCall(uint32_t callerOffset, uint32_t calleeOffset) PER_SHARED_ARCH;
|
||||
|
||||
// Push the return address and make a call. On platforms where this function
|
||||
@ -690,7 +690,7 @@ class MacroAssembler : public MacroAssemblerSpecific
|
||||
// If the JitCode that created this assembler needs to transition into the VM,
|
||||
// we want to store the JitCode on the stack in order to mark it during a GC.
|
||||
// This is a reference to a patch location where the JitCode* will be written.
|
||||
CodeOffsetLabel selfReferencePatch_;
|
||||
CodeOffset selfReferencePatch_;
|
||||
|
||||
public:
|
||||
// ===============================================================
|
||||
@ -1050,7 +1050,7 @@ class MacroAssembler : public MacroAssemblerSpecific
|
||||
|
||||
// All barriers are off by default.
|
||||
// They are enabled if necessary at the end of CodeGenerator::generate().
|
||||
CodeOffsetLabel nopJump = toggledJump(&done);
|
||||
CodeOffset nopJump = toggledJump(&done);
|
||||
writePrebarrierOffset(nopJump);
|
||||
|
||||
callPreBarrier(address, type);
|
||||
@ -1309,7 +1309,7 @@ class MacroAssembler : public MacroAssemblerSpecific
|
||||
};
|
||||
friend class AutoProfilerCallInstrumentation;
|
||||
|
||||
void appendProfilerCallSite(CodeOffsetLabel label) {
|
||||
void appendProfilerCallSite(CodeOffset label) {
|
||||
propagateOOM(profilerCallSites_.append(label));
|
||||
}
|
||||
|
||||
@ -1323,7 +1323,7 @@ class MacroAssembler : public MacroAssemblerSpecific
|
||||
bool emitProfilingInstrumentation_;
|
||||
|
||||
// Record locations of the call sites.
|
||||
Vector<CodeOffsetLabel, 0, SystemAllocPolicy> profilerCallSites_;
|
||||
Vector<CodeOffset, 0, SystemAllocPolicy> profilerCallSites_;
|
||||
|
||||
public:
|
||||
void loadBaselineOrIonRaw(Register script, Register dest, Label* failure);
|
||||
|
@ -428,7 +428,7 @@ SafepointReader::InvalidationPatchPoint(IonScript* script, const SafepointIndex*
|
||||
{
|
||||
SafepointReader reader(script, si);
|
||||
|
||||
return CodeLocationLabel(script->method(), CodeOffsetLabel(reader.osiCallPointOffset()));
|
||||
return CodeLocationLabel(script->method(), CodeOffset(reader.osiCallPointOffset()));
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -281,11 +281,11 @@ class ICEntry
|
||||
setKind(kind);
|
||||
}
|
||||
|
||||
CodeOffsetLabel returnOffset() const {
|
||||
return CodeOffsetLabel(returnOffset_);
|
||||
CodeOffset returnOffset() const {
|
||||
return CodeOffset(returnOffset_);
|
||||
}
|
||||
|
||||
void setReturnOffset(CodeOffsetLabel offset) {
|
||||
void setReturnOffset(CodeOffset offset) {
|
||||
MOZ_ASSERT(offset.offset() <= (size_t) UINT32_MAX);
|
||||
returnOffset_ = (uint32_t) offset.offset();
|
||||
}
|
||||
|
@ -951,13 +951,13 @@ Assembler::processCodeLabels(uint8_t* rawCode)
|
||||
}
|
||||
|
||||
void
|
||||
Assembler::writeCodePointer(CodeOffsetLabel* label) {
|
||||
Assembler::writeCodePointer(CodeOffset* label) {
|
||||
BufferOffset off = writeInst(LabelBase::INVALID_OFFSET);
|
||||
label->use(off.getOffset());
|
||||
}
|
||||
|
||||
void
|
||||
Assembler::Bind(uint8_t* rawCode, CodeOffsetLabel* label, const void* address)
|
||||
Assembler::Bind(uint8_t* rawCode, CodeOffset* label, const void* address)
|
||||
{
|
||||
*reinterpret_cast<const void**>(rawCode + label->offset()) = address;
|
||||
}
|
||||
|
@ -1354,7 +1354,7 @@ class Assembler : public AssemblerShared
|
||||
dataRelocations_.writeUnsigned(nextOffset().getOffset());
|
||||
}
|
||||
}
|
||||
void writePrebarrierOffset(CodeOffsetLabel label) {
|
||||
void writePrebarrierOffset(CodeOffset label) {
|
||||
preBarriers_.writeUnsigned(label.offset());
|
||||
}
|
||||
|
||||
@ -1430,7 +1430,7 @@ class Assembler : public AssemblerShared
|
||||
static void WriteInstStatic(uint32_t x, uint32_t* dest);
|
||||
|
||||
public:
|
||||
void writeCodePointer(CodeOffsetLabel* label);
|
||||
void writeCodePointer(CodeOffset* label);
|
||||
|
||||
void haltingAlign(int alignment);
|
||||
void nopAlign(int alignment);
|
||||
@ -1696,10 +1696,10 @@ class Assembler : public AssemblerShared
|
||||
// I'm going to pretend this doesn't exist for now.
|
||||
void retarget(Label* label, void* target, Relocation::Kind reloc);
|
||||
|
||||
void Bind(uint8_t* rawCode, CodeOffsetLabel* label, const void* address);
|
||||
void Bind(uint8_t* rawCode, CodeOffset* label, const void* address);
|
||||
|
||||
// See Bind
|
||||
size_t labelToPatchOffset(CodeOffsetLabel label) {
|
||||
size_t labelToPatchOffset(CodeOffset label) {
|
||||
return label.offset();
|
||||
}
|
||||
|
||||
|
@ -2027,7 +2027,7 @@ MacroAssemblerARMCompat::movePtr(AsmJSImmPtr imm, Register dest)
|
||||
else
|
||||
rs = L_LDR;
|
||||
|
||||
append(AsmJSAbsoluteLink(CodeOffsetLabel(currentOffset()), imm.kind()));
|
||||
append(AsmJSAbsoluteLink(CodeOffset(currentOffset()), imm.kind()));
|
||||
ma_movPatchable(Imm32(-1), dest, Always, rs);
|
||||
}
|
||||
|
||||
@ -4091,16 +4091,16 @@ MacroAssemblerARMCompat::ceilf(FloatRegister input, Register output, Label* bail
|
||||
bind(&fin);
|
||||
}
|
||||
|
||||
CodeOffsetLabel
|
||||
CodeOffset
|
||||
MacroAssemblerARMCompat::toggledJump(Label* label)
|
||||
{
|
||||
// Emit a B that can be toggled to a CMP. See ToggleToJmp(), ToggleToCmp().
|
||||
BufferOffset b = ma_b(label, Always);
|
||||
CodeOffsetLabel ret(b.getOffset());
|
||||
CodeOffset ret(b.getOffset());
|
||||
return ret;
|
||||
}
|
||||
|
||||
CodeOffsetLabel
|
||||
CodeOffset
|
||||
MacroAssemblerARMCompat::toggledCall(JitCode* target, bool enabled)
|
||||
{
|
||||
BufferOffset bo = nextOffset();
|
||||
@ -4111,7 +4111,7 @@ MacroAssemblerARMCompat::toggledCall(JitCode* target, bool enabled)
|
||||
ma_blx(scratch);
|
||||
else
|
||||
ma_nop();
|
||||
return CodeOffsetLabel(bo.getOffset());
|
||||
return CodeOffset(bo.getOffset());
|
||||
}
|
||||
|
||||
void
|
||||
@ -5086,19 +5086,19 @@ MacroAssembler::reserveStack(uint32_t amount)
|
||||
// ===============================================================
|
||||
// Simple call functions.
|
||||
|
||||
CodeOffsetLabel
|
||||
CodeOffset
|
||||
MacroAssembler::call(Register reg)
|
||||
{
|
||||
as_blx(reg);
|
||||
return CodeOffsetLabel(currentOffset());
|
||||
return CodeOffset(currentOffset());
|
||||
}
|
||||
|
||||
CodeOffsetLabel
|
||||
CodeOffset
|
||||
MacroAssembler::call(Label* label)
|
||||
{
|
||||
// For now, assume that it'll be nearby.
|
||||
as_bl(label, Always);
|
||||
return CodeOffsetLabel(currentOffset());
|
||||
return CodeOffset(currentOffset());
|
||||
}
|
||||
|
||||
void
|
||||
@ -5138,12 +5138,12 @@ MacroAssembler::call(JitCode* c)
|
||||
callJitNoProfiler(scratch);
|
||||
}
|
||||
|
||||
CodeOffsetLabel
|
||||
CodeOffset
|
||||
MacroAssembler::callWithPatch()
|
||||
{
|
||||
// For now, assume that it'll be nearby.
|
||||
as_bl(BOffImm(), Always, /* documentation */ nullptr);
|
||||
return CodeOffsetLabel(currentOffset());
|
||||
return CodeOffset(currentOffset());
|
||||
}
|
||||
void
|
||||
MacroAssembler::patchCall(uint32_t callerOffset, uint32_t calleeOffset)
|
||||
|
@ -602,25 +602,25 @@ class MacroAssemblerARMCompat : public MacroAssemblerARM
|
||||
ma_dtr(IsLoad, sp, totSpace, reg, PostIndex);
|
||||
}
|
||||
|
||||
CodeOffsetLabel toggledJump(Label* label);
|
||||
CodeOffset toggledJump(Label* label);
|
||||
|
||||
// Emit a BLX or NOP instruction. ToggleCall can be used to patch this
|
||||
// instruction.
|
||||
CodeOffsetLabel toggledCall(JitCode* target, bool enabled);
|
||||
CodeOffset toggledCall(JitCode* target, bool enabled);
|
||||
|
||||
CodeOffsetLabel pushWithPatch(ImmWord imm) {
|
||||
CodeOffset pushWithPatch(ImmWord imm) {
|
||||
ScratchRegisterScope scratch(asMasm());
|
||||
CodeOffsetLabel label = movWithPatch(imm, scratch);
|
||||
CodeOffset label = movWithPatch(imm, scratch);
|
||||
ma_push(scratch);
|
||||
return label;
|
||||
}
|
||||
|
||||
CodeOffsetLabel movWithPatch(ImmWord imm, Register dest) {
|
||||
CodeOffsetLabel label = CodeOffsetLabel(currentOffset());
|
||||
CodeOffset movWithPatch(ImmWord imm, Register dest) {
|
||||
CodeOffset label = CodeOffset(currentOffset());
|
||||
ma_movPatchable(Imm32(imm.value), dest, Always, HasMOVWT() ? L_MOVWT : L_LDR);
|
||||
return label;
|
||||
}
|
||||
CodeOffsetLabel movWithPatch(ImmPtr imm, Register dest) {
|
||||
CodeOffset movWithPatch(ImmPtr imm, Register dest) {
|
||||
return movWithPatch(ImmWord(uintptr_t(imm.value)), dest);
|
||||
}
|
||||
|
||||
@ -1764,8 +1764,8 @@ class MacroAssemblerARMCompat : public MacroAssemblerARM
|
||||
bool buildOOLFakeExitFrame(void* fakeReturnAddr);
|
||||
|
||||
public:
|
||||
CodeOffsetLabel labelForPatch() {
|
||||
return CodeOffsetLabel(nextOffset().getOffset());
|
||||
CodeOffset labelForPatch() {
|
||||
return CodeOffset(nextOffset().getOffset());
|
||||
}
|
||||
|
||||
void computeEffectiveAddress(const Address& address, Register dest) {
|
||||
|
@ -31,10 +31,10 @@ EmitRepushTailCallReg(MacroAssembler& masm)
|
||||
}
|
||||
|
||||
inline void
|
||||
EmitCallIC(CodeOffsetLabel* patchOffset, MacroAssembler& masm)
|
||||
EmitCallIC(CodeOffset* patchOffset, MacroAssembler& masm)
|
||||
{
|
||||
// Move ICEntry offset into ICStubReg
|
||||
CodeOffsetLabel offset = masm.movWithPatch(ImmWord(-1), ICStubReg);
|
||||
CodeOffset offset = masm.movWithPatch(ImmWord(-1), ICStubReg);
|
||||
*patchOffset = offset;
|
||||
|
||||
// Load stub pointer into ICStubReg
|
||||
|
@ -243,7 +243,7 @@ class Assembler : public vixl::Assembler
|
||||
}
|
||||
}
|
||||
|
||||
void Bind(uint8_t* rawCode, CodeOffsetLabel* label, const void* address) {
|
||||
void Bind(uint8_t* rawCode, CodeOffset* label, const void* address) {
|
||||
*reinterpret_cast<const void**>(rawCode + label->offset()) = address;
|
||||
}
|
||||
|
||||
@ -262,7 +262,7 @@ class Assembler : public vixl::Assembler
|
||||
ARMBuffer::PoolEntry pe(curOffset);
|
||||
return armbuffer_.poolEntryOffset(pe);
|
||||
}
|
||||
size_t labelToPatchOffset(CodeOffsetLabel label) {
|
||||
size_t labelToPatchOffset(CodeOffset label) {
|
||||
return label.offset();
|
||||
}
|
||||
static uint8_t* PatchableJumpAddress(JitCode* code, uint32_t index) {
|
||||
|
@ -509,20 +509,20 @@ MacroAssembler::reserveStack(uint32_t amount)
|
||||
// ===============================================================
|
||||
// Simple call functions.
|
||||
|
||||
CodeOffsetLabel
|
||||
CodeOffset
|
||||
MacroAssembler::call(Register reg)
|
||||
{
|
||||
syncStackPtr();
|
||||
Blr(ARMRegister(reg, 64));
|
||||
return CodeOffsetLabel(currentOffset());
|
||||
return CodeOffset(currentOffset());
|
||||
}
|
||||
|
||||
CodeOffsetLabel
|
||||
CodeOffset
|
||||
MacroAssembler::call(Label* label)
|
||||
{
|
||||
syncStackPtr();
|
||||
Bl(label);
|
||||
return CodeOffsetLabel(currentOffset());
|
||||
return CodeOffset(currentOffset());
|
||||
}
|
||||
|
||||
void
|
||||
@ -560,11 +560,11 @@ MacroAssembler::call(JitCode* c)
|
||||
blr(scratch64);
|
||||
}
|
||||
|
||||
CodeOffsetLabel
|
||||
CodeOffset
|
||||
MacroAssembler::callWithPatch()
|
||||
{
|
||||
MOZ_CRASH("NYI");
|
||||
return CodeOffsetLabel();
|
||||
return CodeOffset();
|
||||
}
|
||||
void
|
||||
MacroAssembler::patchCall(uint32_t callerOffset, uint32_t calleeOffset)
|
||||
|
@ -225,11 +225,11 @@ class MacroAssemblerCompat : public vixl::MacroAssembler
|
||||
// FIXME: This is the same on every arch.
|
||||
// FIXME: If we can share framePushed_, we can share this.
|
||||
// FIXME: Or just make it at the highest level.
|
||||
CodeOffsetLabel PushWithPatch(ImmWord word) {
|
||||
CodeOffset PushWithPatch(ImmWord word) {
|
||||
framePushed_ += sizeof(word.value);
|
||||
return pushWithPatch(word);
|
||||
}
|
||||
CodeOffsetLabel PushWithPatch(ImmPtr ptr) {
|
||||
CodeOffset PushWithPatch(ImmPtr ptr) {
|
||||
return PushWithPatch(ImmWord(uintptr_t(ptr.value)));
|
||||
}
|
||||
|
||||
@ -389,21 +389,21 @@ class MacroAssemblerCompat : public vixl::MacroAssembler
|
||||
movePtr(src.valueReg(), dest.valueReg());
|
||||
}
|
||||
|
||||
CodeOffsetLabel pushWithPatch(ImmWord imm) {
|
||||
CodeOffset pushWithPatch(ImmWord imm) {
|
||||
vixl::UseScratchRegisterScope temps(this);
|
||||
const Register scratch = temps.AcquireX().asUnsized();
|
||||
CodeOffsetLabel label = movWithPatch(imm, scratch);
|
||||
CodeOffset label = movWithPatch(imm, scratch);
|
||||
push(scratch);
|
||||
return label;
|
||||
}
|
||||
|
||||
CodeOffsetLabel movWithPatch(ImmWord imm, Register dest) {
|
||||
CodeOffset movWithPatch(ImmWord imm, Register dest) {
|
||||
BufferOffset off = immPool64(ARMRegister(dest, 64), imm.value);
|
||||
return CodeOffsetLabel(off.getOffset());
|
||||
return CodeOffset(off.getOffset());
|
||||
}
|
||||
CodeOffsetLabel movWithPatch(ImmPtr imm, Register dest) {
|
||||
CodeOffset movWithPatch(ImmPtr imm, Register dest) {
|
||||
BufferOffset off = immPool64(ARMRegister(dest, 64), uint64_t(imm.value));
|
||||
return CodeOffsetLabel(off.getOffset());
|
||||
return CodeOffset(off.getOffset());
|
||||
}
|
||||
|
||||
void boxValue(JSValueType type, Register src, Register dest) {
|
||||
@ -778,7 +778,7 @@ class MacroAssemblerCompat : public vixl::MacroAssembler
|
||||
}
|
||||
void movePtr(AsmJSImmPtr imm, Register dest) {
|
||||
BufferOffset off = movePatchablePtr(ImmWord(0xffffffffffffffffULL), dest);
|
||||
append(AsmJSAbsoluteLink(CodeOffsetLabel(off.getOffset()), imm.kind()));
|
||||
append(AsmJSAbsoluteLink(CodeOffset(off.getOffset()), imm.kind()));
|
||||
}
|
||||
void movePtr(ImmGCPtr imm, Register dest) {
|
||||
BufferOffset load = movePatchablePtr(ImmPtr(imm.value), dest);
|
||||
@ -2516,9 +2516,9 @@ class MacroAssemblerCompat : public vixl::MacroAssembler
|
||||
}
|
||||
|
||||
// Emit a B that can be toggled to a CMP. See ToggleToJmp(), ToggleToCmp().
|
||||
CodeOffsetLabel toggledJump(Label* label) {
|
||||
CodeOffset toggledJump(Label* label) {
|
||||
BufferOffset offset = b(label, Always);
|
||||
CodeOffsetLabel ret(offset.getOffset());
|
||||
CodeOffset ret(offset.getOffset());
|
||||
return ret;
|
||||
}
|
||||
|
||||
@ -2536,7 +2536,7 @@ class MacroAssemblerCompat : public vixl::MacroAssembler
|
||||
}
|
||||
}
|
||||
|
||||
void writePrebarrierOffset(CodeOffsetLabel label) {
|
||||
void writePrebarrierOffset(CodeOffset label) {
|
||||
preBarriers_.writeUnsigned(label.offset());
|
||||
}
|
||||
|
||||
@ -2554,14 +2554,14 @@ class MacroAssemblerCompat : public vixl::MacroAssembler
|
||||
}
|
||||
|
||||
public:
|
||||
CodeOffsetLabel labelForPatch() {
|
||||
return CodeOffsetLabel(nextOffset().getOffset());
|
||||
CodeOffset labelForPatch() {
|
||||
return CodeOffset(nextOffset().getOffset());
|
||||
}
|
||||
|
||||
void handleFailureWithHandlerTail(void* handler);
|
||||
|
||||
// FIXME: See CodeGeneratorX64 calls to noteAsmJSGlobalAccess.
|
||||
void patchAsmJSGlobalAccess(CodeOffsetLabel patchAt, uint8_t* code,
|
||||
void patchAsmJSGlobalAccess(CodeOffset patchAt, uint8_t* code,
|
||||
uint8_t* globalData, unsigned globalDataOffset)
|
||||
{
|
||||
MOZ_CRASH("patchAsmJSGlobalAccess");
|
||||
@ -2866,7 +2866,7 @@ class MacroAssemblerCompat : public vixl::MacroAssembler
|
||||
|
||||
// Emit a BLR or NOP instruction. ToggleCall can be used to patch
|
||||
// this instruction.
|
||||
CodeOffsetLabel toggledCall(JitCode* target, bool enabled) {
|
||||
CodeOffset toggledCall(JitCode* target, bool enabled) {
|
||||
// The returned offset must be to the first instruction generated,
|
||||
// for the debugger to match offset with Baseline's pcMappingEntries_.
|
||||
BufferOffset offset = nextOffset();
|
||||
@ -2892,7 +2892,7 @@ class MacroAssemblerCompat : public vixl::MacroAssembler
|
||||
}
|
||||
|
||||
addPendingJump(loadOffset, ImmPtr(target->raw()), Relocation::JITCODE);
|
||||
CodeOffsetLabel ret(offset.getOffset());
|
||||
CodeOffset ret(offset.getOffset());
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
@ -31,10 +31,10 @@ EmitRepushTailCallReg(MacroAssembler& masm)
|
||||
}
|
||||
|
||||
inline void
|
||||
EmitCallIC(CodeOffsetLabel* patchOffset, MacroAssembler& masm)
|
||||
EmitCallIC(CodeOffset* patchOffset, MacroAssembler& masm)
|
||||
{
|
||||
// Move ICEntry offset into ICStubReg
|
||||
CodeOffsetLabel offset = masm.movWithPatch(ImmWord(-1), ICStubReg);
|
||||
CodeOffset offset = masm.movWithPatch(ImmWord(-1), ICStubReg);
|
||||
*patchOffset = offset;
|
||||
|
||||
// Load stub pointer into ICStubReg
|
||||
|
@ -779,7 +779,7 @@ class AssemblerMIPSShared : public AssemblerShared
|
||||
dataRelocations_.writeUnsigned(nextOffset().getOffset());
|
||||
}
|
||||
}
|
||||
void writePrebarrierOffset(CodeOffsetLabel label) {
|
||||
void writePrebarrierOffset(CodeOffset label) {
|
||||
preBarriers_.writeUnsigned(label.offset());
|
||||
}
|
||||
|
||||
@ -1044,7 +1044,7 @@ class AssemblerMIPSShared : public AssemblerShared
|
||||
}
|
||||
|
||||
// See Bind
|
||||
size_t labelToPatchOffset(CodeOffsetLabel label) { return label.offset(); }
|
||||
size_t labelToPatchOffset(CodeOffset label) { return label.offset(); }
|
||||
|
||||
void call(Label* label);
|
||||
void call(void* target);
|
||||
|
@ -1125,22 +1125,22 @@ MacroAssembler::Pop(const ValueOperand& val)
|
||||
// ===============================================================
|
||||
// Simple call functions.
|
||||
|
||||
CodeOffsetLabel
|
||||
CodeOffset
|
||||
MacroAssembler::call(Register reg)
|
||||
{
|
||||
as_jalr(reg);
|
||||
as_nop();
|
||||
return CodeOffsetLabel(currentOffset());
|
||||
return CodeOffset(currentOffset());
|
||||
}
|
||||
|
||||
CodeOffsetLabel
|
||||
CodeOffset
|
||||
MacroAssembler::call(Label* label)
|
||||
{
|
||||
ma_bal(label);
|
||||
return CodeOffsetLabel(currentOffset());
|
||||
return CodeOffset(currentOffset());
|
||||
}
|
||||
|
||||
CodeOffsetLabel
|
||||
CodeOffset
|
||||
MacroAssembler::callWithPatch()
|
||||
{
|
||||
addLongJump(nextOffset());
|
||||
|
@ -892,7 +892,7 @@ MacroAssemblerMIPSCompat::movePtr(ImmPtr imm, Register dest)
|
||||
void
|
||||
MacroAssemblerMIPSCompat::movePtr(AsmJSImmPtr imm, Register dest)
|
||||
{
|
||||
append(AsmJSAbsoluteLink(CodeOffsetLabel(nextOffset().getOffset()), imm.kind()));
|
||||
append(AsmJSAbsoluteLink(CodeOffset(nextOffset().getOffset()), imm.kind()));
|
||||
ma_liPatchable(dest, ImmWord(-1));
|
||||
}
|
||||
|
||||
@ -2521,19 +2521,19 @@ MacroAssemblerMIPSCompat::atomicExchangeToTypedIntArray(Scalar::Type arrayType,
|
||||
Register offsetTemp, Register maskTemp,
|
||||
AnyRegister output);
|
||||
|
||||
CodeOffsetLabel
|
||||
CodeOffset
|
||||
MacroAssemblerMIPSCompat::toggledJump(Label* label)
|
||||
{
|
||||
CodeOffsetLabel ret(nextOffset().getOffset());
|
||||
CodeOffset ret(nextOffset().getOffset());
|
||||
ma_b(label);
|
||||
return ret;
|
||||
}
|
||||
|
||||
CodeOffsetLabel
|
||||
CodeOffset
|
||||
MacroAssemblerMIPSCompat::toggledCall(JitCode* target, bool enabled)
|
||||
{
|
||||
BufferOffset bo = nextOffset();
|
||||
CodeOffsetLabel offset(bo.getOffset());
|
||||
CodeOffset offset(bo.getOffset());
|
||||
addPendingJump(bo, ImmPtr(target->raw()), Relocation::JITCODE);
|
||||
ma_liPatchable(ScratchRegister, ImmPtr(target->raw()));
|
||||
if (enabled) {
|
||||
|
@ -274,29 +274,29 @@ class MacroAssemblerMIPSCompat : public MacroAssemblerMIPS
|
||||
// Emit a branch that can be toggled to a non-operation. On MIPS we use
|
||||
// "andi" instruction to toggle the branch.
|
||||
// See ToggleToJmp(), ToggleToCmp().
|
||||
CodeOffsetLabel toggledJump(Label* label);
|
||||
CodeOffset toggledJump(Label* label);
|
||||
|
||||
// Emit a "jalr" or "nop" instruction. ToggleCall can be used to patch
|
||||
// this instruction.
|
||||
CodeOffsetLabel toggledCall(JitCode* target, bool enabled);
|
||||
CodeOffset toggledCall(JitCode* target, bool enabled);
|
||||
|
||||
static size_t ToggledCallSize(uint8_t* code) {
|
||||
// Four instructions used in: MacroAssemblerMIPSCompat::toggledCall
|
||||
return 4 * sizeof(uint32_t);
|
||||
}
|
||||
|
||||
CodeOffsetLabel pushWithPatch(ImmWord imm) {
|
||||
CodeOffsetLabel label = movWithPatch(imm, ScratchRegister);
|
||||
CodeOffset pushWithPatch(ImmWord imm) {
|
||||
CodeOffset label = movWithPatch(imm, ScratchRegister);
|
||||
ma_push(ScratchRegister);
|
||||
return label;
|
||||
}
|
||||
|
||||
CodeOffsetLabel movWithPatch(ImmWord imm, Register dest) {
|
||||
CodeOffsetLabel label = CodeOffsetLabel(currentOffset());
|
||||
CodeOffset movWithPatch(ImmWord imm, Register dest) {
|
||||
CodeOffset label = CodeOffset(currentOffset());
|
||||
ma_liPatchable(dest, imm);
|
||||
return label;
|
||||
}
|
||||
CodeOffsetLabel movWithPatch(ImmPtr imm, Register dest) {
|
||||
CodeOffset movWithPatch(ImmPtr imm, Register dest) {
|
||||
return movWithPatch(ImmWord(uintptr_t(imm.value)), dest);
|
||||
}
|
||||
|
||||
@ -1314,8 +1314,8 @@ class MacroAssemblerMIPSCompat : public MacroAssemblerMIPS
|
||||
bool buildOOLFakeExitFrame(void* fakeReturnAddr);
|
||||
|
||||
public:
|
||||
CodeOffsetLabel labelForPatch() {
|
||||
return CodeOffsetLabel(nextOffset().getOffset());
|
||||
CodeOffset labelForPatch() {
|
||||
return CodeOffset(nextOffset().getOffset());
|
||||
}
|
||||
|
||||
void memIntToValue(Address Source, Address Dest) {
|
||||
|
@ -32,10 +32,10 @@ EmitRepushTailCallReg(MacroAssembler& masm)
|
||||
}
|
||||
|
||||
inline void
|
||||
EmitCallIC(CodeOffsetLabel* patchOffset, MacroAssembler& masm)
|
||||
EmitCallIC(CodeOffset* patchOffset, MacroAssembler& masm)
|
||||
{
|
||||
// Move ICEntry offset into ICStubReg.
|
||||
CodeOffsetLabel offset = masm.movWithPatch(ImmWord(-1), ICStubReg);
|
||||
CodeOffset offset = masm.movWithPatch(ImmWord(-1), ICStubReg);
|
||||
*patchOffset = offset;
|
||||
|
||||
// Load stub pointer into ICStubReg.
|
||||
|
@ -977,7 +977,7 @@ MacroAssemblerMIPS64Compat::movePtr(ImmPtr imm, Register dest)
|
||||
void
|
||||
MacroAssemblerMIPS64Compat::movePtr(AsmJSImmPtr imm, Register dest)
|
||||
{
|
||||
append(AsmJSAbsoluteLink(CodeOffsetLabel(nextOffset().getOffset()), imm.kind()));
|
||||
append(AsmJSAbsoluteLink(CodeOffset(nextOffset().getOffset()), imm.kind()));
|
||||
ma_liPatchable(dest, ImmWord(-1));
|
||||
}
|
||||
|
||||
@ -2624,19 +2624,19 @@ MacroAssemblerMIPS64Compat::atomicExchangeToTypedIntArray(Scalar::Type arrayType
|
||||
Register offsetTemp, Register maskTemp,
|
||||
AnyRegister output);
|
||||
|
||||
CodeOffsetLabel
|
||||
CodeOffset
|
||||
MacroAssemblerMIPS64Compat::toggledJump(Label* label)
|
||||
{
|
||||
CodeOffsetLabel ret(nextOffset().getOffset());
|
||||
CodeOffset ret(nextOffset().getOffset());
|
||||
ma_b(label);
|
||||
return ret;
|
||||
}
|
||||
|
||||
CodeOffsetLabel
|
||||
CodeOffset
|
||||
MacroAssemblerMIPS64Compat::toggledCall(JitCode* target, bool enabled)
|
||||
{
|
||||
BufferOffset bo = nextOffset();
|
||||
CodeOffsetLabel offset(bo.getOffset());
|
||||
CodeOffset offset(bo.getOffset());
|
||||
addPendingJump(bo, ImmPtr(target->raw()), Relocation::JITCODE);
|
||||
ma_liPatchable(ScratchRegister, ImmPtr(target->raw()));
|
||||
if (enabled) {
|
||||
|
@ -295,32 +295,32 @@ class MacroAssemblerMIPS64Compat : public MacroAssemblerMIPS64
|
||||
// Emit a branch that can be toggled to a non-operation. On MIPS64 we use
|
||||
// "andi" instruction to toggle the branch.
|
||||
// See ToggleToJmp(), ToggleToCmp().
|
||||
CodeOffsetLabel toggledJump(Label* label);
|
||||
CodeOffset toggledJump(Label* label);
|
||||
|
||||
// Emit a "jalr" or "nop" instruction. ToggleCall can be used to patch
|
||||
// this instruction.
|
||||
CodeOffsetLabel toggledCall(JitCode* target, bool enabled);
|
||||
CodeOffset toggledCall(JitCode* target, bool enabled);
|
||||
|
||||
static size_t ToggledCallSize(uint8_t* code) {
|
||||
// Six instructions used in: MacroAssemblerMIPS64Compat::toggledCall
|
||||
return 6 * sizeof(uint32_t);
|
||||
}
|
||||
|
||||
CodeOffsetLabel pushWithPatch(ImmWord imm) {
|
||||
CodeOffsetLabel label = movWithPatch(imm, ScratchRegister);
|
||||
CodeOffset pushWithPatch(ImmWord imm) {
|
||||
CodeOffset offset = movWithPatch(imm, ScratchRegister);
|
||||
ma_push(ScratchRegister);
|
||||
return label;
|
||||
return offset;
|
||||
}
|
||||
|
||||
CodeOffsetLabel movWithPatch(ImmWord imm, Register dest) {
|
||||
CodeOffsetLabel label = CodeOffsetLabel(currentOffset());
|
||||
CodeOffset movWithPatch(ImmWord imm, Register dest) {
|
||||
CodeOffset offset = CodeOffset(currentOffset());
|
||||
ma_liPatchable(dest, imm, Li64);
|
||||
return label;
|
||||
return offset;
|
||||
}
|
||||
CodeOffsetLabel movWithPatch(ImmPtr imm, Register dest) {
|
||||
CodeOffsetLabel label = CodeOffsetLabel(currentOffset());
|
||||
CodeOffset movWithPatch(ImmPtr imm, Register dest) {
|
||||
CodeOffset offset = CodeOffset(currentOffset());
|
||||
ma_liPatchable(dest, imm);
|
||||
return label;
|
||||
return offset;
|
||||
}
|
||||
|
||||
void jump(Label* label) {
|
||||
@ -1332,8 +1332,8 @@ class MacroAssemblerMIPS64Compat : public MacroAssemblerMIPS64
|
||||
bool buildOOLFakeExitFrame(void* fakeReturnAddr);
|
||||
|
||||
public:
|
||||
CodeOffsetLabel labelForPatch() {
|
||||
return CodeOffsetLabel(nextOffset().getOffset());
|
||||
CodeOffset labelForPatch() {
|
||||
return CodeOffset(nextOffset().getOffset());
|
||||
}
|
||||
|
||||
void memIntToValue(Address Source, Address Dest) {
|
||||
|
@ -32,10 +32,10 @@ EmitRepushTailCallReg(MacroAssembler& masm)
|
||||
}
|
||||
|
||||
inline void
|
||||
EmitCallIC(CodeOffsetLabel* patchOffset, MacroAssembler& masm)
|
||||
EmitCallIC(CodeOffset* patchOffset, MacroAssembler& masm)
|
||||
{
|
||||
// Move ICEntry offset into ICStubReg.
|
||||
CodeOffsetLabel offset = masm.movWithPatch(ImmWord(-1), ICStubReg);
|
||||
CodeOffset offset = masm.movWithPatch(ImmWord(-1), ICStubReg);
|
||||
*patchOffset = offset;
|
||||
|
||||
// Load stub pointer into ICStubReg.
|
||||
|
@ -188,19 +188,19 @@ class MacroAssemblerNone : public Assembler
|
||||
void nopAlign(size_t) { MOZ_CRASH(); }
|
||||
void checkStackAlignment() { MOZ_CRASH(); }
|
||||
uint32_t currentOffset() { MOZ_CRASH(); }
|
||||
uint32_t labelToPatchOffset(CodeOffsetLabel) { MOZ_CRASH(); }
|
||||
CodeOffsetLabel labelForPatch() { MOZ_CRASH(); }
|
||||
uint32_t labelToPatchOffset(CodeOffset) { MOZ_CRASH(); }
|
||||
CodeOffset labelForPatch() { MOZ_CRASH(); }
|
||||
|
||||
void nop() { MOZ_CRASH(); }
|
||||
void breakpoint() { MOZ_CRASH(); }
|
||||
void abiret() { MOZ_CRASH(); }
|
||||
void ret() { MOZ_CRASH(); }
|
||||
|
||||
CodeOffsetLabel toggledJump(Label*) { MOZ_CRASH(); }
|
||||
CodeOffsetLabel toggledCall(JitCode*, bool) { MOZ_CRASH(); }
|
||||
CodeOffset toggledJump(Label*) { MOZ_CRASH(); }
|
||||
CodeOffset toggledCall(JitCode*, bool) { MOZ_CRASH(); }
|
||||
static size_t ToggledCallSize(uint8_t*) { MOZ_CRASH(); }
|
||||
|
||||
void writePrebarrierOffset(CodeOffsetLabel) { MOZ_CRASH(); }
|
||||
void writePrebarrierOffset(CodeOffset) { MOZ_CRASH(); }
|
||||
|
||||
void finish() { MOZ_CRASH(); }
|
||||
|
||||
@ -218,7 +218,7 @@ class MacroAssemblerNone : public Assembler
|
||||
template <typename T> void Push(T) { MOZ_CRASH(); }
|
||||
template <typename T> void pop(T) { MOZ_CRASH(); }
|
||||
template <typename T> void Pop(T) { MOZ_CRASH(); }
|
||||
template <typename T> CodeOffsetLabel pushWithPatch(T) { MOZ_CRASH(); }
|
||||
template <typename T> CodeOffset pushWithPatch(T) { MOZ_CRASH(); }
|
||||
|
||||
CodeOffsetJump jumpWithPatch(RepatchLabel*, Label* doc = nullptr) { MOZ_CRASH(); }
|
||||
CodeOffsetJump jumpWithPatch(RepatchLabel*, Condition, Label* doc = nullptr) { MOZ_CRASH(); }
|
||||
@ -266,7 +266,7 @@ class MacroAssemblerNone : public Assembler
|
||||
template <typename T, typename S> void moveFloat32(T, S) { MOZ_CRASH(); }
|
||||
template <typename T, typename S> void moveDouble(T, S) { MOZ_CRASH(); }
|
||||
template <typename T, typename S> void move64(T, S) { MOZ_CRASH(); }
|
||||
template <typename T> CodeOffsetLabel movWithPatch(T, Register) { MOZ_CRASH(); }
|
||||
template <typename T> CodeOffset movWithPatch(T, Register) { MOZ_CRASH(); }
|
||||
|
||||
template <typename T> void loadInt32x1(T, FloatRegister dest) { MOZ_CRASH(); }
|
||||
template <typename T> void loadInt32x2(T, FloatRegister dest) { MOZ_CRASH(); }
|
||||
|
@ -16,7 +16,7 @@ static const uint32_t STUB_FRAME_SAVED_STUB_OFFSET = 0;
|
||||
|
||||
inline void EmitRestoreTailCallReg(MacroAssembler&) { MOZ_CRASH(); }
|
||||
inline void EmitRepushTailCallReg(MacroAssembler&) { MOZ_CRASH(); }
|
||||
inline void EmitCallIC(CodeOffsetLabel*, MacroAssembler&) { MOZ_CRASH(); }
|
||||
inline void EmitCallIC(CodeOffset*, MacroAssembler&) { MOZ_CRASH(); }
|
||||
inline void EmitEnterTypeMonitorIC(MacroAssembler&, size_t v = 0) { MOZ_CRASH(); }
|
||||
inline void EmitReturnFromIC(MacroAssembler&) { MOZ_CRASH(); }
|
||||
inline void EmitChangeICReturnAddress(MacroAssembler&, Register) { MOZ_CRASH(); }
|
||||
|
@ -422,15 +422,15 @@ struct AbsoluteLabel : public LabelBase
|
||||
}
|
||||
};
|
||||
|
||||
class CodeOffsetLabel
|
||||
class CodeOffset
|
||||
{
|
||||
size_t offset_;
|
||||
|
||||
static const size_t NOT_USED = size_t(-1);
|
||||
|
||||
public:
|
||||
explicit CodeOffsetLabel(size_t offset) : offset_(offset) {}
|
||||
CodeOffsetLabel() : offset_(NOT_USED) {}
|
||||
explicit CodeOffset(size_t offset) : offset_(offset) {}
|
||||
CodeOffset() : offset_(NOT_USED) {}
|
||||
|
||||
size_t offset() const {
|
||||
MOZ_ASSERT(used());
|
||||
@ -461,26 +461,26 @@ class CodeLabel
|
||||
{
|
||||
// The destination position, where the absolute reference should get
|
||||
// patched into.
|
||||
CodeOffsetLabel patchAt_;
|
||||
CodeOffset patchAt_;
|
||||
|
||||
// The source label (relative) in the code to where the destination should
|
||||
// get patched to.
|
||||
CodeOffsetLabel target_;
|
||||
CodeOffset target_;
|
||||
|
||||
public:
|
||||
CodeLabel()
|
||||
{ }
|
||||
explicit CodeLabel(const CodeOffsetLabel& patchAt)
|
||||
explicit CodeLabel(const CodeOffset& patchAt)
|
||||
: patchAt_(patchAt)
|
||||
{ }
|
||||
CodeLabel(const CodeOffsetLabel& patchAt, const CodeOffsetLabel& target)
|
||||
CodeLabel(const CodeOffset& patchAt, const CodeOffset& target)
|
||||
: patchAt_(patchAt),
|
||||
target_(target)
|
||||
{ }
|
||||
CodeOffsetLabel* patchAt() {
|
||||
CodeOffset* patchAt() {
|
||||
return &patchAt_;
|
||||
}
|
||||
CodeOffsetLabel* target() {
|
||||
CodeOffset* target() {
|
||||
return &target_;
|
||||
}
|
||||
void offsetBy(size_t delta) {
|
||||
@ -625,7 +625,7 @@ class CodeLocationLabel
|
||||
raw_ = nullptr;
|
||||
setUninitialized();
|
||||
}
|
||||
CodeLocationLabel(JitCode* code, CodeOffsetLabel base) {
|
||||
CodeLocationLabel(JitCode* code, CodeOffset base) {
|
||||
*this = base;
|
||||
repoint(code);
|
||||
}
|
||||
@ -638,7 +638,7 @@ class CodeLocationLabel
|
||||
setAbsolute();
|
||||
}
|
||||
|
||||
void operator = (CodeOffsetLabel base) {
|
||||
void operator = (CodeOffset base) {
|
||||
raw_ = (uint8_t*)base.offset();
|
||||
setRelative();
|
||||
}
|
||||
@ -857,10 +857,10 @@ typedef Vector<AsmJSHeapAccess, 0, SystemAllocPolicy> AsmJSHeapAccessVector;
|
||||
|
||||
struct AsmJSGlobalAccess
|
||||
{
|
||||
CodeOffsetLabel patchAt;
|
||||
CodeOffset patchAt;
|
||||
unsigned globalDataOffset;
|
||||
|
||||
AsmJSGlobalAccess(CodeOffsetLabel patchAt, unsigned globalDataOffset)
|
||||
AsmJSGlobalAccess(CodeOffset patchAt, unsigned globalDataOffset)
|
||||
: patchAt(patchAt), globalDataOffset(globalDataOffset)
|
||||
{}
|
||||
};
|
||||
@ -955,9 +955,9 @@ class AsmJSAbsoluteAddress
|
||||
// the MacroAssembler (in AsmJSModule::staticallyLink).
|
||||
struct AsmJSAbsoluteLink
|
||||
{
|
||||
AsmJSAbsoluteLink(CodeOffsetLabel patchAt, AsmJSImmKind target)
|
||||
AsmJSAbsoluteLink(CodeOffset patchAt, AsmJSImmKind target)
|
||||
: patchAt(patchAt), target(target) {}
|
||||
CodeOffsetLabel patchAt;
|
||||
CodeOffset patchAt;
|
||||
AsmJSImmKind target;
|
||||
};
|
||||
|
||||
@ -1011,7 +1011,7 @@ class AssemblerShared
|
||||
return embedsNurseryPointers_;
|
||||
}
|
||||
|
||||
void append(const CallSiteDesc& desc, CodeOffsetLabel label, size_t framePushed,
|
||||
void append(const CallSiteDesc& desc, CodeOffset label, size_t framePushed,
|
||||
uint32_t targetIndex = CallSiteAndTarget::NOT_INTERNAL)
|
||||
{
|
||||
// framePushed does not include sizeof(AsmJSFrame), so add it in here (see
|
||||
|
@ -53,19 +53,19 @@ class BaselineCompilerShared
|
||||
// has been allocated.
|
||||
struct ICLoadLabel {
|
||||
size_t icEntry;
|
||||
CodeOffsetLabel label;
|
||||
CodeOffset label;
|
||||
};
|
||||
js::Vector<ICLoadLabel, 16, SystemAllocPolicy> icLoadLabels_;
|
||||
|
||||
uint32_t pushedBeforeCall_;
|
||||
mozilla::DebugOnly<bool> inCall_;
|
||||
|
||||
CodeOffsetLabel spsPushToggleOffset_;
|
||||
CodeOffsetLabel profilerEnterFrameToggleOffset_;
|
||||
CodeOffsetLabel profilerExitFrameToggleOffset_;
|
||||
CodeOffsetLabel traceLoggerEnterToggleOffset_;
|
||||
CodeOffsetLabel traceLoggerExitToggleOffset_;
|
||||
CodeOffsetLabel traceLoggerScriptTextIdOffset_;
|
||||
CodeOffset spsPushToggleOffset_;
|
||||
CodeOffset profilerEnterFrameToggleOffset_;
|
||||
CodeOffset profilerExitFrameToggleOffset_;
|
||||
CodeOffset traceLoggerEnterToggleOffset_;
|
||||
CodeOffset traceLoggerExitToggleOffset_;
|
||||
CodeOffset traceLoggerScriptTextIdOffset_;
|
||||
|
||||
BaselineCompilerShared(JSContext* cx, TempAllocator& alloc, JSScript* script);
|
||||
|
||||
@ -90,7 +90,7 @@ class BaselineCompilerShared
|
||||
// Append an ICEntry without a stub.
|
||||
bool appendICEntry(ICEntry::Kind kind, uint32_t returnOffset) {
|
||||
ICEntry entry(script->pcToOffset(pc), kind);
|
||||
entry.setReturnOffset(CodeOffsetLabel(returnOffset));
|
||||
entry.setReturnOffset(CodeOffset(returnOffset));
|
||||
if (!icEntries_.append(entry)) {
|
||||
ReportOutOfMemory(cx);
|
||||
return false;
|
||||
@ -98,7 +98,7 @@ class BaselineCompilerShared
|
||||
return true;
|
||||
}
|
||||
|
||||
bool addICLoadLabel(CodeOffsetLabel label) {
|
||||
bool addICLoadLabel(CodeOffset label) {
|
||||
MOZ_ASSERT(!icEntries_.empty());
|
||||
ICLoadLabel loadLabel;
|
||||
loadLabel.label = label;
|
||||
|
@ -264,7 +264,7 @@ CodeGeneratorShared::addNativeToBytecodeEntry(const BytecodeSite* site)
|
||||
// Otherwise, some native code was generated for the previous bytecode site.
|
||||
// Add a new entry for code that is about to be generated.
|
||||
NativeToBytecode entry;
|
||||
entry.nativeOffset = CodeOffsetLabel(nativeOffset);
|
||||
entry.nativeOffset = CodeOffset(nativeOffset);
|
||||
entry.tree = tree;
|
||||
entry.pc = pc;
|
||||
if (!nativeToBytecodeList_.append(entry))
|
||||
@ -343,8 +343,8 @@ CodeGeneratorShared::addTrackedOptimizationsEntry(const TrackedOptimizations* op
|
||||
// If we're generating code for a new set of optimizations, add a new
|
||||
// entry.
|
||||
NativeToTrackedOptimizations entry;
|
||||
entry.startOffset = CodeOffsetLabel(nativeOffset);
|
||||
entry.endOffset = CodeOffsetLabel(nativeOffset);
|
||||
entry.startOffset = CodeOffset(nativeOffset);
|
||||
entry.endOffset = CodeOffset(nativeOffset);
|
||||
entry.optimizations = optimizations;
|
||||
return trackedOptimizations_.append(entry);
|
||||
}
|
||||
@ -360,7 +360,7 @@ CodeGeneratorShared::extendTrackedOptimizationsEntry(const TrackedOptimizations*
|
||||
MOZ_ASSERT(entry.optimizations == optimizations);
|
||||
MOZ_ASSERT_IF(!masm.oom(), nativeOffset >= entry.endOffset.offset());
|
||||
|
||||
entry.endOffset = CodeOffsetLabel(nativeOffset);
|
||||
entry.endOffset = CodeOffset(nativeOffset);
|
||||
|
||||
// If we generated no code, remove the last entry.
|
||||
if (nativeOffset == entry.startOffset.offset())
|
||||
@ -1734,7 +1734,7 @@ CodeGeneratorShared::emitTracelogScript(bool isStart)
|
||||
|
||||
masm.Push(logger);
|
||||
|
||||
CodeOffsetLabel patchLogger = masm.movWithPatch(ImmPtr(nullptr), logger);
|
||||
CodeOffset patchLogger = masm.movWithPatch(ImmPtr(nullptr), logger);
|
||||
masm.propagateOOM(patchableTraceLoggers_.append(patchLogger));
|
||||
|
||||
Address enabledAddress(logger, TraceLoggerThread::offsetOfEnabled());
|
||||
@ -1742,7 +1742,7 @@ CodeGeneratorShared::emitTracelogScript(bool isStart)
|
||||
|
||||
masm.Push(script);
|
||||
|
||||
CodeOffsetLabel patchScript = masm.movWithPatch(ImmWord(0), script);
|
||||
CodeOffset patchScript = masm.movWithPatch(ImmWord(0), script);
|
||||
masm.propagateOOM(patchableTLScripts_.append(patchScript));
|
||||
|
||||
if (isStart)
|
||||
@ -1769,7 +1769,7 @@ CodeGeneratorShared::emitTracelogTree(bool isStart, uint32_t textId)
|
||||
|
||||
masm.Push(logger);
|
||||
|
||||
CodeOffsetLabel patchLocation = masm.movWithPatch(ImmPtr(nullptr), logger);
|
||||
CodeOffset patchLocation = masm.movWithPatch(ImmPtr(nullptr), logger);
|
||||
masm.propagateOOM(patchableTraceLoggers_.append(patchLocation));
|
||||
|
||||
Address enabledAddress(logger, TraceLoggerThread::offsetOfEnabled());
|
||||
|
@ -55,8 +55,8 @@ struct ReciprocalMulConstants {
|
||||
struct NativeToTrackedOptimizations
|
||||
{
|
||||
// [startOffset, endOffset]
|
||||
CodeOffsetLabel startOffset;
|
||||
CodeOffsetLabel endOffset;
|
||||
CodeOffset startOffset;
|
||||
CodeOffset endOffset;
|
||||
const TrackedOptimizations* optimizations;
|
||||
};
|
||||
|
||||
@ -83,7 +83,7 @@ class CodeGeneratorShared : public LElementVisitor
|
||||
uint32_t lastOsiPointOffset_;
|
||||
SafepointWriter safepoints_;
|
||||
Label invalidate_;
|
||||
CodeOffsetLabel invalidateEpilogueData_;
|
||||
CodeOffset invalidateEpilogueData_;
|
||||
|
||||
// Label for the common return path.
|
||||
NonAssertingLabel returnLabel_;
|
||||
@ -106,13 +106,13 @@ class CodeGeneratorShared : public LElementVisitor
|
||||
Vector<PatchableBackedgeInfo, 0, SystemAllocPolicy> patchableBackedges_;
|
||||
|
||||
#ifdef JS_TRACE_LOGGING
|
||||
js::Vector<CodeOffsetLabel, 0, SystemAllocPolicy> patchableTraceLoggers_;
|
||||
js::Vector<CodeOffsetLabel, 0, SystemAllocPolicy> patchableTLScripts_;
|
||||
js::Vector<CodeOffset, 0, SystemAllocPolicy> patchableTraceLoggers_;
|
||||
js::Vector<CodeOffset, 0, SystemAllocPolicy> patchableTLScripts_;
|
||||
#endif
|
||||
|
||||
public:
|
||||
struct NativeToBytecode {
|
||||
CodeOffsetLabel nativeOffset;
|
||||
CodeOffset nativeOffset;
|
||||
InlineScriptTree* tree;
|
||||
jsbytecode* pc;
|
||||
};
|
||||
|
@ -315,8 +315,8 @@ class Assembler : public AssemblerX86Shared
|
||||
subq(Imm32(sizeof(double)), StackPointer);
|
||||
vmovsd(src, Address(StackPointer, 0));
|
||||
}
|
||||
CodeOffsetLabel pushWithPatch(ImmWord word) {
|
||||
CodeOffsetLabel label = movWithPatch(word, ScratchReg);
|
||||
CodeOffset pushWithPatch(ImmWord word) {
|
||||
CodeOffset label = movWithPatch(word, ScratchReg);
|
||||
push(ScratchReg);
|
||||
return label;
|
||||
}
|
||||
@ -326,11 +326,11 @@ class Assembler : public AssemblerX86Shared
|
||||
addq(Imm32(sizeof(double)), StackPointer);
|
||||
}
|
||||
|
||||
CodeOffsetLabel movWithPatch(ImmWord word, Register dest) {
|
||||
CodeOffset movWithPatch(ImmWord word, Register dest) {
|
||||
masm.movq_i64r(word.value, dest.encoding());
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel movWithPatch(ImmPtr imm, Register dest) {
|
||||
CodeOffset movWithPatch(ImmPtr imm, Register dest) {
|
||||
return movWithPatch(ImmWord(uintptr_t(imm.value)), dest);
|
||||
}
|
||||
|
||||
@ -598,7 +598,7 @@ class Assembler : public AssemblerX86Shared
|
||||
}
|
||||
void mov(AsmJSImmPtr imm, Register dest) {
|
||||
masm.movq_i64r(-1, dest.encoding());
|
||||
append(AsmJSAbsoluteLink(CodeOffsetLabel(masm.currentOffset()), imm.kind()));
|
||||
append(AsmJSAbsoluteLink(CodeOffset(masm.currentOffset()), imm.kind()));
|
||||
}
|
||||
void mov(const Operand& src, Register dest) {
|
||||
movq(src, dest);
|
||||
@ -612,7 +612,7 @@ class Assembler : public AssemblerX86Shared
|
||||
void mov(Register src, Register dest) {
|
||||
movq(src, dest);
|
||||
}
|
||||
void mov(CodeOffsetLabel* label, Register dest) {
|
||||
void mov(CodeOffset* label, Register dest) {
|
||||
masm.movq_i64r(/* placeholder */ 0, dest.encoding());
|
||||
label->use(masm.size());
|
||||
}
|
||||
@ -632,49 +632,49 @@ class Assembler : public AssemblerX86Shared
|
||||
}
|
||||
}
|
||||
|
||||
CodeOffsetLabel loadRipRelativeInt32(Register dest) {
|
||||
return CodeOffsetLabel(masm.movl_ripr(dest.encoding()).offset());
|
||||
CodeOffset loadRipRelativeInt32(Register dest) {
|
||||
return CodeOffset(masm.movl_ripr(dest.encoding()).offset());
|
||||
}
|
||||
CodeOffsetLabel loadRipRelativeInt64(Register dest) {
|
||||
return CodeOffsetLabel(masm.movq_ripr(dest.encoding()).offset());
|
||||
CodeOffset loadRipRelativeInt64(Register dest) {
|
||||
return CodeOffset(masm.movq_ripr(dest.encoding()).offset());
|
||||
}
|
||||
CodeOffsetLabel loadRipRelativeDouble(FloatRegister dest) {
|
||||
return CodeOffsetLabel(masm.vmovsd_ripr(dest.encoding()).offset());
|
||||
CodeOffset loadRipRelativeDouble(FloatRegister dest) {
|
||||
return CodeOffset(masm.vmovsd_ripr(dest.encoding()).offset());
|
||||
}
|
||||
CodeOffsetLabel loadRipRelativeFloat32(FloatRegister dest) {
|
||||
return CodeOffsetLabel(masm.vmovss_ripr(dest.encoding()).offset());
|
||||
CodeOffset loadRipRelativeFloat32(FloatRegister dest) {
|
||||
return CodeOffset(masm.vmovss_ripr(dest.encoding()).offset());
|
||||
}
|
||||
CodeOffsetLabel loadRipRelativeInt32x4(FloatRegister dest) {
|
||||
return CodeOffsetLabel(masm.vmovdqa_ripr(dest.encoding()).offset());
|
||||
CodeOffset loadRipRelativeInt32x4(FloatRegister dest) {
|
||||
return CodeOffset(masm.vmovdqa_ripr(dest.encoding()).offset());
|
||||
}
|
||||
CodeOffsetLabel loadRipRelativeFloat32x4(FloatRegister dest) {
|
||||
return CodeOffsetLabel(masm.vmovaps_ripr(dest.encoding()).offset());
|
||||
CodeOffset loadRipRelativeFloat32x4(FloatRegister dest) {
|
||||
return CodeOffset(masm.vmovaps_ripr(dest.encoding()).offset());
|
||||
}
|
||||
CodeOffsetLabel storeRipRelativeInt32(Register dest) {
|
||||
return CodeOffsetLabel(masm.movl_rrip(dest.encoding()).offset());
|
||||
CodeOffset storeRipRelativeInt32(Register dest) {
|
||||
return CodeOffset(masm.movl_rrip(dest.encoding()).offset());
|
||||
}
|
||||
CodeOffsetLabel storeRipRelativeDouble(FloatRegister dest) {
|
||||
return CodeOffsetLabel(masm.vmovsd_rrip(dest.encoding()).offset());
|
||||
CodeOffset storeRipRelativeDouble(FloatRegister dest) {
|
||||
return CodeOffset(masm.vmovsd_rrip(dest.encoding()).offset());
|
||||
}
|
||||
CodeOffsetLabel storeRipRelativeFloat32(FloatRegister dest) {
|
||||
return CodeOffsetLabel(masm.vmovss_rrip(dest.encoding()).offset());
|
||||
CodeOffset storeRipRelativeFloat32(FloatRegister dest) {
|
||||
return CodeOffset(masm.vmovss_rrip(dest.encoding()).offset());
|
||||
}
|
||||
CodeOffsetLabel storeRipRelativeInt32x4(FloatRegister dest) {
|
||||
return CodeOffsetLabel(masm.vmovdqa_rrip(dest.encoding()).offset());
|
||||
CodeOffset storeRipRelativeInt32x4(FloatRegister dest) {
|
||||
return CodeOffset(masm.vmovdqa_rrip(dest.encoding()).offset());
|
||||
}
|
||||
CodeOffsetLabel storeRipRelativeFloat32x4(FloatRegister dest) {
|
||||
return CodeOffsetLabel(masm.vmovaps_rrip(dest.encoding()).offset());
|
||||
CodeOffset storeRipRelativeFloat32x4(FloatRegister dest) {
|
||||
return CodeOffset(masm.vmovaps_rrip(dest.encoding()).offset());
|
||||
}
|
||||
CodeOffsetLabel leaRipRelative(Register dest) {
|
||||
return CodeOffsetLabel(masm.leaq_rip(dest.encoding()).offset());
|
||||
CodeOffset leaRipRelative(Register dest) {
|
||||
return CodeOffset(masm.leaq_rip(dest.encoding()).offset());
|
||||
}
|
||||
|
||||
void loadAsmJSActivation(Register dest) {
|
||||
CodeOffsetLabel label = loadRipRelativeInt64(dest);
|
||||
CodeOffset label = loadRipRelativeInt64(dest);
|
||||
append(AsmJSGlobalAccess(label, AsmJSActivationGlobalDataOffset));
|
||||
}
|
||||
void loadAsmJSHeapRegisterFromGlobalData() {
|
||||
CodeOffsetLabel label = loadRipRelativeInt64(HeapReg);
|
||||
CodeOffset label = loadRipRelativeInt64(HeapReg);
|
||||
append(AsmJSGlobalAccess(label, AsmJSHeapGlobalDataOffset));
|
||||
}
|
||||
|
||||
@ -770,8 +770,8 @@ class Assembler : public AssemblerX86Shared
|
||||
|
||||
// Emit a CALL or CMP (nop) instruction. ToggleCall can be used to patch
|
||||
// this instruction.
|
||||
CodeOffsetLabel toggledCall(JitCode* target, bool enabled) {
|
||||
CodeOffsetLabel offset(size());
|
||||
CodeOffset toggledCall(JitCode* target, bool enabled) {
|
||||
CodeOffset offset(size());
|
||||
JmpSrc src = enabled ? masm.call() : masm.cmp_eax();
|
||||
addPendingJump(src, ImmPtr(target->raw()), Relocation::JITCODE);
|
||||
MOZ_ASSERT_IF(!oom(), size() - offset.offset() == ToggledCallSize(nullptr));
|
||||
|
@ -735,7 +735,7 @@ CodeGeneratorX64::visitAsmJSLoadGlobalVar(LAsmJSLoadGlobalVar* ins)
|
||||
MIRType type = mir->type();
|
||||
MOZ_ASSERT(IsNumberType(type) || IsSimdType(type));
|
||||
|
||||
CodeOffsetLabel label;
|
||||
CodeOffset label;
|
||||
switch (type) {
|
||||
case MIRType_Int32:
|
||||
label = masm.loadRipRelativeInt32(ToRegister(ins->output()));
|
||||
@ -769,7 +769,7 @@ CodeGeneratorX64::visitAsmJSStoreGlobalVar(LAsmJSStoreGlobalVar* ins)
|
||||
MIRType type = mir->value()->type();
|
||||
MOZ_ASSERT(IsNumberType(type) || IsSimdType(type));
|
||||
|
||||
CodeOffsetLabel label;
|
||||
CodeOffset label;
|
||||
switch (type) {
|
||||
case MIRType_Int32:
|
||||
label = masm.storeRipRelativeInt32(ToRegister(ins->value()));
|
||||
@ -804,7 +804,7 @@ CodeGeneratorX64::visitAsmJSLoadFuncPtr(LAsmJSLoadFuncPtr* ins)
|
||||
Register tmp = ToRegister(ins->temp());
|
||||
Register out = ToRegister(ins->output());
|
||||
|
||||
CodeOffsetLabel label = masm.leaRipRelative(tmp);
|
||||
CodeOffset label = masm.leaRipRelative(tmp);
|
||||
masm.loadPtr(Operand(tmp, index, TimesEight, 0), out);
|
||||
masm.append(AsmJSGlobalAccess(label, mir->globalDataOffset()));
|
||||
}
|
||||
@ -814,7 +814,7 @@ CodeGeneratorX64::visitAsmJSLoadFFIFunc(LAsmJSLoadFFIFunc* ins)
|
||||
{
|
||||
MAsmJSLoadFFIFunc* mir = ins->mir();
|
||||
|
||||
CodeOffsetLabel label = masm.loadRipRelativeInt64(ToRegister(ins->output()));
|
||||
CodeOffset label = masm.loadRipRelativeInt64(ToRegister(ins->output()));
|
||||
masm.append(AsmJSGlobalAccess(label, mir->globalDataOffset()));
|
||||
}
|
||||
|
||||
|
@ -32,7 +32,7 @@ MacroAssemblerX64::loadConstantDouble(double d, FloatRegister dest)
|
||||
// PC-relative addressing. Use "jump" label support code, because we need
|
||||
// the same PC-relative address patching that jumps use.
|
||||
JmpSrc j = masm.vmovsd_ripr(dest.encoding());
|
||||
dbl->uses.append(CodeOffsetLabel(j.offset()));
|
||||
dbl->uses.append(CodeOffset(j.offset()));
|
||||
}
|
||||
|
||||
void
|
||||
@ -45,7 +45,7 @@ MacroAssemblerX64::loadConstantFloat32(float f, FloatRegister dest)
|
||||
return;
|
||||
// See comment in loadConstantDouble
|
||||
JmpSrc j = masm.vmovss_ripr(dest.encoding());
|
||||
flt->uses.append(CodeOffsetLabel(j.offset()));
|
||||
flt->uses.append(CodeOffset(j.offset()));
|
||||
}
|
||||
|
||||
void
|
||||
@ -59,7 +59,7 @@ MacroAssemblerX64::loadConstantInt32x4(const SimdConstant& v, FloatRegister dest
|
||||
return;
|
||||
MOZ_ASSERT(val->type() == SimdConstant::Int32x4);
|
||||
JmpSrc j = masm.vmovdqa_ripr(dest.encoding());
|
||||
val->uses.append(CodeOffsetLabel(j.offset()));
|
||||
val->uses.append(CodeOffset(j.offset()));
|
||||
}
|
||||
|
||||
void
|
||||
@ -73,13 +73,13 @@ MacroAssemblerX64::loadConstantFloat32x4(const SimdConstant&v, FloatRegister des
|
||||
return;
|
||||
MOZ_ASSERT(val->type() == SimdConstant::Float32x4);
|
||||
JmpSrc j = masm.vmovaps_ripr(dest.encoding());
|
||||
val->uses.append(CodeOffsetLabel(j.offset()));
|
||||
val->uses.append(CodeOffset(j.offset()));
|
||||
}
|
||||
|
||||
void
|
||||
MacroAssemblerX64::bindOffsets(const MacroAssemblerX86Shared::UsesVector& uses)
|
||||
{
|
||||
for (CodeOffsetLabel use : uses) {
|
||||
for (CodeOffset use : uses) {
|
||||
JmpDst dst(currentOffset());
|
||||
JmpSrc src(use.offset());
|
||||
// Using linkJump here is safe, as explaind in the comment in
|
||||
|
@ -1388,7 +1388,7 @@ class MacroAssemblerX64 : public MacroAssemblerX86Shared
|
||||
void handleFailureWithHandlerTail(void* handler);
|
||||
|
||||
// See CodeGeneratorX64 calls to noteAsmJSGlobalAccess.
|
||||
void patchAsmJSGlobalAccess(CodeOffsetLabel patchAt, uint8_t* code, uint8_t* globalData,
|
||||
void patchAsmJSGlobalAccess(CodeOffset patchAt, uint8_t* code, uint8_t* globalData,
|
||||
unsigned globalDataOffset)
|
||||
{
|
||||
uint8_t* nextInsn = code + patchAt.offset();
|
||||
|
@ -31,10 +31,10 @@ EmitRepushTailCallReg(MacroAssembler& masm)
|
||||
}
|
||||
|
||||
inline void
|
||||
EmitCallIC(CodeOffsetLabel* patchOffset, MacroAssembler& masm)
|
||||
EmitCallIC(CodeOffset* patchOffset, MacroAssembler& masm)
|
||||
{
|
||||
// Move ICEntry offset into ICStubReg
|
||||
CodeOffsetLabel offset = masm.movWithPatch(ImmWord(-1), ICStubReg);
|
||||
CodeOffset offset = masm.movWithPatch(ImmWord(-1), ICStubReg);
|
||||
*patchOffset = offset;
|
||||
|
||||
// Load stub pointer into ICStubReg
|
||||
|
@ -262,7 +262,7 @@ class AssemblerX86Shared : public AssemblerShared
|
||||
dataRelocations_.writeUnsigned(masm.currentOffset());
|
||||
}
|
||||
}
|
||||
void writePrebarrierOffset(CodeOffsetLabel label) {
|
||||
void writePrebarrierOffset(CodeOffset label) {
|
||||
preBarriers_.writeUnsigned(label.offset());
|
||||
}
|
||||
|
||||
@ -438,8 +438,8 @@ class AssemblerX86Shared : public AssemblerShared
|
||||
void nopAlign(int alignment) {
|
||||
masm.nopAlign(alignment);
|
||||
}
|
||||
void writeCodePointer(CodeOffsetLabel* label) {
|
||||
// A CodeOffsetLabel only has one use, bake in the "end of list" value.
|
||||
void writeCodePointer(CodeOffset* label) {
|
||||
// A CodeOffset only has one use, bake in the "end of list" value.
|
||||
masm.jumpTablePointer(LabelBase::INVALID_OFFSET);
|
||||
label->use(masm.size());
|
||||
}
|
||||
@ -923,7 +923,7 @@ class AssemblerX86Shared : public AssemblerShared
|
||||
}
|
||||
label->bind(dst.offset());
|
||||
}
|
||||
void use(CodeOffsetLabel* label) {
|
||||
void use(CodeOffset* label) {
|
||||
label->use(currentOffset());
|
||||
}
|
||||
uint32_t currentOffset() {
|
||||
@ -955,7 +955,7 @@ class AssemblerX86Shared : public AssemblerShared
|
||||
label->reset();
|
||||
}
|
||||
|
||||
static void Bind(uint8_t* raw, CodeOffsetLabel* label, const void* address) {
|
||||
static void Bind(uint8_t* raw, CodeOffset* label, const void* address) {
|
||||
if (label->used()) {
|
||||
intptr_t offset = label->offset();
|
||||
X86Encoding::SetPointer(raw + offset, address);
|
||||
@ -963,7 +963,7 @@ class AssemblerX86Shared : public AssemblerShared
|
||||
}
|
||||
|
||||
// See Bind and X86Encoding::setPointer.
|
||||
size_t labelToPatchOffset(CodeOffsetLabel label) {
|
||||
size_t labelToPatchOffset(CodeOffset label) {
|
||||
return label.offset() - sizeof(void*);
|
||||
}
|
||||
|
||||
@ -974,7 +974,7 @@ class AssemblerX86Shared : public AssemblerShared
|
||||
// Remove the size of the return address which is included in the frame.
|
||||
masm.ret_i(n.value - sizeof(void*));
|
||||
}
|
||||
CodeOffsetLabel call(Label* label) {
|
||||
CodeOffset call(Label* label) {
|
||||
if (label->bound()) {
|
||||
masm.linkJump(masm.call(), JmpDst(label->offset()));
|
||||
} else {
|
||||
@ -982,11 +982,11 @@ class AssemblerX86Shared : public AssemblerShared
|
||||
JmpSrc prev = JmpSrc(label->use(j.offset()));
|
||||
masm.setNextJump(j, prev);
|
||||
}
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel call(Register reg) {
|
||||
CodeOffset call(Register reg) {
|
||||
masm.call_r(reg.encoding());
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
void call(const Operand& op) {
|
||||
switch (op.kind()) {
|
||||
@ -1001,8 +1001,8 @@ class AssemblerX86Shared : public AssemblerShared
|
||||
}
|
||||
}
|
||||
|
||||
CodeOffsetLabel callWithPatch() {
|
||||
return CodeOffsetLabel(masm.call().offset());
|
||||
CodeOffset callWithPatch() {
|
||||
return CodeOffset(masm.call().offset());
|
||||
}
|
||||
void patchCall(uint32_t callerOffset, uint32_t calleeOffset) {
|
||||
unsigned char* code = masm.data();
|
||||
@ -1074,9 +1074,9 @@ class AssemblerX86Shared : public AssemblerShared
|
||||
MOZ_CRASH("unexpected operand kind");
|
||||
}
|
||||
}
|
||||
CodeOffsetLabel cmplWithPatch(Imm32 rhs, Register lhs) {
|
||||
CodeOffset cmplWithPatch(Imm32 rhs, Register lhs) {
|
||||
masm.cmpl_i32r(rhs.value, lhs.encoding());
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
void cmpw(Register rhs, Register lhs) {
|
||||
masm.cmpw_rr(rhs.encoding(), lhs.encoding());
|
||||
@ -1118,9 +1118,9 @@ class AssemblerX86Shared : public AssemblerShared
|
||||
void addl(Imm32 imm, Register dest) {
|
||||
masm.addl_ir(imm.value, dest.encoding());
|
||||
}
|
||||
CodeOffsetLabel addlWithPatch(Imm32 imm, Register dest) {
|
||||
CodeOffset addlWithPatch(Imm32 imm, Register dest) {
|
||||
masm.addl_i32r(imm.value, dest.encoding());
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
void addl(Imm32 imm, const Operand& op) {
|
||||
switch (op.kind()) {
|
||||
|
@ -305,7 +305,7 @@ static bool
|
||||
AppendShiftedUses(const MacroAssemblerX86Shared::UsesVector& old, size_t delta,
|
||||
MacroAssemblerX86Shared::UsesVector* vec)
|
||||
{
|
||||
for (CodeOffsetLabel use : old) {
|
||||
for (CodeOffset use : old) {
|
||||
use.offsetBy(delta);
|
||||
if (!vec->append(use))
|
||||
return false;
|
||||
@ -548,13 +548,13 @@ MacroAssembler::Pop(const ValueOperand& val)
|
||||
// ===============================================================
|
||||
// Simple call functions.
|
||||
|
||||
CodeOffsetLabel
|
||||
CodeOffset
|
||||
MacroAssembler::call(Register reg)
|
||||
{
|
||||
return Assembler::call(reg);
|
||||
}
|
||||
|
||||
CodeOffsetLabel
|
||||
CodeOffset
|
||||
MacroAssembler::call(Label* label)
|
||||
{
|
||||
return Assembler::call(label);
|
||||
@ -592,7 +592,7 @@ MacroAssembler::call(JitCode* target)
|
||||
Assembler::call(target);
|
||||
}
|
||||
|
||||
CodeOffsetLabel
|
||||
CodeOffset
|
||||
MacroAssembler::callWithPatch()
|
||||
{
|
||||
return Assembler::callWithPatch();
|
||||
|
@ -46,7 +46,7 @@ class MacroAssemblerX86Shared : public Assembler
|
||||
const MacroAssembler& asMasm() const;
|
||||
|
||||
public:
|
||||
typedef Vector<CodeOffsetLabel, 0, SystemAllocPolicy> UsesVector;
|
||||
typedef Vector<CodeOffset, 0, SystemAllocPolicy> UsesVector;
|
||||
|
||||
protected:
|
||||
// For Double, Float and SimdData, make the move ctors explicit so that MSVC
|
||||
@ -205,7 +205,7 @@ class MacroAssemblerX86Shared : public Assembler
|
||||
void cmp32(Register lhs, const Operand& rhs) {
|
||||
cmpl(rhs, lhs);
|
||||
}
|
||||
CodeOffsetLabel cmp32WithPatch(Register lhs, Imm32 rhs) {
|
||||
CodeOffset cmp32WithPatch(Register lhs, Imm32 rhs) {
|
||||
return cmplWithPatch(rhs, lhs);
|
||||
}
|
||||
void add32(Register src, Register dest) {
|
||||
@ -1472,8 +1472,8 @@ class MacroAssemblerX86Shared : public Assembler
|
||||
}
|
||||
|
||||
// Emit a JMP that can be toggled to a CMP. See ToggleToJmp(), ToggleToCmp().
|
||||
CodeOffsetLabel toggledJump(Label* label) {
|
||||
CodeOffsetLabel offset(size());
|
||||
CodeOffset toggledJump(Label* label) {
|
||||
CodeOffset offset(size());
|
||||
jump(label);
|
||||
return offset;
|
||||
}
|
||||
@ -1487,8 +1487,8 @@ class MacroAssemblerX86Shared : public Assembler
|
||||
// Exists for ARM compatibility.
|
||||
}
|
||||
|
||||
CodeOffsetLabel labelForPatch() {
|
||||
return CodeOffsetLabel(size());
|
||||
CodeOffset labelForPatch() {
|
||||
return CodeOffset(size());
|
||||
}
|
||||
|
||||
void abiret() {
|
||||
|
@ -228,9 +228,9 @@ class Assembler : public AssemblerX86Shared
|
||||
vmovsd(src, Address(StackPointer, 0));
|
||||
}
|
||||
|
||||
CodeOffsetLabel pushWithPatch(ImmWord word) {
|
||||
CodeOffset pushWithPatch(ImmWord word) {
|
||||
masm.push_i32(int32_t(word.value));
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
|
||||
void pop(FloatRegister src) {
|
||||
@ -238,11 +238,11 @@ class Assembler : public AssemblerX86Shared
|
||||
addl(Imm32(sizeof(double)), StackPointer);
|
||||
}
|
||||
|
||||
CodeOffsetLabel movWithPatch(ImmWord word, Register dest) {
|
||||
CodeOffset movWithPatch(ImmWord word, Register dest) {
|
||||
movl(Imm32(word.value), dest);
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel movWithPatch(ImmPtr imm, Register dest) {
|
||||
CodeOffset movWithPatch(ImmPtr imm, Register dest) {
|
||||
return movWithPatch(ImmWord(uintptr_t(imm.value)), dest);
|
||||
}
|
||||
|
||||
@ -288,7 +288,7 @@ class Assembler : public AssemblerX86Shared
|
||||
}
|
||||
void mov(AsmJSImmPtr imm, Register dest) {
|
||||
masm.movl_i32r(-1, dest.encoding());
|
||||
append(AsmJSAbsoluteLink(CodeOffsetLabel(masm.currentOffset()), imm.kind()));
|
||||
append(AsmJSAbsoluteLink(CodeOffset(masm.currentOffset()), imm.kind()));
|
||||
}
|
||||
void mov(const Operand& src, Register dest) {
|
||||
movl(src, dest);
|
||||
@ -299,7 +299,7 @@ class Assembler : public AssemblerX86Shared
|
||||
void mov(Imm32 imm, const Operand& dest) {
|
||||
movl(imm, dest);
|
||||
}
|
||||
void mov(CodeOffsetLabel* label, Register dest) {
|
||||
void mov(CodeOffset* label, Register dest) {
|
||||
// Put a placeholder value in the instruction stream.
|
||||
masm.movl_i32r(0, dest.encoding());
|
||||
label->use(masm.size());
|
||||
@ -367,11 +367,11 @@ class Assembler : public AssemblerX86Shared
|
||||
}
|
||||
void cmpl(Register rhs, AsmJSAbsoluteAddress lhs) {
|
||||
masm.cmpl_rm_disp32(rhs.encoding(), (void*)-1);
|
||||
append(AsmJSAbsoluteLink(CodeOffsetLabel(masm.currentOffset()), lhs.kind()));
|
||||
append(AsmJSAbsoluteLink(CodeOffset(masm.currentOffset()), lhs.kind()));
|
||||
}
|
||||
void cmpl(Imm32 rhs, AsmJSAbsoluteAddress lhs) {
|
||||
JmpSrc src = masm.cmpl_im_disp32(rhs.value, (void*)-1);
|
||||
append(AsmJSAbsoluteLink(CodeOffsetLabel(src.offset()), lhs.kind()));
|
||||
append(AsmJSAbsoluteLink(CodeOffset(src.offset()), lhs.kind()));
|
||||
}
|
||||
|
||||
void adcl(Imm32 imm, Register dest) {
|
||||
@ -464,8 +464,8 @@ class Assembler : public AssemblerX86Shared
|
||||
|
||||
// Emit a CALL or CMP (nop) instruction. ToggleCall can be used to patch
|
||||
// this instruction.
|
||||
CodeOffsetLabel toggledCall(JitCode* target, bool enabled) {
|
||||
CodeOffsetLabel offset(size());
|
||||
CodeOffset toggledCall(JitCode* target, bool enabled) {
|
||||
CodeOffset offset(size());
|
||||
JmpSrc src = enabled ? masm.call() : masm.cmp_eax();
|
||||
addPendingJump(src, ImmPtr(target->raw()), Relocation::JITCODE);
|
||||
MOZ_ASSERT_IF(!oom(), size() - offset.offset() == ToggledCallSize(nullptr));
|
||||
@ -495,13 +495,13 @@ class Assembler : public AssemblerX86Shared
|
||||
|
||||
// Move a 32-bit immediate into a register where the immediate can be
|
||||
// patched.
|
||||
CodeOffsetLabel movlWithPatch(Imm32 imm, Register dest) {
|
||||
CodeOffset movlWithPatch(Imm32 imm, Register dest) {
|
||||
masm.movl_i32r(imm.value, dest.encoding());
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
|
||||
// Load from *(base + disp32) where disp32 can be patched.
|
||||
CodeOffsetLabel movsblWithPatch(const Operand& src, Register dest) {
|
||||
CodeOffset movsblWithPatch(const Operand& src, Register dest) {
|
||||
switch (src.kind()) {
|
||||
case Operand::MEM_REG_DISP:
|
||||
masm.movsbl_mr_disp32(src.disp(), src.base(), dest.encoding());
|
||||
@ -512,9 +512,9 @@ class Assembler : public AssemblerX86Shared
|
||||
default:
|
||||
MOZ_CRASH("unexpected operand kind");
|
||||
}
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel movzblWithPatch(const Operand& src, Register dest) {
|
||||
CodeOffset movzblWithPatch(const Operand& src, Register dest) {
|
||||
switch (src.kind()) {
|
||||
case Operand::MEM_REG_DISP:
|
||||
masm.movzbl_mr_disp32(src.disp(), src.base(), dest.encoding());
|
||||
@ -525,9 +525,9 @@ class Assembler : public AssemblerX86Shared
|
||||
default:
|
||||
MOZ_CRASH("unexpected operand kind");
|
||||
}
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel movswlWithPatch(const Operand& src, Register dest) {
|
||||
CodeOffset movswlWithPatch(const Operand& src, Register dest) {
|
||||
switch (src.kind()) {
|
||||
case Operand::MEM_REG_DISP:
|
||||
masm.movswl_mr_disp32(src.disp(), src.base(), dest.encoding());
|
||||
@ -538,9 +538,9 @@ class Assembler : public AssemblerX86Shared
|
||||
default:
|
||||
MOZ_CRASH("unexpected operand kind");
|
||||
}
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel movzwlWithPatch(const Operand& src, Register dest) {
|
||||
CodeOffset movzwlWithPatch(const Operand& src, Register dest) {
|
||||
switch (src.kind()) {
|
||||
case Operand::MEM_REG_DISP:
|
||||
masm.movzwl_mr_disp32(src.disp(), src.base(), dest.encoding());
|
||||
@ -551,9 +551,9 @@ class Assembler : public AssemblerX86Shared
|
||||
default:
|
||||
MOZ_CRASH("unexpected operand kind");
|
||||
}
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel movlWithPatch(const Operand& src, Register dest) {
|
||||
CodeOffset movlWithPatch(const Operand& src, Register dest) {
|
||||
switch (src.kind()) {
|
||||
case Operand::MEM_REG_DISP:
|
||||
masm.movl_mr_disp32(src.disp(), src.base(), dest.encoding());
|
||||
@ -564,9 +564,9 @@ class Assembler : public AssemblerX86Shared
|
||||
default:
|
||||
MOZ_CRASH("unexpected operand kind");
|
||||
}
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel vmovssWithPatch(const Operand& src, FloatRegister dest) {
|
||||
CodeOffset vmovssWithPatch(const Operand& src, FloatRegister dest) {
|
||||
MOZ_ASSERT(HasSSE2());
|
||||
switch (src.kind()) {
|
||||
case Operand::MEM_REG_DISP:
|
||||
@ -578,9 +578,9 @@ class Assembler : public AssemblerX86Shared
|
||||
default:
|
||||
MOZ_CRASH("unexpected operand kind");
|
||||
}
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel vmovdWithPatch(const Operand& src, FloatRegister dest) {
|
||||
CodeOffset vmovdWithPatch(const Operand& src, FloatRegister dest) {
|
||||
MOZ_ASSERT(HasSSE2());
|
||||
switch (src.kind()) {
|
||||
case Operand::MEM_REG_DISP:
|
||||
@ -592,9 +592,9 @@ class Assembler : public AssemblerX86Shared
|
||||
default:
|
||||
MOZ_CRASH("unexpected operand kind");
|
||||
}
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel vmovqWithPatch(const Operand& src, FloatRegister dest) {
|
||||
CodeOffset vmovqWithPatch(const Operand& src, FloatRegister dest) {
|
||||
MOZ_ASSERT(HasSSE2());
|
||||
switch (src.kind()) {
|
||||
case Operand::MEM_REG_DISP:
|
||||
@ -606,9 +606,9 @@ class Assembler : public AssemblerX86Shared
|
||||
default:
|
||||
MOZ_CRASH("unexpected operand kind");
|
||||
}
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel vmovsdWithPatch(const Operand& src, FloatRegister dest) {
|
||||
CodeOffset vmovsdWithPatch(const Operand& src, FloatRegister dest) {
|
||||
MOZ_ASSERT(HasSSE2());
|
||||
switch (src.kind()) {
|
||||
case Operand::MEM_REG_DISP:
|
||||
@ -620,9 +620,9 @@ class Assembler : public AssemblerX86Shared
|
||||
default:
|
||||
MOZ_CRASH("unexpected operand kind");
|
||||
}
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel vmovupsWithPatch(const Operand& src, FloatRegister dest) {
|
||||
CodeOffset vmovupsWithPatch(const Operand& src, FloatRegister dest) {
|
||||
MOZ_ASSERT(HasSSE2());
|
||||
switch (src.kind()) {
|
||||
case Operand::MEM_REG_DISP:
|
||||
@ -634,9 +634,9 @@ class Assembler : public AssemblerX86Shared
|
||||
default:
|
||||
MOZ_CRASH("unexpected operand kind");
|
||||
}
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel vmovdquWithPatch(const Operand& src, FloatRegister dest) {
|
||||
CodeOffset vmovdquWithPatch(const Operand& src, FloatRegister dest) {
|
||||
MOZ_ASSERT(HasSSE2());
|
||||
switch (src.kind()) {
|
||||
case Operand::MEM_REG_DISP:
|
||||
@ -648,11 +648,11 @@ class Assembler : public AssemblerX86Shared
|
||||
default:
|
||||
MOZ_CRASH("unexpected operand kind");
|
||||
}
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
|
||||
// Store to *(base + disp32) where disp32 can be patched.
|
||||
CodeOffsetLabel movbWithPatch(Register src, const Operand& dest) {
|
||||
CodeOffset movbWithPatch(Register src, const Operand& dest) {
|
||||
switch (dest.kind()) {
|
||||
case Operand::MEM_REG_DISP:
|
||||
masm.movb_rm_disp32(src.encoding(), dest.disp(), dest.base());
|
||||
@ -663,9 +663,9 @@ class Assembler : public AssemblerX86Shared
|
||||
default:
|
||||
MOZ_CRASH("unexpected operand kind");
|
||||
}
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel movwWithPatch(Register src, const Operand& dest) {
|
||||
CodeOffset movwWithPatch(Register src, const Operand& dest) {
|
||||
switch (dest.kind()) {
|
||||
case Operand::MEM_REG_DISP:
|
||||
masm.movw_rm_disp32(src.encoding(), dest.disp(), dest.base());
|
||||
@ -676,9 +676,9 @@ class Assembler : public AssemblerX86Shared
|
||||
default:
|
||||
MOZ_CRASH("unexpected operand kind");
|
||||
}
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel movlWithPatch(Register src, const Operand& dest) {
|
||||
CodeOffset movlWithPatch(Register src, const Operand& dest) {
|
||||
switch (dest.kind()) {
|
||||
case Operand::MEM_REG_DISP:
|
||||
masm.movl_rm_disp32(src.encoding(), dest.disp(), dest.base());
|
||||
@ -689,9 +689,9 @@ class Assembler : public AssemblerX86Shared
|
||||
default:
|
||||
MOZ_CRASH("unexpected operand kind");
|
||||
}
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel vmovdWithPatch(FloatRegister src, const Operand& dest) {
|
||||
CodeOffset vmovdWithPatch(FloatRegister src, const Operand& dest) {
|
||||
MOZ_ASSERT(HasSSE2());
|
||||
switch (dest.kind()) {
|
||||
case Operand::MEM_REG_DISP:
|
||||
@ -703,9 +703,9 @@ class Assembler : public AssemblerX86Shared
|
||||
default:
|
||||
MOZ_CRASH("unexpected operand kind");
|
||||
}
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel vmovqWithPatch(FloatRegister src, const Operand& dest) {
|
||||
CodeOffset vmovqWithPatch(FloatRegister src, const Operand& dest) {
|
||||
MOZ_ASSERT(HasSSE2());
|
||||
switch (dest.kind()) {
|
||||
case Operand::MEM_REG_DISP:
|
||||
@ -717,9 +717,9 @@ class Assembler : public AssemblerX86Shared
|
||||
default:
|
||||
MOZ_CRASH("unexpected operand kind");
|
||||
}
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel vmovssWithPatch(FloatRegister src, const Operand& dest) {
|
||||
CodeOffset vmovssWithPatch(FloatRegister src, const Operand& dest) {
|
||||
MOZ_ASSERT(HasSSE2());
|
||||
switch (dest.kind()) {
|
||||
case Operand::MEM_REG_DISP:
|
||||
@ -731,9 +731,9 @@ class Assembler : public AssemblerX86Shared
|
||||
default:
|
||||
MOZ_CRASH("unexpected operand kind");
|
||||
}
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel vmovsdWithPatch(FloatRegister src, const Operand& dest) {
|
||||
CodeOffset vmovsdWithPatch(FloatRegister src, const Operand& dest) {
|
||||
MOZ_ASSERT(HasSSE2());
|
||||
switch (dest.kind()) {
|
||||
case Operand::MEM_REG_DISP:
|
||||
@ -745,9 +745,9 @@ class Assembler : public AssemblerX86Shared
|
||||
default:
|
||||
MOZ_CRASH("unexpected operand kind");
|
||||
}
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel vmovupsWithPatch(FloatRegister src, const Operand& dest) {
|
||||
CodeOffset vmovupsWithPatch(FloatRegister src, const Operand& dest) {
|
||||
MOZ_ASSERT(HasSSE2());
|
||||
switch (dest.kind()) {
|
||||
case Operand::MEM_REG_DISP:
|
||||
@ -759,9 +759,9 @@ class Assembler : public AssemblerX86Shared
|
||||
default:
|
||||
MOZ_CRASH("unexpected operand kind");
|
||||
}
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel vmovdquWithPatch(FloatRegister src, const Operand& dest) {
|
||||
CodeOffset vmovdquWithPatch(FloatRegister src, const Operand& dest) {
|
||||
MOZ_ASSERT(HasSSE2());
|
||||
switch (dest.kind()) {
|
||||
case Operand::MEM_REG_DISP:
|
||||
@ -773,135 +773,135 @@ class Assembler : public AssemblerX86Shared
|
||||
default:
|
||||
MOZ_CRASH("unexpected operand kind");
|
||||
}
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
|
||||
// Load from *(addr + index*scale) where addr can be patched.
|
||||
CodeOffsetLabel movlWithPatch(PatchedAbsoluteAddress addr, Register index, Scale scale,
|
||||
CodeOffset movlWithPatch(PatchedAbsoluteAddress addr, Register index, Scale scale,
|
||||
Register dest)
|
||||
{
|
||||
masm.movl_mr(addr.addr, index.encoding(), scale, dest.encoding());
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
|
||||
// Load from *src where src can be patched.
|
||||
CodeOffsetLabel movsblWithPatch(PatchedAbsoluteAddress src, Register dest) {
|
||||
CodeOffset movsblWithPatch(PatchedAbsoluteAddress src, Register dest) {
|
||||
masm.movsbl_mr(src.addr, dest.encoding());
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel movzblWithPatch(PatchedAbsoluteAddress src, Register dest) {
|
||||
CodeOffset movzblWithPatch(PatchedAbsoluteAddress src, Register dest) {
|
||||
masm.movzbl_mr(src.addr, dest.encoding());
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel movswlWithPatch(PatchedAbsoluteAddress src, Register dest) {
|
||||
CodeOffset movswlWithPatch(PatchedAbsoluteAddress src, Register dest) {
|
||||
masm.movswl_mr(src.addr, dest.encoding());
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel movzwlWithPatch(PatchedAbsoluteAddress src, Register dest) {
|
||||
CodeOffset movzwlWithPatch(PatchedAbsoluteAddress src, Register dest) {
|
||||
masm.movzwl_mr(src.addr, dest.encoding());
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel movlWithPatch(PatchedAbsoluteAddress src, Register dest) {
|
||||
CodeOffset movlWithPatch(PatchedAbsoluteAddress src, Register dest) {
|
||||
masm.movl_mr(src.addr, dest.encoding());
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel vmovssWithPatch(PatchedAbsoluteAddress src, FloatRegister dest) {
|
||||
CodeOffset vmovssWithPatch(PatchedAbsoluteAddress src, FloatRegister dest) {
|
||||
MOZ_ASSERT(HasSSE2());
|
||||
masm.vmovss_mr(src.addr, dest.encoding());
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel vmovdWithPatch(PatchedAbsoluteAddress src, FloatRegister dest) {
|
||||
CodeOffset vmovdWithPatch(PatchedAbsoluteAddress src, FloatRegister dest) {
|
||||
MOZ_ASSERT(HasSSE2());
|
||||
masm.vmovd_mr(src.addr, dest.encoding());
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel vmovqWithPatch(PatchedAbsoluteAddress src, FloatRegister dest) {
|
||||
CodeOffset vmovqWithPatch(PatchedAbsoluteAddress src, FloatRegister dest) {
|
||||
MOZ_ASSERT(HasSSE2());
|
||||
masm.vmovq_mr(src.addr, dest.encoding());
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel vmovsdWithPatch(PatchedAbsoluteAddress src, FloatRegister dest) {
|
||||
CodeOffset vmovsdWithPatch(PatchedAbsoluteAddress src, FloatRegister dest) {
|
||||
MOZ_ASSERT(HasSSE2());
|
||||
masm.vmovsd_mr(src.addr, dest.encoding());
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel vmovdqaWithPatch(PatchedAbsoluteAddress src, FloatRegister dest) {
|
||||
CodeOffset vmovdqaWithPatch(PatchedAbsoluteAddress src, FloatRegister dest) {
|
||||
MOZ_ASSERT(HasSSE2());
|
||||
masm.vmovdqa_mr(src.addr, dest.encoding());
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel vmovdquWithPatch(PatchedAbsoluteAddress src, FloatRegister dest) {
|
||||
CodeOffset vmovdquWithPatch(PatchedAbsoluteAddress src, FloatRegister dest) {
|
||||
MOZ_ASSERT(HasSSE2());
|
||||
masm.vmovdqu_mr(src.addr, dest.encoding());
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel vmovapsWithPatch(PatchedAbsoluteAddress src, FloatRegister dest) {
|
||||
CodeOffset vmovapsWithPatch(PatchedAbsoluteAddress src, FloatRegister dest) {
|
||||
MOZ_ASSERT(HasSSE2());
|
||||
masm.vmovaps_mr(src.addr, dest.encoding());
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel vmovupsWithPatch(PatchedAbsoluteAddress src, FloatRegister dest) {
|
||||
CodeOffset vmovupsWithPatch(PatchedAbsoluteAddress src, FloatRegister dest) {
|
||||
MOZ_ASSERT(HasSSE2());
|
||||
masm.vmovups_mr(src.addr, dest.encoding());
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
|
||||
// Store to *dest where dest can be patched.
|
||||
CodeOffsetLabel movbWithPatch(Register src, PatchedAbsoluteAddress dest) {
|
||||
CodeOffset movbWithPatch(Register src, PatchedAbsoluteAddress dest) {
|
||||
masm.movb_rm(src.encoding(), dest.addr);
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel movwWithPatch(Register src, PatchedAbsoluteAddress dest) {
|
||||
CodeOffset movwWithPatch(Register src, PatchedAbsoluteAddress dest) {
|
||||
masm.movw_rm(src.encoding(), dest.addr);
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel movlWithPatch(Register src, PatchedAbsoluteAddress dest) {
|
||||
CodeOffset movlWithPatch(Register src, PatchedAbsoluteAddress dest) {
|
||||
masm.movl_rm(src.encoding(), dest.addr);
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel vmovssWithPatch(FloatRegister src, PatchedAbsoluteAddress dest) {
|
||||
CodeOffset vmovssWithPatch(FloatRegister src, PatchedAbsoluteAddress dest) {
|
||||
MOZ_ASSERT(HasSSE2());
|
||||
masm.vmovss_rm(src.encoding(), dest.addr);
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel vmovdWithPatch(FloatRegister src, PatchedAbsoluteAddress dest) {
|
||||
CodeOffset vmovdWithPatch(FloatRegister src, PatchedAbsoluteAddress dest) {
|
||||
MOZ_ASSERT(HasSSE2());
|
||||
masm.vmovd_rm(src.encoding(), dest.addr);
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel vmovqWithPatch(FloatRegister src, PatchedAbsoluteAddress dest) {
|
||||
CodeOffset vmovqWithPatch(FloatRegister src, PatchedAbsoluteAddress dest) {
|
||||
MOZ_ASSERT(HasSSE2());
|
||||
masm.vmovq_rm(src.encoding(), dest.addr);
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel vmovsdWithPatch(FloatRegister src, PatchedAbsoluteAddress dest) {
|
||||
CodeOffset vmovsdWithPatch(FloatRegister src, PatchedAbsoluteAddress dest) {
|
||||
MOZ_ASSERT(HasSSE2());
|
||||
masm.vmovsd_rm(src.encoding(), dest.addr);
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel vmovdqaWithPatch(FloatRegister src, PatchedAbsoluteAddress dest) {
|
||||
CodeOffset vmovdqaWithPatch(FloatRegister src, PatchedAbsoluteAddress dest) {
|
||||
MOZ_ASSERT(HasSSE2());
|
||||
masm.vmovdqa_rm(src.encoding(), dest.addr);
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel vmovapsWithPatch(FloatRegister src, PatchedAbsoluteAddress dest) {
|
||||
CodeOffset vmovapsWithPatch(FloatRegister src, PatchedAbsoluteAddress dest) {
|
||||
MOZ_ASSERT(HasSSE2());
|
||||
masm.vmovaps_rm(src.encoding(), dest.addr);
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel vmovdquWithPatch(FloatRegister src, PatchedAbsoluteAddress dest) {
|
||||
CodeOffset vmovdquWithPatch(FloatRegister src, PatchedAbsoluteAddress dest) {
|
||||
MOZ_ASSERT(HasSSE2());
|
||||
masm.vmovdqu_rm(src.encoding(), dest.addr);
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
CodeOffsetLabel vmovupsWithPatch(FloatRegister src, PatchedAbsoluteAddress dest) {
|
||||
CodeOffset vmovupsWithPatch(FloatRegister src, PatchedAbsoluteAddress dest) {
|
||||
MOZ_ASSERT(HasSSE2());
|
||||
masm.vmovups_rm(src.encoding(), dest.addr);
|
||||
return CodeOffsetLabel(masm.currentOffset());
|
||||
return CodeOffset(masm.currentOffset());
|
||||
}
|
||||
|
||||
void loadAsmJSActivation(Register dest) {
|
||||
CodeOffsetLabel label = movlWithPatch(PatchedAbsoluteAddress(), dest);
|
||||
CodeOffset label = movlWithPatch(PatchedAbsoluteAddress(), dest);
|
||||
append(AsmJSGlobalAccess(label, AsmJSActivationGlobalDataOffset));
|
||||
}
|
||||
void loadAsmJSHeapRegisterFromGlobalData() {
|
||||
|
@ -778,7 +778,7 @@ CodeGeneratorX86::visitAsmJSLoadGlobalVar(LAsmJSLoadGlobalVar* ins)
|
||||
MIRType type = mir->type();
|
||||
MOZ_ASSERT(IsNumberType(type) || IsSimdType(type));
|
||||
|
||||
CodeOffsetLabel label;
|
||||
CodeOffset label;
|
||||
switch (type) {
|
||||
case MIRType_Int32:
|
||||
label = masm.movlWithPatch(PatchedAbsoluteAddress(), ToRegister(ins->output()));
|
||||
@ -811,7 +811,7 @@ CodeGeneratorX86::visitAsmJSStoreGlobalVar(LAsmJSStoreGlobalVar* ins)
|
||||
MIRType type = mir->value()->type();
|
||||
MOZ_ASSERT(IsNumberType(type) || IsSimdType(type));
|
||||
|
||||
CodeOffsetLabel label;
|
||||
CodeOffset label;
|
||||
switch (type) {
|
||||
case MIRType_Int32:
|
||||
label = masm.movlWithPatch(ToRegister(ins->value()), PatchedAbsoluteAddress());
|
||||
@ -843,7 +843,7 @@ CodeGeneratorX86::visitAsmJSLoadFuncPtr(LAsmJSLoadFuncPtr* ins)
|
||||
|
||||
Register index = ToRegister(ins->index());
|
||||
Register out = ToRegister(ins->output());
|
||||
CodeOffsetLabel label = masm.movlWithPatch(PatchedAbsoluteAddress(), index, TimesFour, out);
|
||||
CodeOffset label = masm.movlWithPatch(PatchedAbsoluteAddress(), index, TimesFour, out);
|
||||
masm.append(AsmJSGlobalAccess(label, mir->globalDataOffset()));
|
||||
}
|
||||
|
||||
@ -853,7 +853,7 @@ CodeGeneratorX86::visitAsmJSLoadFFIFunc(LAsmJSLoadFFIFunc* ins)
|
||||
MAsmJSLoadFFIFunc* mir = ins->mir();
|
||||
|
||||
Register out = ToRegister(ins->output());
|
||||
CodeOffsetLabel label = masm.movlWithPatch(PatchedAbsoluteAddress(), out);
|
||||
CodeOffset label = masm.movlWithPatch(PatchedAbsoluteAddress(), out);
|
||||
masm.append(AsmJSGlobalAccess(label, mir->globalDataOffset()));
|
||||
}
|
||||
|
||||
|
@ -99,7 +99,7 @@ MacroAssemblerX86::loadConstantDouble(double d, FloatRegister dest)
|
||||
if (!dbl)
|
||||
return;
|
||||
masm.vmovsd_mr(nullptr, dest.encoding());
|
||||
dbl->uses.append(CodeOffsetLabel(masm.size()));
|
||||
dbl->uses.append(CodeOffset(masm.size()));
|
||||
}
|
||||
|
||||
void
|
||||
@ -109,7 +109,7 @@ MacroAssemblerX86::addConstantDouble(double d, FloatRegister dest)
|
||||
if (!dbl)
|
||||
return;
|
||||
masm.vaddsd_mr(nullptr, dest.encoding(), dest.encoding());
|
||||
dbl->uses.append(CodeOffsetLabel(masm.size()));
|
||||
dbl->uses.append(CodeOffset(masm.size()));
|
||||
}
|
||||
|
||||
void
|
||||
@ -121,7 +121,7 @@ MacroAssemblerX86::loadConstantFloat32(float f, FloatRegister dest)
|
||||
if (!flt)
|
||||
return;
|
||||
masm.vmovss_mr(nullptr, dest.encoding());
|
||||
flt->uses.append(CodeOffsetLabel(masm.size()));
|
||||
flt->uses.append(CodeOffset(masm.size()));
|
||||
}
|
||||
|
||||
void
|
||||
@ -131,7 +131,7 @@ MacroAssemblerX86::addConstantFloat32(float f, FloatRegister dest)
|
||||
if (!flt)
|
||||
return;
|
||||
masm.vaddss_mr(nullptr, dest.encoding(), dest.encoding());
|
||||
flt->uses.append(CodeOffsetLabel(masm.size()));
|
||||
flt->uses.append(CodeOffset(masm.size()));
|
||||
}
|
||||
|
||||
void
|
||||
@ -145,7 +145,7 @@ MacroAssemblerX86::loadConstantInt32x4(const SimdConstant& v, FloatRegister dest
|
||||
return;
|
||||
MOZ_ASSERT(i4->type() == SimdConstant::Int32x4);
|
||||
masm.vmovdqa_mr(nullptr, dest.encoding());
|
||||
i4->uses.append(CodeOffsetLabel(masm.size()));
|
||||
i4->uses.append(CodeOffset(masm.size()));
|
||||
}
|
||||
|
||||
void
|
||||
@ -159,7 +159,7 @@ MacroAssemblerX86::loadConstantFloat32x4(const SimdConstant& v, FloatRegister de
|
||||
return;
|
||||
MOZ_ASSERT(f4->type() == SimdConstant::Float32x4);
|
||||
masm.vmovaps_mr(nullptr, dest.encoding());
|
||||
f4->uses.append(CodeOffsetLabel(masm.size()));
|
||||
f4->uses.append(CodeOffset(masm.size()));
|
||||
}
|
||||
|
||||
void
|
||||
@ -168,8 +168,8 @@ MacroAssemblerX86::finish()
|
||||
if (!doubles_.empty())
|
||||
masm.haltingAlign(sizeof(double));
|
||||
for (const Double& d : doubles_) {
|
||||
CodeOffsetLabel cst(masm.currentOffset());
|
||||
for (CodeOffsetLabel use : d.uses)
|
||||
CodeOffset cst(masm.currentOffset());
|
||||
for (CodeOffset use : d.uses)
|
||||
addCodeLabel(CodeLabel(use, cst));
|
||||
masm.doubleConstant(d.value);
|
||||
if (!enoughMemory_)
|
||||
@ -179,8 +179,8 @@ MacroAssemblerX86::finish()
|
||||
if (!floats_.empty())
|
||||
masm.haltingAlign(sizeof(float));
|
||||
for (const Float& f : floats_) {
|
||||
CodeOffsetLabel cst(masm.currentOffset());
|
||||
for (CodeOffsetLabel use : f.uses)
|
||||
CodeOffset cst(masm.currentOffset());
|
||||
for (CodeOffset use : f.uses)
|
||||
addCodeLabel(CodeLabel(use, cst));
|
||||
masm.floatConstant(f.value);
|
||||
if (!enoughMemory_)
|
||||
@ -191,8 +191,8 @@ MacroAssemblerX86::finish()
|
||||
if (!simds_.empty())
|
||||
masm.haltingAlign(SimdMemoryAlignment);
|
||||
for (const SimdData& v : simds_) {
|
||||
CodeOffsetLabel cst(masm.currentOffset());
|
||||
for (CodeOffsetLabel use : v.uses)
|
||||
CodeOffset cst(masm.currentOffset());
|
||||
for (CodeOffset use : v.uses)
|
||||
addCodeLabel(CodeLabel(use, cst));
|
||||
switch (v.type()) {
|
||||
case SimdConstant::Int32x4: masm.int32x4Constant(v.value.asInt32x4()); break;
|
||||
|
@ -31,10 +31,10 @@ EmitRepushTailCallReg(MacroAssembler& masm)
|
||||
}
|
||||
|
||||
inline void
|
||||
EmitCallIC(CodeOffsetLabel* patchOffset, MacroAssembler& masm)
|
||||
EmitCallIC(CodeOffset* patchOffset, MacroAssembler& masm)
|
||||
{
|
||||
// Move ICEntry offset into ICStubReg
|
||||
CodeOffsetLabel offset = masm.movWithPatch(ImmWord(-1), ICStubReg);
|
||||
CodeOffset offset = masm.movWithPatch(ImmWord(-1), ICStubReg);
|
||||
*patchOffset = offset;
|
||||
|
||||
// Load stub pointer into ICStubReg
|
||||
|
Loading…
Reference in New Issue
Block a user