Bug 977805 - Add an option to mark JIT pages as non-writable. r=luke

This commit is contained in:
Jan de Mooij 2015-06-12 10:20:59 +02:00
parent 1c859963ca
commit c6dc5991b5
28 changed files with 312 additions and 124 deletions

View File

@ -598,6 +598,9 @@ DynamicallyLinkModule(JSContext* cx, CallArgs args, AsmJSModule& module)
module.initGlobalNaN();
// See the comment in AllocateExecutableMemory.
ExecutableAllocator::makeExecutable(module.codeBase(), module.codeBytes());
return true;
}

View File

@ -62,11 +62,11 @@ using mozilla::Swap;
static uint8_t*
AllocateExecutableMemory(ExclusiveContext* cx, size_t bytes)
{
#ifdef XP_WIN
unsigned permissions = PAGE_EXECUTE_READWRITE;
#else
unsigned permissions = PROT_READ | PROT_WRITE | PROT_EXEC;
#endif
// On most platforms, this will allocate RWX memory. On iOS, or when
// --non-writable-jitcode is used, this will allocate RW memory. In this
// case, DynamicallyLinkModule will reprotect the code as RX.
unsigned permissions =
ExecutableAllocator::initialProtectionFlags(ExecutableAllocator::Writable);
void* p = AllocateExecutableMemory(nullptr, bytes, permissions, "asm-js-code", AsmJSPageSize);
if (!p)
ReportOutOfMemory(cx);
@ -295,10 +295,9 @@ AsmJSModule::finish(ExclusiveContext* cx, TokenStream& tokenStream, MacroAssembl
pod.srcLength_ = endBeforeCurly - srcStart_;
pod.srcLengthWithRightBrace_ = endAfterCurly - srcStart_;
// The global data section sits immediately after the executable (and
// other) data allocated by the MacroAssembler, so ensure it is
// SIMD-aligned.
pod.codeBytes_ = AlignBytes(masm.bytesNeeded(), SimdMemoryAlignment);
// Start global data on a new page so JIT code may be given independent
// protection flags.
pod.codeBytes_ = AlignBytes(masm.bytesNeeded(), AsmJSPageSize);
// The entire region is allocated via mmap/VirtualAlloc which requires
// units of pages.
@ -947,6 +946,24 @@ AsmJSModule::restoreToInitialState(ArrayBufferObjectMaybeShared* maybePrevBuffer
restoreHeapToInitialState(maybePrevBuffer);
}
namespace {
class MOZ_STACK_CLASS AutoMutateCode
{
AutoWritableJitCode awjc_;
AutoFlushICache afc_;
public:
AutoMutateCode(JSContext* cx, AsmJSModule& module, const char* name)
: awjc_(cx->runtime(), module.codeBase(), module.codeBytes()),
afc_(name)
{
module.setAutoFlushICacheRange();
}
};
}; // anonymous namespace
bool
AsmJSModule::detachHeap(JSContext* cx)
{
@ -967,9 +984,7 @@ AsmJSModule::detachHeap(JSContext* cx)
MOZ_ASSERT_IF(active(), activation()->exitReason() == AsmJSExit::Reason_JitFFI ||
activation()->exitReason() == AsmJSExit::Reason_SlowFFI);
AutoFlushICache afc("AsmJSModule::detachHeap");
setAutoFlushICacheRange();
AutoMutateCode amc(cx, *this, "AsmJSModule::detachHeap");
restoreHeapToInitialState(maybeHeap_);
MOZ_ASSERT(hasDetachedHeap());
@ -1717,9 +1732,7 @@ AsmJSModule::changeHeap(Handle<ArrayBufferObject*> newHeap, JSContext* cx)
if (interrupted_)
return false;
AutoFlushICache afc("AsmJSModule::changeHeap");
setAutoFlushICacheRange();
AutoMutateCode amc(cx, *this, "AsmJSModule::changeHeap");
restoreHeapToInitialState(maybeHeap_);
initHeap(newHeap, cx);
return true;
@ -1756,9 +1769,7 @@ AsmJSModule::setProfilingEnabled(bool enabled, JSContext* cx)
profilingLabels_.clear();
}
// Conservatively flush the icache for the entire module.
AutoFlushICache afc("AsmJSModule::setProfilingEnabled");
setAutoFlushICacheRange();
AutoMutateCode amc(cx, *this, "AsmJSModule::setProfilingEnabled");
// Patch all internal (asm.js->asm.js) callsites to call the profiling
// prologues:

View File

@ -442,6 +442,8 @@ NativeRegExpMacroAssembler::GenerateCode(JSContext* cx, bool match_only)
writePerfSpewerJitCodeProfile(code, "RegExp");
#endif
AutoWritableJitCode awjc(code);
for (size_t i = 0; i < labelPatches.length(); i++) {
LabelPatch& v = labelPatches[i];
MOZ_ASSERT(!v.label);

View File

@ -232,7 +232,17 @@ BaselineCompiler::compile()
// Adopt fallback stubs from the compiler into the baseline script.
baselineScript->adoptFallbackStubs(&stubSpace_);
// Patch IC loads using IC entries
// All barriers are emitted off-by-default, toggle them on if needed.
if (cx->zone()->needsIncrementalBarrier())
baselineScript->toggleBarriers(true);
// If profiler instrumentation is enabled, toggle instrumentation on.
if (cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(cx->runtime()))
baselineScript->toggleProfilerInstrumentation(true);
AutoWritableJitCode awjc(code);
// Patch IC loads using IC entries.
for (size_t i = 0; i < icLoadLabels_.length(); i++) {
CodeOffsetLabel label = icLoadLabels_[i].label;
label.fixup(&masm);
@ -246,10 +256,6 @@ BaselineCompiler::compile()
if (modifiesArguments_)
baselineScript->setModifiesArguments();
// All barriers are emitted off-by-default, toggle them on if needed.
if (cx->zone()->needsIncrementalBarrier())
baselineScript->toggleBarriers(true);
#ifdef JS_TRACE_LOGGING
// Initialize the tracelogger instrumentation.
baselineScript->initTraceLogger(cx->runtime(), script);
@ -267,10 +273,6 @@ BaselineCompiler::compile()
if (compileDebugInstrumentation_)
baselineScript->setHasDebugInstrumentation();
// If profiler instrumentation is enabled, toggle instrumentation on.
if (cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(cx->runtime()))
baselineScript->toggleProfilerInstrumentation(true);
// Always register a native => bytecode mapping entry, since profiler can be
// turned on with baseline jitcode on stack, and baseline jitcode cannot be invalidated.
{

View File

@ -864,6 +864,8 @@ BaselineScript::toggleDebugTraps(JSScript* script, jsbytecode* pc)
SrcNoteLineScanner scanner(script->notes(), script->lineno());
AutoWritableJitCode awjc(method());
for (uint32_t i = 0; i < numPCMappingIndexEntries(); i++) {
PCMappingIndexEntry& entry = pcMappingIndexEntry(i);
@ -910,6 +912,7 @@ BaselineScript::initTraceLogger(JSRuntime* runtime, JSScript* script)
traceLoggerScriptEvent_ = TraceLoggerEvent(logger, TraceLogger_Scripts);
if (TraceLogTextIdEnabled(TraceLogger_Engine) || TraceLogTextIdEnabled(TraceLogger_Scripts)) {
AutoWritableJitCode awjc(method_);
CodeLocationLabel enter(method_, CodeOffsetLabel(traceLoggerEnterToggleOffset_));
CodeLocationLabel exit(method_, CodeOffsetLabel(traceLoggerExitToggleOffset_));
Assembler::ToggleToCmp(enter);
@ -933,6 +936,8 @@ BaselineScript::toggleTraceLoggerScripts(JSRuntime* runtime, JSScript* script, b
else
traceLoggerScriptEvent_ = TraceLoggerEvent(logger, TraceLogger_Scripts);
AutoWritableJitCode awjc(method());
// Enable/Disable the traceLogger prologue and epilogue.
CodeLocationLabel enter(method_, CodeOffsetLabel(traceLoggerEnterToggleOffset_));
CodeLocationLabel exit(method_, CodeOffsetLabel(traceLoggerExitToggleOffset_));
@ -959,6 +964,8 @@ BaselineScript::toggleTraceLoggerEngine(bool enable)
MOZ_ASSERT(enable == !traceLoggerEngineEnabled_);
MOZ_ASSERT(scriptsEnabled == traceLoggerScriptsEnabled_);
AutoWritableJitCode awjc(method());
// Enable/Disable the traceLogger prologue and epilogue.
CodeLocationLabel enter(method_, CodeOffsetLabel(traceLoggerEnterToggleOffset_));
CodeLocationLabel exit(method_, CodeOffsetLabel(traceLoggerExitToggleOffset_));
@ -987,6 +994,8 @@ BaselineScript::toggleProfilerInstrumentation(bool enable)
JitSpew(JitSpew_BaselineIC, " toggling profiling %s for BaselineScript %p",
enable ? "on" : "off", this);
AutoWritableJitCode awjc(method());
// Toggle the jump
CodeLocationLabel enterToggleLocation(method_, CodeOffsetLabel(profilerEnterToggleOffset_));
CodeLocationLabel exitToggleLocation(method_, CodeOffsetLabel(profilerExitToggleOffset_));

View File

@ -7996,10 +7996,43 @@ CodeGenerator::link(JSContext* cx, CompilerConstraintList* constraints)
script->setIonScript(cx, ionScript);
invalidateEpilogueData_.fixup(&masm);
Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, invalidateEpilogueData_),
ImmPtr(ionScript),
ImmPtr((void*)-1));
{
AutoWritableJitCode awjc(code);
invalidateEpilogueData_.fixup(&masm);
Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, invalidateEpilogueData_),
ImmPtr(ionScript),
ImmPtr((void*)-1));
for (size_t i = 0; i < ionScriptLabels_.length(); i++) {
ionScriptLabels_[i].fixup(&masm);
Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, ionScriptLabels_[i]),
ImmPtr(ionScript),
ImmPtr((void*)-1));
}
#ifdef JS_TRACE_LOGGING
TraceLoggerThread* logger = TraceLoggerForMainThread(cx->runtime());
for (uint32_t i = 0; i < patchableTraceLoggers_.length(); i++) {
patchableTraceLoggers_[i].fixup(&masm);
Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, patchableTraceLoggers_[i]),
ImmPtr(logger),
ImmPtr(nullptr));
}
if (patchableTLScripts_.length() > 0) {
MOZ_ASSERT(TraceLogTextIdEnabled(TraceLogger_Scripts));
TraceLoggerEvent event(logger, TraceLogger_Scripts, script);
ionScript->setTraceLoggerEvent(event);
uint32_t textId = event.payload()->textId();
for (uint32_t i = 0; i < patchableTLScripts_.length(); i++) {
patchableTLScripts_[i].fixup(&masm);
Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, patchableTLScripts_[i]),
ImmPtr((void*) uintptr_t(textId)),
ImmPtr((void*)0));
}
}
#endif
}
JitSpew(JitSpew_Codegen, "Created IonScript %p (raw %p)",
(void*) ionScript, (void*) code->raw());
@ -8017,13 +8050,6 @@ CodeGenerator::link(JSContext* cx, CompilerConstraintList* constraints)
perfSpewer_.writeProfile(script, code, masm);
#endif
for (size_t i = 0; i < ionScriptLabels_.length(); i++) {
ionScriptLabels_[i].fixup(&masm);
Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, ionScriptLabels_[i]),
ImmPtr(ionScript),
ImmPtr((void*)-1));
}
// for generating inline caches during the execution.
if (runtimeData_.length())
ionScript->copyRuntimeData(&runtimeData_[0]);
@ -8051,29 +8077,6 @@ CodeGenerator::link(JSContext* cx, CompilerConstraintList* constraints)
if (patchableBackedges_.length() > 0)
ionScript->copyPatchableBackedges(cx, code, patchableBackedges_.begin(), masm);
#ifdef JS_TRACE_LOGGING
TraceLoggerThread* logger = TraceLoggerForMainThread(cx->runtime());
for (uint32_t i = 0; i < patchableTraceLoggers_.length(); i++) {
patchableTraceLoggers_[i].fixup(&masm);
Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, patchableTraceLoggers_[i]),
ImmPtr(logger),
ImmPtr(nullptr));
}
if (patchableTLScripts_.length() > 0) {
MOZ_ASSERT(TraceLogTextIdEnabled(TraceLogger_Scripts));
TraceLoggerEvent event(logger, TraceLogger_Scripts, script);
ionScript->setTraceLoggerEvent(event);
uint32_t textId = event.payload()->textId();
for (uint32_t i = 0; i < patchableTLScripts_.length(); i++) {
patchableTLScripts_[i].fixup(&masm);
Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, patchableTLScripts_[i]),
ImmPtr((void*) uintptr_t(textId)),
ImmPtr((void*)0));
}
}
#endif
// Replace dummy JSObject pointers embedded by LNurseryObject.
code->fixupNurseryObjects(cx, gen->nurseryObjects());

View File

@ -62,3 +62,8 @@ ExecutableAllocator::addSizeOfCode(JS::CodeSizes* sizes) const
}
}
#ifdef TARGET_OS_IPHONE
bool ExecutableAllocator::nonWritableJitCode = true;
#else
bool ExecutableAllocator::nonWritableJitCode = false;
#endif

View File

@ -1,4 +1,6 @@
/*
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sts=4 et sw=4 tw=99:
*
* Copyright (C) 2008 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
@ -57,14 +59,6 @@ extern "C" void sync_instruction_memory(caddr_t v, u_int len);
#include <sys/cachectl.h>
#endif
#if ENABLE_ASSEMBLER_WX_EXCLUSIVE
#define PROTECTION_FLAGS_RW (PROT_READ | PROT_WRITE)
#define PROTECTION_FLAGS_RX (PROT_READ | PROT_EXEC)
#define INITIAL_PROTECTION_FLAGS PROTECTION_FLAGS_RX
#else
#define INITIAL_PROTECTION_FLAGS (PROT_READ | PROT_WRITE | PROT_EXEC)
#endif
namespace JS {
struct CodeSizes;
}
@ -177,12 +171,14 @@ namespace jit {
}
};
class ExecutableAllocator {
class ExecutableAllocator
{
typedef void (*DestroyCallback)(void* addr, size_t size);
enum ProtectionSetting { Writable, Executable };
DestroyCallback destroyCallback;
public:
enum ProtectionSetting { Writable, Executable };
ExecutableAllocator()
: destroyCallback(nullptr)
{
@ -267,6 +263,8 @@ class ExecutableAllocator {
this->destroyCallback = destroyCallback;
}
static bool nonWritableJitCode;
private:
static size_t pageSize;
static size_t largeAllocSize;
@ -379,21 +377,19 @@ class ExecutableAllocator {
return pool;
}
#if ENABLE_ASSEMBLER_WX_EXCLUSIVE
static void makeWritable(void* start, size_t size)
{
reprotectRegion(start, size, Writable);
if (nonWritableJitCode)
reprotectRegion(start, size, Writable);
}
static void makeExecutable(void* start, size_t size)
{
reprotectRegion(start, size, Executable);
if (nonWritableJitCode)
reprotectRegion(start, size, Executable);
}
#else
static void makeWritable(void*, size_t) {}
static void makeExecutable(void*, size_t) {}
#endif
static unsigned initialProtectionFlags(ProtectionSetting protection);
#if defined(JS_CODEGEN_X86) || defined(JS_CODEGEN_X64)
static void cacheFlush(void*, size_t)
@ -446,9 +442,7 @@ class ExecutableAllocator {
ExecutableAllocator(const ExecutableAllocator&) = delete;
void operator=(const ExecutableAllocator&) = delete;
#if ENABLE_ASSEMBLER_WX_EXCLUSIVE
static void reprotectRegion(void*, size_t, ProtectionSetting);
#endif
// These are strong references; they keep pools alive.
static const size_t maxSmallPools = 4;

View File

@ -1,4 +1,6 @@
/*
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sts=4 et sw=4 tw=99:
*
* Copyright (C) 2008 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
@ -35,7 +37,8 @@
using namespace js::jit;
size_t ExecutableAllocator::determinePageSize()
size_t
ExecutableAllocator::determinePageSize()
{
return getpagesize();
}
@ -57,24 +60,29 @@ js::jit::DeallocateExecutableMemory(void* addr, size_t bytes, size_t pageSize)
MOZ_ASSERT(!result || errno == ENOMEM);
}
ExecutablePool::Allocation ExecutableAllocator::systemAlloc(size_t n)
ExecutablePool::Allocation
ExecutableAllocator::systemAlloc(size_t n)
{
void* allocation = AllocateExecutableMemory(nullptr, n, INITIAL_PROTECTION_FLAGS,
void* allocation = AllocateExecutableMemory(nullptr, n, initialProtectionFlags(Executable),
"js-jit-code", pageSize);
ExecutablePool::Allocation alloc = { reinterpret_cast<char*>(allocation), n };
return alloc;
}
void ExecutableAllocator::systemRelease(const ExecutablePool::Allocation& alloc)
void
ExecutableAllocator::systemRelease(const ExecutablePool::Allocation& alloc)
{
DeallocateExecutableMemory(alloc.pages, alloc.size, pageSize);
}
#if WTF_ENABLE_ASSEMBLER_WX_EXCLUSIVE
void ExecutableAllocator::reprotectRegion(void* start, size_t size, ProtectionSetting setting)
static const unsigned FLAGS_RW = PROT_READ | PROT_WRITE;
static const unsigned FLAGS_RX = PROT_READ | PROT_EXEC;
void
ExecutableAllocator::reprotectRegion(void* start, size_t size, ProtectionSetting setting)
{
if (!pageSize)
intializePageSize();
MOZ_ASSERT(nonWritableJitCode);
MOZ_ASSERT(pageSize);
// Calculate the start of the page containing this region,
// and account for this extra memory within size.
@ -87,7 +95,14 @@ void ExecutableAllocator::reprotectRegion(void* start, size_t size, ProtectionSe
size += (pageSize - 1);
size &= ~(pageSize - 1);
mprotect(pageStart, size, (setting == Writable) ? PROTECTION_FLAGS_RW : PROTECTION_FLAGS_RX);
mprotect(pageStart, size, (setting == Writable) ? FLAGS_RW : FLAGS_RX);
}
#endif
/* static */ unsigned
ExecutableAllocator::initialProtectionFlags(ProtectionSetting protection)
{
if (!nonWritableJitCode)
return FLAGS_RW | FLAGS_RX;
return (protection == Writable) ? FLAGS_RW : FLAGS_RX;
}

View File

@ -1,4 +1,6 @@
/*
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sts=4 et sw=4 tw=99:
*
* Copyright (C) 2008 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
@ -35,14 +37,16 @@ using namespace js::jit;
uint64_t ExecutableAllocator::rngSeed;
size_t ExecutableAllocator::determinePageSize()
size_t
ExecutableAllocator::determinePageSize()
{
SYSTEM_INFO system_info;
GetSystemInfo(&system_info);
return system_info.dwPageSize;
}
void* ExecutableAllocator::computeRandomAllocationAddress()
void*
ExecutableAllocator::computeRandomAllocationAddress()
{
/*
* Inspiration is V8's OS::Allocate in platform-win32.cc.
@ -186,7 +190,6 @@ js::jit::AllocateExecutableMemory(void* addr, size_t bytes, unsigned permissions
size_t pageSize)
{
MOZ_ASSERT(bytes % pageSize == 0);
MOZ_ASSERT(permissions == PAGE_EXECUTE_READWRITE);
#ifdef JS_CPU_X64
if (sJitExceptionHandler)
@ -226,27 +229,57 @@ js::jit::DeallocateExecutableMemory(void* addr, size_t bytes, size_t pageSize)
VirtualFree(addr, 0, MEM_RELEASE);
}
ExecutablePool::Allocation ExecutableAllocator::systemAlloc(size_t n)
ExecutablePool::Allocation
ExecutableAllocator::systemAlloc(size_t n)
{
void* allocation = nullptr;
if (!RandomizeIsBroken()) {
void* randomAddress = computeRandomAllocationAddress();
allocation = AllocateExecutableMemory(randomAddress, n, PAGE_EXECUTE_READWRITE,
allocation = AllocateExecutableMemory(randomAddress, n, initialProtectionFlags(Executable),
"js-jit-code", pageSize);
}
if (!allocation) {
allocation = AllocateExecutableMemory(nullptr, n, PAGE_EXECUTE_READWRITE,
allocation = AllocateExecutableMemory(nullptr, n, initialProtectionFlags(Executable),
"js-jit-code", pageSize);
}
ExecutablePool::Allocation alloc = { reinterpret_cast<char*>(allocation), n };
return alloc;
}
void ExecutableAllocator::systemRelease(const ExecutablePool::Allocation& alloc)
void
ExecutableAllocator::systemRelease(const ExecutablePool::Allocation& alloc)
{
DeallocateExecutableMemory(alloc.pages, alloc.size, pageSize);
}
#if ENABLE_ASSEMBLER_WX_EXCLUSIVE
#error "ASSEMBLER_WX_EXCLUSIVE not yet suported on this platform."
#endif
void
ExecutableAllocator::reprotectRegion(void* start, size_t size, ProtectionSetting setting)
{
MOZ_ASSERT(nonWritableJitCode);
MOZ_ASSERT(pageSize);
// Calculate the start of the page containing this region,
// and account for this extra memory within size.
intptr_t startPtr = reinterpret_cast<intptr_t>(start);
intptr_t pageStartPtr = startPtr & ~(pageSize - 1);
void* pageStart = reinterpret_cast<void*>(pageStartPtr);
size += (startPtr - pageStartPtr);
// Round size up
size += (pageSize - 1);
size &= ~(pageSize - 1);
DWORD oldProtect;
int flags = (setting == Writable) ? PAGE_READWRITE : PAGE_EXECUTE_READ;
if (!VirtualProtect(pageStart, size, flags, &oldProtect))
MOZ_CRASH();
}
/* static */ unsigned
ExecutableAllocator::initialProtectionFlags(ProtectionSetting protection)
{
if (!nonWritableJitCode)
return PAGE_EXECUTE_READWRITE;
return (protection == Writable) ? PAGE_READWRITE : PAGE_EXECUTE_READ;
}

View File

@ -767,6 +767,12 @@ JitCode::traceChildren(JSTracer* trc)
if (invalidated())
return;
// If we're moving objects, we need writable JIT code.
ReprotectCode reprotect = (trc->runtime()->isHeapMinorCollecting() || zone()->isGCCompacting())
? Reprotect
: DontReprotect;
MaybeAutoWritableJitCode awjc(this, reprotect);
if (jumpRelocTableBytes_) {
uint8_t* start = code_ + jumpRelocTableOffset();
CompactBufferReader reader(start, start + jumpRelocTableBytes_);
@ -785,6 +791,8 @@ JitCode::fixupNurseryObjects(JSContext* cx, const ObjectVector& nurseryObjects)
if (nurseryObjects.empty() || !dataRelocTableBytes_)
return;
AutoWritableJitCode awjc(this);
uint8_t* start = code_ + dataRelocTableOffset();
CompactBufferReader reader(start, start + dataRelocTableBytes_);
MacroAssembler::FixupNurseryObjects(cx, this, reader, nurseryObjects);
@ -806,8 +814,11 @@ JitCode::finalize(FreeOp* fop)
// Buffer can be freed at any time hereafter. Catch use-after-free bugs.
// Don't do this if the Ion code is protected, as the signal handler will
// deadlock trying to reacquire the interrupt lock.
memset(code_, JS_SWEPT_CODE_PATTERN, bufferSize_);
code_ = nullptr;
{
AutoWritableJitCode awjc(this);
memset(code_, JS_SWEPT_CODE_PATTERN, bufferSize_);
code_ = nullptr;
}
// Code buffers are stored inside JSC pools.
// Pools are refcounted. Releasing the pool may free it.
@ -823,6 +834,7 @@ JitCode::finalize(FreeOp* fop)
void
JitCode::togglePreBarriers(bool enabled)
{
AutoWritableJitCode awjc(this);
uint8_t* start = code_ + preBarrierTableOffset();
CompactBufferReader reader(start, start + preBarrierTableBytes_);
@ -1215,6 +1227,7 @@ IonScript::purgeCaches()
if (invalidated())
return;
AutoWritableJitCode awjc(method());
for (size_t i = 0; i < numCaches(); i++)
getCacheFromIndex(i).reset();
}
@ -2829,6 +2842,7 @@ InvalidateActivation(FreeOp* fop, const JitActivationIterator& activations, bool
// the call sequence causing the safepoint being >= the size of
// a uint32, which is checked during safepoint index
// construction.
AutoWritableJitCode awjc(ionCode);
const SafepointIndex* si = ionScript->getSafepointIndex(it.returnAddressToFp());
CodeLocationLabel dataLabelToMunge(it.returnAddressToFp());
ptrdiff_t delta = ionScript->invalidateEpilogueDataOffset() -

View File

@ -246,11 +246,13 @@ class IonCache::StubAttacher
void patchRejoinJump(MacroAssembler& masm, JitCode* code) {
rejoinOffset_.fixup(&masm);
CodeLocationJump rejoinJump(code, rejoinOffset_);
AutoWritableJitCode awjc(code);
PatchJump(rejoinJump, rejoinLabel_);
}
void patchStubCodePointer(MacroAssembler& masm, JitCode* code) {
if (hasStubCodePatchOffset_) {
AutoWritableJitCode awjc(code);
stubCodePatchOffset_.fixup(&masm);
Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, stubCodePatchOffset_),
ImmPtr(code), STUB_ADDR);
@ -260,11 +262,12 @@ class IonCache::StubAttacher
void patchNextStubJump(MacroAssembler& masm, JitCode* code) {
// Patch the previous nextStubJump of the last stub, or the jump from the
// codeGen, to jump into the newly allocated code.
PatchJump(cache_.lastJump_, CodeLocationLabel(code));
PatchJump(cache_.lastJump_, CodeLocationLabel(code), Reprotect);
// If this path is not taken, we are producing an entry which can no
// longer go back into the update function.
if (hasNextStubOffset_) {
AutoWritableJitCode awjc(code);
nextStubOffset_.fixup(&masm);
CodeLocationJump nextStubJump(code, nextStubOffset_);
PatchJump(nextStubJump, cache_.fallbackLabel_);
@ -371,6 +374,7 @@ IonCache::linkAndAttachStub(JSContext* cx, MacroAssembler& masm, StubAttacher& a
void
IonCache::updateBaseAddress(JitCode* code, MacroAssembler& masm)
{
AutoWritableJitCode awjc(code);
fallbackLabel_.repoint(code, &masm);
initialJump_.repoint(code, &masm);
lastJump_.repoint(code, &masm);
@ -2000,8 +2004,9 @@ GetPropertyIC::reset()
}
void
IonCache::disable()
IonCache::disable(IonScript* ion)
{
AutoWritableJitCode awjc(ion->method());
reset();
this->disabled_ = 1;
}
@ -4042,7 +4047,7 @@ GetElementIC::update(JSContext* cx, HandleScript outerScript, size_t cacheIndex,
cache.incFailedUpdates();
if (cache.shouldDisable()) {
JitSpew(JitSpew_IonIC, "Disable inline cache");
cache.disable();
cache.disable(ion);
}
} else {
cache.resetFailedUpdates();

View File

@ -243,7 +243,7 @@ class IonCache
{
}
virtual void disable();
void disable(IonScript* ion);
inline bool isDisabled() const {
return disabled_;
}

View File

@ -103,6 +103,10 @@ class JitCode : public gc::TenuredCell
size_t instructionsSize() const {
return insnSize_;
}
size_t bufferSize() const {
return bufferSize_;
}
void traceChildren(JSTracer* trc);
void finalize(FreeOp* fop);
void fixupAfterMovingGC() {}

View File

@ -557,6 +557,51 @@ void FinishInvalidation(FreeOp* fop, JSScript* script);
const unsigned WINDOWS_BIG_FRAME_TOUCH_INCREMENT = 4096 - 1;
#endif
// If ExecutableAllocator::nonWritableJitCode is |true|, this class will ensure
// JIT code is writable (has RW permissions) in its scope. If nonWritableJitCode
// is |false|, it's a no-op.
class MOZ_STACK_CLASS AutoWritableJitCode
{
JSRuntime* rt_;
void* addr_;
size_t size_;
public:
AutoWritableJitCode(JSRuntime* rt, void* addr, size_t size)
: rt_(rt), addr_(addr), size_(size)
{
rt_->toggleAutoWritableJitCodeActive(true);
ExecutableAllocator::makeWritable(addr_, size_);
}
AutoWritableJitCode(void* addr, size_t size)
: AutoWritableJitCode(TlsPerThreadData.get()->runtimeFromMainThread(), addr, size)
{}
explicit AutoWritableJitCode(JitCode* code)
: AutoWritableJitCode(code->runtimeFromMainThread(), code->raw(), code->bufferSize())
{}
~AutoWritableJitCode() {
ExecutableAllocator::makeExecutable(addr_, size_);
rt_->toggleAutoWritableJitCodeActive(false);
}
};
enum ReprotectCode { Reprotect = true, DontReprotect = false };
class MOZ_STACK_CLASS MaybeAutoWritableJitCode
{
mozilla::Maybe<AutoWritableJitCode> awjc_;
public:
MaybeAutoWritableJitCode(void* addr, size_t size, ReprotectCode reprotect) {
if (reprotect)
awjc_.emplace(addr, size);
}
MaybeAutoWritableJitCode(JitCode* code, ReprotectCode reprotect) {
if (reprotect)
awjc_.emplace(code);
}
};
} // namespace jit
} // namespace js

View File

@ -68,6 +68,7 @@ class Linker
return nullptr;
if (masm.oom())
return fail(cx);
AutoWritableJitCode awjc(result, bytesNeeded);
code->copyFrom(masm);
masm.link(code);
if (masm.embedsNurseryPointers())

View File

@ -2250,11 +2250,18 @@ void
LIRGenerator::visitInterruptCheck(MInterruptCheck* ins)
{
// Implicit interrupt checks require asm.js signal handlers to be installed.
// They also require writable JIT code: reprotecting in patchIonBackedges
// would be expensive and using AutoWritableJitCode in the signal handler
// is complicated because there could be another AutoWritableJitCode on the
// stack.
LInstructionHelper<0, 0, 0>* lir;
if (GetJitContext()->runtime->canUseSignalHandlers())
if (GetJitContext()->runtime->canUseSignalHandlers() &&
!ExecutableAllocator::nonWritableJitCode)
{
lir = new(alloc()) LInterruptCheckImplicit();
else
} else {
lir = new(alloc()) LInterruptCheck();
}
add(lir, ins);
assignSafepoint(lir, ins);
}

View File

@ -531,7 +531,7 @@ Imm16::Imm16()
{ }
void
jit::PatchJump(CodeLocationJump& jump_, CodeLocationLabel label)
jit::PatchJump(CodeLocationJump& jump_, CodeLocationLabel label, ReprotectCode reprotect)
{
// We need to determine if this jump can fit into the standard 24+2 bit
// address or if we need a larger branch (or just need to use our pool
@ -545,11 +545,18 @@ jit::PatchJump(CodeLocationJump& jump_, CodeLocationLabel label)
int jumpOffset = label.raw() - jump_.raw();
if (BOffImm::IsInRange(jumpOffset)) {
// This instruction started off as a branch, and will remain one.
MaybeAutoWritableJitCode awjc(jump, sizeof(Instruction), reprotect);
Assembler::RetargetNearBranch(jump, jumpOffset, c);
} else {
// This instruction started off as a branch, but now needs to be demoted
// to an ldr.
uint8_t** slot = reinterpret_cast<uint8_t**>(jump_.jumpTableEntry());
// Ensure both the branch and the slot are writable.
MOZ_ASSERT(uintptr_t(slot) > uintptr_t(jump));
size_t size = uintptr_t(slot) - uintptr_t(jump) + sizeof(void*);
MaybeAutoWritableJitCode awjc(jump, size, reprotect);
Assembler::RetargetFarBranch(jump, slot, label.raw(), c);
}
}

View File

@ -1101,7 +1101,8 @@ class Operand
};
void
PatchJump(CodeLocationJump& jump_, CodeLocationLabel label);
PatchJump(CodeLocationJump& jump_, CodeLocationLabel label,
ReprotectCode reprotect = DontReprotect);
static inline void
PatchBackedge(CodeLocationJump& jump_, CodeLocationLabel label, JitRuntime::BackedgeTarget target)

View File

@ -493,8 +493,14 @@ class ABIArgGenerator
static const Register NonReturn_VolatileReg1;
};
static inline void PatchJump(CodeLocationJump&, CodeLocationLabel) { MOZ_CRASH(); }
static inline void
PatchJump(CodeLocationJump&, CodeLocationLabel, ReprotectCode reprotect = DontReprotect)
{
MOZ_CRASH();
}
static inline bool GetTempRegForIntArg(uint32_t, uint32_t, Register*) { MOZ_CRASH(); }
static inline
void PatchBackedge(CodeLocationJump& jump_, CodeLocationLabel label, JitRuntime::BackedgeTarget target)
{

View File

@ -175,9 +175,10 @@ Assembler::PatchableJumpAddress(JitCode* code, size_t index)
/* static */
void
Assembler::PatchJumpEntry(uint8_t* entry, uint8_t* target)
Assembler::PatchJumpEntry(uint8_t* entry, uint8_t* target, ReprotectCode reprotect)
{
uint8_t** index = (uint8_t**) (entry + SizeOfExtendedJump - sizeof(void*));
MaybeAutoWritableJitCode awjc(index, sizeof(void*), reprotect);
*index = target;
}

View File

@ -265,7 +265,7 @@ class Assembler : public AssemblerX86Shared
using AssemblerX86Shared::vmovq;
static uint8_t* PatchableJumpAddress(JitCode* code, size_t index);
static void PatchJumpEntry(uint8_t* entry, uint8_t* target);
static void PatchJumpEntry(uint8_t* entry, uint8_t* target, ReprotectCode reprotect);
Assembler()
: extendedJumpTable_(0)
@ -794,15 +794,20 @@ class Assembler : public AssemblerX86Shared
};
static inline void
PatchJump(CodeLocationJump jump, CodeLocationLabel label)
PatchJump(CodeLocationJump jump, CodeLocationLabel label, ReprotectCode reprotect = DontReprotect)
{
if (X86Encoding::CanRelinkJump(jump.raw(), label.raw())) {
MaybeAutoWritableJitCode awjc(jump.raw() - 8, 8, reprotect);
X86Encoding::SetRel32(jump.raw(), label.raw());
} else {
X86Encoding::SetRel32(jump.raw(), jump.jumpTableEntry());
Assembler::PatchJumpEntry(jump.jumpTableEntry(), label.raw());
{
MaybeAutoWritableJitCode awjc(jump.raw() - 8, 8, reprotect);
X86Encoding::SetRel32(jump.raw(), jump.jumpTableEntry());
}
Assembler::PatchJumpEntry(jump.jumpTableEntry(), label.raw(), reprotect);
}
}
static inline void
PatchBackedge(CodeLocationJump& jump_, CodeLocationLabel label, JitRuntime::BackedgeTarget target)
{

View File

@ -62,7 +62,8 @@ TraceDataRelocations(JSTracer* trc, uint8_t* buffer, CompactBufferReader& reader
layout.asBits = *word;
Value v = IMPL_TO_JSVAL(layout);
TraceManuallyBarrieredEdge(trc, &v, "ion-masm-value");
*word = JSVAL_TO_IMPL(v).asBits;
if (*word != JSVAL_TO_IMPL(v).asBits)
*word = JSVAL_TO_IMPL(v).asBits;
continue;
}
#endif

View File

@ -159,7 +159,7 @@ namespace js {
namespace jit {
static inline void
PatchJump(CodeLocationJump jump, CodeLocationLabel label)
PatchJump(CodeLocationJump jump, CodeLocationLabel label, ReprotectCode reprotect = DontReprotect)
{
#ifdef DEBUG
// Assert that we're overwriting a jump instruction, either:
@ -169,6 +169,7 @@ PatchJump(CodeLocationJump jump, CodeLocationLabel label)
MOZ_ASSERT(((*x >= 0x80 && *x <= 0x8F) && *(x - 1) == 0x0F) ||
(*x == 0xE9));
#endif
MaybeAutoWritableJitCode awjc(jump.raw() - 8, 8, reprotect);
X86Encoding::SetRel32(jump.raw(), label.raw());
}
static inline void

View File

@ -3590,13 +3590,11 @@ EscapeForShell(AutoCStringVector& argv)
static Vector<const char*, 4, js::SystemAllocPolicy> sPropagatedFlags;
#if defined(JS_CODEGEN_X86) || defined(JS_CODEGEN_X64)
static bool
PropagateFlagToNestedShells(const char* flag)
{
return sPropagatedFlags.append(flag);
}
#endif
static bool
NestedShell(JSContext* cx, unsigned argc, jsval* vp)
@ -6306,6 +6304,7 @@ main(int argc, char** argv, char** envp)
|| !op.addIntOption('\0', "baseline-warmup-threshold", "COUNT",
"Wait for COUNT calls or iterations before baseline-compiling "
"(default: 10)", -1)
|| !op.addBoolOption('\0', "non-writable-jitcode", "Allocate JIT code as non-writable memory.")
|| !op.addBoolOption('\0', "no-fpu", "Pretend CPU does not support floating-point operations "
"to test JIT codegen (no-op on platforms other than x86).")
|| !op.addBoolOption('\0', "no-sse3", "Pretend CPU does not support SSE3 instructions and above "
@ -6381,6 +6380,11 @@ main(int argc, char** argv, char** envp)
OOM_printAllocationCount = op.getBoolOption('O');
#endif
if (op.getBoolOption("non-writable-jitcode")) {
js::jit::ExecutableAllocator::nonWritableJitCode = true;
PropagateFlagToNestedShells("--non-writable-jitcode");
}
#ifdef JS_CODEGEN_X86
if (op.getBoolOption("no-fpu"))
js::jit::CPUInfo::SetFloatingPointDisabled();

View File

@ -15,7 +15,7 @@ JITFLAGS = {
'all': [
[], # no flags, normal baseline and ion
['--ion-eager', '--ion-offthread-compile=off'], # implies --baseline-eager
['--ion-eager', '--ion-offthread-compile=off',
['--ion-eager', '--ion-offthread-compile=off', '--non-writable-jitcode',
'--ion-check-range-analysis', '--ion-extra-checks', '--no-sse3', '--no-threads'],
['--baseline-eager'],
['--baseline-eager', '--no-fpu'],

View File

@ -212,6 +212,7 @@ JSRuntime::JSRuntime(JSRuntime* parentRuntime)
ctypesActivityCallback(nullptr),
offthreadIonCompilationEnabled_(true),
parallelParsingEnabled_(true),
autoWritableJitCodeActive_(false),
#ifdef DEBUG
enteredPolicy(nullptr),
#endif

View File

@ -1381,6 +1381,8 @@ struct JSRuntime : public JS::shadow::Runtime,
bool offthreadIonCompilationEnabled_;
bool parallelParsingEnabled_;
bool autoWritableJitCodeActive_;
public:
// Note: these values may be toggled dynamically (in response to about:config
@ -1398,6 +1400,12 @@ struct JSRuntime : public JS::shadow::Runtime,
return parallelParsingEnabled_;
}
void toggleAutoWritableJitCodeActive(bool b) {
MOZ_ASSERT(autoWritableJitCodeActive_ != b, "AutoWritableJitCode should not be nested.");
MOZ_ASSERT(CurrentThreadCanAccessRuntime(this));
autoWritableJitCodeActive_ = b;
}
const JS::RuntimeOptions& options() const {
return options_;
}