mirror of
https://gitlab.winehq.org/wine/wine-gecko.git
synced 2024-09-13 09:24:08 -07:00
Bug 852802 - Add incremental needsBarrier to the runtime and check it first; r=billm
--HG-- extra : rebase_source : b5af1fbd8f587e152e115f9c74222f195d0d97b5
This commit is contained in:
parent
a23a508328
commit
6295cc7946
@ -7,6 +7,8 @@
|
||||
#ifndef js_heap_api_h___
|
||||
#define js_heap_api_h___
|
||||
|
||||
#include "jspubtd.h"
|
||||
|
||||
/* These values are private to the JS engine. */
|
||||
namespace js {
|
||||
namespace gc {
|
||||
@ -43,6 +45,7 @@ const size_t CellMask = CellSize - 1;
|
||||
/* These are magic constants derived from actual offsets in gc/Heap.h. */
|
||||
const size_t ChunkMarkBitmapOffset = 1032368;
|
||||
const size_t ChunkMarkBitmapBits = 129024;
|
||||
const size_t ChunkRuntimeOffset = ChunkSize - sizeof(void*);
|
||||
|
||||
/*
|
||||
* Live objects are marked black. How many other additional colors are available
|
||||
@ -89,6 +92,15 @@ GetGCThingMarkBitmap(const void *thing)
|
||||
return reinterpret_cast<uintptr_t *>(addr);
|
||||
}
|
||||
|
||||
static JS_ALWAYS_INLINE JS::shadow::Runtime *
|
||||
GetGCThingRuntime(const void *thing)
|
||||
{
|
||||
uintptr_t addr = uintptr_t(thing);
|
||||
addr &= ~js::gc::ChunkMask;
|
||||
addr |= js::gc::ChunkRuntimeOffset;
|
||||
return *reinterpret_cast<JS::shadow::Runtime **>(addr);
|
||||
}
|
||||
|
||||
static JS_ALWAYS_INLINE void
|
||||
GetGCThingMarkWordAndMask(const void *thing, uint32_t color,
|
||||
uintptr_t **wordp, uintptr_t *maskp)
|
||||
@ -139,6 +151,9 @@ GCThingIsMarkedGray(void *thing)
|
||||
static JS_ALWAYS_INLINE bool
|
||||
IsIncrementalBarrierNeededOnGCThing(void *thing, JSGCTraceKind kind)
|
||||
{
|
||||
shadow::Runtime *rt = js::gc::GetGCThingRuntime(thing);
|
||||
if (!rt->needsBarrier_)
|
||||
return false;
|
||||
js::Zone *zone = GetGCThingZone(thing);
|
||||
return reinterpret_cast<shadow::Zone *>(zone)->needsBarrier_;
|
||||
}
|
||||
|
@ -71,7 +71,7 @@ inline void
|
||||
EncapsulatedValue::writeBarrierPre(const Value &value)
|
||||
{
|
||||
#ifdef JSGC_INCREMENTAL
|
||||
if (value.isMarkable())
|
||||
if (value.isMarkable() && runtime(value)->needsBarrier())
|
||||
writeBarrierPre(ZoneOfValue(value), value);
|
||||
#endif
|
||||
}
|
||||
@ -81,6 +81,7 @@ EncapsulatedValue::writeBarrierPre(Zone *zone, const Value &value)
|
||||
{
|
||||
#ifdef JSGC_INCREMENTAL
|
||||
if (zone->needsBarrier()) {
|
||||
JS_ASSERT_IF(value.isMarkable(), runtime(value)->needsBarrier());
|
||||
Value tmp(value);
|
||||
js::gc::MarkValueUnbarriered(zone->barrierTracer(), &tmp, "write barrier");
|
||||
JS_ASSERT(tmp == value);
|
||||
|
@ -825,6 +825,7 @@ struct Chunk
|
||||
|
||||
JS_STATIC_ASSERT(sizeof(Chunk) == ChunkSize);
|
||||
JS_STATIC_ASSERT(js::gc::ChunkMarkBitmapOffset == offsetof(Chunk, bitmap));
|
||||
JS_STATIC_ASSERT(js::gc::ChunkRuntimeOffset == offsetof(Chunk, info) + offsetof(ChunkInfo, runtime));
|
||||
|
||||
inline uintptr_t
|
||||
ArenaHeader::address() const
|
||||
|
@ -534,6 +534,7 @@ gc::StartVerifyPreBarriers(JSRuntime *rt)
|
||||
rt->gcIncrementalState = MARK;
|
||||
rt->gcMarker.start();
|
||||
|
||||
rt->setNeedsBarrier(true);
|
||||
for (ZonesIter zone(rt); !zone.done(); zone.next()) {
|
||||
PurgeJITCaches(zone);
|
||||
zone->setNeedsBarrier(true, Zone::UpdateIon);
|
||||
@ -617,6 +618,7 @@ gc::EndVerifyPreBarriers(JSRuntime *rt)
|
||||
zone->setNeedsBarrier(false, Zone::UpdateIon);
|
||||
PurgeJITCaches(zone);
|
||||
}
|
||||
rt->setNeedsBarrier(false);
|
||||
|
||||
/*
|
||||
* We need to bump gcNumber so that the methodjit knows that jitcode has
|
||||
|
@ -456,7 +456,7 @@ void
|
||||
IonCode::writeBarrierPre(IonCode *code)
|
||||
{
|
||||
#ifdef JSGC_INCREMENTAL
|
||||
if (!code)
|
||||
if (!code || !code->runtime()->needsBarrier())
|
||||
return;
|
||||
|
||||
Zone *zone = code->zone();
|
||||
|
@ -632,7 +632,7 @@ typedef Vector<JS::Zone *, 1, SystemAllocPolicy> ZoneVector;
|
||||
|
||||
} // namespace js
|
||||
|
||||
struct JSRuntime : js::RuntimeFriendFields,
|
||||
struct JSRuntime : private JS::shadow::Runtime,
|
||||
public js::MallocProvider<JSRuntime>
|
||||
{
|
||||
/*
|
||||
@ -642,11 +642,17 @@ struct JSRuntime : js::RuntimeFriendFields,
|
||||
* above for more details.
|
||||
*
|
||||
* NB: This field is statically asserted to be at offset
|
||||
* sizeof(RuntimeFriendFields). See
|
||||
* sizeof(js::shadow::Runtime). See
|
||||
* PerThreadDataFriendFields::getMainThread.
|
||||
*/
|
||||
js::PerThreadData mainThread;
|
||||
|
||||
/*
|
||||
* If non-zero, we were been asked to call the operation callback as soon
|
||||
* as possible.
|
||||
*/
|
||||
volatile int32_t interrupt;
|
||||
|
||||
/* Default compartment. */
|
||||
JSCompartment *atomsCompartment;
|
||||
|
||||
@ -1083,6 +1089,14 @@ struct JSRuntime : js::RuntimeFriendFields,
|
||||
volatile ptrdiff_t gcMallocBytes;
|
||||
|
||||
public:
|
||||
void setNeedsBarrier(bool needs) {
|
||||
needsBarrier_ = needs;
|
||||
}
|
||||
|
||||
bool needsBarrier() const {
|
||||
return needsBarrier_;
|
||||
}
|
||||
|
||||
/*
|
||||
* The trace operations to trace embedding-specific GC roots. One is for
|
||||
* tracing through black roots and the other is for tracing through gray
|
||||
|
@ -3198,6 +3198,17 @@ FinishMarkingValidation(JSRuntime *rt)
|
||||
#endif
|
||||
}
|
||||
|
||||
static void
|
||||
AssertNeedsBarrierFlagsConsistent(JSRuntime *rt)
|
||||
{
|
||||
#ifdef DEBUG
|
||||
bool anyNeedsBarrier = false;
|
||||
for (ZonesIter zone(rt); !zone.done(); zone.next())
|
||||
anyNeedsBarrier |= zone->needsBarrier();
|
||||
JS_ASSERT(rt->needsBarrier() == anyNeedsBarrier);
|
||||
#endif
|
||||
}
|
||||
|
||||
static void
|
||||
DropStringWrappers(JSRuntime *rt)
|
||||
{
|
||||
@ -3320,6 +3331,8 @@ GetNextZoneGroup(JSRuntime *rt)
|
||||
zone->setGCState(Zone::NoGC);
|
||||
zone->gcGrayRoots.clearAndFree();
|
||||
}
|
||||
rt->setNeedsBarrier(false);
|
||||
AssertNeedsBarrierFlagsConsistent(rt);
|
||||
|
||||
for (GCCompartmentGroupIter comp(rt); !comp.done(); comp.next()) {
|
||||
ArrayBufferObject::resetArrayBufferList(comp);
|
||||
@ -4085,6 +4098,8 @@ ResetIncrementalGC(JSRuntime *rt, const char *reason)
|
||||
zone->setNeedsBarrier(false, Zone::UpdateIon);
|
||||
zone->setGCState(Zone::NoGC);
|
||||
}
|
||||
rt->setNeedsBarrier(false);
|
||||
AssertNeedsBarrierFlagsConsistent(rt);
|
||||
|
||||
rt->gcIncrementalState = NO_INCREMENTAL;
|
||||
|
||||
@ -4161,19 +4176,25 @@ AutoGCSlice::AutoGCSlice(JSRuntime *rt)
|
||||
JS_ASSERT(!zone->needsBarrier());
|
||||
}
|
||||
}
|
||||
rt->setNeedsBarrier(false);
|
||||
AssertNeedsBarrierFlagsConsistent(rt);
|
||||
}
|
||||
|
||||
AutoGCSlice::~AutoGCSlice()
|
||||
{
|
||||
/* We can't use GCZonesIter if this is the end of the last slice. */
|
||||
bool haveBarriers = false;
|
||||
for (ZonesIter zone(runtime); !zone.done(); zone.next()) {
|
||||
if (zone->isGCMarking()) {
|
||||
zone->setNeedsBarrier(true, Zone::UpdateIon);
|
||||
zone->allocator.arenas.prepareForIncrementalGC(runtime);
|
||||
haveBarriers = true;
|
||||
} else {
|
||||
zone->setNeedsBarrier(false, Zone::UpdateIon);
|
||||
}
|
||||
}
|
||||
runtime->setNeedsBarrier(haveBarriers);
|
||||
AssertNeedsBarrierFlagsConsistent(runtime);
|
||||
}
|
||||
|
||||
static void
|
||||
|
@ -1664,7 +1664,7 @@ inline void
|
||||
TypeObject::writeBarrierPre(TypeObject *type)
|
||||
{
|
||||
#ifdef JSGC_INCREMENTAL
|
||||
if (!type)
|
||||
if (!type || !type->runtime()->needsBarrier())
|
||||
return;
|
||||
|
||||
JS::Zone *zone = type->zone();
|
||||
@ -1698,7 +1698,7 @@ inline void
|
||||
TypeNewScript::writeBarrierPre(TypeNewScript *newScript)
|
||||
{
|
||||
#ifdef JSGC_INCREMENTAL
|
||||
if (!newScript)
|
||||
if (!newScript || !newScript->fun->runtime()->needsBarrier())
|
||||
return;
|
||||
|
||||
JS::Zone *zone = newScript->fun->zone();
|
||||
|
@ -214,6 +214,20 @@ typedef JSBool (*JSInitCallback)(void);
|
||||
|
||||
#ifdef __cplusplus
|
||||
|
||||
namespace JS {
|
||||
namespace shadow {
|
||||
|
||||
struct Runtime
|
||||
{
|
||||
/* Restrict zone access during Minor GC. */
|
||||
bool needsBarrier_;
|
||||
|
||||
Runtime() : needsBarrier_(false) {}
|
||||
};
|
||||
|
||||
} /* namespace shadow */
|
||||
} /* namespace JS */
|
||||
|
||||
namespace js {
|
||||
|
||||
class Allocator;
|
||||
@ -300,21 +314,6 @@ struct ContextFriendFields {
|
||||
#endif
|
||||
};
|
||||
|
||||
struct RuntimeFriendFields {
|
||||
/*
|
||||
* If non-zero, we were been asked to call the operation callback as soon
|
||||
* as possible.
|
||||
*/
|
||||
volatile int32_t interrupt;
|
||||
|
||||
RuntimeFriendFields()
|
||||
: interrupt(0) { }
|
||||
|
||||
static const RuntimeFriendFields *get(const JSRuntime *rt) {
|
||||
return reinterpret_cast<const RuntimeFriendFields *>(rt);
|
||||
}
|
||||
};
|
||||
|
||||
class PerThreadData;
|
||||
|
||||
struct PerThreadDataFriendFields
|
||||
@ -323,7 +322,7 @@ struct PerThreadDataFriendFields
|
||||
// Note: this type only exists to permit us to derive the offset of
|
||||
// the perThread data within the real JSRuntime* type in a portable
|
||||
// way.
|
||||
struct RuntimeDummy : RuntimeFriendFields
|
||||
struct RuntimeDummy : JS::shadow::Runtime
|
||||
{
|
||||
struct PerThreadDummy {
|
||||
void *field1;
|
||||
@ -368,14 +367,14 @@ struct PerThreadDataFriendFields
|
||||
}
|
||||
|
||||
static inline PerThreadDataFriendFields *getMainThread(JSRuntime *rt) {
|
||||
// mainThread must always appear directly after |RuntimeFriendFields|.
|
||||
// mainThread must always appear directly after |JS::shadow::Runtime|.
|
||||
// Tested by a JS_STATIC_ASSERT in |jsfriendapi.cpp|
|
||||
return reinterpret_cast<PerThreadDataFriendFields *>(
|
||||
reinterpret_cast<char*>(rt) + RuntimeMainThreadOffset);
|
||||
}
|
||||
|
||||
static inline const PerThreadDataFriendFields *getMainThread(const JSRuntime *rt) {
|
||||
// mainThread must always appear directly after |RuntimeFriendFields|.
|
||||
// mainThread must always appear directly after |JS::shadow::Runtime|.
|
||||
// Tested by a JS_STATIC_ASSERT in |jsfriendapi.cpp|
|
||||
return reinterpret_cast<const PerThreadDataFriendFields *>(
|
||||
reinterpret_cast<const char*>(rt) + RuntimeMainThreadOffset);
|
||||
|
@ -180,7 +180,7 @@ inline void
|
||||
JSScript::writeBarrierPre(js::RawScript script)
|
||||
{
|
||||
#ifdef JSGC_INCREMENTAL
|
||||
if (!script)
|
||||
if (!script || !script->runtime()->needsBarrier())
|
||||
return;
|
||||
|
||||
JS::Zone *zone = script->zone();
|
||||
|
@ -391,7 +391,7 @@ js::ObjectImpl::writeBarrierPre(ObjectImpl *obj)
|
||||
* This would normally be a null test, but TypeScript::global uses 0x1 as a
|
||||
* special value.
|
||||
*/
|
||||
if (IsNullTaggedPointer(obj))
|
||||
if (IsNullTaggedPointer(obj) || !obj->runtime()->needsBarrier())
|
||||
return;
|
||||
|
||||
Zone *zone = obj->zone();
|
||||
|
@ -406,7 +406,7 @@ inline void
|
||||
Shape::writeBarrierPre(RawShape shape)
|
||||
{
|
||||
#ifdef JSGC_INCREMENTAL
|
||||
if (!shape)
|
||||
if (!shape || !shape->runtime()->needsBarrier())
|
||||
return;
|
||||
|
||||
JS::Zone *zone = shape->zone();
|
||||
@ -449,7 +449,7 @@ inline void
|
||||
BaseShape::writeBarrierPre(RawBaseShape base)
|
||||
{
|
||||
#ifdef JSGC_INCREMENTAL
|
||||
if (!base)
|
||||
if (!base || !base->runtime()->needsBarrier())
|
||||
return;
|
||||
|
||||
JS::Zone *zone = base->zone();
|
||||
|
@ -107,7 +107,7 @@ inline void
|
||||
JSString::writeBarrierPre(JSString *str)
|
||||
{
|
||||
#ifdef JSGC_INCREMENTAL
|
||||
if (!str)
|
||||
if (!str || !str->runtime()->needsBarrier())
|
||||
return;
|
||||
|
||||
JS::Zone *zone = str->zone();
|
||||
|
Loading…
Reference in New Issue
Block a user