#ifndef gc_GCRuntime_h
#define gc_GCRuntime_h
#include "mozilla/Atomics.h"
#include "mozilla/EnumSet.h"
#include "mozilla/Maybe.h"
#include "mozilla/TimeStamp.h"
#include "gc/ArenaList.h"
#include "gc/AtomMarking.h"
#include "gc/GCMarker.h"
#include "gc/GCParallelTask.h"
#include "gc/Nursery.h"
#include "gc/Scheduling.h"
#include "gc/Statistics.h"
#include "gc/StoreBuffer.h"
#include "js/GCAnnotations.h"
#include "js/UniquePtr.h"
#include "vm/AtomsTable.h"
namespace js {
class AutoAccessAtomsZone;
class AutoLockGC;
class AutoLockGCBgAlloc;
class AutoLockHelperThreadState;
class VerifyPreTracer;
namespace gc {
using BlackGrayEdgeVector = Vector<TenuredCell*, 0, SystemAllocPolicy>;
using ZoneVector = Vector<JS::Zone*, 4, SystemAllocPolicy>;
class AutoCallGCCallbacks;
class AutoGCSession;
class AutoRunParallelTask;
class AutoTraceSession;
class MarkingValidator;
struct MovingTracer;
enum class ShouldCheckThresholds;
class SweepGroupsIter;
class WeakCacheSweepIterator;
enum IncrementalProgress { NotFinished = 0, Finished };
template <typename... Args>
struct SweepAction {
virtual ~SweepAction() {}
virtual IncrementalProgress run(Args... args) = 0;
virtual void assertFinished() const = 0;
virtual bool shouldSkip() { return false; }
};
class ChunkPool {
Chunk* head_;
size_t count_;
public:
ChunkPool() : head_(nullptr), count_(0) {}
~ChunkPool() {
}
bool empty() const { return !head_; }
size_t count() const { return count_; }
Chunk* head() {
MOZ_ASSERT(head_);
return head_;
}
Chunk* pop();
void push(Chunk* chunk);
Chunk* remove(Chunk* chunk);
#ifdef DEBUG
bool contains(Chunk* chunk) const;
bool verify() const;
#endif
class Iter {
public:
explicit Iter(ChunkPool& pool) : current_(pool.head_) {}
bool done() const { return !current_; }
void next();
Chunk* get() const { return current_; }
operator Chunk*() const { return get(); }
Chunk* operator->() const { return get(); }
private:
Chunk* current_;
};
};
class BackgroundSweepTask : public GCParallelTaskHelper<BackgroundSweepTask> {
public:
explicit BackgroundSweepTask(JSRuntime* rt) : GCParallelTaskHelper(rt) {}
void run();
};
class BackgroundFreeTask : public GCParallelTaskHelper<BackgroundFreeTask> {
public:
explicit BackgroundFreeTask(JSRuntime* rt) : GCParallelTaskHelper(rt) {}
void run();
};
class BackgroundAllocTask : public GCParallelTaskHelper<BackgroundAllocTask> {
GCLockData<ChunkPool&> chunkPool_;
const bool enabled_;
public:
BackgroundAllocTask(JSRuntime* rt, ChunkPool& pool);
bool enabled() const { return enabled_; }
void run();
};
class BackgroundDecommitTask
: public GCParallelTaskHelper<BackgroundDecommitTask> {
public:
using ChunkVector = mozilla::Vector<Chunk*>;
explicit BackgroundDecommitTask(JSRuntime* rt) : GCParallelTaskHelper(rt) {}
void setChunksToScan(ChunkVector& chunks);
void run();
private:
MainThreadOrGCTaskData<ChunkVector> toDecommit;
};
template <typename F>
struct Callback {
MainThreadOrGCTaskData<F> op;
MainThreadOrGCTaskData<void*> data;
Callback() : op(nullptr), data(nullptr) {}
Callback(F op, void* data) : op(op), data(data) {}
};
template <typename F>
using CallbackVector =
MainThreadData<Vector<Callback<F>, 4, SystemAllocPolicy>>;
template <typename T, typename Iter0, typename Iter1>
class ChainedIter {
Iter0 iter0_;
Iter1 iter1_;
public:
ChainedIter(const Iter0& iter0, const Iter1& iter1)
: iter0_(iter0), iter1_(iter1) {}
bool done() const { return iter0_.done() && iter1_.done(); }
void next() {
MOZ_ASSERT(!done());
if (!iter0_.done()) {
iter0_.next();
} else {
MOZ_ASSERT(!iter1_.done());
iter1_.next();
}
}
T get() const {
MOZ_ASSERT(!done());
if (!iter0_.done()) {
return iter0_.get();
}
MOZ_ASSERT(!iter1_.done());
return iter1_.get();
}
operator T() const { return get(); }
T operator->() const { return get(); }
};
typedef HashMap<Value*, const char*, DefaultHasher<Value*>, SystemAllocPolicy>
RootedValueMap;
using AllocKinds = mozilla::EnumSet<AllocKind, uint32_t>;
class ZoneList {
static Zone* const End;
Zone* head;
Zone* tail;
public:
ZoneList();
~ZoneList();
bool isEmpty() const;
Zone* front() const;
void append(Zone* zone);
void transferFrom(ZoneList& other);
Zone* removeFront();
void clear();
private:
explicit ZoneList(Zone* singleZone);
void check() const;
ZoneList(const ZoneList& other) = delete;
ZoneList& operator=(const ZoneList& other) = delete;
};
class GCRuntime {
public:
explicit GCRuntime(JSRuntime* rt);
MOZ_MUST_USE bool init(uint32_t maxbytes, uint32_t maxNurseryBytes);
void finishRoots();
void finish();
inline bool hasZealMode(ZealMode mode);
inline void clearZealMode(ZealMode mode);
inline bool upcomingZealousGC();
inline bool needZealousGC();
inline bool hasIncrementalTwoSliceZealMode();
MOZ_MUST_USE bool addRoot(Value* vp, const char* name);
void removeRoot(Value* vp);
void setMarkStackLimit(size_t limit, AutoLockGC& lock);
MOZ_MUST_USE bool setParameter(JSGCParamKey key, uint32_t value,
AutoLockGC& lock);
void resetParameter(JSGCParamKey key, AutoLockGC& lock);
uint32_t getParameter(JSGCParamKey key, const AutoLockGC& lock);
MOZ_MUST_USE bool triggerGC(JS::GCReason reason);
void maybeAllocTriggerZoneGC(Zone* zone, const AutoLockGC& lock);
bool triggerZoneGC(Zone* zone, JS::GCReason reason, size_t usedBytes,
size_t thresholdBytes);
void maybeGC(Zone* zone);
bool gcIfRequested();
void gc(JSGCInvocationKind gckind, JS::GCReason reason);
void startGC(JSGCInvocationKind gckind, JS::GCReason reason,
int64_t millis = 0);
void gcSlice(JS::GCReason reason, int64_t millis = 0);
void finishGC(JS::GCReason reason);
void abortGC();
void startDebugGC(JSGCInvocationKind gckind, SliceBudget& budget);
void debugGCSlice(SliceBudget& budget);
void triggerFullGCForAtoms(JSContext* cx);
void runDebugGC();
void notifyRootsRemoved();
enum TraceOrMarkRuntime { TraceRuntime, MarkRuntime };
void traceRuntime(JSTracer* trc, AutoTraceSession& session);
void traceRuntimeForMinorGC(JSTracer* trc, AutoGCSession& session);
void purgeRuntimeForMinorGC();
void shrinkBuffers();
void onOutOfMallocMemory();
void onOutOfMallocMemory(const AutoLockGC& lock);
#ifdef JS_GC_ZEAL
const uint32_t* addressOfZealModeBits() { return &zealModeBits.refNoCheck(); }
void getZealBits(uint32_t* zealBits, uint32_t* frequency,
uint32_t* nextScheduled);
void setZeal(uint8_t zeal, uint32_t frequency);
void unsetZeal(uint8_t zeal);
bool parseAndSetZeal(const char* str);
void setNextScheduled(uint32_t count);
void verifyPreBarriers();
void maybeVerifyPreBarriers(bool always);
bool selectForMarking(JSObject* object);
void clearSelectedForMarking();
void setDeterministic(bool enable);
#endif
uint64_t nextCellUniqueId() {
MOZ_ASSERT(nextCellUniqueId_ > 0);
uint64_t uid = ++nextCellUniqueId_;
return uid;
}
#ifdef DEBUG
bool shutdownCollectedEverything() const { return arenasEmptyAtShutdown; }
#endif
public:
State state() const { return incrementalState; }
bool isHeapCompacting() const { return state() == State::Compact; }
bool isForegroundSweeping() const { return state() == State::Sweep; }
bool isBackgroundSweeping() { return sweepTask.isRunning(); }
void waitBackgroundSweepEnd();
void waitBackgroundSweepOrAllocEnd() {
waitBackgroundSweepEnd();
allocTask.cancelAndWait();
}
void waitBackgroundFreeEnd();
void lockGC() { lock.lock(); }
void unlockGC() { lock.unlock(); }
#ifdef DEBUG
bool currentThreadHasLockedGC() const { return lock.ownedByCurrentThread(); }
#endif
void setAlwaysPreserveCode() { alwaysPreserveCode = true; }
bool isIncrementalGCAllowed() const { return incrementalAllowed; }
void disallowIncrementalGC() { incrementalAllowed = false; }
bool isIncrementalGCEnabled() const {
return mode == JSGC_MODE_INCREMENTAL && incrementalAllowed;
}
bool isIncrementalGCInProgress() const { return state() != State::NotActive; }
bool isCompactingGCEnabled() const;
bool isShrinkingGC() const { return invocationKind == GC_SHRINK; }
bool initSweepActions();
void setGrayRootsTracer(JSTraceDataOp traceOp, void* data);
MOZ_MUST_USE bool addBlackRootsTracer(JSTraceDataOp traceOp, void* data);
void removeBlackRootsTracer(JSTraceDataOp traceOp, void* data);
int32_t getMallocBytes() const { return mallocCounter.bytes(); }
size_t maxMallocBytesAllocated() const { return mallocCounter.maxBytes(); }
void setMaxMallocBytes(size_t value, const AutoLockGC& lock);
bool updateMallocCounter(size_t nbytes) {
mallocCounter.update(nbytes);
TriggerKind trigger = mallocCounter.shouldTriggerGC(tunables);
if (MOZ_LIKELY(trigger == NoTrigger) ||
trigger <= mallocCounter.triggered()) {
return false;
}
if (!triggerGC(JS::GCReason::TOO_MUCH_MALLOC)) {
return false;
}
stats().recordTrigger(mallocCounter.bytes(), mallocCounter.maxBytes());
mallocCounter.recordTrigger(trigger);
return true;
}
void updateMallocCountersOnGCStart();
void setGCCallback(JSGCCallback callback, void* data);
void callGCCallback(JSGCStatus status) const;
void setObjectsTenuredCallback(JSObjectsTenuredCallback callback, void* data);
void callObjectsTenuredCallback();
MOZ_MUST_USE bool addFinalizeCallback(JSFinalizeCallback callback,
void* data);
void removeFinalizeCallback(JSFinalizeCallback func);
MOZ_MUST_USE bool addWeakPointerZonesCallback(
JSWeakPointerZonesCallback callback, void* data);
void removeWeakPointerZonesCallback(JSWeakPointerZonesCallback callback);
MOZ_MUST_USE bool addWeakPointerCompartmentCallback(
JSWeakPointerCompartmentCallback callback, void* data);
void removeWeakPointerCompartmentCallback(
JSWeakPointerCompartmentCallback callback);
JS::GCSliceCallback setSliceCallback(JS::GCSliceCallback callback);
JS::GCNurseryCollectionCallback setNurseryCollectionCallback(
JS::GCNurseryCollectionCallback callback);
JS::DoCycleCollectionCallback setDoCycleCollectionCallback(
JS::DoCycleCollectionCallback callback);
void callDoCycleCollectionCallback(JSContext* cx);
void setFullCompartmentChecks(bool enable);
JS::Zone* getCurrentSweepGroup() { return currentSweepGroup; }
uint64_t gcNumber() const { return number; }
uint64_t minorGCCount() const { return minorGCNumber; }
void incMinorGcNumber() {
++minorGCNumber;
++number;
}
uint64_t majorGCCount() const { return majorGCNumber; }
void incMajorGcNumber() { ++majorGCNumber; }
int64_t defaultSliceBudget() const { return defaultTimeBudget_; }
bool isIncrementalGc() const { return isIncremental; }
bool isFullGc() const { return isFull; }
bool isCompactingGc() const { return isCompacting; }
bool areGrayBitsValid() const { return grayBitsValid; }
void setGrayBitsInvalid() { grayBitsValid = false; }
bool majorGCRequested() const {
return majorGCTriggerReason != JS::GCReason::NO_REASON;
}
bool fullGCForAtomsRequested() const { return fullGCForAtomsRequested_; }
double computeHeapGrowthFactor(size_t lastBytes);
size_t computeTriggerBytes(double growthFactor, size_t lastBytes);
JSGCMode gcMode() const { return mode; }
void setGCMode(JSGCMode m) {
mode = m;
marker.setGCMode(mode);
}
inline void updateOnFreeArenaAlloc(const ChunkInfo& info);
inline void updateOnArenaFree();
ChunkPool& fullChunks(const AutoLockGC& lock) { return fullChunks_.ref(); }
ChunkPool& availableChunks(const AutoLockGC& lock) {
return availableChunks_.ref();
}
ChunkPool& emptyChunks(const AutoLockGC& lock) { return emptyChunks_.ref(); }
const ChunkPool& fullChunks(const AutoLockGC& lock) const {
return fullChunks_.ref();
}
const ChunkPool& availableChunks(const AutoLockGC& lock) const {
return availableChunks_.ref();
}
const ChunkPool& emptyChunks(const AutoLockGC& lock) const {
return emptyChunks_.ref();
}
typedef ChainedIter<Chunk*, ChunkPool::Iter, ChunkPool::Iter>
NonEmptyChunksIter;
NonEmptyChunksIter allNonEmptyChunks(const AutoLockGC& lock) {
return NonEmptyChunksIter(ChunkPool::Iter(availableChunks(lock)),
ChunkPool::Iter(fullChunks(lock)));
}
Chunk* getOrAllocChunk(AutoLockGCBgAlloc& lock);
void recycleChunk(Chunk* chunk, const AutoLockGC& lock);
#ifdef JS_GC_ZEAL
void startVerifyPreBarriers();
void endVerifyPreBarriers();
void finishVerifier();
bool isVerifyPreBarriersEnabled() const { return !!verifyPreData; }
bool shouldYieldForZeal(ZealMode mode);
#else
bool isVerifyPreBarriersEnabled() const { return false; }
#endif
void queueUnusedLifoBlocksForFree(LifoAlloc* lifo);
void queueAllLifoBlocksForFree(LifoAlloc* lifo);
void queueAllLifoBlocksForFreeAfterMinorGC(LifoAlloc* lifo);
void queueBuffersForFreeAfterMinorGC(Nursery::BufferSet& buffers);
void releaseArena(Arena* arena, const AutoLockGC& lock);
void releaseHeldRelocatedArenas();
void releaseHeldRelocatedArenasWithoutUnlocking(const AutoLockGC& lock);
template <AllowGC allowGC>
MOZ_MUST_USE bool checkAllocatorState(JSContext* cx, AllocKind kind);
template <AllowGC allowGC>
JSObject* tryNewNurseryObject(JSContext* cx, size_t thingSize,
size_t nDynamicSlots, const Class* clasp);
template <AllowGC allowGC>
static JSObject* tryNewTenuredObject(JSContext* cx, AllocKind kind,
size_t thingSize, size_t nDynamicSlots);
template <typename T, AllowGC allowGC>
static T* tryNewTenuredThing(JSContext* cx, AllocKind kind, size_t thingSize);
template <AllowGC allowGC>
JSString* tryNewNurseryString(JSContext* cx, size_t thingSize,
AllocKind kind);
static TenuredCell* refillFreeListInGC(Zone* zone, AllocKind thingKind);
void setParallelAtomsAllocEnabled(bool enabled);
void bufferGrayRoots();
void startTask(GCParallelTask& task, gcstats::PhaseKind phase,
AutoLockHelperThreadState& locked);
void joinTask(GCParallelTask& task, gcstats::PhaseKind phase,
AutoLockHelperThreadState& locked);
void mergeRealms(JS::Realm* source, JS::Realm* target);
private:
enum IncrementalResult { ResetIncremental = 0, Ok };
void deleteEmptyZone(Zone* zone);
friend class ArenaLists;
Chunk* pickChunk(AutoLockGCBgAlloc& lock);
Arena* allocateArena(Chunk* chunk, Zone* zone, AllocKind kind,
ShouldCheckThresholds checkThresholds,
const AutoLockGC& lock);
MOZ_MUST_USE bool gcIfNeededAtAllocation(JSContext* cx);
template <typename T>
static void checkIncrementalZoneState(JSContext* cx, T* t);
static TenuredCell* refillFreeListFromAnyThread(JSContext* cx,
AllocKind thingKind);
static TenuredCell* refillFreeListFromMainThread(JSContext* cx,
AllocKind thingKind);
static TenuredCell* refillFreeListFromHelperThread(JSContext* cx,
AllocKind thingKind);
friend class BackgroundDecommitTask;
ChunkPool expireEmptyChunkPool(const AutoLockGC& lock);
void freeEmptyChunks(const AutoLockGC& lock);
void prepareToFreeChunk(ChunkInfo& info);
friend class BackgroundAllocTask;
bool wantBackgroundAllocation(const AutoLockGC& lock) const;
bool startBackgroundAllocTaskIfIdle();
void requestMajorGC(JS::GCReason reason);
SliceBudget defaultBudget(JS::GCReason reason, int64_t millis);
IncrementalResult budgetIncrementalGC(bool nonincrementalByAPI,
JS::GCReason reason,
SliceBudget& budget);
IncrementalResult resetIncrementalGC(AbortReason reason);
void checkCanCallAPI();
MOZ_MUST_USE bool checkIfGCAllowedInCurrentState(JS::GCReason reason);
gcstats::ZoneGCStats scanZonesBeforeGC();
void collect(bool nonincrementalByAPI, SliceBudget budget,
JS::GCReason reason) JS_HAZ_GC_CALL;
MOZ_MUST_USE IncrementalResult gcCycle(bool nonincrementalByAPI,
SliceBudget budget,
JS::GCReason reason);
bool shouldRepeatForDeadZone(JS::GCReason reason);
void incrementalSlice(SliceBudget& budget, JS::GCReason reason,
AutoGCSession& session);
MOZ_MUST_USE bool shouldCollectNurseryForSlice(bool nonincrementalByAPI,
SliceBudget& budget);
friend class AutoCallGCCallbacks;
void maybeCallGCCallback(JSGCStatus status);
void pushZealSelectedObjects();
void purgeRuntime();
MOZ_MUST_USE bool beginMarkPhase(JS::GCReason reason, AutoGCSession& session);
bool prepareZonesForCollection(JS::GCReason reason, bool* isFullOut);
bool shouldPreserveJITCode(JS::Realm* realm,
const mozilla::TimeStamp& currentTime,
JS::GCReason reason, bool canAllocateMoreCode);
void startBackgroundFreeAfterMinorGC();
void traceRuntimeForMajorGC(JSTracer* trc, AutoGCSession& session);
void traceRuntimeAtoms(JSTracer* trc, const AutoAccessAtomsZone& atomsAccess);
void traceKeptAtoms(JSTracer* trc);
void traceRuntimeCommon(JSTracer* trc, TraceOrMarkRuntime traceOrMark);
void maybeDoCycleCollection();
void markCompartments();
IncrementalProgress markUntilBudgetExhausted(SliceBudget& sliceBudget,
gcstats::PhaseKind phase);
void drainMarkStack();
template <class ZoneIterT>
void markWeakReferences(gcstats::PhaseKind phase);
void markWeakReferencesInCurrentGroup(gcstats::PhaseKind phase);
template <class ZoneIterT>
void markGrayRoots(gcstats::PhaseKind phase);
void markBufferedGrayRoots(JS::Zone* zone);
void markAllWeakReferences(gcstats::PhaseKind phase);
void markAllGrayReferences(gcstats::PhaseKind phase);
void beginSweepPhase(JS::GCReason reason, AutoGCSession& session);
void groupZonesForSweeping(JS::GCReason reason);
MOZ_MUST_USE bool findSweepGroupEdges();
void getNextSweepGroup();
IncrementalProgress markGrayReferencesInCurrentGroup(FreeOp* fop,
SliceBudget& budget);
IncrementalProgress endMarkingSweepGroup(FreeOp* fop, SliceBudget& budget);
void markIncomingCrossCompartmentPointers(MarkColor color);
IncrementalProgress beginSweepingSweepGroup(FreeOp* fop, SliceBudget& budget);
void sweepDebuggerOnMainThread(FreeOp* fop);
void sweepJitDataOnMainThread(FreeOp* fop);
IncrementalProgress endSweepingSweepGroup(FreeOp* fop, SliceBudget& budget);
IncrementalProgress performSweepActions(SliceBudget& sliceBudget);
IncrementalProgress sweepTypeInformation(FreeOp* fop, SliceBudget& budget,
Zone* zone);
IncrementalProgress releaseSweptEmptyArenas(FreeOp* fop, SliceBudget& budget,
Zone* zone);
void startSweepingAtomsTable();
IncrementalProgress sweepAtomsTable(FreeOp* fop, SliceBudget& budget);
IncrementalProgress sweepWeakCaches(FreeOp* fop, SliceBudget& budget);
IncrementalProgress finalizeAllocKind(FreeOp* fop, SliceBudget& budget,
Zone* zone, AllocKind kind);
IncrementalProgress sweepShapeTree(FreeOp* fop, SliceBudget& budget,
Zone* zone);
void endSweepPhase(bool lastGC);
bool allCCVisibleZonesWereCollected() const;
void sweepZones(FreeOp* fop, bool destroyingRuntime);
void decommitAllWithoutUnlocking(const AutoLockGC& lock);
void startDecommit();
void queueZonesAndStartBackgroundSweep(ZoneList& zones);
void sweepFromBackgroundThread(AutoLockHelperThreadState& lock);
void startBackgroundFree();
void freeFromBackgroundThread(AutoLockHelperThreadState& lock);
void sweepBackgroundThings(ZoneList& zones, LifoAlloc& freeBlocks);
void assertBackgroundSweepingFinished();
bool shouldCompact();
void beginCompactPhase();
IncrementalProgress compactPhase(JS::GCReason reason,
SliceBudget& sliceBudget,
AutoGCSession& session);
void endCompactPhase();
void sweepTypesAfterCompacting(Zone* zone);
void sweepZoneAfterCompacting(Zone* zone);
MOZ_MUST_USE bool relocateArenas(Zone* zone, JS::GCReason reason,
Arena*& relocatedListOut,
SliceBudget& sliceBudget);
void updateTypeDescrObjects(MovingTracer* trc, Zone* zone);
void updateCellPointers(Zone* zone, AllocKinds kinds, size_t bgTaskCount);
void updateAllCellPointers(MovingTracer* trc, Zone* zone);
void updateZonePointersToRelocatedCells(Zone* zone);
void updateRuntimePointersToRelocatedCells(AutoGCSession& session);
void protectAndHoldArenas(Arena* arenaList);
void unprotectHeldRelocatedArenas();
void releaseRelocatedArenas(Arena* arenaList);
void releaseRelocatedArenasWithoutUnlocking(Arena* arenaList,
const AutoLockGC& lock);
void finishCollection();
void computeNonIncrementalMarkingForValidation(AutoGCSession& session);
void validateIncrementalMarking();
void finishMarkingValidation();
#ifdef DEBUG
void checkForCompartmentMismatches();
#endif
void callFinalizeCallbacks(FreeOp* fop, JSFinalizeStatus status) const;
void callWeakPointerZonesCallbacks() const;
void callWeakPointerCompartmentCallbacks(JS::Compartment* comp) const;
public:
JSRuntime* const rt;
UnprotectedData<JS::Zone*> systemZone;
private:
MainThreadOrGCTaskData<ZoneVector> zones_;
public:
ZoneVector& zones() { return zones_.ref(); }
WriteOnceData<Zone*> atomsZone;
private:
UnprotectedData<gcstats::Statistics> stats_;
public:
gcstats::Statistics& stats() { return stats_.ref(); }
GCMarker marker;
Vector<JS::GCCellPtr, 0, SystemAllocPolicy> unmarkGrayStack;
HeapSize heapSize;
GCSchedulingTunables tunables;
GCSchedulingState schedulingState;
AtomMarkingRuntime atomMarking;
private:
GCLockData<ChunkPool> emptyChunks_;
GCLockData<ChunkPool> availableChunks_;
GCLockData<ChunkPool> fullChunks_;
MainThreadData<RootedValueMap> rootsHash;
mozilla::Atomic<uint64_t, mozilla::ReleaseAcquire,
mozilla::recordreplay::Behavior::DontPreserve>
nextCellUniqueId_;
mozilla::Atomic<uint32_t, mozilla::ReleaseAcquire,
mozilla::recordreplay::Behavior::DontPreserve>
numArenasFreeCommitted;
MainThreadData<VerifyPreTracer*> verifyPreData;
private:
UnprotectedData<bool> chunkAllocationSinceLastGC;
MainThreadData<mozilla::TimeStamp> lastGCTime;
MainThreadData<JSGCMode> mode;
mozilla::Atomic<size_t, mozilla::ReleaseAcquire,
mozilla::recordreplay::Behavior::DontPreserve>
numActiveZoneIters;
MainThreadData<bool> cleanUpEverything;
enum class GrayBufferState { Unused, Okay, Failed };
MainThreadOrGCTaskData<GrayBufferState> grayBufferState;
bool hasValidGrayRootsBuffer() const {
return grayBufferState == GrayBufferState::Okay;
}
void resetBufferedGrayRoots() const;
void clearBufferedGrayRoots() {
grayBufferState = GrayBufferState::Unused;
resetBufferedGrayRoots();
}
UnprotectedData<bool> grayBitsValid;
mozilla::Atomic<JS::GCReason, mozilla::Relaxed,
mozilla::recordreplay::Behavior::DontPreserve>
majorGCTriggerReason;
private:
MainThreadData<bool> fullGCForAtomsRequested_;
MainThreadData<uint64_t> minorGCNumber;
MainThreadData<uint64_t> majorGCNumber;
MainThreadData<uint64_t> number;
MainThreadData<bool> isIncremental;
MainThreadData<bool> isFull;
MainThreadData<bool> isCompacting;
MainThreadData<JSGCInvocationKind> invocationKind;
MainThreadData<JS::GCReason> initialReason;
MainThreadOrGCTaskData<State> incrementalState;
MainThreadData<State> initialState;
#ifdef JS_GC_ZEAL
MainThreadData<bool> useZeal;
#endif
MainThreadData<bool> lastMarkSlice;
MainThreadData<bool> safeToYield;
MainThreadData<bool> sweepOnBackgroundThread;
HelperThreadLockData<ZoneList> backgroundSweepZones;
HelperThreadLockData<LifoAlloc> lifoBlocksToFree;
MainThreadData<LifoAlloc> lifoBlocksToFreeAfterMinorGC;
HelperThreadLockData<Nursery::BufferSet> buffersToFreeAfterMinorGC;
MainThreadData<unsigned> sweepGroupIndex;
MainThreadData<JS::Zone*> sweepGroups;
MainThreadOrGCTaskData<JS::Zone*> currentSweepGroup;
MainThreadData<UniquePtr<SweepAction<GCRuntime*, FreeOp*, SliceBudget&>>>
sweepActions;
MainThreadOrGCTaskData<JS::Zone*> sweepZone;
MainThreadData<mozilla::Maybe<AtomsTable::SweepIterator>> maybeAtomsToSweep;
MainThreadOrGCTaskData<JS::detail::WeakCacheBase*> sweepCache;
MainThreadData<bool> hasMarkedGrayRoots;
MainThreadData<bool> abortSweepAfterCurrentGroup;
#ifdef DEBUG
MainThreadData<Vector<const Cell*, 0, SystemAllocPolicy>>
cellsToAssertNotGray;
friend void js::gc::detail::AssertCellIsNotGray(const Cell*);
#endif
friend class SweepGroupsIter;
friend class WeakCacheSweepIterator;
MainThreadData<bool> startedCompacting;
MainThreadData<ZoneList> zonesToMaybeCompact;
MainThreadData<Arena*> relocatedArenasToRelease;
#ifdef JS_GC_ZEAL
MainThreadData<MarkingValidator*> markingValidator;
#endif
MainThreadData<int64_t> defaultTimeBudget_;
MainThreadData<bool> incrementalAllowed;
MainThreadData<bool> compactingEnabled;
MainThreadData<bool> rootsRemoved;
#ifdef JS_GC_ZEAL
static_assert(size_t(ZealMode::Count) <= 32,
"Too many zeal modes to store in a uint32_t");
MainThreadData<uint32_t> zealModeBits;
MainThreadData<int> zealFrequency;
MainThreadData<int> nextScheduled;
MainThreadData<bool> deterministicOnly;
MainThreadData<int> incrementalLimit;
MainThreadData<Vector<JSObject*, 0, SystemAllocPolicy>> selectedForMarking;
#endif
MainThreadData<bool> fullCompartmentChecks;
MainThreadData<uint32_t> gcCallbackDepth;
Callback<JSGCCallback> gcCallback;
Callback<JS::DoCycleCollectionCallback> gcDoCycleCollectionCallback;
Callback<JSObjectsTenuredCallback> tenuredCallback;
CallbackVector<JSFinalizeCallback> finalizeCallbacks;
CallbackVector<JSWeakPointerZonesCallback> updateWeakPointerZonesCallbacks;
CallbackVector<JSWeakPointerCompartmentCallback>
updateWeakPointerCompartmentCallbacks;
MemoryCounter mallocCounter;
CallbackVector<JSTraceDataOp> blackRootTracers;
Callback<JSTraceDataOp> grayRootTracer;
MainThreadData<bool> alwaysPreserveCode;
#ifdef DEBUG
MainThreadData<bool> arenasEmptyAtShutdown;
#endif
friend class js::AutoLockGC;
friend class js::AutoLockGCBgAlloc;
js::Mutex lock;
friend class BackgroundSweepTask;
friend class BackgroundFreeTask;
BackgroundAllocTask allocTask;
BackgroundSweepTask sweepTask;
BackgroundFreeTask freeTask;
BackgroundDecommitTask decommitTask;
MainThreadData<SortedArenaList> incrementalSweepList;
private:
MainThreadData<Nursery> nursery_;
MainThreadData<gc::StoreBuffer> storeBuffer_;
public:
Nursery& nursery() { return nursery_.ref(); }
gc::StoreBuffer& storeBuffer() { return storeBuffer_.ref(); }
void* addressOfNurseryPosition() {
return nursery_.refNoCheck().addressOfPosition();
}
const void* addressOfNurseryCurrentEnd() {
return nursery_.refNoCheck().addressOfCurrentEnd();
}
const void* addressOfStringNurseryCurrentEnd() {
return nursery_.refNoCheck().addressOfCurrentStringEnd();
}
uint32_t* addressOfNurseryAllocCount() {
return stats().addressOfAllocsSinceMinorGCNursery();
}
void minorGC(JS::GCReason reason,
gcstats::PhaseKind phase = gcstats::PhaseKind::MINOR_GC)
JS_HAZ_GC_CALL;
void evictNursery(JS::GCReason reason = JS::GCReason::EVICT_NURSERY) {
minorGC(reason, gcstats::PhaseKind::EVICT_NURSERY);
}
friend class MarkingValidator;
friend class AutoEnterIteration;
};
class MOZ_RAII AutoEnterIteration {
GCRuntime* gc;
public:
explicit AutoEnterIteration(GCRuntime* gc_) : gc(gc_) {
++gc->numActiveZoneIters;
}
~AutoEnterIteration() {
MOZ_ASSERT(gc->numActiveZoneIters);
--gc->numActiveZoneIters;
}
};
#ifdef JS_GC_ZEAL
inline bool GCRuntime::hasZealMode(ZealMode mode) {
static_assert(size_t(ZealMode::Limit) < sizeof(zealModeBits) * 8,
"Zeal modes must fit in zealModeBits");
return zealModeBits & (1 << uint32_t(mode));
}
inline void GCRuntime::clearZealMode(ZealMode mode) {
zealModeBits &= ~(1 << uint32_t(mode));
MOZ_ASSERT(!hasZealMode(mode));
}
inline bool GCRuntime::upcomingZealousGC() { return nextScheduled == 1; }
inline bool GCRuntime::needZealousGC() {
if (nextScheduled > 0 && --nextScheduled == 0) {
if (hasZealMode(ZealMode::Alloc) || hasZealMode(ZealMode::GenerationalGC) ||
hasZealMode(ZealMode::IncrementalMultipleSlices) ||
hasZealMode(ZealMode::Compact) || hasIncrementalTwoSliceZealMode()) {
nextScheduled = zealFrequency;
}
return true;
}
return false;
}
inline bool GCRuntime::hasIncrementalTwoSliceZealMode() {
return hasZealMode(ZealMode::YieldBeforeMarking) ||
hasZealMode(ZealMode::YieldBeforeSweeping) ||
hasZealMode(ZealMode::YieldBeforeSweepingAtoms) ||
hasZealMode(ZealMode::YieldBeforeSweepingCaches) ||
hasZealMode(ZealMode::YieldBeforeSweepingTypes) ||
hasZealMode(ZealMode::YieldBeforeSweepingObjects) ||
hasZealMode(ZealMode::YieldBeforeSweepingNonObjects) ||
hasZealMode(ZealMode::YieldBeforeSweepingShapeTrees) ||
hasZealMode(ZealMode::YieldWhileGrayMarking);
}
#else
inline bool GCRuntime::hasZealMode(ZealMode mode) { return false; }
inline void GCRuntime::clearZealMode(ZealMode mode) {}
inline bool GCRuntime::upcomingZealousGC() { return false; }
inline bool GCRuntime::needZealousGC() { return false; }
inline bool GCRuntime::hasIncrementalTwoSliceZealMode() { return false; }
#endif
}
}
#endif