1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
10 #include "mozilla/Atomics.h"
11 #include "mozilla/BitSet.h"
14 #include <type_traits>
16 #include "js/AllocPolicy.h"
17 #include "js/GCAnnotations.h"
18 #include "js/HashTable.h"
19 #include "js/shadow/String.h" // JS::shadow::String
20 #include "js/shadow/Symbol.h" // JS::shadow::Symbol
21 #include "js/shadow/Zone.h" // JS::shadow::Zone
22 #include "js/TraceKind.h"
23 #include "js/TypeDecls.h"
25 /* These values are private to the JS engine. */
28 JS_PUBLIC_API
bool CurrentThreadCanAccessZone(JS::Zone
* zone
);
30 // To prevent false sharing, some data structures are aligned to a typical cache
32 static constexpr size_t TypicalCacheLineSize
= 64;
42 const size_t ArenaShift
= 12;
43 const size_t ArenaSize
= size_t(1) << ArenaShift
;
44 const size_t ArenaMask
= ArenaSize
- 1;
46 #if defined(XP_DARWIN) && defined(__aarch64__)
47 const size_t PageShift
= 14;
49 const size_t PageShift
= 12;
51 // Expected page size, so we could initialze ArenasPerPage at compile-time.
52 // The actual system page size should be queried by SystemPageSize().
53 const size_t PageSize
= size_t(1) << PageShift
;
54 const size_t PageMask
= PageSize
- 1;
55 constexpr size_t ArenasPerPage
= PageSize
/ ArenaSize
;
57 const size_t ChunkShift
= 20;
58 const size_t ChunkSize
= size_t(1) << ChunkShift
;
59 const size_t ChunkMask
= ChunkSize
- 1;
61 const size_t CellAlignShift
= 3;
62 const size_t CellAlignBytes
= size_t(1) << CellAlignShift
;
63 const size_t CellAlignMask
= CellAlignBytes
- 1;
65 const size_t CellBytesPerMarkBit
= CellAlignBytes
;
66 const size_t MarkBitsPerCell
= 2;
69 * The minimum cell size ends up as twice the cell alignment because the mark
70 * bitmap contains one bit per CellBytesPerMarkBit bytes (which is equal to
71 * CellAlignBytes) and we need two mark bits per cell.
73 const size_t MinCellSize
= CellBytesPerMarkBit
* MarkBitsPerCell
;
76 * The mark bitmap has one bit per each possible cell start position. This
77 * wastes some space for larger GC things but allows us to avoid division by the
78 * cell's size when accessing the bitmap.
80 const size_t ArenaBitmapBits
= ArenaSize
/ CellBytesPerMarkBit
;
81 const size_t ArenaBitmapBytes
= HowMany(ArenaBitmapBits
, 8);
82 const size_t ArenaBitmapWords
= HowMany(ArenaBitmapBits
, JS_BITS_PER_WORD
);
84 enum class ChunkKind
: uint8_t {
93 // The base class for all GC chunks, either in the nursery or in the tenured
94 // heap memory. This structure is locatable from any GC pointer by aligning to
98 // Initialize a tenured heap chunk.
99 explicit ChunkBase(JSRuntime
* rt
) {
100 MOZ_ASSERT((uintptr_t(this) & ChunkMask
) == 0);
101 initBaseForArenaChunk(rt
);
104 void initBaseForArenaChunk(JSRuntime
* rt
) {
106 storeBuffer
= nullptr;
107 kind
= ChunkKind::TenuredArenas
;
108 nurseryChunkIndex
= UINT8_MAX
;
111 // Initialize a nursery chunk.
112 ChunkBase(JSRuntime
* rt
, StoreBuffer
* sb
, ChunkKind kind
, uint8_t chunkIndex
)
116 nurseryChunkIndex(chunkIndex
) {
117 MOZ_ASSERT(isNurseryChunk());
118 MOZ_ASSERT((uintptr_t(this) & ChunkMask
) == 0);
119 MOZ_ASSERT(storeBuffer
);
122 ChunkBase(JSRuntime
* rt
, ChunkKind kind
)
123 : storeBuffer(nullptr),
126 nurseryChunkIndex(UINT8_MAX
) {}
129 ChunkKind
getKind() const {
130 MOZ_ASSERT_IF(storeBuffer
, isNurseryChunk());
131 MOZ_ASSERT_IF(!storeBuffer
, isTenuredChunk());
135 bool isNurseryChunk() const {
136 return kind
== ChunkKind::NurseryToSpace
||
137 kind
== ChunkKind::NurseryFromSpace
;
140 bool isTenuredChunk() const {
141 return kind
== ChunkKind::TenuredArenas
||
142 kind
== ChunkKind::MediumBuffers
|| kind
== ChunkKind::LargeBuffer
;
145 // The store buffer for pointers from tenured things to things in this
146 // chunk. Will be non-null if and only if this is a nursery chunk.
147 StoreBuffer
* storeBuffer
;
149 // Provide quick access to the runtime from absolutely anywhere.
154 uint8_t nurseryChunkIndex
;
157 // Information about tenured heap chunks containing arenas.
158 struct ArenaChunkInfo
{
160 friend class ChunkPool
;
161 ArenaChunk
* next
= nullptr;
162 ArenaChunk
* prev
= nullptr;
165 /* Number of free arenas, either committed or decommitted. */
166 uint32_t numArenasFree
;
168 /* Number of free, committed arenas. */
169 uint32_t numArenasFreeCommitted
;
173 * Calculating ArenasPerChunk:
175 * To figure out how many Arenas will fit in a chunk we need to know how much
176 * extra space is available after we allocate the header data. This is a problem
177 * because the header size depends on the number of arenas in the chunk.
179 * The dependent fields are markBits, decommittedPages and
180 * freeCommittedArenas. markBits needs ArenaBitmapBytes bytes per arena,
181 * decommittedPages needs one bit per page and freeCommittedArenas needs one
184 * We can calculate an approximate value by dividing the number of bits of free
185 * space in the chunk by the number of bits needed per arena. This is an
186 * approximation because it doesn't take account of the fact that the variable
187 * sized fields must be rounded up to a whole number of words, or any padding
188 * the compiler adds between fields.
190 * Fortunately, for the chunk and arena size parameters we use this
191 * approximation turns out to be correct. If it were not we might need to adjust
192 * the arena count down by one to allow more space for the padding.
194 const size_t BitsPerPageWithHeaders
=
195 (ArenaSize
+ ArenaBitmapBytes
) * ArenasPerPage
* CHAR_BIT
+ ArenasPerPage
+
197 const size_t ChunkBitsAvailable
=
198 (ChunkSize
- sizeof(ChunkBase
) - sizeof(ArenaChunkInfo
)) * CHAR_BIT
;
199 const size_t PagesPerChunk
= ChunkBitsAvailable
/ BitsPerPageWithHeaders
;
200 const size_t ArenasPerChunk
= PagesPerChunk
* ArenasPerPage
;
201 const size_t FreeCommittedBits
= ArenasPerChunk
;
202 const size_t DecommitBits
= PagesPerChunk
;
203 const size_t BitsPerArenaWithHeaders
=
204 (ArenaSize
+ ArenaBitmapBytes
) * CHAR_BIT
+
205 (DecommitBits
/ ArenasPerChunk
) + 1;
207 const size_t CalculatedChunkSizeRequired
=
208 sizeof(ChunkBase
) + sizeof(ArenaChunkInfo
) +
209 RoundUp(ArenasPerChunk
* ArenaBitmapBytes
, sizeof(uintptr_t)) +
210 RoundUp(FreeCommittedBits
, sizeof(uint32_t) * CHAR_BIT
) / CHAR_BIT
+
211 RoundUp(DecommitBits
, sizeof(uint32_t) * CHAR_BIT
) / CHAR_BIT
+
212 ArenasPerChunk
* ArenaSize
;
213 static_assert(CalculatedChunkSizeRequired
<= ChunkSize
,
214 "Calculated ArenasPerChunk is too large");
216 const size_t CalculatedChunkPadSize
= ChunkSize
- CalculatedChunkSizeRequired
;
217 static_assert(CalculatedChunkPadSize
* CHAR_BIT
< BitsPerArenaWithHeaders
,
218 "Calculated ArenasPerChunk is too small");
220 static_assert(ArenasPerChunk
== 252,
221 "Do not accidentally change our heap's density.");
223 const size_t FirstArenaOffset
= ChunkSize
- ArenasPerChunk
* ArenaSize
;
225 // Mark bitmaps are atomic because they can be written by gray unmarking on the
226 // main thread while read by sweeping on a background thread. The former does
227 // not affect the result of the latter.
228 using MarkBitmapWord
= mozilla::Atomic
<uintptr_t, mozilla::Relaxed
>;
229 static constexpr size_t MarkBitmapWordBits
= sizeof(MarkBitmapWord
) * CHAR_BIT
;
232 * Live objects are marked black or gray. Everything reachable from a JS root is
233 * marked black. Objects marked gray are eligible for cycle collection.
235 * BlackBit: GrayOrBlackBit: Color:
241 enum class ColorBit
: uint32_t { BlackBit
= 0, GrayOrBlackBit
= 1 };
243 // Mark colors. Order is important here: the greater value the 'more marked' a
245 enum class MarkColor
: uint8_t { Gray
= 1, Black
= 2 };
247 // Mark bitmap for a tenured heap chunk.
248 template <size_t BytesPerMarkBit
, size_t FirstThingOffset
>
249 class alignas(TypicalCacheLineSize
) MarkBitmap
{
250 static constexpr size_t ByteCount
=
251 (ChunkSize
- FirstThingOffset
) / BytesPerMarkBit
;
252 static constexpr size_t WordCount
= HowMany(ByteCount
, MarkBitmapWordBits
);
253 MarkBitmapWord bitmap
[WordCount
];
256 static constexpr size_t FirstThingAdjustmentBits
=
257 FirstThingOffset
/ BytesPerMarkBit
;
259 static constexpr size_t FirstThingAdjustmentWords
=
260 FirstThingAdjustmentBits
/ MarkBitmapWordBits
;
262 MOZ_ALWAYS_INLINE
void getMarkWordAndMask(const void* cell
, ColorBit colorBit
,
263 MarkBitmapWord
** wordp
,
265 // Note: the JIT pre-barrier trampolines inline this code. Update
266 // MacroAssembler::emitPreBarrierFastPath code too when making changes here!
268 MOZ_ASSERT(size_t(colorBit
) < MarkBitsPerCell
);
270 size_t offset
= uintptr_t(cell
) & ChunkMask
;
271 MOZ_ASSERT(offset
>= FirstThingOffset
);
273 const size_t bit
= offset
/ BytesPerMarkBit
+ size_t(colorBit
);
274 size_t word
= bit
/ MarkBitmapWordBits
- FirstThingAdjustmentWords
;
275 MOZ_ASSERT(word
< WordCount
);
276 *wordp
= &bitmap
[word
];
277 *maskp
= uintptr_t(1) << (bit
% MarkBitmapWordBits
);
280 // The following are not exported and are defined in gc/Heap.h:
281 MOZ_ALWAYS_INLINE
bool markBit(const void* cell
, ColorBit colorBit
) {
282 MarkBitmapWord
* word
;
284 getMarkWordAndMask(cell
, colorBit
, &word
, &mask
);
288 MOZ_ALWAYS_INLINE
bool isMarkedAny(const void* cell
) {
289 return markBit(cell
, ColorBit::BlackBit
) ||
290 markBit(cell
, ColorBit::GrayOrBlackBit
);
293 MOZ_ALWAYS_INLINE
bool isMarkedBlack(const void* cell
) {
294 // Return true if BlackBit is set.
295 return markBit(cell
, ColorBit::BlackBit
);
298 MOZ_ALWAYS_INLINE
bool isMarkedGray(const void* cell
) {
299 // Return true if GrayOrBlackBit is set and BlackBit is not set.
300 return !markBit(cell
, ColorBit::BlackBit
) &&
301 markBit(cell
, ColorBit::GrayOrBlackBit
);
304 inline bool markIfUnmarked(const void* cell
, MarkColor color
);
305 inline bool markIfUnmarkedAtomic(const void* cell
, MarkColor color
);
306 inline void markBlack(const void* cell
);
307 inline void markBlackAtomic(const void* cell
);
308 inline void copyMarkBit(TenuredCell
* dst
, const TenuredCell
* src
,
310 inline void unmark(const void* cell
);
311 inline void unmarkOneBit(const void* cell
, ColorBit colorBit
);
312 inline MarkBitmapWord
* arenaBits(Arena
* arena
);
314 inline void copyFrom(const MarkBitmap
& other
);
318 using ChunkMarkBitmap
= MarkBitmap
<CellBytesPerMarkBit
, FirstArenaOffset
>;
320 // Bitmap with one bit per page used for decommitted page set.
321 using ChunkPageBitmap
= mozilla::BitSet
<PagesPerChunk
, uint32_t>;
323 // Bitmap with one bit per arena used for free committed arena set.
324 using ChunkArenaBitmap
= mozilla::BitSet
<ArenasPerChunk
, uint32_t>;
326 // Base class for a tenured heap chunk containing fixed size arenas.
327 class ArenaChunkBase
: public ChunkBase
{
330 ChunkMarkBitmap markBits
;
331 ChunkArenaBitmap freeCommittedArenas
;
332 ChunkPageBitmap decommittedPages
;
335 explicit ArenaChunkBase(JSRuntime
* runtime
) : ChunkBase(runtime
) {
336 static_assert(sizeof(markBits
) == ArenaBitmapBytes
* ArenasPerChunk
,
337 "Ensure our MarkBitmap actually covers all arenas.");
338 info
.numArenasFree
= ArenasPerChunk
;
341 void initAsCommitted();
342 void initAsDecommitted();
344 static_assert(FirstArenaOffset
==
345 RoundUp(sizeof(gc::ArenaChunkBase
), ArenaSize
));
348 * We sometimes use an index to refer to a cell in an arena. The index for a
349 * cell is found by dividing by the cell alignment so not all indices refer to
352 const size_t ArenaCellIndexBytes
= CellAlignBytes
;
353 const size_t MaxArenaCellIndex
= ArenaSize
/ CellAlignBytes
;
355 const size_t ChunkStoreBufferOffset
= offsetof(ChunkBase
, storeBuffer
);
356 const size_t ChunkMarkBitmapOffset
= offsetof(ArenaChunkBase
, markBits
);
358 // Hardcoded offsets into Arena class.
359 const size_t ArenaZoneOffset
= 2 * sizeof(uint32_t);
360 const size_t ArenaHeaderSize
= ArenaZoneOffset
+ 2 * sizeof(uintptr_t) +
361 sizeof(size_t) + sizeof(uintptr_t);
363 // The first word of a GC thing has certain requirements from the GC and is used
364 // to store flags in the low bits.
365 const size_t CellFlagBitsReservedForGC
= 3;
367 // The first word can be used to store JSClass pointers for some thing kinds, so
368 // these must be suitably aligned.
369 const size_t JSClassAlignBytes
= size_t(1) << CellFlagBitsReservedForGC
;
372 /* When downcasting, ensure we are actually the right type. */
373 extern JS_PUBLIC_API
void AssertGCThingHasType(js::gc::Cell
* cell
,
376 inline void AssertGCThingHasType(js::gc::Cell
* cell
, JS::TraceKind kind
) {}
379 MOZ_ALWAYS_INLINE
bool IsInsideNursery(const js::gc::Cell
* cell
);
380 MOZ_ALWAYS_INLINE
bool IsInsideNursery(const js::gc::TenuredCell
* cell
);
387 enum class HeapState
{
388 Idle
, // doing nothing with the GC heap
389 Tracing
, // tracing the GC heap without collecting, e.g.
390 // IterateCompartments()
391 MajorCollecting
, // doing a GC of the major heap
392 MinorCollecting
, // doing a GC of the minor heap (nursery)
393 CycleCollecting
// in the "Unlink" phase of cycle collection
396 JS_PUBLIC_API HeapState
RuntimeHeapState();
398 static inline bool RuntimeHeapIsBusy() {
399 return RuntimeHeapState() != HeapState::Idle
;
402 static inline bool RuntimeHeapIsTracing() {
403 return RuntimeHeapState() == HeapState::Tracing
;
406 static inline bool RuntimeHeapIsMajorCollecting() {
407 return RuntimeHeapState() == HeapState::MajorCollecting
;
410 static inline bool RuntimeHeapIsMinorCollecting() {
411 return RuntimeHeapState() == HeapState::MinorCollecting
;
414 static inline bool RuntimeHeapIsCollecting(HeapState state
) {
415 return state
== HeapState::MajorCollecting
||
416 state
== HeapState::MinorCollecting
;
419 static inline bool RuntimeHeapIsCollecting() {
420 return RuntimeHeapIsCollecting(RuntimeHeapState());
423 static inline bool RuntimeHeapIsCycleCollecting() {
424 return RuntimeHeapState() == HeapState::CycleCollecting
;
428 * This list enumerates the different types of conceptual stacks we have in
429 * SpiderMonkey. In reality, they all share the C stack, but we allow different
430 * stack limits depending on the type of code running.
433 StackForSystemCode
, // C++, such as the GC, running on behalf of the VM.
434 StackForTrustedScript
, // Script running with trusted principals.
435 StackForUntrustedScript
, // Script running with untrusted principals.
440 * Default maximum size for the generational nursery in bytes. This is the
441 * initial value. In the browser this configured by the
442 * javascript.options.mem.nursery.max_kb pref.
444 const uint32_t DefaultNurseryMaxBytes
= 64 * js::gc::ChunkSize
;
446 /* Default maximum heap size in bytes to pass to JS_NewContext(). */
447 const uint32_t DefaultHeapMaxBytes
= 32 * 1024 * 1024;
450 * A GC pointer, tagged with the trace kind.
452 * In general, a GC pointer should be stored with an exact type. This class
453 * is for use when that is not possible because a single pointer must point
454 * to several kinds of GC thing.
456 class JS_PUBLIC_API GCCellPtr
{
458 GCCellPtr() : GCCellPtr(nullptr) {}
460 // Construction from a void* and trace kind.
461 GCCellPtr(void* gcthing
, JS::TraceKind traceKind
)
462 : ptr(checkedCast(gcthing
, traceKind
)) {}
464 // Automatically construct a null GCCellPtr from nullptr.
465 MOZ_IMPLICIT
GCCellPtr(decltype(nullptr))
466 : ptr(checkedCast(nullptr, JS::TraceKind::Null
)) {}
468 // Construction from an explicit type.
469 template <typename T
>
470 explicit GCCellPtr(T
* p
)
471 : ptr(checkedCast(p
, JS::MapTypeToTraceKind
<T
>::kind
)) {}
472 explicit GCCellPtr(JSFunction
* p
)
473 : ptr(checkedCast(p
, JS::TraceKind::Object
)) {}
474 explicit GCCellPtr(JSScript
* p
)
475 : ptr(checkedCast(p
, JS::TraceKind::Script
)) {}
476 explicit GCCellPtr(const Value
& v
);
478 JS::TraceKind
kind() const {
479 uintptr_t kindBits
= ptr
& OutOfLineTraceKindMask
;
480 if (kindBits
!= OutOfLineTraceKindMask
) {
481 return JS::TraceKind(kindBits
);
483 return outOfLineKind();
486 // Allow GCCellPtr to be used in a boolean context.
487 explicit operator bool() const {
488 MOZ_ASSERT(bool(asCell()) == (kind() != JS::TraceKind::Null
));
492 // Simplify checks to the kind.
493 template <typename T
, typename
= std::enable_if_t
<JS::IsBaseTraceType_v
<T
>>>
495 return kind() == JS::MapTypeToTraceKind
<T
>::kind
;
498 // Conversions to more specific types must match the kind. Access to
499 // further refined types is not allowed directly from a GCCellPtr.
500 template <typename T
, typename
= std::enable_if_t
<JS::IsBaseTraceType_v
<T
>>>
502 MOZ_ASSERT(kind() == JS::MapTypeToTraceKind
<T
>::kind
);
503 // We can't use static_cast here, because the fact that JSObject
504 // inherits from js::gc::Cell is not part of the public API.
505 return *reinterpret_cast<T
*>(asCell());
508 // Return a pointer to the cell this |GCCellPtr| refers to, or |nullptr|.
509 // (It would be more symmetrical with |to| for this to return a |Cell&|, but
510 // the result can be |nullptr|, and null references are undefined behavior.)
511 js::gc::Cell
* asCell() const {
512 return reinterpret_cast<js::gc::Cell
*>(ptr
& ~OutOfLineTraceKindMask
);
515 // The CC's trace logger needs an identity that is XPIDL serializable.
516 uint64_t unsafeAsInteger() const {
517 return static_cast<uint64_t>(unsafeAsUIntPtr());
519 // Inline mark bitmap access requires direct pointer arithmetic.
520 uintptr_t unsafeAsUIntPtr() const {
521 MOZ_ASSERT(asCell());
522 MOZ_ASSERT(!js::gc::IsInsideNursery(asCell()));
523 return reinterpret_cast<uintptr_t>(asCell());
526 MOZ_ALWAYS_INLINE
bool mayBeOwnedByOtherRuntime() const {
527 if (!is
<JSString
>() && !is
<JS::Symbol
>()) {
530 if (is
<JSString
>()) {
531 return JS::shadow::String::isPermanentAtom(asCell());
533 MOZ_ASSERT(is
<JS::Symbol
>());
534 return JS::shadow::Symbol::isWellKnownSymbol(asCell());
538 static uintptr_t checkedCast(void* p
, JS::TraceKind traceKind
) {
539 auto* cell
= static_cast<js::gc::Cell
*>(p
);
540 MOZ_ASSERT((uintptr_t(p
) & OutOfLineTraceKindMask
) == 0);
541 AssertGCThingHasType(cell
, traceKind
);
542 // Store trace in the bottom bits of pointer for common kinds.
543 uintptr_t kindBits
= uintptr_t(traceKind
);
544 if (kindBits
>= OutOfLineTraceKindMask
) {
545 kindBits
= OutOfLineTraceKindMask
;
547 return uintptr_t(p
) | kindBits
;
550 JS::TraceKind
outOfLineKind() const;
555 // Unwraps the given GCCellPtr, calls the functor |f| with a template argument
556 // of the actual type of the pointer, and returns the result.
557 template <typename F
>
558 auto MapGCThingTyped(GCCellPtr thing
, F
&& f
) {
559 switch (thing
.kind()) {
560 #define JS_EXPAND_DEF(name, type, _, _1) \
561 case JS::TraceKind::name: \
562 return f(&thing.as<type>());
563 JS_FOR_EACH_TRACEKIND(JS_EXPAND_DEF
);
566 MOZ_CRASH("Invalid trace kind in MapGCThingTyped for GCCellPtr.");
570 // Unwraps the given GCCellPtr and calls the functor |f| with a template
571 // argument of the actual type of the pointer. Doesn't return anything.
572 template <typename F
>
573 void ApplyGCThingTyped(GCCellPtr thing
, F
&& f
) {
574 // This function doesn't do anything but is supplied for symmetry with other
575 // MapGCThingTyped/ApplyGCThingTyped implementations that have to wrap the
576 // functor to return a dummy value that is ignored.
577 MapGCThingTyped(thing
, f
);
582 // These are defined in the toplevel namespace instead of within JS so that
583 // they won't shadow other operator== overloads (see bug 1456512.)
585 inline bool operator==(JS::GCCellPtr ptr1
, JS::GCCellPtr ptr2
) {
586 return ptr1
.asCell() == ptr2
.asCell();
589 inline bool operator!=(JS::GCCellPtr ptr1
, JS::GCCellPtr ptr2
) {
590 return !(ptr1
== ptr2
);
598 // `addr` must be an address within GC-controlled memory. Note that it cannot
599 // point just past GC-controlled memory.
600 static MOZ_ALWAYS_INLINE ChunkBase
* GetGCAddressChunkBase(const void* addr
) {
602 auto* chunk
= reinterpret_cast<ChunkBase
*>(uintptr_t(addr
) & ~ChunkMask
);
603 MOZ_ASSERT(chunk
->runtime
);
604 MOZ_ASSERT(chunk
->kind
!= ChunkKind::Invalid
);
608 static MOZ_ALWAYS_INLINE ChunkBase
* GetCellChunkBase(const Cell
* cell
) {
609 return GetGCAddressChunkBase(cell
);
612 static MOZ_ALWAYS_INLINE ArenaChunkBase
* GetCellChunkBase(
613 const TenuredCell
* cell
) {
615 auto* chunk
= reinterpret_cast<ArenaChunkBase
*>(uintptr_t(cell
) & ~ChunkMask
);
616 MOZ_ASSERT(chunk
->runtime
);
617 MOZ_ASSERT(chunk
->kind
== ChunkKind::TenuredArenas
);
621 static MOZ_ALWAYS_INLINE
JS::Zone
* GetTenuredGCThingZone(const void* ptr
) {
622 // This takes a void* because the compiler can't see type relationships in
623 // this header. |ptr| must be a pointer to a tenured GC thing.
625 const uintptr_t zone_addr
= (uintptr_t(ptr
) & ~ArenaMask
) | ArenaZoneOffset
;
626 return *reinterpret_cast<JS::Zone
**>(zone_addr
);
629 static MOZ_ALWAYS_INLINE
bool TenuredCellIsMarkedBlack(
630 const TenuredCell
* cell
) {
632 MOZ_ASSERT(!js::gc::IsInsideNursery(cell
));
634 ArenaChunkBase
* chunk
= GetCellChunkBase(cell
);
635 return chunk
->markBits
.isMarkedBlack(cell
);
638 static MOZ_ALWAYS_INLINE
bool NonBlackCellIsMarkedGray(
639 const TenuredCell
* cell
) {
640 // Return true if GrayOrBlackBit is set. Callers should check BlackBit first.
643 MOZ_ASSERT(!js::gc::IsInsideNursery(cell
));
644 MOZ_ASSERT(!TenuredCellIsMarkedBlack(cell
));
646 ArenaChunkBase
* chunk
= GetCellChunkBase(cell
);
647 return chunk
->markBits
.markBit(cell
, ColorBit::GrayOrBlackBit
);
650 static MOZ_ALWAYS_INLINE
bool TenuredCellIsMarkedGray(const TenuredCell
* cell
) {
652 MOZ_ASSERT(!js::gc::IsInsideNursery(cell
));
653 ArenaChunkBase
* chunk
= GetCellChunkBase(cell
);
654 return chunk
->markBits
.isMarkedGray(cell
);
657 static MOZ_ALWAYS_INLINE
bool CellIsMarkedGray(const Cell
* cell
) {
659 if (js::gc::IsInsideNursery(cell
)) {
662 return TenuredCellIsMarkedGray(reinterpret_cast<const TenuredCell
*>(cell
));
665 extern JS_PUBLIC_API
bool CanCheckGrayBits(const TenuredCell
* cell
);
667 extern JS_PUBLIC_API
bool CellIsMarkedGrayIfKnown(const TenuredCell
* cell
);
670 extern JS_PUBLIC_API
void AssertCellIsNotGray(const Cell
* cell
);
672 extern JS_PUBLIC_API
bool ObjectIsMarkedBlack(const JSObject
* obj
);
675 MOZ_ALWAYS_INLINE
bool ChunkPtrHasStoreBuffer(const void* ptr
) {
676 return GetGCAddressChunkBase(ptr
)->storeBuffer
;
679 } /* namespace detail */
681 MOZ_ALWAYS_INLINE
bool IsInsideNursery(const Cell
* cell
) {
683 return detail::ChunkPtrHasStoreBuffer(cell
);
686 MOZ_ALWAYS_INLINE
bool IsInsideNursery(const TenuredCell
* cell
) {
688 MOZ_ASSERT(!IsInsideNursery(reinterpret_cast<const Cell
*>(cell
)));
692 // Return whether |cell| is in the region of the nursery currently being
694 MOZ_ALWAYS_INLINE
bool InCollectedNurseryRegion(const Cell
* cell
) {
696 return detail::GetCellChunkBase(cell
)->getKind() ==
697 ChunkKind::NurseryFromSpace
;
700 // Allow use before the compiler knows the derivation of JSObject, JSString, and
702 MOZ_ALWAYS_INLINE
bool IsInsideNursery(const JSObject
* obj
) {
703 return IsInsideNursery(reinterpret_cast<const Cell
*>(obj
));
705 MOZ_ALWAYS_INLINE
bool IsInsideNursery(const JSString
* str
) {
706 return IsInsideNursery(reinterpret_cast<const Cell
*>(str
));
708 MOZ_ALWAYS_INLINE
bool IsInsideNursery(const JS::BigInt
* bi
) {
709 return IsInsideNursery(reinterpret_cast<const Cell
*>(bi
));
711 MOZ_ALWAYS_INLINE
bool InCollectedNurseryRegion(const JSObject
* obj
) {
712 return InCollectedNurseryRegion(reinterpret_cast<const Cell
*>(obj
));
715 MOZ_ALWAYS_INLINE
bool IsCellPointerValid(const void* ptr
) {
716 auto addr
= uintptr_t(ptr
);
717 if (addr
< ChunkSize
|| addr
% CellAlignBytes
!= 0) {
721 auto* cell
= reinterpret_cast<const Cell
*>(ptr
);
722 if (!IsInsideNursery(cell
)) {
723 return detail::GetTenuredGCThingZone(cell
) != nullptr;
729 MOZ_ALWAYS_INLINE
bool IsCellPointerValidOrNull(const void* cell
) {
733 return IsCellPointerValid(cell
);
741 extern JS_PUBLIC_API Zone
* GetTenuredGCThingZone(GCCellPtr thing
);
743 extern JS_PUBLIC_API Zone
* GetNurseryCellZone(js::gc::Cell
* cell
);
745 static MOZ_ALWAYS_INLINE Zone
* GetGCThingZone(GCCellPtr thing
) {
746 if (!js::gc::IsInsideNursery(thing
.asCell())) {
747 return js::gc::detail::GetTenuredGCThingZone(thing
.asCell());
750 return GetNurseryCellZone(thing
.asCell());
753 static MOZ_ALWAYS_INLINE Zone
* GetStringZone(JSString
* str
) {
754 if (!js::gc::IsInsideNursery(str
)) {
755 return js::gc::detail::GetTenuredGCThingZone(str
);
758 return GetNurseryCellZone(reinterpret_cast<js::gc::Cell
*>(str
));
761 extern JS_PUBLIC_API Zone
* GetObjectZone(JSObject
* obj
);
763 static MOZ_ALWAYS_INLINE
bool GCThingIsMarkedGray(GCCellPtr thing
) {
764 js::gc::Cell
* cell
= thing
.asCell();
765 if (IsInsideNursery(cell
)) {
769 auto* tenuredCell
= reinterpret_cast<js::gc::TenuredCell
*>(cell
);
770 return js::gc::detail::CellIsMarkedGrayIfKnown(tenuredCell
);
773 // Specialised gray marking check for use by the cycle collector. This is not
774 // called during incremental GC or when the gray bits are invalid.
775 static MOZ_ALWAYS_INLINE
bool GCThingIsMarkedGrayInCC(GCCellPtr thing
) {
776 js::gc::Cell
* cell
= thing
.asCell();
777 if (IsInsideNursery(cell
)) {
781 auto* tenuredCell
= reinterpret_cast<js::gc::TenuredCell
*>(cell
);
782 if (!js::gc::detail::TenuredCellIsMarkedGray(tenuredCell
)) {
786 MOZ_ASSERT(js::gc::detail::CanCheckGrayBits(tenuredCell
));
791 extern JS_PUBLIC_API
JS::TraceKind
GCThingTraceKind(void* thing
);
794 * Returns true when writes to GC thing pointers (and reads from weak pointers)
795 * must call an incremental barrier. This is generally only true when running
796 * mutator code in-between GC slices. At other times, the barrier may be elided
799 extern JS_PUBLIC_API
bool IsIncrementalBarrierNeeded(JSContext
* cx
);
802 * Notify the GC that a reference to a JSObject is about to be overwritten.
803 * This method must be called if IsIncrementalBarrierNeeded.
805 extern JS_PUBLIC_API
void IncrementalPreWriteBarrier(JSObject
* obj
);
808 * Notify the GC that a reference to a tenured GC cell is about to be
809 * overwritten. This method must be called if IsIncrementalBarrierNeeded.
811 extern JS_PUBLIC_API
void IncrementalPreWriteBarrier(GCCellPtr thing
);
814 * Unsets the gray bit for anything reachable from |thing|. |kind| should not be
815 * JS::TraceKind::Shape. |thing| should be non-null. The return value indicates
816 * if anything was unmarked.
818 extern JS_PUBLIC_API
bool UnmarkGrayGCThingRecursively(GCCellPtr thing
);
825 extern JS_PUBLIC_API
void PerformIncrementalReadBarrier(JS::GCCellPtr thing
);
827 static MOZ_ALWAYS_INLINE
void ExposeGCThingToActiveJS(JS::GCCellPtr thing
) {
828 // TODO: I'd like to assert !RuntimeHeapIsBusy() here but this gets
829 // called while we are tracing the heap, e.g. during memory reporting
830 // (see bug 1313318).
831 MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
833 // GC things residing in the nursery cannot be gray: they have no mark bits.
834 // All live objects in the nursery are moved to tenured at the beginning of
835 // each GC slice, so the gray marker never sees nursery things.
836 if (IsInsideNursery(thing
.asCell())) {
840 auto* cell
= reinterpret_cast<TenuredCell
*>(thing
.asCell());
841 if (detail::TenuredCellIsMarkedBlack(cell
)) {
845 // GC things owned by other runtimes are always black.
846 MOZ_ASSERT(!thing
.mayBeOwnedByOtherRuntime());
848 auto* zone
= JS::shadow::Zone::from(detail::GetTenuredGCThingZone(cell
));
849 if (zone
->needsIncrementalBarrier()) {
850 PerformIncrementalReadBarrier(thing
);
851 } else if (!zone
->isGCPreparing() && detail::NonBlackCellIsMarkedGray(cell
)) {
852 MOZ_ALWAYS_TRUE(JS::UnmarkGrayGCThingRecursively(thing
));
855 MOZ_ASSERT_IF(!zone
->isGCPreparing(), !detail::TenuredCellIsMarkedGray(cell
));
858 static MOZ_ALWAYS_INLINE
void IncrementalReadBarrier(JS::GCCellPtr thing
) {
859 // This is a lighter version of ExposeGCThingToActiveJS that doesn't do gray
862 if (IsInsideNursery(thing
.asCell())) {
866 auto* cell
= reinterpret_cast<TenuredCell
*>(thing
.asCell());
867 auto* zone
= JS::shadow::Zone::from(detail::GetTenuredGCThingZone(cell
));
868 if (zone
->needsIncrementalBarrier() &&
869 !detail::TenuredCellIsMarkedBlack(cell
)) {
870 // GC things owned by other runtimes are always black.
871 MOZ_ASSERT(!thing
.mayBeOwnedByOtherRuntime());
872 PerformIncrementalReadBarrier(thing
);
876 template <typename T
>
877 extern JS_PUBLIC_API
bool EdgeNeedsSweepUnbarrieredSlow(T
* thingp
);
879 static MOZ_ALWAYS_INLINE
bool EdgeNeedsSweepUnbarriered(JSObject
** objp
) {
880 // This function does not handle updating nursery pointers. Raw JSObject
881 // pointers should be updated separately or replaced with
882 // JS::Heap<JSObject*> which handles this automatically.
883 MOZ_ASSERT(!JS::RuntimeHeapIsMinorCollecting());
884 if (IsInsideNursery(*objp
)) {
888 auto zone
= JS::shadow::Zone::from(detail::GetTenuredGCThingZone(*objp
));
889 if (!zone
->isGCSweepingOrCompacting()) {
893 return EdgeNeedsSweepUnbarrieredSlow(objp
);
902 * This should be called when an object that is marked gray is exposed to the JS
903 * engine (by handing it to running JS code or writing it into live JS
904 * data). During incremental GC, since the gray bits haven't been computed yet,
905 * we conservatively mark the object black.
907 static MOZ_ALWAYS_INLINE
void ExposeObjectToActiveJS(JSObject
* obj
) {
909 MOZ_ASSERT(!js::gc::EdgeNeedsSweepUnbarrieredSlow(&obj
));
910 js::gc::ExposeGCThingToActiveJS(GCCellPtr(obj
));
915 #endif /* js_HeapAPI_h */