UDocumentation UE5.7 10.02.2026 (Source)
API documentation for Unreal Engine 5.7
VVMHeap.h
Go to the documentation of this file.
1// Copyright Epic Games, Inc. All Rights Reserved.
2
3#pragma once
4
5#if WITH_VERSE_VM || defined(__INTELLISENSE__)
6
7#include "Containers/Array.h"
8#include "HAL/Platform.h"
12#include "VVMLog.h"
13#include "verse_heap_config_ue.h"
14#include <atomic>
15#include <cstddef>
16
17class FThread;
18
19namespace UE
20{
21class FConditionVariable;
22class FMutex;
23} // namespace UE
24
25extern "C" struct pas_heap;
26
27namespace Verse
28{
31struct FGlobalHeapRoot;
33struct FHeapPageHeader;
34struct FIOContext;
35struct FMarkStack;
36class FSubspace;
37struct VEmergentType;
38
39template <typename T>
40struct alignas(alignof(T)) TNeverDestroyed;
41
42enum class EWeakBarrierState
43{
48};
49
50class FHeap final
51{
52public:
53 enum EState
54 {
55 Idle,
56 Marking, // Finding roots and traversing the heap
57 PostMarking, // Successfully completed a marking pass
58 Census, // Cleaning up weakly referenced objects
59 Destroying, // Running destructors on unreferenced objects
60 Sweeping, // Reclaiming memory
61 Finishing, // Ending the collection process, returning to idle
62 };
63
64 // Must call this before doing anything with the heap.
65 COREUOBJECT_API static void Initialize();
66
67 COREUOBJECT_API static void Deinitialize();
68
69 // Space for objects that are fast to GC: they have no destructors and do not require census. Ideally most
70 // objects we allocate dynamically are fast.
72
73 // Same as FastSpace above, except allocations in this space are NOT put onto the mark stack.
74 // It is up to the callers who allocated into this space to handle marking anything those allocations contain.
76
77 // Space for objects that require destructors. It's fine for objects to have destructors so long as those objects
78 // are relatively infrequently allocated (they have low churn rate). It's fine for this space to get large so long
79 // as churn rate stays low.
80 //
81 // Destructors will be called from any thread. It's possible for destructors to be called on the slow path of
82 // allocation. It's possible for them to be called from some GC worker thread.
84
85 // Space for objects that require census. Census is a heap iteration that happens after marking but before
86 // sweeping, where every live object in the census space has some callback called. This is mostly for weak handles.
87 // It's ideal for performance if this space stays small, but it's fine for the churn rate to be high.
89
90 // Space for objects that require both destruction and census.
92
93 // Special space for emergent types. It's a limited to make a 32 bit offset enough.
95
96 // Use this set to iterate destructors. In the future, if we have spaces that require destruction other than the
97 // destructor space (like if we need a DestructorAndCensusSpace), then this set will contain all of them.
98 // That's important because there is some constant overhead to iterating any iteration set, but iteration sets
99 // can contain any number of spaces.
101
102 // Census set, for now just containing the CensusSpace. If we have multiple spaces that require Census (like if
103 // we need a DestructorAndCensusSpace), then this will contain all of them.
105
106 static bool IsMarking()
107 {
108 return State == EState::Marking;
109 }
110
111 static bool IsDestroying()
112 {
113 return State == EState::Destroying;
114 }
115
116 static bool IsCollecting()
117 {
118 return State != EState::Idle;
119 }
120
122 {
123 return WeakBarrierState;
124 }
125
126 static std::atomic<uint32>* GetMarkBitWord(const void* Ptr)
127 {
128 uintptr_t Address = reinterpret_cast<uintptr_t>(Ptr);
129 uintptr_t ChunkBase = Address & ~(static_cast<uintptr_t>(VERSE_HEAP_CHUNK_SIZE) - 1);
130 uintptr_t ChunkOffset = Address & (static_cast<uintptr_t>(VERSE_HEAP_CHUNK_SIZE) - 1);
131 uintptr_t BitIndex = ChunkOffset >> VERSE_HEAP_MIN_ALIGN_SHIFT;
132 uintptr_t WordIndex = BitIndex >> 5;
133 return reinterpret_cast<std::atomic<uint32>*>(ChunkBase) + WordIndex;
134 }
135
136 static uint32 GetMarkBitIndex(const void* Ptr)
137 {
138 uintptr_t Address = reinterpret_cast<uintptr_t>(Ptr);
139 checkSlow(!(Address & (static_cast<uintptr_t>(VERSE_HEAP_MIN_ALIGN) - 1)));
140 return static_cast<uint32>(Address >> VERSE_HEAP_MIN_ALIGN_SHIFT);
141 }
142
143 static uint32 GetMarkBitMask(const void* Ptr)
144 {
145 return static_cast<uint32>(1) << (GetMarkBitIndex(Ptr) & 31);
146 }
147
148 static bool IsMarked(const void* Ptr)
149 {
150 std::atomic<uint32>* Word = GetMarkBitWord(Ptr);
152 return Word->load(std::memory_order_relaxed) & Mask;
153 }
154
156 {
157 uint32 Offset = (BitCast<uint8*>(EmergentType) - BitCast<uint8*>(FHeap::EmergentTypeBase)) / FHeap::EmergentAlignment;
159 TEXT("EmergentType could not be translated to an offset (pointer 0x%p, offset 0x%x => 0x%p)."),
161 Offset,
163 return Offset;
164 }
165
167 {
168 return BitCast<VEmergentType*>(BitCast<uint8*>(FHeap::EmergentTypeBase) + static_cast<size_t>(Offset) * FHeap::EmergentAlignment);
169 }
170
172 {
173 if (Bytes)
174 {
175 LiveNativeBytes += static_cast<size_t>(Bytes);
176 V_DIE_IF(static_cast<ptrdiff_t>(LiveNativeBytes) < 0);
177 }
178 }
179
180 static void ReportDeallocatedNativeBytes(size_t Bytes)
181 {
182 if (Bytes)
183 {
184 size_t AllocatedBytes = static_cast<size_t>(-static_cast<ptrdiff_t>(Bytes));
187 }
188 }
189
190 static size_t GetLiveNativeBytes()
191 {
192 return LiveNativeBytes;
193 }
194
195 // If a collection cycle is going right now, then do nothing except return a request object that
196 // allows us to wait for when it's done.
197 //
198 // If a collection cycle is not going right now, then start one, and return a request object that
199 // allows us to wait for when it's done.
200 //
201 // The fact that this won't start a GC cycle if one is already going is quite significant. GC
202 // cycles float garbage: anything allocated in this cycle will not be freed in this cycle. So, if
203 // you create some garbage, call this function, and then wait, then you're not guaranteed that the
204 // garbage you created will be deleted. It might float!
205 //
206 // This function forms the basis of GC triggers. If you want to trigger GC when there is too much
207 // of something (like live bytes in the heap), then use this.
209
210 // If a collection cycle is going right now, then request another collection cycle to start
211 // immediately after this one. Note that this cycle might be unique to us, since if multiple calls
212 // happen to this function within a cycle, then they'll all request the same fresh cycle.
213 //
214 // If a collection cycle is not going right now, then start one.
215 //
216 // Either way, return a request object that allows us to wait for the cycle we started.
217 //
218 // The point of this function is that it allows you to handle floating garbage. If you allocate
219 // some garbage, call this function, and then wait, then you're guaranteed that the garbage you
220 // created will get collected. It may float in the *current* cycle (if there was one), but it
221 // cannot possibly float in the fresh cycle that this requested.
222 //
223 // Hence this function forms the basis of STW "GC now" equivalents. Like, if you wanted to
224 // "synchronously GC" in a concurrent GC, then that means calling this function and then waiting
225 // for the request.
227
229 {
231 }
232
233 // Request that the collector allow external control. This will crash if the GC is already externally
234 // controlled.
236
237 // Disable external control.
239
240 static bool IsExternallyControlled()
241 {
243 }
244
247
248 // To be called from an external control system: Check if the GC wants to terminate. This means that the
249 // Verse GC has no more objects to mark. If the Verse GC has no more objects to mark, and other GCs in
250 // the system also have no more objects to mark, then it's safe to permit GC termination.
251 //
252 // This can only be called if the GC is marking. Otherwise it crashes.
254
255 // To be called from an external control system: Permit the GC to terminate. This crashes if the Verse
256 // GC is not pending termination.
257 //
258 // NOTE: It's only safe to use this if there is no concurrent mutator activity while you're trying to do
259 // this.
261
262 // To be called from an external control system: Permit the GC to terminate. This returns true if the Verse
263 // GC was pending termination and was able to terminate, false otherwise.
265
266 // External GCs are expected to create their own FMarkStack instance(s) and mark VCells using that API.
267 // Then, eventually, passing those MarkStacks to Verse using this AddExternalMarkStack() function. This
268 // ensures that the Verse GC is aware of external marking activity in its heap.
269 //
270 // Only use this function with EnableExternalControl().
271 //
272 // Never call this function before ExternallySignalGCStartAndWaitUntilItDoes().
273 //
274 // Never pass a MarkStack that had objects from a previous collection (i.e. from before
275 // ExternallySignalGCStartAndWaitUntilItDoes()).
276 //
277 // Never call ExternallySynchronouslyTerminateGC() if you have any MarkStacks that you haven't yet passed to
278 // the Verse GC using this AddExternalMarkStack() function.
279 //
280 // This function clears the contents of the MarkStack you give it.
282
283 // It doesn't mean it's actually a valid cell, but it does mean
284 // that libpas owns this address.
285 COREUOBJECT_API static bool OwnsAddress(void*);
286
287 static double GetTotalTimeSpentCollecting()
288 {
290 }
291
292private:
293 friend struct FCollectionCycleRequest;
294 friend struct FContextImpl; // Usually, FContextImpl just uses FHeap API, but sometimes it's not practical.
295 friend struct FGlobalHeapRoot;
296 friend struct FGlobalHeapCensusRoot;
297 friend struct FMarkStack;
298 friend struct FWeakKeyMapGuard;
299
300 FHeap() = delete;
301
303
304 // If we have threading, this returns false. If we don't have threading, it asserts we aren't GCing right now and checks if we should
305 // turn on threading.
307
308 static void CollectorThreadMain();
310
311 static void WaitForTrigger(FIOContext Context);
312
314 static void RunPreMarking(FIOContext Context);
315 static void RunPostMarking(FIOContext Context);
317
318 static void Terminate();
319 static void CancelTermination();
320
322 static void MarkRoots(FIOContext Context);
323 static void Mark(FIOContext Context);
324 static bool AttemptToTerminate(FIOContext Context); // Returns true if we did terminate.
325 static void ConductCensus(FIOContext Context);
326 static void RunDestructors(FIOContext Context);
327 static void Sweep(FIOContext Context);
328 static void EndCollection(FIOContext Context);
329
330 static void LiveBytesTriggerCallback();
331 static void CensusCallback(void* Object, void* Arg);
332 static void DestructorCallback(void* Object, void* Arg);
333
334 static void CheckCycleTriggerInvariants();
336
338
339 static void ReportMarkedNativeBytes(size_t Bytes)
340 {
342 V_DIE_IF(static_cast<ptrdiff_t>(MarkedNativeBytes) < 0);
343 }
344
345 static bool bWithoutThreading;
346
347 static FThread* CollectorThread;
348
353
356
357 // Controls all of the fields below.
359 COREUOBJECT_API static UE::FConditionVariable ConditionVariable;
360
361 // Invariant: RequestedCycleVersion >= CompletedCycleVersion
362 //
363 // If RequestedCycleVersion > CompletedCycleVersion, then we should run a collection, and increment
364 // CompletedCycleVersion once finished.
365 //
366 // If RequestedCycleVersion == CompletedCycleVersion, then we should not run a collection.
367 //
368 // To request a collection, increment RequestedCycleVersion.
369 //
370 // To wait for our requested collection to finish, wait for CompletedCycleVersion to catch up to
371 // the value we incremented RequestedCycleVersion to.
375
376 COREUOBJECT_API static EState State;
378
379 static size_t LiveCellBytesAtStart;
380
381 static std::atomic<size_t> MarkedNativeBytes;
382 COREUOBJECT_API static std::atomic<size_t> LiveNativeBytes;
383
384 static TNeverDestroyed<FMarkStack> MarkStack; // Must hold Mutex to access safely.
385 static unsigned NumThreadsToScanStackManually;
386
387 static bool bIsExternallyControlled;
391 static bool bIsGCTerminatingExternally;
392
393 static bool bIsTerminated;
394
395 static bool bIsInitialized;
396
398 static double TimeOfPreMarking;
399
400 // Cached value of the base of the EmergentSpace (i.e. EmergentSpace->GetBase()).
401 COREUOBJECT_API static std::byte* EmergentTypeBase;
402
403 static constexpr size_t EmergentAlignment = 16;
404 static constexpr size_t EmergentReservationSize = 16 * 1024 * 1024;
405};
406
407} // namespace Verse
408#endif // WITH_VERSE_VM
#define checkSlow(expr)
Definition AssertionMacros.h:332
#define checkf(expr, format,...)
Definition AssertionMacros.h:315
#define TEXT(x)
Definition Platform.h:1272
FPlatformTypes::uint64 uint64
A 64-bit unsigned integer.
Definition Platform.h:1117
UE_FORCEINLINE_HINT TSharedRef< CastToType, Mode > StaticCastSharedRef(TSharedRef< CastFromType, Mode > const &InSharedRef)
Definition SharedPointer.h:127
uint32 Offset
Definition VulkanMemory.cpp:4033
uint32_t uint32
Definition binka_ue_file_header.h:6
Definition Thread.h:24
Definition ConditionVariable.h:14
Definition Mutex.h:18
UE::FRecursiveMutex Mutex
Definition MeshPaintVirtualTexture.cpp:164
State
Definition PacketHandler.h:88
Definition AdvancedWidgetsModule.cpp:13
Definition Archive.h:36