this repo has no description
1// Copyright (c) Facebook, Inc. and its affiliates. (http://www.facebook.com)
2#include "scavenger.h"
3
4#include <cstring>
5#include <memory>
6
7#include "capi.h"
8#include "runtime.h"
9
10namespace py {
11
12class Scavenger : public PointerVisitor {
13 public:
14 explicit Scavenger(Runtime* runtime);
15
16 bool isWhiteObject(RawHeapObject object);
17
18 RawObject scavenge();
19
20 RawObject scavengeIntoImmortal();
21
22 void visitPointer(RawObject* pointer, PointerKind kind) override;
23
24 private:
25 enum class SaveLocation { kImmortalHeap, kNewSpace };
26
27 void collect(SaveLocation);
28
29 void scavengePointer(RawObject* pointer);
30
31 RawObject transport(RawObject old_object);
32
33 void processDelayedReferences();
34
35 void processGrayObjects();
36
37 uword processGrayObjectsIn(Space*, uword);
38
39 void processLayouts();
40
41 void compactLayoutTypeTransitions();
42
43 Runtime* runtime_;
44 Heap* heap_;
45 Space* immortal_;
46 Space* from_;
47 Space* to_;
48 uword to_gray_line_;
49 uword immortal_gray_line_;
50 RawMutableTuple layouts_;
51 RawMutableTuple layout_type_transitions_;
52 RawObject delayed_references_;
53 RawObject delayed_callbacks_;
54 SaveLocation save_location_;
55};
56
57Scavenger::Scavenger(Runtime* runtime)
58 : runtime_(runtime),
59 heap_(runtime->heap()),
60 immortal_(heap_->immortal()),
61 from_(heap_->space()),
62 to_(nullptr),
63 layouts_(MutableTuple::cast(runtime->layouts())),
64 layout_type_transitions_(
65 MutableTuple::cast(runtime->layoutTypeTransitions())),
66 delayed_references_(NoneType::object()),
67 delayed_callbacks_(NoneType::object()),
68 save_location_(SaveLocation::kNewSpace) {}
69
70void Scavenger::collect(SaveLocation copy_into) {
71 save_location_ = copy_into;
72
73 // As we find objects, they become "gray" since we still
74 // need to search their sub-objects. The gray area extends
75 // from the gray line to the fill mark (invariant). The
76 // black area extends from the start to the gray line
77 to_gray_line_ = to_->start();
78 immortal_gray_line_ = immortal_->start();
79
80 // We touch all roots. If we find code objects we will
81 // move them into the immortal partition.
82 immortal_gray_line_ = processGrayObjectsIn(immortal_, immortal_gray_line_);
83 runtime_->visitRootsWithoutApiHandles(this);
84 visitIncrementedApiHandles(runtime_, this);
85
86 // Gray objects in the immortal space are code objects
87 // All objects in the to_ space are gray. We first want to follow
88 // down the sub-objects of gray code objects and make them
89 // immortal as well. Then we get rid of gray objects in the to_
90 // space (which may make more immortal code objects). Repeat
91 // until we've caught everything
92 processGrayObjects();
93
94 // We have some other objects to track down
95 visitExtensionObjects(runtime_, this, this);
96 processGrayObjects();
97 visitNotIncrementedBorrowedApiHandles(runtime_, this, this);
98 processGrayObjects();
99 processDelayedReferences();
100 processLayouts();
101
102 // One last cleanup
103 processGrayObjects();
104}
105
106RawObject Scavenger::scavenge() {
107 DCHECK(heap_->verify(), "Heap failed to verify before GC");
108
109 // Nothing else should be allocating during a GC.
110 heap_->setSpace(nullptr);
111
112 // Set up a new space for reachable, non-immortal objects
113 to_ = new Space(from_->size());
114
115 // Collect and copy objects into to_
116 collect(SaveLocation::kNewSpace);
117
118 // Swap the new space and and delete the old mortal heap
119 heap_->setSpace(to_);
120 DCHECK(heap_->verify(), "Heap failed to verify after GC");
121 delete from_;
122 return delayed_callbacks_;
123}
124
125RawObject Scavenger::scavengeIntoImmortal() {
126 // Make sure we have enough room
127 // TODO(T89880293) We can try compacting first if there isn't enough room
128 uword immortal_available = immortal_->end() - immortal_->fill();
129 uword heap_used = from_->fill() - from_->start();
130 DCHECK(heap_used < immortal_available,
131 "Immortal heap partition may not be big enough");
132
133 DCHECK(heap_->verify(), "Heap failed to verify before GC");
134 // Nothing should be allocating during a GC.
135 heap_->setSpace(nullptr);
136 to_ = immortal_;
137
138 // Collect and copy objects into immortal partition
139 collect(SaveLocation::kImmortalHeap);
140
141 // Start with a fresh, empty heap
142 heap_->setSpace(new Space(from_->size()));
143 DCHECK(heap_->verify(), "Heap failed to verify after GC");
144 delete from_;
145 return delayed_callbacks_;
146}
147
148void Scavenger::visitPointer(RawObject* pointer, PointerKind) {
149 scavengePointer(pointer);
150}
151
152void Scavenger::scavengePointer(RawObject* pointer) {
153 if (!(*pointer).isHeapObject()) {
154 return;
155 }
156 RawHeapObject object = HeapObject::cast(*pointer);
157 if (!from_->contains(object.address())) {
158 DCHECK(object.header().isHeader(), "object must have a header");
159 DCHECK(
160 to_->contains(object.address()) || heap_->isImmortal(object.address()),
161 "object must be in 'from' or 'to' or 'immortal'space");
162 } else if (object.isForwarding()) {
163 DCHECK(to_->contains(HeapObject::cast(object.forward()).address()) ||
164 heap_->isImmortal(HeapObject::cast(object.forward()).address()),
165 "transported object must be located in 'to' or 'immortal' space");
166 *pointer = object.forward();
167 } else {
168 *pointer = transport(object);
169 }
170}
171
172bool Scavenger::isWhiteObject(RawHeapObject object) {
173 DCHECK(to_ == immortal_ || !to_->contains(object.address()),
174 "must not test objects that have already been visited");
175 return !object.isForwarding();
176}
177
178void Scavenger::processGrayObjects() {
179 SaveLocation saved = save_location_;
180 while (immortal_gray_line_ < immortal_->fill() ||
181 to_gray_line_ < to_->fill()) {
182 // Gray immortal code objects and all reachables
183 save_location_ = SaveLocation::kImmortalHeap;
184 immortal_gray_line_ = processGrayObjectsIn(immortal_, immortal_gray_line_);
185 save_location_ = saved;
186
187 // Objects reachable from gray objects become gray as well
188 to_gray_line_ = (to_ == immortal_)
189 ? immortal_gray_line_
190 : processGrayObjectsIn(to_, to_gray_line_);
191 }
192}
193
194uword Scavenger::processGrayObjectsIn(Space* space, uword scan) {
195 while (scan < space->fill()) {
196 if (!(*reinterpret_cast<RawObject*>(scan)).isHeader()) {
197 // Skip immediate values for alignment padding or header overflow.
198 scan += kPointerSize;
199 } else {
200 RawHeapObject object = HeapObject::fromAddress(scan + RawHeader::kSize);
201 uword end = object.baseAddress() + object.size();
202 // Scan pointers that follow the header word, if any.
203 if (!object.isRoot()) {
204 scan = end;
205 continue;
206 }
207 scan += RawHeader::kSize;
208 if (object.isWeakRef()) {
209 RawWeakRef weakref = WeakRef::cast(object);
210 RawObject referent = weakref.referent();
211 if (!referent.isNoneType() &&
212 isWhiteObject(HeapObject::cast(referent))) {
213 // Delay the reference object for later processing.
214 WeakRef::enqueue(object, &delayed_references_);
215 // Skip over the referent field and continue scavenging.
216 scan += kPointerSize;
217 }
218 }
219 for (; scan < end; scan += kPointerSize) {
220 scavengePointer(reinterpret_cast<RawObject*>(scan));
221 }
222 }
223 }
224 return scan;
225}
226
227// Do a final pass through the Layouts Tuple, treating all non-builtin entries
228// as weak roots.
229void Scavenger::processLayouts() {
230 for (word i = static_cast<word>(LayoutId::kLastBuiltinId) + 1,
231 end = layouts_.length();
232 i < end; ++i) {
233 RawObject layout = layouts_.at(i);
234 if (layout == SmallInt::fromWord(0)) continue;
235 RawHeapObject heap_obj = HeapObject::cast(layout);
236 if (to_->contains(heap_obj.address())) continue;
237 if (heap_->isImmortal(heap_obj.address())) continue;
238
239 if (heap_obj.isForwarding()) {
240 DCHECK(heap_obj.forward().isLayout(), "Bad Layout forwarded value");
241 layouts_.atPut(i, heap_obj.forward());
242 } else {
243 layouts_.atPut(i, SmallInt::fromWord(0));
244 }
245 }
246
247 // TODO(T59281894): We can skip this step if the Layouts table doesn't live
248 // in the managed heap.
249 runtime_->setLayouts(transport(layouts_));
250
251 // Remove dead empty entries (triples (A, B, C) where either A or C is dead).
252 // Post-condition: all entries in the tuple will either be references to
253 // to-space or None.
254 word length = layout_type_transitions_.length();
255 DCHECK(!to_->contains(layout_type_transitions_.address()) ||
256 immortal_->contains(layout_type_transitions_.address()),
257 "object should not have been moved");
258 for (word i = 0; i < length; i += LayoutTypeTransition::kTransitionSize) {
259 RawObject from_obj =
260 layout_type_transitions_.at(i + LayoutTypeTransition::kFrom);
261 if (from_obj == SmallInt::fromWord(0)) continue;
262
263 RawHeapObject from = HeapObject::cast(from_obj);
264 RawHeapObject to = HeapObject::cast(
265 layout_type_transitions_.at(i + LayoutTypeTransition::kTo));
266 RawHeapObject result = HeapObject::cast(
267 layout_type_transitions_.at(i + LayoutTypeTransition::kResult));
268 DCHECK(!to_->contains(to.address()),
269 "reference should not have been moved");
270 DCHECK(!to_->contains(from.address()),
271 "reference should not have been moved");
272 DCHECK(!to_->contains(result.address()),
273 "reference should not have been moved");
274 if (from.isForwarding() && result.isForwarding()) {
275 layout_type_transitions_.atPut(i + LayoutTypeTransition::kFrom,
276 from.forward());
277 layout_type_transitions_.atPut(i + LayoutTypeTransition::kTo,
278 to.forward());
279 layout_type_transitions_.atPut(i + LayoutTypeTransition::kResult,
280 result.forward());
281 } else {
282 // Remove the transition edge of the from or result layouts have been
283 // collected.
284 layout_type_transitions_.atPut(i + LayoutTypeTransition::kFrom,
285 SmallInt::fromWord(0));
286 layout_type_transitions_.atPut(i + LayoutTypeTransition::kTo,
287 SmallInt::fromWord(0));
288 layout_type_transitions_.atPut(i + LayoutTypeTransition::kResult,
289 SmallInt::fromWord(0));
290 }
291 }
292
293 compactLayoutTypeTransitions();
294 runtime_->setLayoutTypeTransitions(transport(layout_type_transitions_));
295}
296
297static inline word getLeftMostNoneObjectIndex(RawTuple layout_type_transitions,
298 word left, word right) {
299 while (left < right &&
300 layout_type_transitions.at(left + LayoutTypeTransition::kFrom) !=
301 SmallInt::fromWord(0)) {
302 left += LayoutTypeTransition::kTransitionSize;
303 }
304 return left;
305}
306
307static inline word getRightMostValidObjectIndex(
308 RawTuple layout_type_transitions, word left, word right) {
309 while (left < right &&
310 layout_type_transitions.at(right + LayoutTypeTransition::kFrom) ==
311 SmallInt::fromWord(0)) {
312 right -= LayoutTypeTransition::kTransitionSize;
313 }
314 return right;
315}
316
317static inline void swapTriplesInLayoutTypeTransitions(
318 RawMutableTuple layout_type_transitions, word left, word right) {
319 layout_type_transitions.swap(left + LayoutTypeTransition::kFrom,
320 right + LayoutTypeTransition::kFrom);
321 layout_type_transitions.swap(left + LayoutTypeTransition::kTo,
322 right + LayoutTypeTransition::kTo);
323 layout_type_transitions.swap(left + LayoutTypeTransition::kResult,
324 right + LayoutTypeTransition::kResult);
325}
326
327// applies compaction to layout_type_transitions
328// moves all valid triplets to front, and all None-Type triplets to end
329void Scavenger::compactLayoutTypeTransitions() {
330 word length = layout_type_transitions_.length();
331 length = (length / LayoutTypeTransition::kTransitionSize) *
332 LayoutTypeTransition::kTransitionSize;
333
334 word left = 0;
335 word right = length - LayoutTypeTransition::kTransitionSize;
336
337 do {
338 left = getLeftMostNoneObjectIndex(layout_type_transitions_, left, right);
339 right = getRightMostValidObjectIndex(layout_type_transitions_, left, right);
340
341 swapTriplesInLayoutTypeTransitions(layout_type_transitions_, left, right);
342 } while (left < right);
343}
344
345// Process the list of weakrefs that had otherwise-unreachable referents during
346// processGrayObjects().
347//
348// If the referent had one or more strong references after all, the weakref is
349// updated to point to the relocated object. Otherwise, the weakref's referent
350// field is set to None and its callback (if present) is enqueued for running
351// later.
352void Scavenger::processDelayedReferences() {
353 while (delayed_references_ != NoneType::object()) {
354 RawWeakRef weak = WeakRef::cast(WeakRef::dequeue(&delayed_references_));
355 if (!weak.referent().isHeapObject()) {
356 continue;
357 }
358 RawHeapObject referent = HeapObject::cast(weak.referent());
359 if (referent.isForwarding()) {
360 weak.setReferent(referent.forward());
361 } else {
362 weak.setReferent(NoneType::object());
363 if (!weak.callback().isNoneType()) {
364 WeakRef::enqueue(weak, &delayed_callbacks_);
365 }
366 }
367 }
368}
369
370RawObject Scavenger::transport(RawObject old_object) {
371 RawHeapObject from_object = HeapObject::cast(old_object);
372 if (heap_->isImmortal(from_object.address())) {
373 return old_object;
374 }
375 DCHECK(from_->contains(from_object.address()),
376 "objects must be transported from 'from' space");
377 DCHECK(from_object.header().isHeader(),
378 "object must have a header and must not forward");
379
380 // Code objects get transported to the immortal heap when
381 // they are first discovered during a GC cycle. Then
382 // objects reachable from those code objects that have not
383 // been processed will also be made immortal.
384 word size = from_object.size();
385 uword address = 0;
386 if (from_object.isCode() || save_location_ == SaveLocation::kImmortalHeap) {
387 // Allocate these from the immortal partition
388 bool success = immortal_->allocate(size, &address);
389 CHECK(success, "out of memory in immortal space");
390 } else {
391 // Otherwise allocate from the standard partition
392 bool success = to_->allocate(size, &address);
393 DCHECK(success, "GC transport allocation failed in new heap partition");
394 }
395
396 auto dst = reinterpret_cast<void*>(address);
397 auto src = reinterpret_cast<void*>(from_object.baseAddress());
398 std::memcpy(dst, src, size);
399 word offset = from_object.address() - from_object.baseAddress();
400 RawHeapObject to_object = HeapObject::fromAddress(address + offset);
401 from_object.forwardTo(to_object);
402
403 LayoutId layout_id = to_object.layoutId();
404 auto layout_ptr = reinterpret_cast<RawObject*>(
405 layouts_.address() + static_cast<word>(layout_id) * kPointerSize);
406 scavengePointer(layout_ptr);
407
408 return to_object;
409}
410
411bool isWhiteObject(Scavenger* scavenger, RawHeapObject object) {
412 return scavenger->isWhiteObject(object);
413}
414
415RawObject scavenge(Runtime* runtime) { return Scavenger(runtime).scavenge(); }
416
417RawObject scavengeImmortalize(Runtime* runtime) {
418 return Scavenger(runtime).scavengeIntoImmortal();
419}
420
421} // namespace py