Skip to content

Commit 1a2b08e

Browse files
Anton BikineevV8 LUCI CQ
authored andcommitted
heap,sandbox: Update EPT's evacuation entries in Scavenger.
If Scavenger interleaves MarkCompact that performs compaction on EPT, there may be some evacuation entries allocated in the young EPT that would back-point to the Scavenger's from-space. Add a new phase that updates all the evacuation entries in the young EPT up until `start_of_evacation_area`. Bug: 358485426 Change-Id: Ic23e57ff38279d4e93964cf21ee62eb01ebe8e61 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/5805957 Reviewed-by: Samuel Groß <[email protected]> Reviewed-by: Michael Lippautz <[email protected]> Commit-Queue: Anton Bikineev <[email protected]> Cr-Commit-Position: refs/heads/main@{#95827}
1 parent 0f09350 commit 1a2b08e

File tree

6 files changed

+112
-3
lines changed

6 files changed

+112
-3
lines changed

src/heap/incremental-marking.cc

Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -498,6 +498,68 @@ void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() {
498498
weak_objects_->UpdateAfterScavenge();
499499
}
500500

501+
void IncrementalMarking::UpdateExternalPointerTableAfterScavenge() {
502+
#ifdef V8_COMPRESS_POINTERS
503+
if (!IsMajorMarking()) return;
504+
DCHECK(!v8_flags.separate_gc_phases);
505+
506+
heap_->isolate()->external_pointer_table().UpdateAllEvacuationEntries(
507+
heap_->young_external_pointer_space(), [](Address old_handle_location) {
508+
// 1) Resolve object start from the marking bitmap. Note that it's safe
509+
// since there is no black allocation for the young space (and hence
510+
// no range or page marking).
511+
// 2) Get a relocated object from the forwaring reference stored in the
512+
// map.
513+
// 3) Compute offset from the original object start to the handle
514+
// location.
515+
// 4) Compute and return the new handle location.
516+
//
517+
// Please note that instead of updating the evacuation entries, we
518+
// could simply clobber them all, which would still work, but limit
519+
// compaction to some extent. We can reconsider this in the future, if
520+
// relying on the marking bitmap becomes an issue (e.g. with inlined
521+
// mark-bits).
522+
const MemoryChunk* chunk =
523+
MemoryChunk::FromAddress(old_handle_location);
524+
if (!chunk->InYoungGeneration()) {
525+
return old_handle_location;
526+
}
527+
// TODO(358485426): Check that the page is not black.
528+
529+
Address base = MarkingBitmap::FindPreviousValidObject(
530+
static_cast<const PageMetadata*>(chunk->Metadata()),
531+
old_handle_location);
532+
Tagged<HeapObject> object(HeapObject::FromAddress(base));
533+
534+
MapWord map_word = object->map_word(kRelaxedLoad);
535+
if (!map_word.IsForwardingAddress()) {
536+
// There may be objects in the EPT that do not exist anymore. If these
537+
// objects are dead at scavenging time, their marking deque entries will
538+
// not point to forwarding addresses. Hence, we can discard them.
539+
#if DEBUG
540+
// Check that the handle did reside inside the original dead object.
541+
const int object_size = object->Size();
542+
// Map slots can never contain external pointers.
543+
DCHECK_LT(object.address(), old_handle_location);
544+
DCHECK_LT(old_handle_location, object.address() + object_size);
545+
#endif // DEBUG
546+
return kNullAddress;
547+
}
548+
549+
Tagged<HeapObject> moved_object = map_word.ToForwardingAddress(object);
550+
#if DEBUG
551+
const int object_size = moved_object->Size();
552+
// Map slots can never contain external pointers.
553+
DCHECK_LT(object.address(), old_handle_location);
554+
DCHECK_LT(old_handle_location, object.address() + object_size);
555+
#endif // DEBUG
556+
557+
const ptrdiff_t handle_offset = old_handle_location - base;
558+
return moved_object.address() + handle_offset;
559+
});
560+
#endif // V8_COMPRESS_POINTERS
561+
}
562+
501563
void IncrementalMarking::UpdateMarkedBytesAfterScavenge(
502564
size_t dead_bytes_in_new_space) {
503565
if (!IsMajorMarking()) return;

src/heap/incremental-marking.h

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -102,6 +102,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
102102
bool Stop();
103103

104104
void UpdateMarkingWorklistAfterScavenge();
105+
void UpdateExternalPointerTableAfterScavenge();
105106
void UpdateMarkedBytesAfterScavenge(size_t dead_bytes_in_new_space);
106107

107108
// Performs incremental marking step and finalizes marking if complete.

src/heap/scavenger.cc

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -506,6 +506,7 @@ void ScavengerCollector::CollectGarbage() {
506506
&Heap::UpdateYoungReferenceInExternalStringTableEntry);
507507

508508
heap_->incremental_marking()->UpdateMarkingWorklistAfterScavenge();
509+
heap_->incremental_marking()->UpdateExternalPointerTableAfterScavenge();
509510

510511
if (V8_UNLIKELY(v8_flags.always_use_string_forwarding_table)) {
511512
isolate_->string_forwarding_table()->UpdateAfterYoungEvacuation();

src/sandbox/compactible-external-entity-table.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ enum class ExternalEntityTableCompactionOutcome {
4242
* compacted. This decision is mostly based on the absolute and relative
4343
* size of the freelist.
4444
* - If compaction is needed, this algorithm determines by how many segments
45-
* it would like to shrink the space (N). It will then attempts to move all
45+
* it would like to shrink the space (N). It will then attempt to move all
4646
* live entries out of these segments so that they can be deallocated
4747
* afterwards during sweeping.
4848
* - The algorithm then simply selects the last N segments for evacuation, and

src/sandbox/external-pointer-table.cc

Lines changed: 43 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ class SegmentsIterator {
4242
using const_iterator = typename std::set<Segment>::const_reverse_iterator;
4343

4444
public:
45-
SegmentsIterator() {}
45+
SegmentsIterator() = default;
4646

4747
void AddSegments(const std::set<Segment>& segments, Data data) {
4848
streams_.emplace_back(segments.rbegin(), segments.rend(), data);
@@ -126,7 +126,7 @@ uint32_t ExternalPointerTable::EvacuateAndSweepAndCompact(Space* space,
126126
segments_iter.AddSegments(from_space_segments, from_space_compaction);
127127

128128
FreelistHead empty_freelist;
129-
from_space->freelist_head_.store(empty_freelist, std::memory_order_release);
129+
from_space->freelist_head_.store(empty_freelist, std::memory_order_relaxed);
130130

131131
for (Address field : from_space->invalidated_fields_)
132132
space->invalidated_fields_.push_back(field);
@@ -176,6 +176,13 @@ uint32_t ExternalPointerTable::EvacuateAndSweepAndCompact(Space* space,
176176
Address handle_location =
177177
payload.ExtractEvacuationEntryHandleLocation();
178178

179+
// The evacuation entry may be invalidated by the Scavenger that has
180+
// freed the object.
181+
if (handle_location == kNullAddress) {
182+
AddToFreelist(i);
183+
continue;
184+
}
185+
179186
// The external pointer field may have been invalidated in the meantime
180187
// (for example if the host object has been in-place converted to a
181188
// different type of object). In that case, the field no longer
@@ -295,6 +302,40 @@ void ExternalPointerTable::ResolveEvacuationEntryDuringSweeping(
295302
}
296303
}
297304

305+
void ExternalPointerTable::UpdateAllEvacuationEntries(
306+
Space* space, std::function<Address(Address)> function) {
307+
DCHECK(space->BelongsTo(this));
308+
DCHECK(!space->is_internal_read_only_space());
309+
310+
if (!space->IsCompacting()) return;
311+
312+
// Lock the space. Technically this is not necessary since no other thread can
313+
// allocate entries at this point, but some of the methods we call on the
314+
// space assert that the lock is held.
315+
base::MutexGuard guard(&space->mutex_);
316+
// Same for the invalidated fields mutex.
317+
base::MutexGuard invalidated_fields_guard(&space->invalidated_fields_mutex_);
318+
319+
const uint32_t start_of_evacuation_area =
320+
space->start_of_evacuation_area_.load(std::memory_order_relaxed);
321+
322+
// Iterate until the start of evacuation area.
323+
for (auto& segment : space->segments_) {
324+
if (segment.first_entry() == start_of_evacuation_area) return;
325+
for (uint32_t i = segment.first_entry(); i < segment.last_entry() + 1;
326+
++i) {
327+
ExternalPointerTableEntry& entry = at(i);
328+
ExternalPointerTableEntry::Payload payload = entry.GetRawPayload();
329+
if (!payload.ContainsEvacuationEntry()) {
330+
continue;
331+
}
332+
Address new_location =
333+
function(payload.ExtractEvacuationEntryHandleLocation());
334+
entry.MakeEvacuationEntry(new_location);
335+
}
336+
}
337+
}
338+
298339
} // namespace internal
299340
} // namespace v8
300341

src/sandbox/external-pointer-table.h

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -351,6 +351,10 @@ class V8_EXPORT_PRIVATE ExternalPointerTable
351351
uint32_t SweepAndCompact(Space* space, Counters* counters);
352352
uint32_t Sweep(Space* space, Counters* counters);
353353

354+
// Updates all evacuation entries with new handle locations. The function
355+
// takes the old hanlde location and returns the new one.
356+
void UpdateAllEvacuationEntries(Space*, std::function<Address(Address)>);
357+
354358
inline bool Contains(Space* space, ExternalPointerHandle handle) const;
355359

356360
// A resource outside of the V8 heap whose lifetime is tied to something

0 commit comments

Comments
 (0)