Skip to content

Commit

Permalink
[heap][sandbox] Add young generation for ExternalPointerTable
Browse files Browse the repository at this point in the history
When the sandbox is enabled, instead of allocating external pointer table
entries in one big space that is only collected as part of a major GC,
add a spatially partitioned young generation.  After a minor collection,
eagerly sweep that new space, so it is ready for fresh allocations.
When objects are promoted, their EPT entries get evacuated to the old
space.

Design doc:
https://docs.google.com/document/d/1Pvr4RbG_ZiaYZ-VkE22bSyCOiOntpbSBNJIhv45JTKk

Bug: chromium:40643874
Change-Id: I08978c087c809328fd614325c9b238e4e5a0994a
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/5185345
Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
Commit-Queue: Andy Wingo <wingo@igalia.com>
Cr-Commit-Position: refs/heads/main@{#93653}
  • Loading branch information
wingo authored and V8 LUCI CQ committed Apr 30, 2024
1 parent c42bda3 commit 6219bc7
Show file tree
Hide file tree
Showing 25 changed files with 489 additions and 104 deletions.
8 changes: 4 additions & 4 deletions src/codegen/external-reference.cc
Original file line number Diff line number Diff line change
Expand Up @@ -425,19 +425,19 @@ FUNCTION_REFERENCE(delete_handle_scope_extensions,
FUNCTION_REFERENCE(ephemeron_key_write_barrier_function,
Heap::EphemeronKeyWriteBarrierFromCode)

ExternalPointerHandle AllocateAndInitializeExternalPointerTableEntry(
ExternalPointerHandle AllocateAndInitializeYoungExternalPointerTableEntry(
Isolate* isolate, Address pointer) {
#ifdef V8_ENABLE_SANDBOX
return isolate->external_pointer_table().AllocateAndInitializeEntry(
isolate->heap()->external_pointer_space(), pointer,
isolate->heap()->young_external_pointer_space(), pointer,
kExternalObjectValueTag);
#else
return 0;
#endif // V8_ENABLE_SANDBOX
}

FUNCTION_REFERENCE(allocate_and_initialize_external_pointer_table_entry,
AllocateAndInitializeExternalPointerTableEntry)
FUNCTION_REFERENCE(allocate_and_initialize_young_external_pointer_table_entry,
AllocateAndInitializeYoungExternalPointerTableEntry)

FUNCTION_REFERENCE(get_date_field_function, JSDate::GetField)

Expand Down
4 changes: 2 additions & 2 deletions src/codegen/external-reference.h
Original file line number Diff line number Diff line change
Expand Up @@ -122,8 +122,8 @@ class StatsCounter;
V(address_of_shared_string_table_flag, "v8_flags.shared_string_table") \
V(address_of_the_hole_nan, "the_hole_nan") \
V(address_of_uint32_bias, "uint32_bias") \
V(allocate_and_initialize_external_pointer_table_entry, \
"AllocateAndInitializeExternalPointerTableEntry") \
V(allocate_and_initialize_young_external_pointer_table_entry, \
"AllocateAndInitializeYoungExternalPointerTableEntry") \
V(baseline_pc_for_bytecode_offset, "BaselinePCForBytecodeOffset") \
V(baseline_pc_for_next_executed_bytecode, \
"BaselinePCForNextExecutedBytecode") \
Expand Down
6 changes: 3 additions & 3 deletions src/compiler/turboshaft/fast-api-call-lowering-reducer.h
Original file line number Diff line number Diff line change
Expand Up @@ -585,14 +585,14 @@ class FastApiCallLoweringReducer : public Next {
builder.AddReturn(MachineType::Uint32());
builder.AddParam(MachineType::Pointer());
builder.AddParam(MachineType::Pointer());
OpIndex allocate_and_initialize_external_pointer_table_entry =
OpIndex allocate_and_initialize_young_external_pointer_table_entry =
__ ExternalConstant(
ExternalReference::
allocate_and_initialize_external_pointer_table_entry());
allocate_and_initialize_young_external_pointer_table_entry());
auto call_descriptor =
Linkage::GetSimplifiedCDescriptor(__ graph_zone(), builder.Build());
OpIndex handle =
__ Call(allocate_and_initialize_external_pointer_table_entry,
__ Call(allocate_and_initialize_young_external_pointer_table_entry,
{isolate_ptr, pointer},
TSCallDescriptor::Create(call_descriptor, CanThrow::kNo,
__ graph_zone()));
Expand Down
9 changes: 7 additions & 2 deletions src/execution/isolate.cc
Original file line number Diff line number Diff line change
Expand Up @@ -4259,7 +4259,9 @@ void Isolate::Deinit() {
}

#ifdef V8_COMPRESS_POINTERS
external_pointer_table().TearDownSpace(heap()->external_pointer_space());
external_pointer_table().TearDownSpace(
heap()->young_external_pointer_space());
external_pointer_table().TearDownSpace(heap()->old_external_pointer_space());
external_pointer_table().DetachSpaceFromReadOnlySegment(
heap()->read_only_external_pointer_space());
external_pointer_table().TearDownSpace(
Expand Down Expand Up @@ -5028,7 +5030,10 @@ bool Isolate::Init(SnapshotData* startup_snapshot_data,
heap()->read_only_external_pointer_space());
external_pointer_table().AttachSpaceToReadOnlySegment(
heap()->read_only_external_pointer_space());
external_pointer_table().InitializeSpace(heap()->external_pointer_space());
external_pointer_table().InitializeSpace(
heap()->young_external_pointer_space());
external_pointer_table().InitializeSpace(
heap()->old_external_pointer_space());
cpp_heap_pointer_table().Initialize();
cpp_heap_pointer_table().InitializeSpace(heap()->cpp_heap_pointer_space());
#endif // V8_COMPRESS_POINTERS
Expand Down
7 changes: 7 additions & 0 deletions src/heap/heap.cc
Original file line number Diff line number Diff line change
Expand Up @@ -4205,6 +4205,13 @@ void Heap::NotifyObjectLayoutChange(
USE(num_invalidated_slots);
DCHECK_GT(num_invalidated_slots, 0);
}

// During concurrent marking for a minor GC, the heap also builds up a
// RememberedSet of external pointer field locations, and uses that set to
// evacuate external pointer table entries when promoting objects. Here we
// would need to invalidate that set too; until we do, assert that
// NotifyObjectLayoutChange is never called on young objects.
CHECK(!InYoungGeneration(object));
#endif
}

Expand Down
12 changes: 8 additions & 4 deletions src/heap/heap.h
Original file line number Diff line number Diff line change
Expand Up @@ -787,8 +787,11 @@ class Heap final {
inline Space* space(int idx) const;

#ifdef V8_COMPRESS_POINTERS
ExternalPointerTable::Space* external_pointer_space() {
return &external_pointer_space_;
ExternalPointerTable::Space* young_external_pointer_space() {
return &young_external_pointer_space_;
}
ExternalPointerTable::Space* old_external_pointer_space() {
return &old_external_pointer_space_;
}
ExternalPointerTable::Space* read_only_external_pointer_space() {
return &read_only_external_pointer_space_;
Expand Down Expand Up @@ -2158,9 +2161,10 @@ class Heap final {
std::unique_ptr<Space> space_[LAST_SPACE + 1];

#ifdef V8_COMPRESS_POINTERS
// The space in the ExternalPointerTable containing entries owned by objects
// The spaces in the ExternalPointerTable containing entries owned by objects
// in this heap.
ExternalPointerTable::Space external_pointer_space_;
ExternalPointerTable::Space young_external_pointer_space_;
ExternalPointerTable::Space old_external_pointer_space_;
// Likewise but for slots in host objects in ReadOnlySpace.
ExternalPointerTable::Space read_only_external_pointer_space_;
// Space in the ExternalPointerTable containing entries owned by objects in
Expand Down
5 changes: 0 additions & 5 deletions src/heap/incremental-marking.cc
Original file line number Diff line number Diff line change
Expand Up @@ -315,11 +315,6 @@ void IncrementalMarking::StartMarkingMajor() {
is_compacting_ = major_collector_->StartCompaction(
MarkCompactCollector::StartCompactionMode::kIncremental);

#ifdef V8_COMPRESS_POINTERS
heap_->external_pointer_space()->StartCompactingIfNeeded();
heap_->cpp_heap_pointer_space()->StartCompactingIfNeeded();
#endif // V8_COMPRESS_POINTERS

major_collector_->StartMarking();
current_local_marking_worklists_ =
major_collector_->local_marking_worklists();
Expand Down
16 changes: 10 additions & 6 deletions src/heap/mark-compact.cc
Original file line number Diff line number Diff line change
Expand Up @@ -360,6 +360,12 @@ void MarkCompactCollector::StartMarking() {
heap()->Unmark();
}

#ifdef V8_COMPRESS_POINTERS
heap_->young_external_pointer_space()->StartCompactingIfNeeded();
heap_->old_external_pointer_space()->StartCompactingIfNeeded();
heap_->cpp_heap_pointer_space()->StartCompactingIfNeeded();
#endif // V8_COMPRESS_POINTERS

// CppHeap's marker must be initialized before the V8 marker to allow
// exchanging of worklists.
if (heap_->cpp_heap()) {
Expand Down Expand Up @@ -691,10 +697,6 @@ void MarkCompactCollector::Prepare() {
// be set up.
CppHeap::From(heap_->cpp_heap_)->StartMarking();
}
#ifdef V8_COMPRESS_POINTERS
heap_->external_pointer_space()->StartCompactingIfNeeded();
heap_->cpp_heap_pointer_space()->StartCompactingIfNeeded();
#endif // V8_COMPRESS_POINTERS
}

if (heap_->new_space()) {
Expand Down Expand Up @@ -2896,8 +2898,10 @@ void MarkCompactCollector::ClearNonLiveReferences() {
// Note we explicitly do NOT run SweepAndCompact on
// read_only_external_pointer_space since these entries are all immortal by
// definition.
isolate->external_pointer_table().SweepAndCompact(
isolate->heap()->external_pointer_space(), isolate->counters());
isolate->external_pointer_table().EvacuateAndSweepAndCompact(
isolate->heap()->old_external_pointer_space(),
isolate->heap()->young_external_pointer_space(), isolate->counters());
isolate->heap()->young_external_pointer_space()->AssertEmpty();
if (isolate->owns_shareable_data()) {
isolate->shared_external_pointer_table().SweepAndCompact(
isolate->shared_external_pointer_space(), isolate->counters());
Expand Down
17 changes: 11 additions & 6 deletions src/heap/marking-visitor-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -180,12 +180,17 @@ void MarkingVisitorBase<ConcreteVisitor>::VisitExternalPointer(
DCHECK_NE(slot.tag(), kExternalPointerNullTag);
if (slot.HasExternalPointerHandle()) {
ExternalPointerHandle handle = slot.Relaxed_LoadHandle();
ExternalPointerTable* table = IsSharedExternalPointerType(slot.tag())
? shared_external_pointer_table_
: external_pointer_table_;
ExternalPointerTable::Space* space = IsSharedExternalPointerType(slot.tag())
? shared_external_pointer_space_
: heap_->external_pointer_space();
ExternalPointerTable* table;
ExternalPointerTable::Space* space;
if (IsSharedExternalPointerType(slot.tag())) {
table = shared_external_pointer_table_;
space = shared_external_pointer_space_;
} else {
table = external_pointer_table_;
space = Heap::InYoungGeneration(host)
? heap_->young_external_pointer_space()
: heap_->old_external_pointer_space();
}
table->Mark(space, handle, slot.address());
}
#endif // V8_COMPRESS_POINTERS
Expand Down
1 change: 1 addition & 0 deletions src/heap/memory-chunk-layout.h
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ enum RememberedSetType {
OLD_TO_SHARED,
OLD_TO_CODE,
TRUSTED_TO_TRUSTED,
SURVIVOR_TO_EXTERNAL_POINTER,
NUMBER_OF_REMEMBERED_SET_TYPES
};

Expand Down
50 changes: 50 additions & 0 deletions src/heap/minor-mark-sweep.cc
Original file line number Diff line number Diff line change
Expand Up @@ -294,6 +294,19 @@ void MinorMarkSweepCollector::FinishConcurrentMarking() {
}
}

#ifdef DEBUG
static bool ExternalPointerRememberedSetsEmpty(PagedNewSpace* space) {
PagedSpaceForNewSpace* p = space->paged_space();
for (auto it = p->begin(); it != p->end();) {
PageMetadata* p = *(it++);
if (p->slot_set<SURVIVOR_TO_EXTERNAL_POINTER>()) {
return false;
}
}
return true;
}
#endif

void MinorMarkSweepCollector::StartMarking(bool force_use_background_threads) {
#ifdef VERIFY_HEAP
if (v8_flags.verify_heap) {
Expand Down Expand Up @@ -327,6 +340,7 @@ void MinorMarkSweepCollector::StartMarking(bool force_use_background_threads) {
marking_worklists_ = std::make_unique<MarkingWorklists>();
DCHECK_NULL(main_marking_visitor_);
DCHECK_NULL(pretenuring_feedback_);
DCHECK(ExternalPointerRememberedSetsEmpty(heap_->paged_new_space()));
pretenuring_feedback_ =
std::make_unique<PretenuringHandler::PretenuringFeedbackMap>(
PretenuringHandler::kInitialFeedbackCapacity);
Expand Down Expand Up @@ -809,6 +823,32 @@ bool ShouldMovePage(PageMetadata* p, intptr_t live_bytes,

} // namespace

void MinorMarkSweepCollector::EvacuateExternalPointerReferences(
MutablePageMetadata* p) {
#ifdef V8_COMPRESS_POINTERS
using BasicSlotSet = ::heap::base::BasicSlotSet<kTaggedSize>;
BasicSlotSet* slots = p->slot_set<SURVIVOR_TO_EXTERNAL_POINTER>();
if (!slots) return;
ExternalPointerTable& table = heap_->isolate()->external_pointer_table();
ExternalPointerTable::Space* young = heap_->young_external_pointer_space();
ExternalPointerTable::Space* old = heap_->old_external_pointer_space();
auto callback = [&table, young, old](Address handle_location) {
ExternalPointerHandle handle =
*reinterpret_cast<ExternalPointerHandle*>(handle_location);
table.Evacuate(young, old, handle, handle_location,
ExternalPointerTable::EvacuateMarkMode::kClearMark);
return KEEP_SLOT;
};
auto slot_count = slots->Iterate<BasicSlotSet::AccessMode::NON_ATOMIC>(
p->ChunkAddress(), 0, p->buckets(), callback,
BasicSlotSet::EmptyBucketMode::FREE_EMPTY_BUCKETS);
DCHECK(slot_count);
USE(slot_count);
// SURVIVOR_TO_EXTERNAL_POINTER remembered set will be freed later by the
// sweeper.
#endif
}

bool MinorMarkSweepCollector::StartSweepNewSpace() {
TRACE_GC(heap_->tracer(), GCTracer::Scope::MINOR_MS_SWEEP_NEW);
PagedSpaceForNewSpace* paged_space = heap_->paged_new_space()->paged_space();
Expand Down Expand Up @@ -838,6 +878,7 @@ bool MinorMarkSweepCollector::StartSweepNewSpace() {
}

if (ShouldMovePage(p, live_bytes_on_page, p->wasted_memory())) {
EvacuateExternalPointerReferences(p);
heap_->new_space()->PromotePageToOldSpace(p);
has_promoted_pages = true;
sweeper()->AddPromotedPage(p);
Expand All @@ -848,6 +889,13 @@ bool MinorMarkSweepCollector::StartSweepNewSpace() {
}
}

#ifdef V8_COMPRESS_POINTERS
// Now that we have evacuated any external pointers, rebuild EPT free-lists
// for the new space.
heap_->isolate()->external_pointer_table().SweepAndCompact(
heap_->young_external_pointer_space(), heap_->isolate()->counters());
#endif

if (v8_flags.gc_verbose) {
PrintIsolate(heap_->isolate(),
"sweeping: space=%s initialized_for_sweeping=%d",
Expand All @@ -871,6 +919,7 @@ bool MinorMarkSweepCollector::SweepNewLargeSpace() {
LargePageMetadata* current = *it;
MemoryChunk* chunk = current->Chunk();
it++;

Tagged<HeapObject> object = current->GetObject();
if (!non_atomic_marking_state_->IsMarked(object)) {
// Object is dead and page can be released.
Expand All @@ -882,6 +931,7 @@ bool MinorMarkSweepCollector::SweepNewLargeSpace() {
chunk->ClearFlagNonExecutable(MemoryChunk::TO_PAGE);
chunk->SetFlagNonExecutable(MemoryChunk::FROM_PAGE);
current->ProgressBar().ResetIfEnabled();
EvacuateExternalPointerReferences(current);
old_lo_space->PromoteNewLargeObject(current);
has_promoted_pages = true;
sweeper()->AddPromotedPage(current);
Expand Down
1 change: 1 addition & 0 deletions src/heap/minor-mark-sweep.h
Original file line number Diff line number Diff line change
Expand Up @@ -195,6 +195,7 @@ class MinorMarkSweepCollector final {
YoungGenerationRootMarkingVisitor& root_visitor);
void MarkRootsFromConservativeStack(
YoungGenerationRootMarkingVisitor& root_visitor);
void EvacuateExternalPointerReferences(MutablePageMetadata* p);

void TraceFragmentation();
void ClearNonLiveReferences();
Expand Down
1 change: 1 addition & 0 deletions src/heap/mutable-page.cc
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,7 @@ void MutablePageMetadata::ReleaseAllocatedMemoryNeededForWritableChunk() {
ReleaseSlotSet(OLD_TO_CODE);
ReleaseSlotSet(OLD_TO_SHARED);
ReleaseSlotSet(TRUSTED_TO_TRUSTED);
ReleaseSlotSet(SURVIVOR_TO_EXTERNAL_POINTER);
ReleaseTypedSlotSet(OLD_TO_NEW);
ReleaseTypedSlotSet(OLD_TO_OLD);
ReleaseTypedSlotSet(OLD_TO_SHARED);
Expand Down
28 changes: 28 additions & 0 deletions src/heap/scavenger-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -488,6 +488,8 @@ class ScavengeVisitor final : public NewSpaceVisitor<ScavengeVisitor> {
V8_INLINE int VisitJSArrayBuffer(Tagged<Map> map,
Tagged<JSArrayBuffer> object);
V8_INLINE int VisitJSApiObject(Tagged<Map> map, Tagged<JSObject> object);
V8_INLINE void VisitExternalPointer(Tagged<HeapObject> host,
ExternalPointerSlot slot);

private:
template <typename TSlot>
Expand Down Expand Up @@ -555,6 +557,32 @@ int ScavengeVisitor::VisitJSApiObject(Tagged<Map> map,
return size;
}

void ScavengeVisitor::VisitExternalPointer(Tagged<HeapObject> host,
ExternalPointerSlot slot) {
#ifdef V8_COMPRESS_POINTERS
DCHECK_NE(slot.tag(), kExternalPointerNullTag);
DCHECK(!IsSharedExternalPointerType(slot.tag()));
DCHECK(Heap::InYoungGeneration(host));

// If an incremental mark is in progress, there is already a whole-heap trace
// running that will mark live EPT entries, and the scavenger won't sweep the
// young EPT space. So, leave the tracing and sweeping work to the impending
// major GC.
//
// The EPT entry may or may not be marked already by the incremental marker.
if (scavenger_->is_incremental_marking_) return;

// TODO(chromium:337580006): Remove when pointer compression always uses
// EPT.
if (!slot.HasExternalPointerHandle()) return;

ExternalPointerHandle handle = slot.Relaxed_LoadHandle();
Heap* heap = scavenger_->heap();
ExternalPointerTable& table = heap->isolate()->external_pointer_table();
table.Mark(heap->young_external_pointer_space(), handle, slot.address());
#endif // V8_COMPRESS_POINTERS
}

int ScavengeVisitor::VisitEphemeronHashTable(Tagged<Map> map,
Tagged<EphemeronHashTable> table) {
// Register table with the scavenger, so it can take care of the weak keys
Expand Down

0 comments on commit 6219bc7

Please sign in to comment.