Skip to content

Commit

Permalink
deps: cherry-pick f4376ec801e1ded from V8 upstream
Browse files Browse the repository at this point in the history
    Original commit message:
      [heap] Make maximum regular code object size a runtime value.

      Executable V8 pages include 3 reserved OS pages: one for the writable
      header and two as guards. On systems with 64k OS pages, the amount of
      allocatable space left for objects can then be quite smaller than the
      page size, only 64k for each 256k page.

      This means regular code objects cannot be larger than 64k, while the
      maximum regular object size is fixed to 128k, half of the page size. As
      a result code object never reach this limit and we can end up filling
      regular pages with few large code objects.

      To fix this, we change the maximum code object size to be runtime value,
      set to half of the allocatable space per page. On systems with 64k OS
      pages, the limit will be 32k.

      Alternatively, we could increase the V8 page size to 512k on Arm64 linux
      so we wouldn't waste code space. However, systems with 4k OS pages are
      more common, and those with 64k pages tend to have more memory available
      so we should be able to live with it.

      Bug: v8:10808
      Change-Id: I5d807e7a3df89f1e9c648899e9ba2f8e2648264c
      Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2460809
      Reviewed-by: Igor Sheludko <ishell@chromium.org>
      Reviewed-by: Georg Neis <neis@chromium.org>
      Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
      Commit-Queue: Pierre Langlois <pierre.langlois@arm.com>
      Cr-Commit-Position: refs/heads/master@{#70569}

PR-URL: #37225
Refs: nodejs/help#3202
Reviewed-By: Michael Dawson <midawson@redhat.com>
Reviewed-By: Stewart X Addison <sxa@redhat.com>
Reviewed-By: Juan José Arboleda <soyjuanarbol@gmail.com>
Reviewed-By: James M Snell <jasnell@gmail.com>
  • Loading branch information
danbev authored and MylesBorins committed Apr 6, 2021
1 parent 78680c1 commit ae1fa98
Show file tree
Hide file tree
Showing 18 changed files with 143 additions and 21 deletions.
4 changes: 4 additions & 0 deletions deps/v8/src/base/build_config.h
Expand Up @@ -205,6 +205,10 @@
// PPC has large (64KB) physical pages.
const int kPageSizeBits = 19;
#else
// Arm64 supports up to 64k OS pages on Linux, however 4k pages are more common
// so we keep the V8 page size at 256k. Nonetheless, we need to make sure we
// don't decrease it further in the future due to reserving 3 OS pages for every
// executable V8 page.
const int kPageSizeBits = 18;
#endif

Expand Down
2 changes: 1 addition & 1 deletion deps/v8/src/compiler/allocation-builder.h
Expand Up @@ -27,7 +27,7 @@ class AllocationBuilder final {
// Primitive allocation of static size.
void Allocate(int size, AllocationType allocation = AllocationType::kYoung,
Type type = Type::Any()) {
DCHECK_LE(size, kMaxRegularHeapObjectSize);
DCHECK_LE(size, Heap::MaxRegularHeapObjectSize(allocation));
effect_ = graph()->NewNode(
common()->BeginRegion(RegionObservability::kNotObservable), effect_);
allocation_ =
Expand Down
4 changes: 4 additions & 0 deletions deps/v8/src/compiler/memory-lowering.cc
Expand Up @@ -98,6 +98,10 @@ Reduction MemoryLowering::ReduceAllocateRaw(
DCHECK_EQ(IrOpcode::kAllocateRaw, node->opcode());
DCHECK_IMPLIES(allocation_folding_ == AllocationFolding::kDoAllocationFolding,
state_ptr != nullptr);
// Code objects may have a maximum size smaller than kMaxHeapObjectSize due to
// guard pages. If we need to support allocating code here we would need to
// call MemoryChunkLayout::MaxRegularCodeObjectSize() at runtime.
DCHECK_NE(allocation_type, AllocationType::kCode);
Node* value;
Node* size = node->InputAt(0);
Node* effect = node->InputAt(1);
Expand Down
3 changes: 2 additions & 1 deletion deps/v8/src/diagnostics/objects-debug.cc
Expand Up @@ -955,7 +955,8 @@ void Code::CodeVerify(Isolate* isolate) {
// everything is set up.
// CHECK_EQ(ReadOnlyHeap::Contains(*this), !IsExecutable());
relocation_info().ObjectVerify(isolate);
CHECK(Code::SizeFor(body_size()) <= kMaxRegularHeapObjectSize ||
CHECK(Code::SizeFor(body_size()) <=
MemoryChunkLayout::MaxRegularCodeObjectSize() ||
isolate->heap()->InSpace(*this, CODE_LO_SPACE));
Address last_gc_pc = kNullAddress;

Expand Down
3 changes: 2 additions & 1 deletion deps/v8/src/heap/factory-base.cc
Expand Up @@ -721,7 +721,8 @@ template <typename Impl>
HeapObject FactoryBase<Impl>::AllocateRawArray(int size,
AllocationType allocation) {
HeapObject result = AllocateRaw(size, allocation);
if (size > kMaxRegularHeapObjectSize && FLAG_use_marking_progress_bar) {
if ((size > Heap::MaxRegularHeapObjectSize(allocation)) &&
FLAG_use_marking_progress_bar) {
MemoryChunk* chunk = MemoryChunk::FromHeapObject(result);
chunk->SetFlag<AccessMode::ATOMIC>(MemoryChunk::HAS_PROGRESS_BAR);
}
Expand Down
3 changes: 2 additions & 1 deletion deps/v8/src/heap/factory.cc
Expand Up @@ -346,7 +346,8 @@ MaybeHandle<FixedArray> Factory::TryNewFixedArray(
AllocationResult allocation = heap->AllocateRaw(size, allocation_type);
HeapObject result;
if (!allocation.To(&result)) return MaybeHandle<FixedArray>();
if (size > kMaxRegularHeapObjectSize && FLAG_use_marking_progress_bar) {
if ((size > Heap::MaxRegularHeapObjectSize(allocation_type)) &&
FLAG_use_marking_progress_bar) {
MemoryChunk* chunk = MemoryChunk::FromHeapObject(result);
chunk->SetFlag<AccessMode::ATOMIC>(MemoryChunk::HAS_PROGRESS_BAR);
}
Expand Down
6 changes: 4 additions & 2 deletions deps/v8/src/heap/heap-inl.h
Expand Up @@ -192,7 +192,9 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationType type,
IncrementObjectCounters();
#endif

bool large_object = size_in_bytes > kMaxRegularHeapObjectSize;
size_t large_object_threshold = MaxRegularHeapObjectSize(type);
bool large_object =
static_cast<size_t>(size_in_bytes) > large_object_threshold;

HeapObject object;
AllocationResult allocation;
Expand Down Expand Up @@ -279,7 +281,7 @@ HeapObject Heap::AllocateRawWith(int size, AllocationType allocation,
Address* limit = heap->NewSpaceAllocationLimitAddress();
if (allocation == AllocationType::kYoung &&
alignment == AllocationAlignment::kWordAligned &&
size <= kMaxRegularHeapObjectSize &&
size <= MaxRegularHeapObjectSize(allocation) &&
(*limit - *top >= static_cast<unsigned>(size)) &&
V8_LIKELY(!FLAG_single_generation && FLAG_inline_new &&
FLAG_gc_interval == 0)) {
Expand Down
8 changes: 8 additions & 0 deletions deps/v8/src/heap/heap.cc
Expand Up @@ -4963,6 +4963,14 @@ bool Heap::AllocationLimitOvershotByLargeMargin() {
return v8_overshoot >= v8_margin || global_overshoot >= global_margin;
}

// static
int Heap::MaxRegularHeapObjectSize(AllocationType allocation) {
if (allocation == AllocationType::kCode) {
return MemoryChunkLayout::MaxRegularCodeObjectSize();
}
return kMaxRegularHeapObjectSize;
}

bool Heap::ShouldOptimizeForLoadTime() {
return isolate()->rail_mode() == PERFORMANCE_LOAD &&
!AllocationLimitOvershotByLargeMargin() &&
Expand Down
16 changes: 13 additions & 3 deletions deps/v8/src/heap/heap.h
Expand Up @@ -481,7 +481,7 @@ class Heap {

bool IsImmovable(HeapObject object);

static bool IsLargeObject(HeapObject object);
V8_EXPORT_PRIVATE static bool IsLargeObject(HeapObject object);

// This method supports the deserialization allocator. All allocations
// are word-aligned. The method should never fail to allocate since the
Expand Down Expand Up @@ -1316,6 +1316,14 @@ class Heap {
// more eager to finalize incremental marking.
bool AllocationLimitOvershotByLargeMargin();

// Return the maximum size objects can be before having to allocate them as
// large objects. This takes into account allocating in the code space for
// which the size of the allocatable space per V8 page may depend on the OS
// page size at runtime. You may use kMaxRegularHeapObjectSize as a constant
// instead if you know the allocation isn't in the code spaces.
V8_EXPORT_PRIVATE static int MaxRegularHeapObjectSize(
AllocationType allocation);

// ===========================================================================
// Prologue/epilogue callback methods.========================================
// ===========================================================================
Expand Down Expand Up @@ -1404,8 +1412,10 @@ class Heap {
// Heap object allocation tracking. ==========================================
// ===========================================================================

void AddHeapObjectAllocationTracker(HeapObjectAllocationTracker* tracker);
void RemoveHeapObjectAllocationTracker(HeapObjectAllocationTracker* tracker);
V8_EXPORT_PRIVATE void AddHeapObjectAllocationTracker(
HeapObjectAllocationTracker* tracker);
V8_EXPORT_PRIVATE void RemoveHeapObjectAllocationTracker(
HeapObjectAllocationTracker* tracker);
bool has_heap_object_allocation_tracker() const {
return !allocation_trackers_.empty();
}
Expand Down
7 changes: 6 additions & 1 deletion deps/v8/src/heap/memory-chunk.cc
Expand Up @@ -47,7 +47,6 @@ intptr_t MemoryChunkLayout::ObjectEndOffsetInCodePage() {

size_t MemoryChunkLayout::AllocatableMemoryInCodePage() {
size_t memory = ObjectEndOffsetInCodePage() - ObjectStartOffsetInCodePage();
DCHECK_LE(kMaxRegularHeapObjectSize, memory);
return memory;
}

Expand Down Expand Up @@ -77,6 +76,12 @@ size_t MemoryChunkLayout::AllocatableMemoryInMemoryChunk(
return AllocatableMemoryInDataPage();
}

int MemoryChunkLayout::MaxRegularCodeObjectSize() {
int size = static_cast<int>(AllocatableMemoryInCodePage() / 2);
DCHECK_LE(size, kMaxRegularHeapObjectSize);
return size;
}

#ifdef THREAD_SANITIZER
void MemoryChunk::SynchronizedHeapLoad() {
CHECK(reinterpret_cast<Heap*>(base::Acquire_Load(
Expand Down
2 changes: 2 additions & 0 deletions deps/v8/src/heap/memory-chunk.h
Expand Up @@ -32,6 +32,8 @@ class V8_EXPORT_PRIVATE MemoryChunkLayout {
static size_t AllocatableMemoryInDataPage();
static size_t ObjectStartOffsetInMemoryChunk(AllocationSpace space);
static size_t AllocatableMemoryInMemoryChunk(AllocationSpace space);

static int MaxRegularCodeObjectSize();
};

// MemoryChunk represents a memory region owned by a specific space.
Expand Down
6 changes: 4 additions & 2 deletions deps/v8/src/heap/spaces.h
Expand Up @@ -127,8 +127,10 @@ class Space;
#define DCHECK_OBJECT_SIZE(size) \
DCHECK((0 < size) && (size <= kMaxRegularHeapObjectSize))

#define DCHECK_CODEOBJECT_SIZE(size, code_space) \
DCHECK((0 < size) && (size <= code_space->AreaSize()))
#define DCHECK_CODEOBJECT_SIZE(size, code_space) \
DCHECK((0 < size) && \
(size <= std::min(MemoryChunkLayout::MaxRegularCodeObjectSize(), \
code_space->AreaSize())))

using FreeListCategoryType = int32_t;

Expand Down
3 changes: 1 addition & 2 deletions deps/v8/src/utils/allocation.cc
Expand Up @@ -165,8 +165,7 @@ void* GetRandomMmapAddr() {
void* AllocatePages(v8::PageAllocator* page_allocator, void* hint, size_t size,
size_t alignment, PageAllocator::Permission access) {
DCHECK_NOT_NULL(page_allocator);
DCHECK_EQ(hint, AlignedAddress(hint, alignment));
DCHECK(IsAligned(size, page_allocator->AllocatePageSize()));
DCHECK(IsAligned(size, page_allocator->CommitPageSize()));
if (FLAG_randomize_all_allocations) {
hint = page_allocator->GetRandomMmapAddr();
}
Expand Down
1 change: 1 addition & 0 deletions deps/v8/test/cctest/heap/heap-tester.h
Expand Up @@ -34,6 +34,7 @@
V(InvalidatedSlotsSomeInvalidatedRanges) \
V(TestNewSpaceRefsInCopiedCode) \
V(GCFlags) \
V(CodeLargeObjectSpace64k) \
V(MarkCompactCollector) \
V(MarkCompactEpochCounter) \
V(MemoryReducerActivationForSmallHeaps) \
Expand Down
85 changes: 83 additions & 2 deletions deps/v8/test/cctest/heap/test-heap.cc
Expand Up @@ -6404,7 +6404,7 @@ HEAP_TEST(Regress5831) {

// Generate the code.
Handle<Code> code = GenerateDummyImmovableCode(isolate);
CHECK_GE(i::kMaxRegularHeapObjectSize, code->Size());
CHECK_GE(MemoryChunkLayout::MaxRegularCodeObjectSize(), code->Size());
CHECK(!heap->code_space()->first_page()->Contains(code->address()));

// Ensure it's not in large object space.
Expand Down Expand Up @@ -6889,7 +6889,7 @@ TEST(CodeObjectRegistry) {
{
// Ensure that both code objects end up on the same page.
CHECK(HeapTester::CodeEnsureLinearAllocationArea(
heap, kMaxRegularHeapObjectSize));
heap, MemoryChunkLayout::MaxRegularCodeObjectSize()));
code1 = DummyOptimizedCode(isolate);
Handle<Code> code2 = DummyOptimizedCode(isolate);
code2_address = code2->address();
Expand Down Expand Up @@ -7002,6 +7002,87 @@ TEST(Regress978156) {
marking_state->GreyToBlack(filler);
}

class TestAllocationTracker : public HeapObjectAllocationTracker {
public:
explicit TestAllocationTracker(int expected_size)
: expected_size_(expected_size) {}

void AllocationEvent(Address addr, int size) {
CHECK(expected_size_ == size);
address_ = addr;
}

Address address() { return address_; }

private:
int expected_size_;
Address address_;
};

UNINITIALIZED_HEAP_TEST(CodeLargeObjectSpace64k) {
// Simulate having a system with 64k OS pages.
i::FLAG_v8_os_page_size = 64;

// Initialize the isolate manually to make sure --v8-os-page-size is taken
// into account.
v8::Isolate::CreateParams create_params;
create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
v8::Isolate* isolate = v8::Isolate::New(create_params);

Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();

// Allocate a regular code object.
{
int size_in_bytes =
MemoryChunkLayout::MaxRegularCodeObjectSize() - kTaggedSize;
TestAllocationTracker allocation_tracker{size_in_bytes};
heap->AddHeapObjectAllocationTracker(&allocation_tracker);

HeapObject obj;
{
AllocationResult allocation = heap->AllocateRaw(
size_in_bytes, AllocationType::kCode, AllocationOrigin::kRuntime,
AllocationAlignment::kCodeAligned);
CHECK(allocation.To(&obj));
CHECK_EQ(allocation.ToObjectChecked().address(),
allocation_tracker.address());

heap->CreateFillerObjectAt(obj.address(), size_in_bytes,
ClearRecordedSlots::kNo);
}

CHECK(!Heap::IsLargeObject(obj));
heap->RemoveHeapObjectAllocationTracker(&allocation_tracker);
}

// Allocate a large code object.
{
int size_in_bytes =
MemoryChunkLayout::MaxRegularCodeObjectSize() + kTaggedSize;
TestAllocationTracker allocation_tracker{size_in_bytes};
heap->AddHeapObjectAllocationTracker(&allocation_tracker);

HeapObject obj;
{
AllocationResult allocation = heap->AllocateRaw(
size_in_bytes, AllocationType::kCode, AllocationOrigin::kRuntime,
AllocationAlignment::kCodeAligned);
CHECK(allocation.To(&obj));
CHECK_EQ(allocation.ToObjectChecked().address(),
allocation_tracker.address());

heap->CreateFillerObjectAt(obj.address(), size_in_bytes,
ClearRecordedSlots::kNo);
}

CHECK(Heap::IsLargeObject(obj));
heap->RemoveHeapObjectAllocationTracker(&allocation_tracker);
}

isolate->Dispose();
}


} // namespace heap
} // namespace internal
} // namespace v8
Expand Down
6 changes: 3 additions & 3 deletions deps/v8/test/cctest/test-code-pages.cc
Expand Up @@ -263,7 +263,7 @@ TEST(LargeCodeObject) {

// Create a big function that ends up in CODE_LO_SPACE.
const int instruction_size = Page::kPageSize + 1;
STATIC_ASSERT(instruction_size > kMaxRegularHeapObjectSize);
CHECK_GT(instruction_size, MemoryChunkLayout::MaxRegularCodeObjectSize());
std::unique_ptr<byte[]> instructions(new byte[instruction_size]);

CodeDesc desc;
Expand Down Expand Up @@ -379,7 +379,7 @@ TEST(LargeCodeObjectWithSignalHandler) {

// Create a big function that ends up in CODE_LO_SPACE.
const int instruction_size = Page::kPageSize + 1;
STATIC_ASSERT(instruction_size > kMaxRegularHeapObjectSize);
CHECK_GT(instruction_size, MemoryChunkLayout::MaxRegularCodeObjectSize());
std::unique_ptr<byte[]> instructions(new byte[instruction_size]);

CodeDesc desc;
Expand Down Expand Up @@ -455,7 +455,7 @@ TEST(Sorted) {

// Create a big function that ends up in CODE_LO_SPACE.
const int instruction_size = Page::kPageSize + 1;
STATIC_ASSERT(instruction_size > kMaxRegularHeapObjectSize);
CHECK_GT(instruction_size, MemoryChunkLayout::MaxRegularCodeObjectSize());
std::unique_ptr<byte[]> instructions(new byte[instruction_size]);

CodeDesc desc;
Expand Down
3 changes: 2 additions & 1 deletion deps/v8/test/cctest/test-factory.cc
Expand Up @@ -60,7 +60,8 @@ TEST(Factory_CodeBuilder) {
HandleScope scope(isolate);

// Create a big function that ends up in CODE_LO_SPACE.
const int instruction_size = kMaxRegularHeapObjectSize + 1;
const int instruction_size =
MemoryChunkLayout::MaxRegularCodeObjectSize() + 1;
std::unique_ptr<byte[]> instructions(new byte[instruction_size]);

CodeDesc desc;
Expand Down
2 changes: 1 addition & 1 deletion deps/v8/test/cctest/test-unwinder-code-pages.cc
Expand Up @@ -564,7 +564,7 @@ TEST(PCIsInV8_LargeCodeObject_CodePagesAPI) {

// Create a big function that ends up in CODE_LO_SPACE.
const int instruction_size = Page::kPageSize + 1;
STATIC_ASSERT(instruction_size > kMaxRegularHeapObjectSize);
CHECK_GT(instruction_size, MemoryChunkLayout::MaxRegularCodeObjectSize());
std::unique_ptr<byte[]> instructions(new byte[instruction_size]);

CodeDesc desc;
Expand Down

0 comments on commit ae1fa98

Please sign in to comment.