diff --git a/common.gypi b/common.gypi index 3f708d89b1ef38..0e5ce682494601 100644 --- a/common.gypi +++ b/common.gypi @@ -36,7 +36,7 @@ # Reset this number to 0 on major V8 upgrades. # Increment by one for each non-official patch applied to deps/v8. - 'v8_embedder_string': '-node.12', + 'v8_embedder_string': '-node.13', ##### V8 defaults for Node.js ##### diff --git a/deps/v8/include/js_protocol.pdl b/deps/v8/include/js_protocol.pdl index 8d8211bf989f13..2d560435522769 100644 --- a/deps/v8/include/js_protocol.pdl +++ b/deps/v8/include/js_protocol.pdl @@ -766,6 +766,22 @@ experimental domain HeapProfiler # Average sample interval in bytes. Poisson distribution is used for the intervals. The # default value is 32768 bytes. optional number samplingInterval + # By default, the sampling heap profiler reports only objects which are + # still alive when the profile is returned via getSamplingProfile or + # stopSampling, which is useful for determining what functions contribute + # the most to steady-state memory usage. This flag instructs the sampling + # heap profiler to also include information about objects discarded by + # major GC, which will show which functions cause large temporary memory + # usage or long GC pauses. + optional boolean includeObjectsCollectedByMajorGC + # By default, the sampling heap profiler reports only objects which are + # still alive when the profile is returned via getSamplingProfile or + # stopSampling, which is useful for determining what functions contribute + # the most to steady-state memory usage. This flag instructs the sampling + # heap profiler to also include information about objects discarded by + # minor GC, which is useful when tuning a latency-sensitive application + # for minimal GC activity. + optional boolean includeObjectsCollectedByMinorGC command startTrackingHeapObjects parameters diff --git a/deps/v8/include/v8-profiler.h b/deps/v8/include/v8-profiler.h index 8894641993e42a..6145a2257ad05b 100644 --- a/deps/v8/include/v8-profiler.h +++ b/deps/v8/include/v8-profiler.h @@ -903,6 +903,8 @@ class V8_EXPORT HeapProfiler { enum SamplingFlags { kSamplingNoFlags = 0, kSamplingForceGC = 1 << 0, + kSamplingIncludeObjectsCollectedByMajorGC = 1 << 1, + kSamplingIncludeObjectsCollectedByMinorGC = 1 << 2, }; /** @@ -1097,10 +1099,8 @@ class V8_EXPORT HeapProfiler { * |stack_depth| parameter controls the maximum number of stack frames to be * captured on each allocation. * - * NOTE: This is a proof-of-concept at this point. Right now we only sample - * newspace allocations. Support for paged space allocation (e.g. pre-tenured - * objects, large objects, code objects, etc.) and native allocations - * doesn't exist yet, but is anticipated in the future. + * NOTE: Support for native allocations doesn't exist yet, but is anticipated + * in the future. * * Objects allocated before the sampling is started will not be included in * the profile. diff --git a/deps/v8/src/heap/heap.cc b/deps/v8/src/heap/heap.cc index 5a307ff9e1306b..d966d979c87c99 100644 --- a/deps/v8/src/heap/heap.cc +++ b/deps/v8/src/heap/heap.cc @@ -1819,6 +1819,8 @@ bool Heap::CollectGarbage(AllocationSpace space, collector = SelectGarbageCollector(space, gc_reason, &collector_reason); + current_or_last_garbage_collector_ = collector; + if (collector == GarbageCollector::MARK_COMPACTOR && incremental_marking()->IsMinorMarking()) { CollectGarbage(NEW_SPACE, GarbageCollectionReason::kFinalizeMinorMC); diff --git a/deps/v8/src/heap/heap.h b/deps/v8/src/heap/heap.h index daca78390179bf..6e270f246df648 100644 --- a/deps/v8/src/heap/heap.h +++ b/deps/v8/src/heap/heap.h @@ -1455,6 +1455,10 @@ class Heap { bool is_current_gc_forced() const { return is_current_gc_forced_; } + GarbageCollector current_or_last_garbage_collector() const { + return current_or_last_garbage_collector_; + } + // Returns whether the currently in-progress GC should avoid increasing the // ages on any objects that live for a set number of collections. bool ShouldCurrentGCKeepAgesUnchanged() const { @@ -2389,6 +2393,8 @@ class Heap { bool is_current_gc_forced_ = false; bool is_current_gc_for_heap_profiler_ = false; + GarbageCollector current_or_last_garbage_collector_ = + GarbageCollector::SCAVENGER; ExternalStringTable external_string_table_; diff --git a/deps/v8/src/inspector/v8-heap-profiler-agent-impl.cc b/deps/v8/src/inspector/v8-heap-profiler-agent-impl.cc index 13dfd69bbeb587..4e8197fdec5c41 100644 --- a/deps/v8/src/inspector/v8-heap-profiler-agent-impl.cc +++ b/deps/v8/src/inspector/v8-heap-profiler-agent-impl.cc @@ -29,6 +29,7 @@ static const char allocationTrackingEnabled[] = "allocationTrackingEnabled"; static const char samplingHeapProfilerEnabled[] = "samplingHeapProfilerEnabled"; static const char samplingHeapProfilerInterval[] = "samplingHeapProfilerInterval"; +static const char samplingHeapProfilerFlags[] = "samplingHeapProfilerFlags"; } // namespace HeapProfilerAgentState class HeapSnapshotProgress final : public v8::ActivityControl { @@ -208,7 +209,16 @@ void V8HeapProfilerAgentImpl::restore() { double samplingInterval = m_state->doubleProperty( HeapProfilerAgentState::samplingHeapProfilerInterval, -1); DCHECK_GE(samplingInterval, 0); - startSampling(Maybe(samplingInterval)); + int flags = m_state->integerProperty( + HeapProfilerAgentState::samplingHeapProfilerFlags, 0); + startSampling( + Maybe(samplingInterval), + Maybe( + flags & + v8::HeapProfiler::kSamplingIncludeObjectsCollectedByMajorGC), + Maybe( + flags & + v8::HeapProfiler::kSamplingIncludeObjectsCollectedByMinorGC)); } } @@ -387,7 +397,9 @@ void V8HeapProfilerAgentImpl::stopTrackingHeapObjectsInternal() { } Response V8HeapProfilerAgentImpl::startSampling( - Maybe samplingInterval) { + Maybe samplingInterval, + Maybe includeObjectsCollectedByMajorGC, + Maybe includeObjectsCollectedByMinorGC) { v8::HeapProfiler* profiler = m_isolate->GetHeapProfiler(); if (!profiler) return Response::ServerError("Cannot access v8 heap profiler"); const unsigned defaultSamplingInterval = 1 << 15; @@ -400,9 +412,17 @@ Response V8HeapProfilerAgentImpl::startSampling( samplingIntervalValue); m_state->setBoolean(HeapProfilerAgentState::samplingHeapProfilerEnabled, true); + int flags = v8::HeapProfiler::kSamplingForceGC; + if (includeObjectsCollectedByMajorGC.fromMaybe(false)) { + flags |= v8::HeapProfiler::kSamplingIncludeObjectsCollectedByMajorGC; + } + if (includeObjectsCollectedByMinorGC.fromMaybe(false)) { + flags |= v8::HeapProfiler::kSamplingIncludeObjectsCollectedByMinorGC; + } + m_state->setInteger(HeapProfilerAgentState::samplingHeapProfilerFlags, flags); profiler->StartSamplingHeapProfiler( static_cast(samplingIntervalValue), 128, - v8::HeapProfiler::kSamplingForceGC); + static_cast(flags)); return Response::Success(); } diff --git a/deps/v8/src/inspector/v8-heap-profiler-agent-impl.h b/deps/v8/src/inspector/v8-heap-profiler-agent-impl.h index 0387a006b81417..61c6b6af53b9d5 100644 --- a/deps/v8/src/inspector/v8-heap-profiler-agent-impl.h +++ b/deps/v8/src/inspector/v8-heap-profiler-agent-impl.h @@ -56,7 +56,9 @@ class V8HeapProfilerAgentImpl : public protocol::HeapProfiler::Backend { Response getHeapObjectId(const String16& objectId, String16* heapSnapshotObjectId) override; - Response startSampling(Maybe samplingInterval) override; + Response startSampling(Maybe samplingInterval, + Maybe includeObjectsCollectedByMajorGC, + Maybe includeObjectsCollectedByMinorGC) override; Response stopSampling( std::unique_ptr*) override; Response getSamplingProfile( diff --git a/deps/v8/src/profiler/sampling-heap-profiler.cc b/deps/v8/src/profiler/sampling-heap-profiler.cc index 45c72ec20294fa..b38d235e988e61 100644 --- a/deps/v8/src/profiler/sampling-heap-profiler.cc +++ b/deps/v8/src/profiler/sampling-heap-profiler.cc @@ -95,6 +95,19 @@ void SamplingHeapProfiler::SampleObject(Address soon_object, size_t size) { void SamplingHeapProfiler::OnWeakCallback( const WeakCallbackInfo& data) { Sample* sample = data.GetParameter(); + Heap* heap = reinterpret_cast(data.GetIsolate())->heap(); + bool is_minor_gc = + heap->current_or_last_garbage_collector() == GarbageCollector::SCAVENGER; + bool should_keep_sample = + is_minor_gc + ? (sample->profiler->flags_ & + v8::HeapProfiler::kSamplingIncludeObjectsCollectedByMinorGC) + : (sample->profiler->flags_ & + v8::HeapProfiler::kSamplingIncludeObjectsCollectedByMajorGC); + if (should_keep_sample) { + sample->global.Reset(); + return; + } AllocationNode* node = sample->owner; DCHECK_GT(node->allocations_[sample->size], 0); node->allocations_[sample->size]--; diff --git a/deps/v8/test/inspector/heap-profiler/sampling-heap-profiler-flags-expected.txt b/deps/v8/test/inspector/heap-profiler/sampling-heap-profiler-flags-expected.txt new file mode 100644 index 00000000000000..9d66bce04389d9 --- /dev/null +++ b/deps/v8/test/inspector/heap-profiler/sampling-heap-profiler-flags-expected.txt @@ -0,0 +1,6 @@ +Checks sampling heap profiler methods. +Retained size is less than 10KB: true +Including major GC increases size: true +Minor GC collected more: true +Total allocation is greater than 100KB: true +Successfully finished diff --git a/deps/v8/test/inspector/heap-profiler/sampling-heap-profiler-flags.js b/deps/v8/test/inspector/heap-profiler/sampling-heap-profiler-flags.js new file mode 100644 index 00000000000000..7a1ffa9472b78b --- /dev/null +++ b/deps/v8/test/inspector/heap-profiler/sampling-heap-profiler-flags.js @@ -0,0 +1,61 @@ +// Copyright 2017 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Flags: --sampling-heap-profiler-suppress-randomness + +(async function() { + let {contextGroup, Protocol} = InspectorTest.start('Checks sampling heap profiler methods.'); + + contextGroup.addScript(` + function generateTrash() { + var arr = new Array(100); + for (var i = 0; i < 3000; ++i) { + var s = {a:i, b: new Array(100).fill(42)}; + arr[i % 100] = s; + } + return arr[30]; + } + //# sourceURL=test.js`); + + Protocol.HeapProfiler.enable(); + + await Protocol.HeapProfiler.startSampling({ + samplingInterval: 1e4, + includeObjectsCollectedByMajorGC: false, + includeObjectsCollectedByMinorGC: false, + }); + await Protocol.Runtime.evaluate({ expression: 'generateTrash()' }); + const profile1 = await Protocol.HeapProfiler.stopSampling(); + const size1 = nodeSize(profile1.result.profile.head); + InspectorTest.log('Retained size is less than 10KB:', size1 < 10000); + + await Protocol.HeapProfiler.startSampling({ + samplingInterval: 100, + includeObjectsCollectedByMajorGC: true, + includeObjectsCollectedByMinorGC: false, + }); + await Protocol.Runtime.evaluate({ expression: 'generateTrash()' }); + const profile2 = await Protocol.HeapProfiler.stopSampling(); + const size2 = nodeSize(profile2.result.profile.head); + InspectorTest.log('Including major GC increases size:', size1 < size2); + + await Protocol.HeapProfiler.startSampling({ + samplingInterval: 100, + includeObjectsCollectedByMajorGC: true, + includeObjectsCollectedByMinorGC: true, + }); + await Protocol.Runtime.evaluate({ expression: 'generateTrash()' }); + const profile3 = await Protocol.HeapProfiler.stopSampling(); + const size3 = nodeSize(profile3.result.profile.head); + InspectorTest.log('Minor GC collected more:', size3 > size2); + InspectorTest.log('Total allocation is greater than 100KB:', size3 > 100000); + + InspectorTest.log('Successfully finished'); + InspectorTest.completeTest(); + + function nodeSize(node) { + return node.children.reduce((res, child) => res + nodeSize(child), + node.callFrame.functionName === 'generateTrash' ? node.selfSize : 0); + } +})();