diff --git a/common.gypi b/common.gypi index 9ec799d3d98305..e4ef01a0007119 100644 --- a/common.gypi +++ b/common.gypi @@ -38,7 +38,7 @@ # Reset this number to 0 on major V8 upgrades. # Increment by one for each non-official patch applied to deps/v8. - 'v8_embedder_string': '-node.20', + 'v8_embedder_string': '-node.21', ##### V8 defaults for Node.js ##### diff --git a/deps/v8/include/v8-internal.h b/deps/v8/include/v8-internal.h index a037210f552c7b..8ce88fb3efcba8 100644 --- a/deps/v8/include/v8-internal.h +++ b/deps/v8/include/v8-internal.h @@ -152,7 +152,6 @@ class Internals { static const uint32_t kNumIsolateDataSlots = 4; - // IsolateData layout guarantees. static const int kIsolateEmbedderDataOffset = 0; static const int kExternalMemoryOffset = kNumIsolateDataSlots * kApiSystemPointerSize; @@ -160,14 +159,8 @@ class Internals { kExternalMemoryOffset + kApiInt64Size; static const int kExternalMemoryAtLastMarkCompactOffset = kExternalMemoryLimitOffset + kApiInt64Size; - static const int kIsolateFastCCallCallerFpOffset = - kExternalMemoryAtLastMarkCompactOffset + kApiInt64Size; - static const int kIsolateFastCCallCallerPcOffset = - kIsolateFastCCallCallerFpOffset + kApiSystemPointerSize; - static const int kIsolateStackGuardOffset = - kIsolateFastCCallCallerPcOffset + kApiSystemPointerSize; static const int kIsolateRootsOffset = - kIsolateStackGuardOffset + 7 * kApiSystemPointerSize; + kExternalMemoryAtLastMarkCompactOffset + kApiInt64Size; static const int kUndefinedValueRootIndex = 4; static const int kTheHoleValueRootIndex = 5; @@ -186,7 +179,7 @@ class Internals { static const int kFirstNonstringType = 0x40; static const int kOddballType = 0x43; - static const int kForeignType = 0x46; + static const int kForeignType = 0x47; static const int kJSSpecialApiObjectType = 0x410; static const int kJSApiObjectType = 0x420; static const int kJSObjectType = 0x421; diff --git a/deps/v8/include/v8-platform.h b/deps/v8/include/v8-platform.h index b707fafc49229a..7e43b0d9db4a9d 100644 --- a/deps/v8/include/v8-platform.h +++ b/deps/v8/include/v8-platform.h @@ -439,6 +439,14 @@ class Platform { */ virtual void DumpWithoutCrashing() {} + /** + * Lets the embedder to add crash keys. + */ + virtual void AddCrashKey(int id, const char* name, uintptr_t value) { + // "noop" is a valid implementation if the embedder doesn't care to log + // additional data for crashes. + } + protected: /** * Default implementation of current wall-clock time in milliseconds diff --git a/deps/v8/include/v8-profiler.h b/deps/v8/include/v8-profiler.h index 5387b394456a65..2c7d4b12e29123 100644 --- a/deps/v8/include/v8-profiler.h +++ b/deps/v8/include/v8-profiler.h @@ -18,8 +18,8 @@ namespace v8 { class HeapGraphNode; struct HeapStatsUpdate; -using NativeObject = void*; -using SnapshotObjectId = uint32_t; +typedef uint32_t SnapshotObjectId; + struct CpuProfileDeoptFrame { int script_id; @@ -272,10 +272,12 @@ class V8_EXPORT CpuProfilingOptions { * zero, the sampling interval will be equal to * the profiler's sampling interval. */ - CpuProfilingOptions( - CpuProfilingMode mode = kLeafNodeLineNumbers, - unsigned max_samples = kNoSampleLimit, int sampling_interval_us = 0, - MaybeLocal filter_context = MaybeLocal()); + CpuProfilingOptions(CpuProfilingMode mode = kLeafNodeLineNumbers, + unsigned max_samples = kNoSampleLimit, + int sampling_interval_us = 0) + : mode_(mode), + max_samples_(max_samples), + sampling_interval_us_(sampling_interval_us) {} CpuProfilingMode mode() const { return mode_; } unsigned max_samples() const { return max_samples_; } @@ -284,13 +286,12 @@ class V8_EXPORT CpuProfilingOptions { private: friend class internal::CpuProfile; - bool has_filter_context() const { return !filter_context_.IsEmpty(); } + bool has_filter_context() const; void* raw_filter_context() const; CpuProfilingMode mode_; unsigned max_samples_; int sampling_interval_us_; - CopyablePersistentTraits::CopyablePersistent filter_context_; }; /** @@ -752,12 +753,6 @@ class V8_EXPORT EmbedderGraph { */ virtual const char* NamePrefix() { return nullptr; } - /** - * Returns the NativeObject that can be used for querying the - * |HeapSnapshot|. - */ - virtual NativeObject GetNativeObject() { return nullptr; } - Node(const Node&) = delete; Node& operator=(const Node&) = delete; }; @@ -820,12 +815,6 @@ class V8_EXPORT HeapProfiler { */ SnapshotObjectId GetObjectId(Local value); - /** - * Returns SnapshotObjectId for a native object referenced by |value| if it - * has been seen by the heap profiler, kUnknownObjectId otherwise. - */ - SnapshotObjectId GetObjectId(NativeObject value); - /** * Returns heap object with given SnapshotObjectId if the object is alive, * otherwise empty handle is returned. diff --git a/deps/v8/include/v8.h b/deps/v8/include/v8.h index 873ba0e2609b0b..f4ea851d41b02e 100644 --- a/deps/v8/include/v8.h +++ b/deps/v8/include/v8.h @@ -2094,7 +2094,6 @@ struct SampleInfo { StateTag vm_state; // Current VM state. void* external_callback_entry; // External callback address if VM is // executing an external callback. - void* top_context; // Incumbent native context address. }; struct MemoryRange { @@ -7561,9 +7560,8 @@ class V8_EXPORT EmbedderHeapTracer { * overriden to fill a |TraceSummary| that is used by V8 to schedule future * garbage collections. */ - V8_DEPRECATE_SOON("Use version with parameter.", - virtual void TraceEpilogue()) {} - virtual void TraceEpilogue(TraceSummary* trace_summary); + virtual void TraceEpilogue() {} + virtual void TraceEpilogue(TraceSummary* trace_summary) { TraceEpilogue(); } /** * Called upon entering the final marking pause. No more incremental marking @@ -7835,7 +7833,6 @@ class V8_EXPORT Isolate { class V8_EXPORT SuppressMicrotaskExecutionScope { public: explicit SuppressMicrotaskExecutionScope(Isolate* isolate); - explicit SuppressMicrotaskExecutionScope(MicrotaskQueue* microtask_queue); ~SuppressMicrotaskExecutionScope(); // Prevent copying of Scope objects. @@ -7846,8 +7843,13 @@ class V8_EXPORT Isolate { private: internal::Isolate* const isolate_; - internal::MicrotaskQueue* const microtask_queue_; internal::Address previous_stack_height_; + static_assert(sizeof(internal::Address) == + sizeof(internal::MicrotaskQueue*) && + alignof(internal::Address) == + alignof(internal::MicrotaskQueue*), + "The previous_stack_height_ field can replace the " + "microtask_queue_ field ABI-wise"); friend class internal::ThreadLocalTop; }; diff --git a/deps/v8/src/api/api.cc b/deps/v8/src/api/api.cc index 7ad21ee1e48dbc..07467d2af05f38 100644 --- a/deps/v8/src/api/api.cc +++ b/deps/v8/src/api/api.cc @@ -8183,14 +8183,13 @@ Isolate::AllowJavascriptExecutionScope::~AllowJavascriptExecutionScope() { Isolate::SuppressMicrotaskExecutionScope::SuppressMicrotaskExecutionScope( Isolate* isolate) - : isolate_(reinterpret_cast(isolate)), - microtask_queue_(isolate_->default_microtask_queue()) { + : isolate_(reinterpret_cast(isolate)) { isolate_->thread_local_top()->IncrementCallDepth(this); - microtask_queue_->IncrementMicrotasksSuppressions(); + isolate_->default_microtask_queue()->IncrementMicrotasksSuppressions(); } Isolate::SuppressMicrotaskExecutionScope::~SuppressMicrotaskExecutionScope() { - microtask_queue_->DecrementMicrotasksSuppressions(); + isolate_->default_microtask_queue()->DecrementMicrotasksSuppressions(); isolate_->thread_local_top()->DecrementCallDepth(this); } @@ -9975,25 +9974,12 @@ CpuProfiler* CpuProfiler::New(Isolate* isolate, CpuProfilingNamingMode mode) { return New(isolate, mode, kLazyLogging); } -CpuProfilingOptions::CpuProfilingOptions(CpuProfilingMode mode, - unsigned max_samples, - int sampling_interval_us, - MaybeLocal filter_context) - : mode_(mode), - max_samples_(max_samples), - sampling_interval_us_(sampling_interval_us) { - if (!filter_context.IsEmpty()) { - Local local_filter_context = filter_context.ToLocalChecked(); - filter_context_.Reset(local_filter_context->GetIsolate(), - local_filter_context); - } +bool CpuProfilingOptions::has_filter_context() const { + return false; } void* CpuProfilingOptions::raw_filter_context() const { - return reinterpret_cast( - i::Context::cast(*Utils::OpenPersistent(filter_context_)) - .native_context() - .address()); + return nullptr; } void CpuProfiler::Dispose() { delete reinterpret_cast(this); } @@ -10265,10 +10251,6 @@ SnapshotObjectId HeapProfiler::GetObjectId(Local value) { return reinterpret_cast(this)->GetSnapshotObjectId(obj); } -SnapshotObjectId HeapProfiler::GetObjectId(NativeObject value) { - return reinterpret_cast(this)->GetSnapshotObjectId(value); -} - Local HeapProfiler::FindObjectById(SnapshotObjectId id) { i::Handle obj = reinterpret_cast(this)->FindHeapObjectById(id); @@ -10401,17 +10383,6 @@ void EmbedderHeapTracer::TracePrologue(TraceFlags flags) { #endif } -void EmbedderHeapTracer::TraceEpilogue(TraceSummary* trace_summary) { -#if __clang__ -#pragma clang diagnostic push -#pragma clang diagnostic ignored "-Wdeprecated" -#endif - TraceEpilogue(); -#if __clang__ -#pragma clang diagnostic pop -#endif -} - void EmbedderHeapTracer::FinalizeTracing() { if (isolate_) { i::Isolate* isolate = reinterpret_cast(isolate_); diff --git a/deps/v8/src/execution/isolate-data.h b/deps/v8/src/execution/isolate-data.h index 6eb23db2a22adb..6c66775125ba41 100644 --- a/deps/v8/src/execution/isolate-data.h +++ b/deps/v8/src/execution/isolate-data.h @@ -111,27 +111,21 @@ class IsolateData final { Address* builtins() { return builtins_; } private: - // Static layout definition. - // - // Note: The location of fields within IsolateData is significant. The - // closer they are to the value of kRootRegister (i.e.: isolate_root()), the - // cheaper it is to access them. See also: https://crbug.com/993264. - // The recommend guideline is to put frequently-accessed fields close to the - // beginning of IsolateData. +// Static layout definition. #define FIELDS(V) \ V(kEmbedderDataOffset, Internals::kNumIsolateDataSlots* kSystemPointerSize) \ V(kExternalMemoryOffset, kInt64Size) \ V(kExternalMemoryLlimitOffset, kInt64Size) \ V(kExternalMemoryAtLastMarkCompactOffset, kInt64Size) \ - V(kFastCCallCallerFPOffset, kSystemPointerSize) \ - V(kFastCCallCallerPCOffset, kSystemPointerSize) \ - V(kStackGuardOffset, StackGuard::kSizeInBytes) \ V(kRootsTableOffset, RootsTable::kEntriesCount* kSystemPointerSize) \ V(kExternalReferenceTableOffset, ExternalReferenceTable::kSizeInBytes) \ V(kThreadLocalTopOffset, ThreadLocalTop::kSizeInBytes) \ V(kBuiltinEntryTableOffset, Builtins::builtin_count* kSystemPointerSize) \ V(kBuiltinsTableOffset, Builtins::builtin_count* kSystemPointerSize) \ V(kVirtualCallTargetRegisterOffset, kSystemPointerSize) \ + V(kFastCCallCallerFPOffset, kSystemPointerSize) \ + V(kFastCCallCallerPCOffset, kSystemPointerSize) \ + V(kStackGuardOffset, StackGuard::kSizeInBytes) \ V(kStackIsIterableOffset, kUInt8Size) \ /* This padding aligns IsolateData size by 8 bytes. */ \ V(kPaddingOffset, \ @@ -159,17 +153,6 @@ class IsolateData final { // Caches the amount of external memory registered at the last MC. int64_t external_memory_at_last_mark_compact_ = 0; - // Stores the state of the caller for TurboAssembler::CallCFunction so that - // the sampling CPU profiler can iterate the stack during such calls. These - // are stored on IsolateData so that they can be stored to with only one move - // instruction in compiled code. - Address fast_c_call_caller_fp_ = kNullAddress; - Address fast_c_call_caller_pc_ = kNullAddress; - - // Fields related to the system and JS stack. In particular, this contains the - // stack limit used by stack checks in generated code. - StackGuard stack_guard_; - RootsTable roots_; ExternalReferenceTable external_reference_table_; @@ -189,6 +172,17 @@ class IsolateData final { // ia32 (otherwise the arguments adaptor call runs out of registers). void* virtual_call_target_register_ = nullptr; + // Stores the state of the caller for TurboAssembler::CallCFunction so that + // the sampling CPU profiler can iterate the stack during such calls. These + // are stored on IsolateData so that they can be stored to with only one move + // instruction in compiled code. + Address fast_c_call_caller_fp_ = kNullAddress; + Address fast_c_call_caller_pc_ = kNullAddress; + + // Fields related to the system and JS stack. In particular, this contains the + // stack limit used by stack checks in generated code. + StackGuard stack_guard_; + // Whether the SafeStackFrameIterator can successfully iterate the current // stack. Only valid values are 0 or 1. uint8_t stack_is_iterable_ = 1; diff --git a/deps/v8/src/execution/isolate.cc b/deps/v8/src/execution/isolate.cc index ad54ae0a7cb075..d75dbfaa6597d7 100644 --- a/deps/v8/src/execution/isolate.cc +++ b/deps/v8/src/execution/isolate.cc @@ -2924,14 +2924,6 @@ void Isolate::CheckIsolateLayout() { CHECK_EQ(OFFSET_OF(Isolate, isolate_data_), 0); CHECK_EQ(static_cast(OFFSET_OF(Isolate, isolate_data_.embedder_data_)), Internals::kIsolateEmbedderDataOffset); - CHECK_EQ(static_cast( - OFFSET_OF(Isolate, isolate_data_.fast_c_call_caller_fp_)), - Internals::kIsolateFastCCallCallerFpOffset); - CHECK_EQ(static_cast( - OFFSET_OF(Isolate, isolate_data_.fast_c_call_caller_pc_)), - Internals::kIsolateFastCCallCallerPcOffset); - CHECK_EQ(static_cast(OFFSET_OF(Isolate, isolate_data_.stack_guard_)), - Internals::kIsolateStackGuardOffset); CHECK_EQ(static_cast(OFFSET_OF(Isolate, isolate_data_.roots_)), Internals::kIsolateRootsOffset); CHECK_EQ(Internals::kExternalMemoryOffset % 8, 0); diff --git a/deps/v8/src/objects/instance-type.h b/deps/v8/src/objects/instance-type.h index 9a855de95bcf0c..b922c4b29af42c 100644 --- a/deps/v8/src/objects/instance-type.h +++ b/deps/v8/src/objects/instance-type.h @@ -133,8 +133,8 @@ enum InstanceType : uint16_t { // "Data", objects that cannot contain non-map-word pointers to heap // objects. - FOREIGN_TYPE, BYTE_ARRAY_TYPE, + FOREIGN_TYPE, BYTECODE_ARRAY_TYPE, FREE_SPACE_TYPE, FIXED_DOUBLE_ARRAY_TYPE, diff --git a/deps/v8/src/objects/objects-definitions.h b/deps/v8/src/objects/objects-definitions.h index b346b5b7d15188..7c0245a0f24a61 100644 --- a/deps/v8/src/objects/objects-definitions.h +++ b/deps/v8/src/objects/objects-definitions.h @@ -67,8 +67,8 @@ namespace internal { \ V(MAP_TYPE) \ V(CODE_TYPE) \ - V(FOREIGN_TYPE) \ V(BYTE_ARRAY_TYPE) \ + V(FOREIGN_TYPE) \ V(BYTECODE_ARRAY_TYPE) \ V(FREE_SPACE_TYPE) \ \ diff --git a/deps/v8/src/profiler/heap-profiler.cc b/deps/v8/src/profiler/heap-profiler.cc index a498e8e21432b8..472dbdbb10b730 100644 --- a/deps/v8/src/profiler/heap-profiler.cc +++ b/deps/v8/src/profiler/heap-profiler.cc @@ -151,17 +151,6 @@ SnapshotObjectId HeapProfiler::GetSnapshotObjectId(Handle obj) { return ids_->FindEntry(HeapObject::cast(*obj).address()); } -SnapshotObjectId HeapProfiler::GetSnapshotObjectId(NativeObject obj) { - // Try to find id of regular native node first. - SnapshotObjectId id = ids_->FindEntry(reinterpret_cast
(obj)); - // In case no id has been found, check whether there exists an entry where the - // native objects has been merged into a V8 entry. - if (id == v8::HeapProfiler::kUnknownObjectId) { - id = ids_->FindMergedNativeEntry(obj); - } - return id; -} - void HeapProfiler::ObjectMoveEvent(Address from, Address to, int size) { base::MutexGuard guard(&profiler_mutex_); bool known_object = ids_->MoveObject(from, to, size); diff --git a/deps/v8/src/profiler/heap-profiler.h b/deps/v8/src/profiler/heap-profiler.h index f7336eb6be50c8..940574282efd01 100644 --- a/deps/v8/src/profiler/heap-profiler.h +++ b/deps/v8/src/profiler/heap-profiler.h @@ -52,7 +52,6 @@ class HeapProfiler : public HeapObjectAllocationTracker { int GetSnapshotsCount(); HeapSnapshot* GetSnapshot(int index); SnapshotObjectId GetSnapshotObjectId(Handle obj); - SnapshotObjectId GetSnapshotObjectId(NativeObject obj); void DeleteAllSnapshots(); void RemoveSnapshot(HeapSnapshot* snapshot); diff --git a/deps/v8/src/profiler/heap-snapshot-generator.cc b/deps/v8/src/profiler/heap-snapshot-generator.cc index 75b6aa7b77e1d1..dd2eaea0fc6e5b 100644 --- a/deps/v8/src/profiler/heap-snapshot-generator.cc +++ b/deps/v8/src/profiler/heap-snapshot-generator.cc @@ -352,7 +352,7 @@ void HeapObjectsMap::UpdateObjectSize(Address addr, int size) { SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) { base::HashMap::Entry* entry = entries_map_.Lookup( reinterpret_cast(addr), ComputeAddressHash(addr)); - if (entry == nullptr) return v8::HeapProfiler::kUnknownObjectId; + if (entry == nullptr) return 0; int entry_index = static_cast(reinterpret_cast(entry->value)); EntryInfo& entry_info = entries_.at(entry_index); DCHECK(static_cast(entries_.size()) > entries_map_.occupancy()); @@ -386,25 +386,6 @@ SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr, return id; } -SnapshotObjectId HeapObjectsMap::FindMergedNativeEntry(NativeObject addr) { - auto it = merged_native_entries_map_.find(addr); - if (it == merged_native_entries_map_.end()) - return v8::HeapProfiler::kUnknownObjectId; - return entries_[it->second].id; -} - -void HeapObjectsMap::AddMergedNativeEntry(NativeObject addr, - Address canonical_addr) { - base::HashMap::Entry* entry = - entries_map_.Lookup(reinterpret_cast(canonical_addr), - ComputeAddressHash(canonical_addr)); - auto result = merged_native_entries_map_.insert( - {addr, reinterpret_cast(entry->value)}); - if (!result.second) { - result.first->second = reinterpret_cast(entry->value); - } -} - void HeapObjectsMap::StopHeapObjectsTracking() { time_intervals_.clear(); } void HeapObjectsMap::UpdateHeapObjectsMap() { @@ -484,20 +465,9 @@ SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream, void HeapObjectsMap::RemoveDeadEntries() { DCHECK(entries_.size() > 0 && entries_.at(0).id == 0 && entries_.at(0).addr == kNullAddress); - - // Build up temporary reverse map. - std::unordered_map reverse_merged_native_entries_map; - for (const auto& it : merged_native_entries_map_) { - auto result = - reverse_merged_native_entries_map.emplace(it.second, it.first); - DCHECK(result.second); - USE(result); - } - size_t first_free_entry = 1; for (size_t i = 1; i < entries_.size(); ++i) { EntryInfo& entry_info = entries_.at(i); - auto merged_reverse_it = reverse_merged_native_entries_map.find(i); if (entry_info.accessed) { if (first_free_entry != i) { entries_.at(first_free_entry) = entry_info; @@ -508,19 +478,11 @@ void HeapObjectsMap::RemoveDeadEntries() { ComputeAddressHash(entry_info.addr)); DCHECK(entry); entry->value = reinterpret_cast(first_free_entry); - if (merged_reverse_it != reverse_merged_native_entries_map.end()) { - auto it = merged_native_entries_map_.find(merged_reverse_it->second); - DCHECK_NE(merged_native_entries_map_.end(), it); - it->second = first_free_entry; - } ++first_free_entry; } else { if (entry_info.addr) { entries_map_.Remove(reinterpret_cast(entry_info.addr), ComputeAddressHash(entry_info.addr)); - if (merged_reverse_it != reverse_merged_native_entries_map.end()) { - merged_native_entries_map_.erase(merged_reverse_it->second); - } } } } @@ -1891,14 +1853,10 @@ HeapEntry* EmbedderGraphEntriesAllocator::AllocateEntry(HeapThing ptr) { reinterpret_cast(ptr); DCHECK(node->IsEmbedderNode()); size_t size = node->SizeInBytes(); - Address lookup_address = reinterpret_cast
(node->GetNativeObject()); - SnapshotObjectId id = - (lookup_address) ? heap_object_map_->FindOrAddEntry(lookup_address, 0) - : static_cast( - reinterpret_cast(node) << 1); - return snapshot_->AddEntry(EmbedderGraphNodeType(node), - EmbedderGraphNodeName(names_, node), id, - static_cast(size), 0); + return snapshot_->AddEntry( + EmbedderGraphNodeType(node), EmbedderGraphNodeName(names_, node), + static_cast(reinterpret_cast(node) << 1), + static_cast(size), 0); } NativeObjectsExplorer::NativeObjectsExplorer( @@ -1907,14 +1865,12 @@ NativeObjectsExplorer::NativeObjectsExplorer( Isolate::FromHeap(snapshot->profiler()->heap_object_map()->heap())), snapshot_(snapshot), names_(snapshot_->profiler()->names()), - heap_object_map_(snapshot_->profiler()->heap_object_map()), embedder_graph_entries_allocator_( new EmbedderGraphEntriesAllocator(snapshot)) {} HeapEntry* NativeObjectsExplorer::EntryForEmbedderGraphNode( EmbedderGraphImpl::Node* node) { EmbedderGraphImpl::Node* wrapper = node->WrapperNode(); - NativeObject native_object = node->GetNativeObject(); if (wrapper) { node = wrapper; } @@ -1926,16 +1882,8 @@ HeapEntry* NativeObjectsExplorer::EntryForEmbedderGraphNode( static_cast(node); Object object = v8_node->GetObject(); if (object.IsSmi()) return nullptr; - HeapEntry* entry = generator_->FindEntry( + return generator_->FindEntry( reinterpret_cast(Object::cast(object).ptr())); - if (native_object) { - HeapObject heap_object = HeapObject::cast(object); - heap_object_map_->AddMergedNativeEntry(native_object, - heap_object.address()); - DCHECK_EQ(entry->id(), - heap_object_map_->FindMergedNativeEntry(native_object)); - } - return entry; } } diff --git a/deps/v8/src/profiler/heap-snapshot-generator.h b/deps/v8/src/profiler/heap-snapshot-generator.h index 360ed1f009290f..f0678bba72b0a1 100644 --- a/deps/v8/src/profiler/heap-snapshot-generator.h +++ b/deps/v8/src/profiler/heap-snapshot-generator.h @@ -249,8 +249,6 @@ class HeapObjectsMap { SnapshotObjectId FindOrAddEntry(Address addr, unsigned int size, bool accessed = true); - SnapshotObjectId FindMergedNativeEntry(NativeObject addr); - void AddMergedNativeEntry(NativeObject addr, Address canonical_addr); bool MoveObject(Address from, Address to, int size); void UpdateObjectSize(Address addr, int size); SnapshotObjectId last_assigned_id() const { @@ -287,8 +285,6 @@ class HeapObjectsMap { base::HashMap entries_map_; std::vector entries_; std::vector time_intervals_; - // Map from NativeObject to EntryInfo index in entries_. - std::unordered_map merged_native_entries_map_; Heap* heap_; DISALLOW_COPY_AND_ASSIGN(HeapObjectsMap); @@ -457,7 +453,6 @@ class NativeObjectsExplorer { Isolate* isolate_; HeapSnapshot* snapshot_; StringsStorage* names_; - HeapObjectsMap* heap_object_map_; std::unique_ptr embedder_graph_entries_allocator_; // Used during references extraction. HeapSnapshotGenerator* generator_ = nullptr; diff --git a/deps/v8/src/profiler/tick-sample.cc b/deps/v8/src/profiler/tick-sample.cc index 5c2f2d63ce3400..4963b642c65dfb 100644 --- a/deps/v8/src/profiler/tick-sample.cc +++ b/deps/v8/src/profiler/tick-sample.cc @@ -188,7 +188,7 @@ DISABLE_ASAN void TickSample::Init(Isolate* v8_isolate, bool use_simulator_reg_state, base::TimeDelta sampling_interval) { this->update_stats = update_stats; - SampleInfo info; + SampleInfoWithContext info; RegisterState regs = reg_state; if (!GetStackSample(v8_isolate, ®s, record_c_entry_frame, stack, kMaxFramesCount, &info, use_simulator_reg_state, @@ -229,7 +229,26 @@ DISABLE_ASAN void TickSample::Init(Isolate* v8_isolate, bool TickSample::GetStackSample(Isolate* v8_isolate, RegisterState* regs, RecordCEntryFrame record_c_entry_frame, void** frames, size_t frames_limit, - v8::SampleInfo* sample_info, + v8::SampleInfo* sample_info_out, + bool use_simulator_reg_state, void** contexts) { + SampleInfoWithContext sample_info_local; + + bool ret = GetStackSample( + v8_isolate, regs, record_c_entry_frame, frames, frames_limit, + &sample_info_local, use_simulator_reg_state, contexts); + + sample_info_out->frames_count = sample_info_local.frames_count; + sample_info_out->vm_state = sample_info_local.vm_state; + sample_info_out->external_callback_entry = + sample_info_local.external_callback_entry; + + return ret; +} + +bool TickSample::GetStackSample(Isolate* v8_isolate, RegisterState* regs, + RecordCEntryFrame record_c_entry_frame, + void** frames, size_t frames_limit, + SampleInfoWithContext* sample_info, bool use_simulator_reg_state, void** contexts) { i::Isolate* isolate = reinterpret_cast(v8_isolate); sample_info->frames_count = 0; diff --git a/deps/v8/src/profiler/tick-sample.h b/deps/v8/src/profiler/tick-sample.h index 37ae1e9d06e8ea..6123a17e9d737d 100644 --- a/deps/v8/src/profiler/tick-sample.h +++ b/deps/v8/src/profiler/tick-sample.h @@ -14,6 +14,14 @@ namespace internal { class Isolate; +struct SampleInfoWithContext { + size_t frames_count; // Number of frames collected. + StateTag vm_state; // Current VM state. + void* external_callback_entry; // External callback address if VM is + // executing an external callback. + void* top_context; // Incumbent native context address. +}; + // TickSample captures the information collected for each sample. struct V8_EXPORT TickSample { // Internal profiling (with --prof + tools/$OS-tick-processor) wants to @@ -71,6 +79,12 @@ struct V8_EXPORT TickSample { v8::SampleInfo* sample_info, bool use_simulator_reg_state = true, void** contexts = nullptr); + static bool GetStackSample(Isolate* isolate, v8::RegisterState* state, + RecordCEntryFrame record_c_entry_frame, + void** frames, size_t frames_limit, + SampleInfoWithContext* sample_info, + bool use_simulator_reg_state = true, + void** contexts = nullptr); void print() const; diff --git a/deps/v8/src/wasm/wasm-module-sourcemap.cc b/deps/v8/src/wasm/wasm-module-sourcemap.cc index cfe54e7c375885..7452f7bc7fe41e 100644 --- a/deps/v8/src/wasm/wasm-module-sourcemap.cc +++ b/deps/v8/src/wasm/wasm-module-sourcemap.cc @@ -28,7 +28,9 @@ WasmModuleSourceMap::WasmModuleSourceMap(v8::Isolate* v8_isolate, bool has_valid_version = src_map_obj ->Get(context, - v8::String::NewFromUtf8(v8_isolate, "version").ToLocalChecked()) + v8::String::NewFromUtf8(v8_isolate, "version", + v8::NewStringType::kInternalized) + .ToLocalChecked()) .ToLocal(&version_value) && version_value->IsUint32(); uint32_t version = 0; @@ -39,7 +41,9 @@ WasmModuleSourceMap::WasmModuleSourceMap(v8::Isolate* v8_isolate, bool has_valid_sources = src_map_obj ->Get(context, - v8::String::NewFromUtf8(v8_isolate, "sources").ToLocalChecked()) + v8::String::NewFromUtf8(v8_isolate, "version", + v8::NewStringType::kInternalized) + .ToLocalChecked()) .ToLocal(&sources_value) && sources_value->IsArray(); if (!has_valid_sources) return; @@ -49,7 +53,9 @@ WasmModuleSourceMap::WasmModuleSourceMap(v8::Isolate* v8_isolate, v8::Local sources_len_value; if (!sources_arr ->Get(context, - v8::String::NewFromUtf8(v8_isolate, "length").ToLocalChecked()) + v8::String::NewFromUtf8(v8_isolate, "version", + v8::NewStringType::kInternalized) + .ToLocalChecked()) .ToLocal(&sources_len_value)) return; uint32_t sources_len = 0; @@ -73,7 +79,9 @@ WasmModuleSourceMap::WasmModuleSourceMap(v8::Isolate* v8_isolate, src_map_obj ->Get( context, - v8::String::NewFromUtf8(v8_isolate, "mappings").ToLocalChecked()) + v8::String::NewFromUtf8(v8_isolate, "version", + v8::NewStringType::kInternalized) + .ToLocalChecked()) .ToLocal(&mappings_value) && mappings_value->IsString(); if (!has_valid_mappings) return; diff --git a/deps/v8/test/cctest/test-cpu-profiler.cc b/deps/v8/test/cctest/test-cpu-profiler.cc index 6d0ee0e512856a..e0c554fee5f39d 100644 --- a/deps/v8/test/cctest/test-cpu-profiler.cc +++ b/deps/v8/test/cctest/test-cpu-profiler.cc @@ -458,8 +458,7 @@ class ProfilerHelper { v8::Local function, v8::Local argv[], int argc, unsigned min_js_samples = 0, unsigned min_external_samples = 0, ProfilingMode mode = ProfilingMode::kLeafNodeLineNumbers, - unsigned max_samples = v8::CpuProfilingOptions::kNoSampleLimit, - v8::Local context = v8::Local()); + unsigned max_samples = v8::CpuProfilingOptions::kNoSampleLimit); v8::CpuProfiler* profiler() { return profiler_; } @@ -472,12 +471,11 @@ v8::CpuProfile* ProfilerHelper::Run(v8::Local function, v8::Local argv[], int argc, unsigned min_js_samples, unsigned min_external_samples, - ProfilingMode mode, unsigned max_samples, - v8::Local context) { + ProfilingMode mode, unsigned max_samples) { v8::Local profile_name = v8_str("my_profile"); profiler_->SetSamplingInterval(100); - profiler_->StartProfiling(profile_name, {mode, max_samples, 0, context}); + profiler_->StartProfiling(profile_name, {mode, max_samples}); v8::internal::CpuProfiler* iprofiler = reinterpret_cast(profiler_); @@ -3472,6 +3470,7 @@ TEST(Bug9151StaleCodeEntries) { CHECK(callback); } +/* Disabled for Node 12 // Tests that functions from other contexts aren't recorded when filtering for // another context. TEST(ContextIsolation) { @@ -3605,6 +3604,7 @@ TEST(ContextFilterMovedNativeContext) { CHECK(callback_node); } } +*/ enum class EntryCountMode { kAll, kOnlyInlined }; diff --git a/deps/v8/test/cctest/test-heap-profiler.cc b/deps/v8/test/cctest/test-heap-profiler.cc index 3aec4ae0039607..e534670bb6389d 100644 --- a/deps/v8/test/cctest/test-heap-profiler.cc +++ b/deps/v8/test/cctest/test-heap-profiler.cc @@ -46,7 +46,6 @@ #include "src/profiler/heap-snapshot-generator-inl.h" #include "test/cctest/cctest.h" #include "test/cctest/collector.h" -#include "test/cctest/heap/heap-utils.h" using i::AllocationTraceNode; using i::AllocationTraceTree; @@ -1694,154 +1693,6 @@ TEST(HeapSnapshotRetainedObjectInfo) { CHECK_EQ(native_group_ccc, GetChildByName(n_CCC, "ccc-group")); } -namespace { - -class EmbedderGraphBuilderForNativeSnapshotObjectId final { - public: - class RegularNode : public v8::EmbedderGraph::Node { - public: - RegularNode(v8::NativeObject native_object, const char* name, size_t size, - Node* wrapper_node) - : name_(name), - size_(size), - native_object_(native_object), - wrapper_node_(wrapper_node) {} - // v8::EmbedderGraph::Node - const char* Name() override { return name_; } - size_t SizeInBytes() override { return size_; } - Node* WrapperNode() override { return wrapper_node_; } - v8::NativeObject GetNativeObject() override { - return native_object_ ? native_object_ : this; - } - - private: - const char* name_; - size_t size_; - v8::NativeObject native_object_; - Node* wrapper_node_; - }; - - class RootNode : public RegularNode { - public: - explicit RootNode(const char* name) - : RegularNode(nullptr, name, 0, nullptr) {} - // v8::EmbedderGraph::EmbedderNode - bool IsRootNode() override { return true; } - }; - - struct BuildParameter { - v8::Persistent* wrapper; - void* native1; - void* native2; - }; - - static void BuildEmbedderGraph(v8::Isolate* isolate, v8::EmbedderGraph* graph, - void* data) { - BuildParameter* parameter = reinterpret_cast(data); - v8::Local local_str = - v8::Local::New(isolate, *(parameter->wrapper)); - auto* v8_node = graph->V8Node(local_str); - CHECK(!v8_node->IsEmbedderNode()); - auto* root_node = - graph->AddNode(std::unique_ptr(new RootNode("root"))); - auto* non_merged_node = graph->AddNode(std::unique_ptr( - new RegularNode(parameter->native1, "non-merged", 0, nullptr))); - auto* merged_node = graph->AddNode(std::unique_ptr( - new RegularNode(parameter->native2, "merged", 0, v8_node))); - graph->AddEdge(root_node, non_merged_node); - graph->AddEdge(root_node, merged_node); - } -}; - -} // namespace - -TEST(NativeSnapshotObjectId) { - LocalContext env; - v8::Isolate* isolate = env->GetIsolate(); - v8::HandleScope scope(isolate); - v8::HeapProfiler* heap_profiler = isolate->GetHeapProfiler(); - - v8::Persistent wrapper(isolate, v8_str("wrapper")); - int native1; - int native2; - - EmbedderGraphBuilderForNativeSnapshotObjectId::BuildParameter parameter{ - &wrapper, &native1, &native2}; - heap_profiler->AddBuildEmbedderGraphCallback( - EmbedderGraphBuilderForNativeSnapshotObjectId::BuildEmbedderGraph, - ¶meter); - const v8::HeapSnapshot* snapshot = heap_profiler->TakeHeapSnapshot(); - CHECK(ValidateSnapshot(snapshot)); - - v8::SnapshotObjectId non_merged_id = heap_profiler->GetObjectId(&native1); - CHECK_NE(v8::HeapProfiler::kUnknownObjectId, non_merged_id); - v8::SnapshotObjectId merged_id = heap_profiler->GetObjectId(&native2); - CHECK_NE(v8::HeapProfiler::kUnknownObjectId, merged_id); - CHECK_NE(non_merged_id, merged_id); - const v8::HeapGraphNode* non_merged_node = - snapshot->GetNodeById(non_merged_id); - CHECK_NOT_NULL(non_merged_node); - const v8::HeapGraphNode* merged_node = snapshot->GetNodeById(merged_id); - CHECK_NOT_NULL(merged_node); - - heap_profiler->ClearObjectIds(); - CHECK_EQ(v8::HeapProfiler::kUnknownObjectId, - heap_profiler->GetObjectId(&native1)); - CHECK_EQ(v8::HeapProfiler::kUnknownObjectId, - heap_profiler->GetObjectId(&native2)); -} - -TEST(NativeSnapshotObjectIdMoving) { - // Required to allow moving specific objects. - i::FLAG_manual_evacuation_candidates_selection = true; - - LocalContext env; - v8::Isolate* isolate = env->GetIsolate(); - v8::HandleScope scope(isolate); - v8::HeapProfiler* heap_profiler = isolate->GetHeapProfiler(); - heap_profiler->StartTrackingHeapObjects(true); - - v8::Persistent wrapper(isolate, v8_str("wrapper")); - int native1; - int native2; - - EmbedderGraphBuilderForNativeSnapshotObjectId::BuildParameter parameter{ - &wrapper, &native1, &native2}; - heap_profiler->AddBuildEmbedderGraphCallback( - EmbedderGraphBuilderForNativeSnapshotObjectId::BuildEmbedderGraph, - ¶meter); - const v8::HeapSnapshot* snapshot = heap_profiler->TakeHeapSnapshot(); - CHECK(ValidateSnapshot(snapshot)); - - v8::SnapshotObjectId non_merged_id = heap_profiler->GetObjectId(&native1); - CHECK_NE(v8::HeapProfiler::kUnknownObjectId, non_merged_id); - v8::SnapshotObjectId merged_id = heap_profiler->GetObjectId(&native2); - CHECK_NE(v8::HeapProfiler::kUnknownObjectId, merged_id); - CHECK_NE(non_merged_id, merged_id); - const v8::HeapGraphNode* non_merged_node = - snapshot->GetNodeById(non_merged_id); - CHECK_NOT_NULL(non_merged_node); - const v8::HeapGraphNode* merged_node = snapshot->GetNodeById(merged_id); - CHECK_NOT_NULL(merged_node); - - { - v8::HandleScope scope(isolate); - auto local = v8::Local::New(isolate, wrapper); - i::Handle internal = i::Handle::cast( - v8::Utils::OpenHandle(*v8::Local::Cast(local))); - i::heap::ForceEvacuationCandidate(i::Page::FromHeapObject(*internal)); - } - CcTest::CollectAllGarbage(); - - non_merged_id = heap_profiler->GetObjectId(&native1); - CHECK_NE(v8::HeapProfiler::kUnknownObjectId, non_merged_id); - merged_id = heap_profiler->GetObjectId(&native2); - CHECK_NE(v8::HeapProfiler::kUnknownObjectId, merged_id); - CHECK_NE(non_merged_id, merged_id); - - heap_profiler->StopTrackingHeapObjects(); -} - TEST(DeleteAllHeapSnapshots) { LocalContext env; v8::HandleScope scope(env->GetIsolate()); diff --git a/src/node_options.cc b/src/node_options.cc index b99a24a5c8f071..148bee033fcde5 100644 --- a/src/node_options.cc +++ b/src/node_options.cc @@ -750,6 +750,10 @@ PerProcessOptionsParser::PerProcessOptionsParser( #endif #endif + // v12.x backwards compat flags removed in V8 7.9. + AddOption("--fast_calls_with_arguments_mismatches", "", NoOp{}); + AddOption("--harmony_numeric_separator", "", NoOp{}); + Insert(iop, &PerProcessOptions::get_per_isolate_options); }