Skip to content

Commit

Permalink
deps: patch V8 to 10.7.193.16
Browse files Browse the repository at this point in the history
Refs: v8/v8@10.7.193.13...10.7.193.16
PR-URL: #45023
Reviewed-By: Rich Trott <rtrott@gmail.com>
Reviewed-By: Jiawen Geng <technicalcute@gmail.com>
Reviewed-By: Colin Ihrig <cjihrig@gmail.com>
Reviewed-By: Mohammed Keyvanzadeh <mohammadkeyvanzade94@gmail.com>
Reviewed-By: James M Snell <jasnell@gmail.com>
Reviewed-By: Yagiz Nizipli <yagiz@nizipli.com>
  • Loading branch information
targos authored and RafaelGSS committed Nov 10, 2022
1 parent dbc696d commit 3bfba6d
Show file tree
Hide file tree
Showing 7 changed files with 148 additions and 21 deletions.
2 changes: 1 addition & 1 deletion deps/v8/include/v8-version.h
Expand Up @@ -11,7 +11,7 @@
#define V8_MAJOR_VERSION 10
#define V8_MINOR_VERSION 7
#define V8_BUILD_NUMBER 193
#define V8_PATCH_LEVEL 13
#define V8_PATCH_LEVEL 16

// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
Expand Down
45 changes: 44 additions & 1 deletion deps/v8/src/compiler/compilation-dependencies.cc
Expand Up @@ -34,7 +34,8 @@ namespace compiler {
V(Protector) \
V(PrototypeProperty) \
V(StableMap) \
V(Transition)
V(Transition) \
V(ObjectSlotValue)

CompilationDependencies::CompilationDependencies(JSHeapBroker* broker,
Zone* zone)
Expand Down Expand Up @@ -868,6 +869,42 @@ class ProtectorDependency final : public CompilationDependency {
const PropertyCellRef cell_;
};

// Check that an object slot will not change during compilation.
class ObjectSlotValueDependency final : public CompilationDependency {
public:
explicit ObjectSlotValueDependency(const HeapObjectRef& object, int offset,
const ObjectRef& value)
: CompilationDependency(kObjectSlotValue),
object_(object.object()),
offset_(offset),
value_(value.object()) {}

bool IsValid() const override {
PtrComprCageBase cage_base = GetPtrComprCageBase(*object_);
Object current_value =
offset_ == HeapObject::kMapOffset
? object_->map()
: TaggedField<Object>::Relaxed_Load(cage_base, *object_, offset_);
return *value_ == current_value;
}
void Install(PendingDependencies* deps) const override {}

private:
size_t Hash() const override {
return base::hash_combine(object_.address(), offset_, value_.address());
}

bool Equals(const CompilationDependency* that) const override {
const ObjectSlotValueDependency* const zat = that->AsObjectSlotValue();
return object_->address() == zat->object_->address() &&
offset_ == zat->offset_ && value_.address() == zat->value_.address();
}

Handle<HeapObject> object_;
int offset_;
Handle<Object> value_;
};

class ElementsKindDependency final : public CompilationDependency {
public:
ElementsKindDependency(const AllocationSiteRef& site, ElementsKind kind)
Expand Down Expand Up @@ -1120,6 +1157,12 @@ void CompilationDependencies::DependOnElementsKind(
}
}

void CompilationDependencies::DependOnObjectSlotValue(
const HeapObjectRef& object, int offset, const ObjectRef& value) {
RecordDependency(
zone_->New<ObjectSlotValueDependency>(object, offset, value));
}

void CompilationDependencies::DependOnOwnConstantElement(
const JSObjectRef& holder, uint32_t index, const ObjectRef& element) {
RecordDependency(
Expand Down
4 changes: 4 additions & 0 deletions deps/v8/src/compiler/compilation-dependencies.h
Expand Up @@ -93,6 +93,10 @@ class V8_EXPORT_PRIVATE CompilationDependencies : public ZoneObject {
// Record the assumption that {site}'s {ElementsKind} doesn't change.
void DependOnElementsKind(const AllocationSiteRef& site);

// Check that an object slot will not change during compilation.
void DependOnObjectSlotValue(const HeapObjectRef& object, int offset,
const ObjectRef& value);

void DependOnOwnConstantElement(const JSObjectRef& holder, uint32_t index,
const ObjectRef& element);

Expand Down
12 changes: 12 additions & 0 deletions deps/v8/src/compiler/js-create-lowering.cc
Expand Up @@ -1673,6 +1673,10 @@ base::Optional<Node*> JSCreateLowering::TryAllocateFastLiteral(

// Now that we hold the migration lock, get the current map.
MapRef boilerplate_map = boilerplate.map();
// Protect against concurrent changes to the boilerplate object by checking
// for an identical value at the end of the compilation.
dependencies()->DependOnObjectSlotValue(boilerplate, HeapObject::kMapOffset,
boilerplate_map);
{
base::Optional<MapRef> current_boilerplate_map =
boilerplate.map_direct_read();
Expand Down Expand Up @@ -1838,10 +1842,18 @@ base::Optional<Node*> JSCreateLowering::TryAllocateFastLiteralElements(
boilerplate.elements(kRelaxedLoad);
if (!maybe_boilerplate_elements.has_value()) return {};
FixedArrayBaseRef boilerplate_elements = maybe_boilerplate_elements.value();
// Protect against concurrent changes to the boilerplate object by checking
// for an identical value at the end of the compilation.
dependencies()->DependOnObjectSlotValue(
boilerplate, JSObject::kElementsOffset, boilerplate_elements);

// Empty or copy-on-write elements just store a constant.
int const elements_length = boilerplate_elements.length();
MapRef elements_map = boilerplate_elements.map();
// Protect against concurrent changes to the boilerplate object by checking
// for an identical value at the end of the compilation.
dependencies()->DependOnObjectSlotValue(boilerplate_elements,
HeapObject::kMapOffset, elements_map);
if (boilerplate_elements.length() == 0 || elements_map.IsFixedCowArrayMap()) {
if (allocation == AllocationType::kOld &&
!boilerplate.IsElementsTenured(boilerplate_elements)) {
Expand Down
14 changes: 8 additions & 6 deletions deps/v8/src/execution/isolate.cc
Expand Up @@ -2797,13 +2797,15 @@ bool PromiseHasUserDefinedRejectHandlerInternal(Isolate* isolate,
Handle<PromiseCapability>::cast(promise_or_capability)->promise(),
isolate);
}
promise = Handle<JSPromise>::cast(promise_or_capability);
if (!reaction->reject_handler().IsUndefined(isolate)) {
Handle<JSReceiver> reject_handler(
JSReceiver::cast(reaction->reject_handler()), isolate);
if (PromiseIsRejectHandler(isolate, reject_handler)) return true;
if (promise_or_capability->IsJSPromise()) {
promise = Handle<JSPromise>::cast(promise_or_capability);
if (!reaction->reject_handler().IsUndefined(isolate)) {
Handle<JSReceiver> reject_handler(
JSReceiver::cast(reaction->reject_handler()), isolate);
if (PromiseIsRejectHandler(isolate, reject_handler)) return true;
}
if (isolate->PromiseHasUserDefinedRejectHandler(promise)) return true;
}
if (isolate->PromiseHasUserDefinedRejectHandler(promise)) return true;
}
current = handle(reaction->next(), isolate);
}
Expand Down
78 changes: 66 additions & 12 deletions deps/v8/src/heap/concurrent-marking.cc
Expand Up @@ -65,21 +65,48 @@ class ConcurrentMarkingState final
// Helper class for storing in-object slot addresses and values.
class SlotSnapshot {
public:
SlotSnapshot() : number_of_slots_(0) {}
SlotSnapshot()
: number_of_object_slots_(0), number_of_external_pointer_slots_(0) {}
SlotSnapshot(const SlotSnapshot&) = delete;
SlotSnapshot& operator=(const SlotSnapshot&) = delete;
int number_of_slots() const { return number_of_slots_; }
ObjectSlot slot(int i) const { return snapshot_[i].first; }
Object value(int i) const { return snapshot_[i].second; }
void clear() { number_of_slots_ = 0; }
int number_of_object_slots() const { return number_of_object_slots_; }
int number_of_external_pointer_slots() const {
return number_of_external_pointer_slots_;
}
ObjectSlot object_slot(int i) const { return object_snapshot_[i].first; }
Object object_value(int i) const { return object_snapshot_[i].second; }
ExternalPointerSlot external_pointer_slot(int i) const {
return external_pointer_snapshot_[i].first;
}
ExternalPointerTag external_pointer_tag(int i) const {
return external_pointer_snapshot_[i].second;
}
void clear() {
number_of_object_slots_ = 0;
number_of_external_pointer_slots_ = 0;
}
void add(ObjectSlot slot, Object value) {
snapshot_[number_of_slots_++] = {slot, value};
DCHECK_LT(number_of_object_slots_, kMaxObjectSlots);
object_snapshot_[number_of_object_slots_++] = {slot, value};
}
void add(ExternalPointerSlot slot, ExternalPointerTag tag) {
DCHECK_LT(number_of_external_pointer_slots_, kMaxExternalPointerSlots);
external_pointer_snapshot_[number_of_external_pointer_slots_++] = {slot,
tag};
}

private:
static const int kMaxSnapshotSize = JSObject::kMaxInstanceSize / kTaggedSize;
int number_of_slots_;
std::pair<ObjectSlot, Object> snapshot_[kMaxSnapshotSize];
// Maximum number of pointer slots of objects we use snapshotting for.
// ConsStrings can have 3 (Map + Left + Right) pointers.
static constexpr int kMaxObjectSlots = 3;
// Maximum number of external pointer slots of objects we use snapshotting
// for. ExternalStrings can have 2 (resource + cached data) external pointers.
static constexpr int kMaxExternalPointerSlots = 2;
int number_of_object_slots_;
int number_of_external_pointer_slots_;
std::pair<ObjectSlot, Object> object_snapshot_[kMaxObjectSlots];
std::pair<ExternalPointerSlot, ExternalPointerTag>
external_pointer_snapshot_[kMaxExternalPointerSlots];
};

class ConcurrentMarkingVisitorUtility {
Expand Down Expand Up @@ -111,9 +138,9 @@ class ConcurrentMarkingVisitorUtility {
template <typename Visitor>
static void VisitPointersInSnapshot(Visitor* visitor, HeapObject host,
const SlotSnapshot& snapshot) {
for (int i = 0; i < snapshot.number_of_slots(); i++) {
ObjectSlot slot = snapshot.slot(i);
Object object = snapshot.value(i);
for (int i = 0; i < snapshot.number_of_object_slots(); i++) {
ObjectSlot slot = snapshot.object_slot(i);
Object object = snapshot.object_value(i);
DCHECK(!HasWeakHeapObjectTag(object));
if (!object.IsHeapObject()) continue;
HeapObject heap_object = HeapObject::cast(object);
Expand All @@ -126,6 +153,16 @@ class ConcurrentMarkingVisitorUtility {
}
}

template <typename Visitor>
static void VisitExternalPointersInSnapshot(Visitor* visitor, HeapObject host,
const SlotSnapshot& snapshot) {
for (int i = 0; i < snapshot.number_of_external_pointer_slots(); i++) {
ExternalPointerSlot slot = snapshot.external_pointer_slot(i);
ExternalPointerTag tag = snapshot.external_pointer_tag(i);
visitor->VisitExternalPointer(host, slot, tag);
}
}

template <typename Visitor, typename T>
static int VisitFullyWithSnapshot(Visitor* visitor, Map map, T object) {
using TBodyDescriptor = typename T::BodyDescriptor;
Expand All @@ -136,6 +173,8 @@ class ConcurrentMarkingVisitorUtility {
if (!visitor->ShouldVisit(object)) return 0;
ConcurrentMarkingVisitorUtility::VisitPointersInSnapshot(visitor, object,
snapshot);
ConcurrentMarkingVisitorUtility::VisitExternalPointersInSnapshot(
visitor, object, snapshot);
return size;
}

Expand Down Expand Up @@ -182,6 +221,11 @@ class ConcurrentMarkingVisitorUtility {
UNREACHABLE();
}

void VisitExternalPointer(HeapObject host, ExternalPointerSlot slot,
ExternalPointerTag tag) override {
slot_snapshot_->add(slot, tag);
}

void VisitCodeTarget(Code host, RelocInfo* rinfo) final {
// This should never happen, because snapshotting is performed only on
// some String subclasses.
Expand Down Expand Up @@ -450,6 +494,16 @@ class ConcurrentMarkingVisitor final
return SeqTwoByteString::SizeFor(object.length(kAcquireLoad));
}

int VisitExternalOneByteString(Map map, ExternalOneByteString object) {
return ConcurrentMarkingVisitorUtility::VisitFullyWithSnapshot(this, map,
object);
}

int VisitExternalTwoByteString(Map map, ExternalTwoByteString object) {
return ConcurrentMarkingVisitorUtility::VisitFullyWithSnapshot(this, map,
object);
}

// Implements ephemeron semantics: Marks value if key is already reachable.
// Returns true if value was actually marked.
bool ProcessEphemeron(HeapObject key, HeapObject value) {
Expand Down
14 changes: 13 additions & 1 deletion deps/v8/src/objects/string.cc
Expand Up @@ -223,6 +223,14 @@ void String::MakeThin(IsolateT* isolate, String internalized) {
Map target_map = ComputeThinStringMap(isolate, initial_shape,
internalized.IsOneByteRepresentation());
if (initial_shape.IsExternal()) {
// Notify GC about the layout change before the transition to avoid
// concurrent marking from observing any in-between state (e.g.
// ExternalString map where the resource external pointer is overwritten
// with a tagged pointer).
// ExternalString -> ThinString transitions can only happen on the
// main-thread.
isolate->AsIsolate()->heap()->NotifyObjectLayoutChange(
*this, no_gc, InvalidateRecordedSlots::kYes, ThinString::kSize);
MigrateExternalString(isolate->AsIsolate(), *this, internalized);
}

Expand All @@ -231,7 +239,11 @@ void String::MakeThin(IsolateT* isolate, String internalized) {
// ThinString.
ThinString thin = ThinString::unchecked_cast(*this);
thin.set_actual(internalized);
set_map_safe_transition(target_map, kReleaseStore);
if (initial_shape.IsExternal()) {
set_map(target_map, kReleaseStore);
} else {
set_map_safe_transition(target_map, kReleaseStore);
}

DCHECK_GE(old_size, ThinString::kSize);
int size_delta = old_size - ThinString::kSize;
Expand Down

0 comments on commit 3bfba6d

Please sign in to comment.