Skip to content

Commit

Permalink
deps: patch V8 to 10.2.154.23
Browse files Browse the repository at this point in the history
Refs: v8/v8@10.2.154.19...10.2.154.23
PR-URL: #45997
Reviewed-By: Jiawen Geng <technicalcute@gmail.com>
Reviewed-By: Danielle Adams <adamzdanielle@gmail.com>
  • Loading branch information
targos authored and danielleadams committed Jan 3, 2023
1 parent 71433f3 commit 2088cb4
Show file tree
Hide file tree
Showing 8 changed files with 104 additions and 111 deletions.
2 changes: 1 addition & 1 deletion deps/v8/include/v8-version.h
Expand Up @@ -11,7 +11,7 @@
#define V8_MAJOR_VERSION 10
#define V8_MINOR_VERSION 2
#define V8_BUILD_NUMBER 154
#define V8_PATCH_LEVEL 19
#define V8_PATCH_LEVEL 23

// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
Expand Down
25 changes: 10 additions & 15 deletions deps/v8/src/ast/scopes.cc
Expand Up @@ -888,9 +888,8 @@ void DeclarationScope::AddLocal(Variable* var) {
}

void Scope::Snapshot::Reparent(DeclarationScope* new_parent) {
DCHECK(!IsCleared());
DCHECK_EQ(new_parent, outer_scope_and_calls_eval_.GetPointer()->inner_scope_);
DCHECK_EQ(new_parent->outer_scope_, outer_scope_and_calls_eval_.GetPointer());
DCHECK_EQ(new_parent, outer_scope_->inner_scope_);
DCHECK_EQ(new_parent->outer_scope_, outer_scope_);
DCHECK_EQ(new_parent, new_parent->GetClosureScope());
DCHECK_NULL(new_parent->inner_scope_);
DCHECK(new_parent->unresolved_list_.is_empty());
Expand All @@ -915,12 +914,11 @@ void Scope::Snapshot::Reparent(DeclarationScope* new_parent) {
new_parent->sibling_ = top_inner_scope_;
}

Scope* outer_scope = outer_scope_and_calls_eval_.GetPointer();
new_parent->unresolved_list_.MoveTail(&outer_scope->unresolved_list_,
new_parent->unresolved_list_.MoveTail(&outer_scope_->unresolved_list_,
top_unresolved_);

// Move temporaries allocated for complex parameter initializers.
DeclarationScope* outer_closure = outer_scope->GetClosureScope();
DeclarationScope* outer_closure = outer_scope_->GetClosureScope();
for (auto it = top_local_; it != outer_closure->locals()->end(); ++it) {
Variable* local = *it;
DCHECK_EQ(VariableMode::kTemporary, local->mode());
Expand All @@ -932,16 +930,10 @@ void Scope::Snapshot::Reparent(DeclarationScope* new_parent) {
outer_closure->locals_.Rewind(top_local_);

// Move eval calls since Snapshot's creation into new_parent.
if (outer_scope_and_calls_eval_->calls_eval_) {
new_parent->RecordDeclarationScopeEvalCall();
new_parent->inner_scope_calls_eval_ = true;
if (outer_scope_->calls_eval_) {
new_parent->RecordEvalCall();
declaration_scope_->sloppy_eval_can_extend_vars_ = false;
}

// We are in the arrow function case. The calls eval we may have recorded
// is intended for the inner scope and we should simply restore the
// original "calls eval" flag of the outer scope.
RestoreEvalFlag();
Clear();
}

void Scope::ReplaceOuterScope(Scope* outer) {
Expand Down Expand Up @@ -2579,6 +2571,9 @@ void Scope::AllocateVariablesRecursively() {
this->ForEach([](Scope* scope) -> Iteration {
DCHECK(!scope->already_resolved_);
if (WasLazilyParsed(scope)) return Iteration::kContinue;
if (scope->sloppy_eval_can_extend_vars_) {
scope->num_heap_slots_ = Context::MIN_CONTEXT_EXTENDED_SLOTS;
}
DCHECK_EQ(scope->ContextHeaderLength(), scope->num_heap_slots_);

// Allocate variables for this scope.
Expand Down
75 changes: 30 additions & 45 deletions deps/v8/src/ast/scopes.h
Expand Up @@ -112,58 +112,38 @@ class V8_EXPORT_PRIVATE Scope : public NON_EXPORTED_BASE(ZoneObject) {

class Snapshot final {
public:
Snapshot()
: outer_scope_and_calls_eval_(nullptr, false),
top_unresolved_(),
top_local_() {
DCHECK(IsCleared());
}
inline explicit Snapshot(Scope* scope);

// Disallow copy and move.
Snapshot(const Snapshot&) = delete;
Snapshot(Snapshot&&) = delete;

~Snapshot() {
// If we're still active, there was no arrow function. In that case outer
// calls eval if it already called eval before this snapshot started, or
// if the code during the snapshot called eval.
if (!IsCleared() && outer_scope_and_calls_eval_.GetPayload()) {
RestoreEvalFlag();
// Restore eval flags from before the scope was active.
if (sloppy_eval_can_extend_vars_) {
declaration_scope_->sloppy_eval_can_extend_vars_ = true;
}
}

void RestoreEvalFlag() {
if (outer_scope_and_calls_eval_.GetPayload()) {
// This recreates both calls_eval and sloppy_eval_can_extend_vars.
outer_scope_and_calls_eval_.GetPointer()->RecordEvalCall();
if (calls_eval_) {
outer_scope_->calls_eval_ = true;
}
}

void Reparent(DeclarationScope* new_parent);
bool IsCleared() const {
return outer_scope_and_calls_eval_.GetPointer() == nullptr;
}

void Clear() {
outer_scope_and_calls_eval_.SetPointer(nullptr);
#ifdef DEBUG
outer_scope_and_calls_eval_.SetPayload(false);
top_inner_scope_ = nullptr;
top_local_ = base::ThreadedList<Variable>::Iterator();
top_unresolved_ = UnresolvedList::Iterator();
#endif
}

private:
// During tracking calls_eval caches whether the outer scope called eval.
// Upon move assignment we store whether the new inner scope calls eval into
// the move target calls_eval bit, and restore calls eval on the outer
// scope.
base::PointerWithPayload<Scope, bool, 1> outer_scope_and_calls_eval_;
Scope* outer_scope_;
Scope* declaration_scope_;
Scope* top_inner_scope_;
UnresolvedList::Iterator top_unresolved_;
base::ThreadedList<Variable>::Iterator top_local_;
// While the scope is active, the scope caches the flag values for
// outer_scope_ / declaration_scope_ they can be used to know what happened
// while parsing the arrow head. If this turns out to be an arrow head, new
// values on the respective scopes will be cleared and moved to the inner
// scope. Otherwise the cached flags will be merged with the flags from the
// arrow head.
bool calls_eval_;
bool sloppy_eval_can_extend_vars_;
};

enum class DeserializationMode { kIncludingVariables, kScopesOnly };
Expand Down Expand Up @@ -909,8 +889,8 @@ class V8_EXPORT_PRIVATE DeclarationScope : public Scope {
void RecordDeclarationScopeEvalCall() {
calls_eval_ = true;

// If this isn't a sloppy eval, we don't care about it.
if (language_mode() != LanguageMode::kSloppy) return;
// The caller already checked whether we're in sloppy mode.
CHECK(is_sloppy(language_mode()));

// Sloppy eval in script scopes can only introduce global variables anyway,
// so we don't care that it calls sloppy eval.
Expand Down Expand Up @@ -944,7 +924,6 @@ class V8_EXPORT_PRIVATE DeclarationScope : public Scope {
}

sloppy_eval_can_extend_vars_ = true;
num_heap_slots_ = Context::MIN_CONTEXT_EXTENDED_SLOTS;
}

bool sloppy_eval_can_extend_vars() const {
Expand Down Expand Up @@ -1369,7 +1348,9 @@ class V8_EXPORT_PRIVATE DeclarationScope : public Scope {

void Scope::RecordEvalCall() {
calls_eval_ = true;
GetDeclarationScope()->RecordDeclarationScopeEvalCall();
if (is_sloppy(language_mode())) {
GetDeclarationScope()->RecordDeclarationScopeEvalCall();
}
RecordInnerScopeEvalCall();
// The eval contents might access "super" (if it's inside a function that
// binds super).
Expand All @@ -1382,14 +1363,18 @@ void Scope::RecordEvalCall() {
}

Scope::Snapshot::Snapshot(Scope* scope)
: outer_scope_and_calls_eval_(scope, scope->calls_eval_),
: outer_scope_(scope),
declaration_scope_(scope->GetDeclarationScope()),
top_inner_scope_(scope->inner_scope_),
top_unresolved_(scope->unresolved_list_.end()),
top_local_(scope->GetClosureScope()->locals_.end()) {
// Reset in order to record eval calls during this Snapshot's lifetime.
outer_scope_and_calls_eval_.GetPointer()->calls_eval_ = false;
outer_scope_and_calls_eval_.GetPointer()->sloppy_eval_can_extend_vars_ =
false;
top_local_(scope->GetClosureScope()->locals_.end()),
calls_eval_(outer_scope_->calls_eval_),
sloppy_eval_can_extend_vars_(
declaration_scope_->sloppy_eval_can_extend_vars_) {
// Reset in order to record (sloppy) eval calls during this Snapshot's
// lifetime.
outer_scope_->calls_eval_ = false;
declaration_scope_->sloppy_eval_can_extend_vars_ = false;
}

class ModuleScope final : public DeclarationScope {
Expand Down
15 changes: 14 additions & 1 deletion deps/v8/src/builtins/promise-any.tq
Expand Up @@ -119,7 +119,19 @@ PromiseAnyRejectElementClosure(
kPromiseAnyRejectElementRemainingSlot);

// 9. Set errors[index] to x.
const newCapacity = IntPtrMax(SmiUntag(remainingElementsCount), index + 1);

// The max computation below is an optimization to avoid excessive allocations
// in the case of input promises being asynchronously rejected in ascending
// index order.
//
// Note that subtracting 1 from remainingElementsCount is intentional. The
// value of remainingElementsCount is 1 larger than the actual value during
// iteration. So in the case of synchronous rejection, newCapacity is the
// correct size by subtracting 1. In the case of asynchronous rejection this
// is 1 smaller than the correct size, but is not incorrect as it is maxed
// with index + 1.
const newCapacity =
IntPtrMax(SmiUntag(remainingElementsCount) - 1, index + 1);
if (newCapacity > errors.length_intptr) deferred {
errors = ExtractFixedArray(errors, 0, errors.length_intptr, newCapacity);
*ContextSlot(
Expand Down Expand Up @@ -306,6 +318,7 @@ Reject(JSAny) {
PromiseAnyRejectElementContextSlots::
kPromiseAnyRejectElementErrorsSlot);

check(errors.length == index - 1);
const error = ConstructAggregateError(errors);
// 3. Return ThrowCompletion(error).
goto Reject(error);
Expand Down
51 changes: 25 additions & 26 deletions deps/v8/src/compiler/effect-control-linearizer.cc
Expand Up @@ -5384,6 +5384,8 @@ Node* EffectControlLinearizer::LowerLoadFieldByIndex(Node* node) {

auto if_double = __ MakeDeferredLabel();
auto done = __ MakeLabel(MachineRepresentation::kTagged);
auto loaded_field = __ MakeLabel(MachineRepresentation::kTagged);
auto done_double = __ MakeLabel(MachineRepresentation::kFloat64);

// Check if field is a mutable double field.
__ GotoIfNot(__ IntPtrEqual(__ WordAnd(index, one), zero), &if_double);
Expand All @@ -5400,8 +5402,8 @@ Node* EffectControlLinearizer::LowerLoadFieldByIndex(Node* node) {
Node* offset =
__ IntAdd(__ WordShl(index, __ IntPtrConstant(kTaggedSizeLog2 - 1)),
__ IntPtrConstant(JSObject::kHeaderSize - kHeapObjectTag));
Node* result = __ Load(MachineType::AnyTagged(), object, offset);
__ Goto(&done, result);
Node* field = __ Load(MachineType::AnyTagged(), object, offset);
__ Goto(&loaded_field, field);
}

// The field is located in the properties backing store of {object}.
Expand All @@ -5415,18 +5417,15 @@ Node* EffectControlLinearizer::LowerLoadFieldByIndex(Node* node) {
__ IntPtrConstant(kTaggedSizeLog2 - 1)),
__ IntPtrConstant((FixedArray::kHeaderSize - kTaggedSize) -
kHeapObjectTag));
Node* result = __ Load(MachineType::AnyTagged(), properties, offset);
__ Goto(&done, result);
Node* field = __ Load(MachineType::AnyTagged(), properties, offset);
__ Goto(&loaded_field, field);
}
}

// The field is a Double field, either unboxed in the object on 64-bit
// architectures, or a mutable HeapNumber.
__ Bind(&if_double);
{
auto loaded_field = __ MakeLabel(MachineRepresentation::kTagged);
auto done_double = __ MakeLabel(MachineRepresentation::kFloat64);

index = __ WordSar(index, one);

// Check if field is in-object or out-of-object.
Expand Down Expand Up @@ -5454,27 +5453,27 @@ Node* EffectControlLinearizer::LowerLoadFieldByIndex(Node* node) {
Node* field = __ Load(MachineType::AnyTagged(), properties, offset);
__ Goto(&loaded_field, field);
}
}

__ Bind(&loaded_field);
{
Node* field = loaded_field.PhiAt(0);
// We may have transitioned in-place away from double, so check that
// this is a HeapNumber -- otherwise the load is fine and we don't need
// to copy anything anyway.
__ GotoIf(ObjectIsSmi(field), &done, field);
Node* field_map = __ LoadField(AccessBuilder::ForMap(), field);
__ GotoIfNot(__ TaggedEqual(field_map, __ HeapNumberMapConstant()), &done,
field);

Node* value = __ LoadField(AccessBuilder::ForHeapNumberValue(), field);
__ Goto(&done_double, value);
}
__ Bind(&loaded_field);
{
Node* field = loaded_field.PhiAt(0);
// We may have transitioned in-place away from double, so check that
// this is a HeapNumber -- otherwise the load is fine and we don't need
// to copy anything anyway.
__ GotoIf(ObjectIsSmi(field), &done, field);
Node* field_map = __ LoadField(AccessBuilder::ForMap(), field);
__ GotoIfNot(__ TaggedEqual(field_map, __ HeapNumberMapConstant()), &done,
field);

__ Bind(&done_double);
{
Node* result = AllocateHeapNumberWithValue(done_double.PhiAt(0));
__ Goto(&done, result);
}
Node* value = __ LoadField(AccessBuilder::ForHeapNumberValue(), field);
__ Goto(&done_double, value);
}

__ Bind(&done_double);
{
Node* result = AllocateHeapNumberWithValue(done_double.PhiAt(0));
__ Goto(&done, result);
}

__ Bind(&done);
Expand Down
3 changes: 2 additions & 1 deletion deps/v8/src/compiler/heap-refs.h
Expand Up @@ -56,7 +56,8 @@ class PropertyAccessInfo;
enum class AccessMode { kLoad, kStore, kStoreInLiteral, kHas, kDefine };

inline bool IsAnyStore(AccessMode mode) {
return mode == AccessMode::kStore || mode == AccessMode::kStoreInLiteral;
return mode == AccessMode::kStore || mode == AccessMode::kStoreInLiteral ||
mode == AccessMode::kDefine;
}

enum class OddballType : uint8_t {
Expand Down
2 changes: 1 addition & 1 deletion deps/v8/src/compiler/js-native-context-specialization.cc
Expand Up @@ -2896,7 +2896,7 @@ JSNativeContextSpecialization::BuildElementAccess(

// Don't try to store to a copy-on-write backing store (unless supported by
// the store mode).
if (keyed_mode.access_mode() == AccessMode::kStore &&
if (IsAnyStore(keyed_mode.access_mode()) &&
IsSmiOrObjectElementsKind(elements_kind) &&
!IsCOWHandlingStoreMode(keyed_mode.store_mode())) {
effect = graph()->NewNode(
Expand Down
42 changes: 21 additions & 21 deletions deps/v8/tools/v8heapconst.py
Expand Up @@ -538,27 +538,27 @@
("old_space", 0x04b39): "StringSplitCache",
("old_space", 0x04f41): "RegExpMultipleCache",
("old_space", 0x05349): "BuiltinsConstantsTable",
("old_space", 0x05775): "AsyncFunctionAwaitRejectSharedFun",
("old_space", 0x05799): "AsyncFunctionAwaitResolveSharedFun",
("old_space", 0x057bd): "AsyncGeneratorAwaitRejectSharedFun",
("old_space", 0x057e1): "AsyncGeneratorAwaitResolveSharedFun",
("old_space", 0x05805): "AsyncGeneratorYieldResolveSharedFun",
("old_space", 0x05829): "AsyncGeneratorReturnResolveSharedFun",
("old_space", 0x0584d): "AsyncGeneratorReturnClosedRejectSharedFun",
("old_space", 0x05871): "AsyncGeneratorReturnClosedResolveSharedFun",
("old_space", 0x05895): "AsyncIteratorValueUnwrapSharedFun",
("old_space", 0x058b9): "PromiseAllResolveElementSharedFun",
("old_space", 0x058dd): "PromiseAllSettledResolveElementSharedFun",
("old_space", 0x05901): "PromiseAllSettledRejectElementSharedFun",
("old_space", 0x05925): "PromiseAnyRejectElementSharedFun",
("old_space", 0x05949): "PromiseCapabilityDefaultRejectSharedFun",
("old_space", 0x0596d): "PromiseCapabilityDefaultResolveSharedFun",
("old_space", 0x05991): "PromiseCatchFinallySharedFun",
("old_space", 0x059b5): "PromiseGetCapabilitiesExecutorSharedFun",
("old_space", 0x059d9): "PromiseThenFinallySharedFun",
("old_space", 0x059fd): "PromiseThrowerFinallySharedFun",
("old_space", 0x05a21): "PromiseValueThunkFinallySharedFun",
("old_space", 0x05a45): "ProxyRevokeSharedFun",
("old_space", 0x05779): "AsyncFunctionAwaitRejectSharedFun",
("old_space", 0x0579d): "AsyncFunctionAwaitResolveSharedFun",
("old_space", 0x057c1): "AsyncGeneratorAwaitRejectSharedFun",
("old_space", 0x057e5): "AsyncGeneratorAwaitResolveSharedFun",
("old_space", 0x05809): "AsyncGeneratorYieldResolveSharedFun",
("old_space", 0x0582d): "AsyncGeneratorReturnResolveSharedFun",
("old_space", 0x05851): "AsyncGeneratorReturnClosedRejectSharedFun",
("old_space", 0x05875): "AsyncGeneratorReturnClosedResolveSharedFun",
("old_space", 0x05899): "AsyncIteratorValueUnwrapSharedFun",
("old_space", 0x058bd): "PromiseAllResolveElementSharedFun",
("old_space", 0x058e1): "PromiseAllSettledResolveElementSharedFun",
("old_space", 0x05905): "PromiseAllSettledRejectElementSharedFun",
("old_space", 0x05929): "PromiseAnyRejectElementSharedFun",
("old_space", 0x0594d): "PromiseCapabilityDefaultRejectSharedFun",
("old_space", 0x05971): "PromiseCapabilityDefaultResolveSharedFun",
("old_space", 0x05995): "PromiseCatchFinallySharedFun",
("old_space", 0x059b9): "PromiseGetCapabilitiesExecutorSharedFun",
("old_space", 0x059dd): "PromiseThenFinallySharedFun",
("old_space", 0x05a01): "PromiseThrowerFinallySharedFun",
("old_space", 0x05a25): "PromiseValueThunkFinallySharedFun",
("old_space", 0x05a49): "ProxyRevokeSharedFun",
}

# Lower 32 bits of first page addresses for various heap spaces.
Expand Down

0 comments on commit 2088cb4

Please sign in to comment.