Skip to content

Commit

Permalink
deps: patch V8 to 10.7.193.22
Browse files Browse the repository at this point in the history
Refs: v8/v8@10.7.193.20...10.7.193.22
PR-URL: #45460
Reviewed-By: Antoine du Hamel <duhamelantoine1995@gmail.com>
Reviewed-By: Jiawen Geng <technicalcute@gmail.com>
Reviewed-By: Yagiz Nizipli <yagiz@nizipli.com>
  • Loading branch information
targos authored and ruyadorno committed Nov 21, 2022
1 parent 45b54ee commit 413bf9a
Show file tree
Hide file tree
Showing 3 changed files with 49 additions and 53 deletions.
2 changes: 1 addition & 1 deletion deps/v8/include/v8-version.h
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
#define V8_MAJOR_VERSION 10
#define V8_MINOR_VERSION 7
#define V8_BUILD_NUMBER 193
#define V8_PATCH_LEVEL 20
#define V8_PATCH_LEVEL 22

// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
Expand Down
51 changes: 25 additions & 26 deletions deps/v8/src/compiler/effect-control-linearizer.cc
Original file line number Diff line number Diff line change
Expand Up @@ -5294,6 +5294,8 @@ Node* EffectControlLinearizer::LowerLoadFieldByIndex(Node* node) {

auto if_double = __ MakeDeferredLabel();
auto done = __ MakeLabel(MachineRepresentation::kTagged);
auto loaded_field = __ MakeLabel(MachineRepresentation::kTagged);
auto done_double = __ MakeLabel(MachineRepresentation::kFloat64);

// Check if field is a mutable double field.
__ GotoIfNot(__ IntPtrEqual(__ WordAnd(index, one), zero), &if_double);
Expand All @@ -5310,8 +5312,8 @@ Node* EffectControlLinearizer::LowerLoadFieldByIndex(Node* node) {
Node* offset =
__ IntAdd(__ WordShl(index, __ IntPtrConstant(kTaggedSizeLog2 - 1)),
__ IntPtrConstant(JSObject::kHeaderSize - kHeapObjectTag));
Node* result = __ Load(MachineType::AnyTagged(), object, offset);
__ Goto(&done, result);
Node* field = __ Load(MachineType::AnyTagged(), object, offset);
__ Goto(&loaded_field, field);
}

// The field is located in the properties backing store of {object}.
Expand All @@ -5325,18 +5327,15 @@ Node* EffectControlLinearizer::LowerLoadFieldByIndex(Node* node) {
__ IntPtrConstant(kTaggedSizeLog2 - 1)),
__ IntPtrConstant((FixedArray::kHeaderSize - kTaggedSize) -
kHeapObjectTag));
Node* result = __ Load(MachineType::AnyTagged(), properties, offset);
__ Goto(&done, result);
Node* field = __ Load(MachineType::AnyTagged(), properties, offset);
__ Goto(&loaded_field, field);
}
}

// The field is a Double field, either unboxed in the object on 64-bit
// architectures, or a mutable HeapNumber.
__ Bind(&if_double);
{
auto loaded_field = __ MakeLabel(MachineRepresentation::kTagged);
auto done_double = __ MakeLabel(MachineRepresentation::kFloat64);

index = __ WordSar(index, one);

// Check if field is in-object or out-of-object.
Expand Down Expand Up @@ -5364,27 +5363,27 @@ Node* EffectControlLinearizer::LowerLoadFieldByIndex(Node* node) {
Node* field = __ Load(MachineType::AnyTagged(), properties, offset);
__ Goto(&loaded_field, field);
}
}

__ Bind(&loaded_field);
{
Node* field = loaded_field.PhiAt(0);
// We may have transitioned in-place away from double, so check that
// this is a HeapNumber -- otherwise the load is fine and we don't need
// to copy anything anyway.
__ GotoIf(ObjectIsSmi(field), &done, field);
Node* field_map = __ LoadField(AccessBuilder::ForMap(), field);
__ GotoIfNot(__ TaggedEqual(field_map, __ HeapNumberMapConstant()), &done,
field);

Node* value = __ LoadField(AccessBuilder::ForHeapNumberValue(), field);
__ Goto(&done_double, value);
}
__ Bind(&loaded_field);
{
Node* field = loaded_field.PhiAt(0);
// We may have transitioned in-place away from double, so check that
// this is a HeapNumber -- otherwise the load is fine and we don't need
// to copy anything anyway.
__ GotoIf(ObjectIsSmi(field), &done, field);
Node* field_map = __ LoadField(AccessBuilder::ForMap(), field);
__ GotoIfNot(__ TaggedEqual(field_map, __ HeapNumberMapConstant()), &done,
field);

__ Bind(&done_double);
{
Node* result = AllocateHeapNumberWithValue(done_double.PhiAt(0));
__ Goto(&done, result);
}
Node* value = __ LoadField(AccessBuilder::ForHeapNumberValue(), field);
__ Goto(&done_double, value);
}

__ Bind(&done_double);
{
Node* result = AllocateHeapNumberWithValue(done_double.PhiAt(0));
__ Goto(&done, result);
}

__ Bind(&done);
Expand Down
49 changes: 23 additions & 26 deletions deps/v8/src/compiler/js-call-reducer.cc
Original file line number Diff line number Diff line change
Expand Up @@ -710,9 +710,8 @@ class IteratingArrayBuiltinReducerAssembler : public JSCallReducerAssembler {
MapInference* inference, const bool has_stability_dependency,
ElementsKind kind, const SharedFunctionInfoRef& shared,
const NativeContextRef& native_context, ArrayEverySomeVariant variant);
TNode<Object> ReduceArrayPrototypeAt(ZoneVector<ElementsKind> kinds,
bool needs_fallback_builtin_call,
Node* receiver_kind);
TNode<Object> ReduceArrayPrototypeAt(ZoneVector<const MapRef*> kinds,
bool needs_fallback_builtin_call);
TNode<Object> ReduceArrayPrototypeIndexOfIncludes(
ElementsKind kind, ArrayIndexOfIncludesVariant variant);

Expand Down Expand Up @@ -1323,24 +1322,26 @@ TNode<String> JSCallReducerAssembler::ReduceStringPrototypeSlice() {
}

TNode<Object> IteratingArrayBuiltinReducerAssembler::ReduceArrayPrototypeAt(
ZoneVector<ElementsKind> kinds, bool needs_fallback_builtin_call,
Node* receiver_kind) {
ZoneVector<const MapRef*> maps, bool needs_fallback_builtin_call) {
TNode<JSArray> receiver = ReceiverInputAs<JSArray>();
TNode<Object> index = ArgumentOrZero(0);

TNode<Number> index_num = CheckSmi(index);
TNode<FixedArrayBase> elements = LoadElements(receiver);

TNode<Map> receiver_map =
TNode<Map>::UncheckedCast(LoadField(AccessBuilder::ForMap(), receiver));

auto out = MakeLabel(MachineRepresentation::kTagged);

for (ElementsKind kind : kinds) {
for (const MapRef* map : maps) {
DCHECK(map->supports_fast_array_iteration());
auto correct_map_label = MakeLabel(), wrong_map_label = MakeLabel();
Branch(NumberEqual(TNode<Number>::UncheckedCast(receiver_kind),
NumberConstant(kind)),
&correct_map_label, &wrong_map_label);
TNode<Boolean> is_map_equal = ReferenceEqual(receiver_map, Constant(*map));
Branch(is_map_equal, &correct_map_label, &wrong_map_label);
Bind(&correct_map_label);

TNode<Number> length = LoadJSArrayLength(receiver, kind);
TNode<Number> length = LoadJSArrayLength(receiver, map->elements_kind());

// If index is less than 0, then subtract from length.
TNode<Boolean> cond = NumberLessThan(index_num, ZeroConstant());
Expand All @@ -1359,15 +1360,16 @@ TNode<Object> IteratingArrayBuiltinReducerAssembler::ReduceArrayPrototypeAt(

// Retrieving element at index.
TNode<Object> element = LoadElement<Object>(
AccessBuilder::ForFixedArrayElement(kind), elements, real_index_num);
if (IsHoleyElementsKind(kind)) {
AccessBuilder::ForFixedArrayElement(map->elements_kind()), elements,
real_index_num);
if (IsHoleyElementsKind(map->elements_kind())) {
// This case is needed in particular for HOLEY_DOUBLE_ELEMENTS: raw
// doubles are stored in the FixedDoubleArray, and need to be converted to
// HeapNumber or to Smi so that this function can return an Object. The
// automatic converstion performed by
// RepresentationChanger::GetTaggedRepresentationFor does not handle
// holes, so we convert manually a potential hole here.
element = TryConvertHoleToUndefined(element, kind);
element = TryConvertHoleToUndefined(element, map->elements_kind());
}
Goto(&out, element);

Expand Down Expand Up @@ -5633,25 +5635,22 @@ Reduction JSCallReducer::ReduceArrayPrototypeAt(Node* node) {
MapInference inference(broker(), receiver, effect);
if (!inference.HaveMaps()) return NoChange();

// Collecting kinds
ZoneVector<ElementsKind> kinds(broker()->zone());
// Collecting maps, and checking if a fallback builtin call will be required
// (it is required if at least one map doesn't support fast array iteration).
ZoneVector<const MapRef*> maps(broker()->zone());
bool needs_fallback_builtin_call = false;
for (const MapRef& map : inference.GetMaps()) {
if (map.supports_fast_array_iteration()) {
ElementsKind kind = map.elements_kind();
// Checking that |kind| isn't already in |kinds|. Using std::find should
// be fast enough since |kinds| can contain at most 4 items.
if (std::find(kinds.begin(), kinds.end(), kind) == kinds.end()) {
kinds.push_back(kind);
}
maps.push_back(&map);
} else {
needs_fallback_builtin_call = true;
}
}

inference.RelyOnMapsPreferStability(dependencies(), jsgraph(), &effect,
control, p.feedback());

if (kinds.empty()) {
if (maps.empty()) {
// No map in the feedback supports fast iteration. Keeping the builtin call.
return NoChange();
}
Expand All @@ -5660,13 +5659,11 @@ Reduction JSCallReducer::ReduceArrayPrototypeAt(Node* node) {
return NoChange();
}

Node* receiver_kind = LoadReceiverElementsKind(receiver, &effect, control);

IteratingArrayBuiltinReducerAssembler a(this, node);
a.InitializeEffectControl(effect, control);

TNode<Object> subgraph = a.ReduceArrayPrototypeAt(
kinds, needs_fallback_builtin_call, receiver_kind);
TNode<Object> subgraph =
a.ReduceArrayPrototypeAt(maps, needs_fallback_builtin_call);
return ReplaceWithSubgraph(&a, subgraph);
}

Expand Down

0 comments on commit 413bf9a

Please sign in to comment.