diff --git a/deps/v8/src/builtins/builtins-collections-gen.cc b/deps/v8/src/builtins/builtins-collections-gen.cc index e5e6026ce61632..6fea5c37e8c2f2 100644 --- a/deps/v8/src/builtins/builtins-collections-gen.cc +++ b/deps/v8/src/builtins/builtins-collections-gen.cc @@ -2782,10 +2782,9 @@ TNode WeakCollectionsBuiltinsAssembler::ShouldShrink( TNode WeakCollectionsBuiltinsAssembler::ValueIndexFromKeyIndex( TNode key_index) { - return IntPtrAdd( - key_index, - IntPtrConstant(EphemeronHashTable::TodoShape::kEntryValueIndex - - EphemeronHashTable::kEntryKeyIndex)); + return IntPtrAdd(key_index, + IntPtrConstant(EphemeronHashTable::ShapeT::kEntryValueIndex - + EphemeronHashTable::kEntryKeyIndex)); } TF_BUILTIN(WeakMapConstructor, WeakCollectionsBuiltinsAssembler) { diff --git a/deps/v8/src/codegen/code-stub-assembler.cc b/deps/v8/src/codegen/code-stub-assembler.cc index cc80f7aaccacb3..82774268e409e7 100644 --- a/deps/v8/src/codegen/code-stub-assembler.cc +++ b/deps/v8/src/codegen/code-stub-assembler.cc @@ -9505,7 +9505,7 @@ void CodeStubAssembler::NameDictionaryLookup( CAST(UnsafeLoadFixedArrayElement(dictionary, index)); GotoIf(TaggedEqual(current, undefined), if_not_found_with_insertion_index); if (mode == kFindExisting) { - if (Dictionary::TodoShape::kMatchNeedsHoleCheck) { + if (Dictionary::ShapeT::kMatchNeedsHoleCheck) { GotoIf(TaggedEqual(current, TheHoleConstant()), &next_probe); } current = LoadName(current); diff --git a/deps/v8/src/codegen/code-stub-assembler.h b/deps/v8/src/codegen/code-stub-assembler.h index 7d687744c10ed1..b57ec139dd3452 100644 --- a/deps/v8/src/codegen/code-stub-assembler.h +++ b/deps/v8/src/codegen/code-stub-assembler.h @@ -1547,7 +1547,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler TNode LoadFixedArrayBaseLength(TNode array); template TNode LoadArrayCapacity(TNode array) { - return LoadObjectField(array, Array::Shape::kCapacityOffset); + return LoadObjectField(array, Array::ShapeT::kCapacityOffset); } // Load the length of a fixed array base instance. TNode LoadAndUntagFixedArrayBaseLength(TNode array); diff --git a/deps/v8/src/compiler/turboshaft/assembler.h b/deps/v8/src/compiler/turboshaft/assembler.h index ad7de77faf1c38..aad6ed7cf5cc04 100644 --- a/deps/v8/src/compiler/turboshaft/assembler.h +++ b/deps/v8/src/compiler/turboshaft/assembler.h @@ -1121,8 +1121,7 @@ class TurboshaftAssemblerOpInterface template explicit TurboshaftAssemblerOpInterface(Args... args) - : GenericAssemblerOpInterface(args...), - matcher_(Asm().output_graph()) {} + : matcher_(Asm().output_graph()) {} const OperationMatcher& matcher() const { return matcher_; } @@ -2245,11 +2244,11 @@ class TurboshaftAssemblerOpInterface // Helpers to read the most common fields. // TODO(nicohartmann@): Strengthen this to `V`. - V LoadMapField(V object) { - return LoadField(object, AccessBuilder::ForMap()); + V LoadMapField(V object) { + return LoadField(object, AccessBuilder::ForMap()); } - V LoadInstanceTypeField(V map) { + V LoadInstanceTypeField(V map) { return LoadField(map, AccessBuilder::ForMapInstanceType()); } @@ -2849,7 +2848,7 @@ class TurboshaftAssemblerOpInterface V CallRuntime_TransitionElementsKind(Isolate* isolate, V context, V object, - V target_map) { + V target_map) { return CallRuntime( isolate, context, {object, target_map}); } @@ -3267,8 +3266,8 @@ class TurboshaftAssemblerOpInterface void TransitionAndStoreArrayElement( V array, V index, OpIndex value, - TransitionAndStoreArrayElementOp::Kind kind, MaybeHandle fast_map, - MaybeHandle double_map) { + TransitionAndStoreArrayElementOp::Kind kind, MaybeHandle fast_map, + MaybeHandle double_map) { ReduceIfReachableTransitionAndStoreArrayElement(array, index, value, kind, fast_map, double_map); } @@ -3281,17 +3280,17 @@ class TurboshaftAssemblerOpInterface } V CompareMaps(V heap_object, - const ZoneRefSet& maps) { + const ZoneRefSet& maps) { return ReduceIfReachableCompareMaps(heap_object, maps); } void CheckMaps(V heap_object, OpIndex frame_state, - const ZoneRefSet& maps, CheckMapsFlags flags, + const ZoneRefSet& maps, CheckMapsFlags flags, const FeedbackSource& feedback) { ReduceIfReachableCheckMaps(heap_object, frame_state, maps, flags, feedback); } - void AssumeMap(V heap_object, const ZoneRefSet& maps) { + void AssumeMap(V heap_object, const ZoneRefSet& maps) { ReduceIfReachableAssumeMap(heap_object, maps); } @@ -3400,16 +3399,16 @@ class TurboshaftAssemblerOpInterface return ReduceIfReachableAssertNotNull(object, type, trap_id); } - V RttCanon(V rtts, uint32_t type_index) { + V RttCanon(V rtts, uint32_t type_index) { return ReduceIfReachableRttCanon(rtts, type_index); } - V WasmTypeCheck(V object, OptionalV rtt, + V WasmTypeCheck(V object, OptionalV rtt, WasmTypeCheckConfig config) { return ReduceIfReachableWasmTypeCheck(object, rtt, config); } - V WasmTypeCast(V object, OptionalV rtt, + V WasmTypeCast(V object, OptionalV rtt, WasmTypeCheckConfig config) { return ReduceIfReachableWasmTypeCast(object, rtt, config); } @@ -3454,12 +3453,12 @@ class TurboshaftAssemblerOpInterface return ReduceIfReachableArrayLength(array, null_check); } - V WasmAllocateArray(V rtt, ConstOrV length, + V WasmAllocateArray(V rtt, ConstOrV length, const wasm::ArrayType* array_type) { return ReduceIfReachableWasmAllocateArray(rtt, resolve(length), array_type); } - V WasmAllocateStruct(V rtt, + V WasmAllocateStruct(V rtt, const wasm::StructType* struct_type) { return ReduceIfReachableWasmAllocateStruct(rtt, struct_type); } @@ -4044,8 +4043,14 @@ class TSAssembler : public Assembler> { public: - using Assembler>::Assembler; +#ifdef _WIN32 + explicit TSAssembler(Graph& input_graph, Graph& output_graph, + Zone* phase_zone) + : Assembler(input_graph, output_graph, phase_zone) {} +#else + using Assembler>::Assembler; +#endif }; #include "src/compiler/turboshaft/undef-assembler-macros.inc" diff --git a/deps/v8/src/compiler/turboshaft/machine-optimization-reducer.h b/deps/v8/src/compiler/turboshaft/machine-optimization-reducer.h index dbceef39b01b31..ca51848d06a3b4 100644 --- a/deps/v8/src/compiler/turboshaft/machine-optimization-reducer.h +++ b/deps/v8/src/compiler/turboshaft/machine-optimization-reducer.h @@ -1349,26 +1349,11 @@ class MachineOptimizationReducer : public Next { if (matcher.MatchConstantShiftRightArithmeticShiftOutZeros( left, &x, rep_w, &k1) && matcher.MatchIntegralWordConstant(right, rep_w, &k2) && - CountLeadingSignBits(k2, rep_w) > k1) { - if (matcher.Get(left).saturated_use_count.IsZero()) { - return __ Comparison( - x, __ WordConstant(base::bits::Unsigned(k2) << k1, rep_w), kind, - rep_w); - } else if constexpr (reducer_list_contains< - ReducerList, ValueNumberingReducer>::value) { - // If the shift has uses, we only apply the transformation if the - // result would be GVNed away. - OpIndex rhs = - __ WordConstant(base::bits::Unsigned(k2) << k1, rep_w); - static_assert(ComparisonOp::input_count == 2); - static_assert(sizeof(ComparisonOp) == 8); - base::SmallVector storage; - ComparisonOp* cmp = - CreateOperation(storage, x, rhs, kind, rep_w); - if (__ WillGVNOp(*cmp)) { - return __ Comparison(x, rhs, kind, rep_w); - } - } + CountLeadingSignBits(k2, rep_w) > k1 && + matcher.Get(left).saturated_use_count.IsZero()) { + return __ Comparison( + x, __ WordConstant(base::bits::Unsigned(k2) << k1, rep_w), kind, + rep_w); } // k2 > k1) => (k2 << k1) k1) { - if (matcher.Get(right).saturated_use_count.IsZero()) { - return __ Comparison( - __ WordConstant(base::bits::Unsigned(k2) << k1, rep_w), x, kind, - rep_w); - } else if constexpr (reducer_list_contains< - ReducerList, ValueNumberingReducer>::value) { - // If the shift has uses, we only apply the transformation if the - // result would be GVNed away. - OpIndex lhs = - __ WordConstant(base::bits::Unsigned(k2) << k1, rep_w); - static_assert(ComparisonOp::input_count == 2); - static_assert(sizeof(ComparisonOp) == 8); - base::SmallVector storage; - ComparisonOp* cmp = - CreateOperation(storage, lhs, x, kind, rep_w); - if (__ WillGVNOp(*cmp)) { - return __ Comparison(lhs, x, kind, rep_w); - } - } + CountLeadingSignBits(k2, rep_w) > k1 && + matcher.Get(right).saturated_use_count.IsZero()) { + return __ Comparison( + __ WordConstant(base::bits::Unsigned(k2) << k1, rep_w), x, kind, + rep_w); } } // Map 64bit to 32bit comparisons. diff --git a/deps/v8/src/compiler/turboshaft/simplified-lowering-reducer.h b/deps/v8/src/compiler/turboshaft/simplified-lowering-reducer.h index 556c0d23484f9b..5c946e67e7fa3d 100644 --- a/deps/v8/src/compiler/turboshaft/simplified-lowering-reducer.h +++ b/deps/v8/src/compiler/turboshaft/simplified-lowering-reducer.h @@ -32,10 +32,10 @@ class SimplifiedLoweringReducer : public Next { OpIndex ig_index, const SpeculativeNumberBinopOp& op) { DCHECK_EQ(op.kind, SpeculativeNumberBinopOp::Kind::kSafeIntegerAdd); - OpIndex frame_state = Map(op.frame_state()); - V left = ProcessInput(Map(op.left()), Rep::Word32(), + OpIndex frame_state = MapImpl(op.frame_state()); + V left = ProcessInput(MapImpl(op.left()), Rep::Word32(), CheckKind::kSigned32, frame_state); - V right = ProcessInput(Map(op.right()), Rep::Word32(), + V right = ProcessInput(MapImpl(op.right()), Rep::Word32(), CheckKind::kSigned32, frame_state); V result = __ OverflowCheckedBinop( @@ -43,7 +43,7 @@ class SimplifiedLoweringReducer : public Next { WordRepresentation::Word32()); V overflow = __ Projection(result, 1, Rep::Word32()); - __ DeoptimizeIf(overflow, Map(op.frame_state()), + __ DeoptimizeIf(overflow, MapImpl(op.frame_state()), DeoptimizeReason::kOverflow, FeedbackSource{}); return __ Projection(result, 0, Rep::Word32()); } @@ -52,10 +52,10 @@ class SimplifiedLoweringReducer : public Next { base::SmallVector return_values; for (OpIndex input : ret.return_values()) { return_values.push_back( - ProcessInput(Map(input), Rep::Tagged(), CheckKind::kNone, {})); + ProcessInput(MapImpl(input), Rep::Tagged(), CheckKind::kNone, {})); } - __ Return(Map(ret.pop_count()), base::VectorOf(return_values)); + __ Return(MapImpl(ret.pop_count()), base::VectorOf(return_values)); return OpIndex::Invalid(); } @@ -94,7 +94,7 @@ class SimplifiedLoweringReducer : public Next { } } - inline OpIndex Map(OpIndex ig_index) { return __ MapToNewGraph(ig_index); } + inline OpIndex MapImpl(OpIndex ig_index) { return __ MapToNewGraph(ig_index); } }; #include "src/compiler/turboshaft/undef-assembler-macros.inc" diff --git a/deps/v8/src/compiler/turboshaft/variable-reducer.h b/deps/v8/src/compiler/turboshaft/variable-reducer.h index 2e8e8916c98b8f..45eeba0b545d87 100644 --- a/deps/v8/src/compiler/turboshaft/variable-reducer.h +++ b/deps/v8/src/compiler/turboshaft/variable-reducer.h @@ -55,9 +55,11 @@ namespace v8::internal::compiler::turboshaft { // with constant inputs introduced by `VariableReducer` need to be eliminated. template class VariableReducer : public RequiredOptimizationReducer { +protected: using Next = RequiredOptimizationReducer; using Snapshot = SnapshotTable::Snapshot; +private: struct GetActiveLoopVariablesIndex { IntrusiveSetIndex& operator()(Variable var) const { return var.data().active_loop_variables_index; diff --git a/deps/v8/src/heap/heap.cc b/deps/v8/src/heap/heap.cc index 850f281ebde246..ad82f08e2cf9b4 100644 --- a/deps/v8/src/heap/heap.cc +++ b/deps/v8/src/heap/heap.cc @@ -3570,7 +3570,7 @@ void Heap::RightTrimArray(Tagged object, int new_capacity, } const int bytes_to_trim = - (old_capacity - new_capacity) * Array::Shape::kElementSize; + (old_capacity - new_capacity) * Array::HotfixShape::kElementSize; // Calculate location of new array end. const int old_size = Array::SizeFor(old_capacity); diff --git a/deps/v8/src/objects/dictionary.h b/deps/v8/src/objects/dictionary.h index 58924206ea9313..9cfef9e0b14d3f 100644 --- a/deps/v8/src/objects/dictionary.h +++ b/deps/v8/src/objects/dictionary.h @@ -32,8 +32,7 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) Dictionary using DerivedHashTable = HashTable; public: - using TodoShape = Shape; - using Key = typename TodoShape::Key; + using Key = typename Shape::Key; inline Tagged ValueAt(InternalIndex entry); inline Tagged ValueAt(PtrComprCageBase cage_base, InternalIndex entry); @@ -126,7 +125,7 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) Dictionary Key key, Handle value, PropertyDetails details); - OBJECT_CONSTRUCTORS(Dictionary, HashTable); + OBJECT_CONSTRUCTORS(Dictionary, HashTable); }; #define EXTERN_DECLARE_DICTIONARY(DERIVED, SHAPE) \ diff --git a/deps/v8/src/objects/fixed-array.h b/deps/v8/src/objects/fixed-array.h index a05edb99b541b2..70ac52065ddac3 100644 --- a/deps/v8/src/objects/fixed-array.h +++ b/deps/v8/src/objects/fixed-array.h @@ -24,18 +24,18 @@ namespace internal { #include "torque-generated/src/objects/fixed-array-tq.inc" // Derived: must have a Smi slot at kCapacityOffset. -template +template class TaggedArrayBase : public Super { static_assert(std::is_base_of::value); OBJECT_CONSTRUCTORS(TaggedArrayBase, Super); - using ElementT = typename ShapeT::ElementT; - static_assert(ShapeT::kElementSize == kTaggedSize); + using ElementT = typename Shape::ElementT; + static_assert(Shape::kElementSize == kTaggedSize); static_assert(is_subtype_v || is_subtype_v); using ElementFieldT = - TaggedField; + TaggedField; static constexpr bool kSupportsSmiElements = std::is_convertible_v; @@ -56,7 +56,7 @@ class TaggedArrayBase : public Super { std::conditional_t; public: - using Shape = ShapeT; + using ShapeT = Shape; inline int capacity() const; inline int capacity(AcquireLoadTag) const; @@ -187,6 +187,7 @@ class FixedArray : public TaggedArrayBase { OBJECT_CONSTRUCTORS(FixedArray, Super); public: + using HotfixShape = TaggedArrayShape; template static inline Handle New( IsolateT* isolate, int capacity, @@ -232,7 +233,7 @@ class FixedArray : public TaggedArrayBase { class BodyDescriptor; - static constexpr int kLengthOffset = Shape::kCapacityOffset; + static constexpr int kLengthOffset = ShapeT::kCapacityOffset; static constexpr int kMaxLength = FixedArray::kMaxCapacity; static constexpr int kMaxRegularLength = FixedArray::kMaxRegularCapacity; @@ -283,7 +284,7 @@ class TrustedFixedArray class BodyDescriptor; static constexpr int kLengthOffset = - TrustedFixedArray::Shape::kCapacityOffset; + TrustedFixedArray::ShapeT::kCapacityOffset; static constexpr int kMaxLength = TrustedFixedArray::kMaxCapacity; static constexpr int kMaxRegularLength = TrustedFixedArray::kMaxRegularCapacity; @@ -331,7 +332,7 @@ class ProtectedFixedArray class BodyDescriptor; static constexpr int kLengthOffset = - ProtectedFixedArray::Shape::kCapacityOffset; + ProtectedFixedArray::ShapeT::kCapacityOffset; static constexpr int kMaxLength = ProtectedFixedArray::kMaxCapacity; static constexpr int kMaxRegularLength = ProtectedFixedArray::kMaxRegularCapacity; @@ -388,6 +389,7 @@ class PrimitiveArrayBase : public Super { public: using Shape = ShapeT; + using HotfixShape = ShapeT; static constexpr bool kElementsAreMaybeObject = false; inline int length() const; @@ -523,6 +525,8 @@ class WeakFixedArray OBJECT_CONSTRUCTORS(WeakFixedArray, Super); public: + using Shape = WeakFixedArrayShape; + using HotfixShape = WeakFixedArrayShape; template static inline Handle New( IsolateT* isolate, int capacity, @@ -534,7 +538,7 @@ class WeakFixedArray class BodyDescriptor; - static constexpr int kLengthOffset = Shape::kCapacityOffset; + static constexpr int kLengthOffset = ShapeT::kCapacityOffset; }; // WeakArrayList is like a WeakFixedArray with static convenience methods for @@ -671,6 +675,7 @@ class ArrayList : public TaggedArrayBase { public: using Shape = ArrayListShape; + using HotfixShape = ArrayListShape; template static inline Handle New( @@ -742,6 +747,7 @@ class ByteArray : public PrimitiveArrayBase { public: using Shape = ByteArrayShape; + using HotfixShape = ByteArrayShape; template static inline Handle New( diff --git a/deps/v8/src/objects/hash-table-inl.h b/deps/v8/src/objects/hash-table-inl.h index 6d37e7d674d59d..c3451d2fc28db9 100644 --- a/deps/v8/src/objects/hash-table-inl.h +++ b/deps/v8/src/objects/hash-table-inl.h @@ -170,7 +170,7 @@ template template InternalIndex HashTable::FindEntry(IsolateT* isolate, Key key) { ReadOnlyRoots roots(isolate); - return FindEntry(isolate, roots, key, TodoShape::Hash(roots, key)); + return FindEntry(isolate, roots, key, Shape::Hash(roots, key)); } // Find entry for key otherwise return kNotFound. @@ -183,7 +183,7 @@ InternalIndex HashTable::FindEntry(PtrComprCageBase cage_base, uint32_t count = 1; Tagged undefined = roots.undefined_value(); Tagged the_hole = roots.the_hole_value(); - DCHECK_EQ(TodoShape::Hash(roots, key), static_cast(hash)); + DCHECK_EQ(Shape::Hash(roots, key), static_cast(hash)); // EnsureCapacity will guarantee the hash table is never full. for (InternalIndex entry = FirstProbe(hash, capacity);; entry = NextProbe(entry, count++, capacity)) { @@ -191,8 +191,8 @@ InternalIndex HashTable::FindEntry(PtrComprCageBase cage_base, // Empty entry. Uses raw unchecked accessors because it is called by the // string table during bootstrapping. if (element == undefined) return InternalIndex::NotFound(); - if (TodoShape::kMatchNeedsHoleCheck && element == the_hole) continue; - if (TodoShape::IsMatch(key, element)) return entry; + if (Shape::kMatchNeedsHoleCheck && element == the_hole) continue; + if (Shape::IsMatch(key, element)) return entry; } } @@ -216,7 +216,7 @@ bool HashTable::ToKey(ReadOnlyRoots roots, InternalIndex entry, Tagged* out_k) { Tagged k = KeyAt(entry); if (!IsKey(roots, k)) return false; - *out_k = TodoShape::Unwrap(k); + *out_k = Shape::Unwrap(k); return true; } @@ -226,7 +226,7 @@ bool HashTable::ToKey(PtrComprCageBase cage_base, Tagged* out_k) { Tagged k = KeyAt(cage_base, entry); if (!IsKey(GetReadOnlyRoots(cage_base), k)) return false; - *out_k = TodoShape::Unwrap(k); + *out_k = Shape::Unwrap(k); return true; } diff --git a/deps/v8/src/objects/hash-table.h b/deps/v8/src/objects/hash-table.h index 6b25f043e8ed6b..6742b2573d7c7c 100644 --- a/deps/v8/src/objects/hash-table.h +++ b/deps/v8/src/objects/hash-table.h @@ -126,15 +126,15 @@ class V8_EXPORT_PRIVATE HashTableBase : public NON_EXPORTED_BASE(FixedArray) { OBJECT_CONSTRUCTORS(HashTableBase, FixedArray); }; -template +template class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) HashTable : public HashTableBase { public: // TODO(jgruber): Derive from TaggedArrayBase instead of FixedArray, and // merge with TaggedArraryBase's Shape class. Once the naming conflict is // resolved rename all TodoShape occurrences back to Shape. - using TodoShape = ShapeT; - using Key = typename TodoShape::Key; + using ShapeT = Shape; + using Key = typename Shape::Key; // Returns a new HashTable object. template @@ -177,9 +177,8 @@ class EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE) HashTable inline void SetKeyAt(InternalIndex entry, Tagged value, WriteBarrierMode mode = UPDATE_WRITE_BARRIER); - static const int kElementsStartIndex = - kPrefixStartIndex + TodoShape::kPrefixSize; - static const int kEntrySize = TodoShape::kEntrySize; + static const int kElementsStartIndex = kPrefixStartIndex + Shape::kPrefixSize; + static const int kEntrySize = Shape::kEntrySize; static_assert(kEntrySize > 0); static const int kEntryKeyIndex = 0; static const int kElementsStartOffset = diff --git a/deps/v8/src/objects/objects.cc b/deps/v8/src/objects/objects.cc index 99446acd540ac1..6046143354e7f5 100644 --- a/deps/v8/src/objects/objects.cc +++ b/deps/v8/src/objects/objects.cc @@ -5137,11 +5137,11 @@ void HashTable::Rehash(PtrComprCageBase cage_base, uint32_t from_index = EntryToIndex(i); Tagged k = this->get(from_index); if (!IsKey(roots, k)) continue; - uint32_t hash = TodoShape::HashForObject(roots, k); + uint32_t hash = Shape::HashForObject(roots, k); uint32_t insertion_index = EntryToIndex(new_table->FindInsertionEntry(cage_base, roots, hash)); new_table->set_key(insertion_index, get(from_index), mode); - for (int j = 1; j < TodoShape::kEntrySize; j++) { + for (int j = 1; j < Shape::kEntrySize; j++) { new_table->set(insertion_index + j, get(from_index + j), mode); } } @@ -5154,7 +5154,7 @@ InternalIndex HashTable::EntryForProbe(ReadOnlyRoots roots, Tagged k, int probe, InternalIndex expected) { - uint32_t hash = TodoShape::HashForObject(roots, k); + uint32_t hash = Shape::HashForObject(roots, k); uint32_t capacity = this->Capacity(); InternalIndex entry = FirstProbe(hash, capacity); for (int i = 1; i < probe; i++) { @@ -5169,17 +5169,17 @@ void HashTable::Swap(InternalIndex entry1, InternalIndex entry2, WriteBarrierMode mode) { int index1 = EntryToIndex(entry1); int index2 = EntryToIndex(entry2); - Tagged temp[TodoShape::kEntrySize]; + Tagged temp[Shape::kEntrySize]; Derived* self = static_cast(this); - for (int j = 0; j < TodoShape::kEntrySize; j++) { + for (int j = 0; j < Shape::kEntrySize; j++) { temp[j] = get(index1 + j); } self->set_key(index1, get(index2), mode); - for (int j = 1; j < TodoShape::kEntrySize; j++) { + for (int j = 1; j < Shape::kEntrySize; j++) { set(index1 + j, get(index2 + j), mode); } self->set_key(index2, temp[0], mode); - for (int j = 1; j < TodoShape::kEntrySize; j++) { + for (int j = 1; j < Shape::kEntrySize; j++) { set(index2 + j, temp[j], mode); } } @@ -5341,7 +5341,7 @@ GlobalDictionary::TryFindPropertyCellForConcurrentLookupIterator( DisallowGarbageCollection no_gc; PtrComprCageBase cage_base{isolate}; ReadOnlyRoots roots(isolate); - const int32_t hash = TodoShape::Hash(roots, name); + const int32_t hash = ShapeT::Hash(roots, name); const uint32_t capacity = Capacity(); uint32_t count = 1; Tagged undefined = roots.undefined_value(); @@ -5352,8 +5352,8 @@ GlobalDictionary::TryFindPropertyCellForConcurrentLookupIterator( Tagged element = KeyAt(cage_base, entry, kRelaxedLoad); if (isolate->heap()->IsPendingAllocation(element)) return {}; if (element == undefined) return {}; - if (TodoShape::kMatchNeedsHoleCheck && element == the_hole) continue; - if (!TodoShape::IsMatch(name, element)) continue; + if (ShapeT::kMatchNeedsHoleCheck && element == the_hole) continue; + if (!ShapeT::IsMatch(name, element)) continue; CHECK(IsPropertyCell(element, cage_base)); return PropertyCell::cast(element); } @@ -5367,7 +5367,7 @@ Handle StringSet::Add(Isolate* isolate, Handle stringset, Handle name) { if (!stringset->Has(isolate, name)) { stringset = EnsureCapacity(isolate, stringset); - uint32_t hash = TodoShape::Hash(ReadOnlyRoots(isolate), *name); + uint32_t hash = ShapeT::Hash(ReadOnlyRoots(isolate), *name); InternalIndex entry = stringset->FindInsertionEntry(isolate, hash); stringset->set(EntryToIndex(entry), *name); stringset->ElementAdded(); @@ -5386,7 +5386,7 @@ Handle RegisteredSymbolTable::Add( SLOW_DCHECK(table->FindEntry(isolate, key).is_not_found()); table = EnsureCapacity(isolate, table); - uint32_t hash = TodoShape::Hash(ReadOnlyRoots(isolate), key); + uint32_t hash = ShapeT::Hash(ReadOnlyRoots(isolate), key); InternalIndex entry = table->FindInsertionEntry(isolate, hash); table->set(EntryToIndex(entry), *key); table->set(EntryToValueIndex(entry), *symbol); @@ -5455,7 +5455,7 @@ int BaseNameDictionary::NextEnumerationIndex( template Handle Dictionary::DeleteEntry( Isolate* isolate, Handle dictionary, InternalIndex entry) { - DCHECK(TodoShape::kEntrySize != 3 || + DCHECK(Shape::kEntrySize != 3 || dictionary->DetailsAt(entry).IsConfigurable()); dictionary->ClearEntry(entry); dictionary->ElementRemoved(); @@ -5476,7 +5476,7 @@ Handle Dictionary::AtPut(Isolate* isolate, // We don't need to copy over the enumeration index. dictionary->ValueAtPut(entry, *value); - if (TodoShape::kEntrySize == 3) dictionary->DetailsAtPut(entry, details); + if (Shape::kEntrySize == 3) dictionary->DetailsAtPut(entry, details); return dictionary; } @@ -5493,7 +5493,7 @@ void Dictionary::UncheckedAtPut(Isolate* isolate, } else { // We don't need to copy over the enumeration index. dictionary->ValueAtPut(entry, *value); - if (TodoShape::kEntrySize == 3) dictionary->DetailsAtPut(entry, details); + if (Shape::kEntrySize == 3) dictionary->DetailsAtPut(entry, details); } } @@ -5534,19 +5534,19 @@ Handle Dictionary::Add(IsolateT* isolate, PropertyDetails details, InternalIndex* entry_out) { ReadOnlyRoots roots(isolate); - uint32_t hash = TodoShape::Hash(roots, key); + uint32_t hash = Shape::Hash(roots, key); // Validate that the key is absent. SLOW_DCHECK(dictionary->FindEntry(isolate, key).is_not_found()); // Check whether the dictionary should be extended. dictionary = Derived::EnsureCapacity(isolate, dictionary); // Compute the key object. - Handle k = TodoShape::template AsHandle(isolate, key); + Handle k = Shape::template AsHandle(isolate, key); InternalIndex entry = dictionary->FindInsertionEntry(isolate, roots, hash); dictionary->SetEntry(entry, *k, *value, details); DCHECK(IsNumber(dictionary->KeyAt(isolate, entry)) || - IsUniqueName(TodoShape::Unwrap(dictionary->KeyAt(isolate, entry)))); + IsUniqueName(Shape::Unwrap(dictionary->KeyAt(isolate, entry)))); dictionary->ElementAdded(); if (entry_out) *entry_out = entry; return dictionary; @@ -5559,18 +5559,18 @@ void Dictionary::UncheckedAdd(IsolateT* isolate, Key key, Handle value, PropertyDetails details) { ReadOnlyRoots roots(isolate); - uint32_t hash = TodoShape::Hash(roots, key); + uint32_t hash = Shape::Hash(roots, key); // Validate that the key is absent and we capacity is sufficient. SLOW_DCHECK(dictionary->FindEntry(isolate, key).is_not_found()); DCHECK(dictionary->HasSufficientCapacityToAdd(1)); // Compute the key object. - Handle k = TodoShape::template AsHandle(isolate, key); + Handle k = Shape::template AsHandle(isolate, key); InternalIndex entry = dictionary->FindInsertionEntry(isolate, roots, hash); dictionary->SetEntry(entry, *k, *value, details); DCHECK(IsNumber(dictionary->KeyAt(isolate, entry)) || - IsUniqueName(TodoShape::Unwrap(dictionary->KeyAt(isolate, entry)))); + IsUniqueName(Shape::Unwrap(dictionary->KeyAt(isolate, entry)))); } template diff --git a/deps/v8/src/objects/tagged-field.h b/deps/v8/src/objects/tagged-field.h index 4c79657e200420..e4d7552a66c77d 100644 --- a/deps/v8/src/objects/tagged-field.h +++ b/deps/v8/src/objects/tagged-field.h @@ -100,12 +100,10 @@ static_assert(sizeof(UnalignedDoubleMember) == sizeof(double)); #define FLEXIBLE_ARRAY_MEMBER(Type, name) \ using FlexibleDataReturnType = Type[0]; \ FlexibleDataReturnType& name() { \ - static_assert(alignof(Type) <= alignof(decltype(*this))); \ using ReturnType = Type[0]; \ return reinterpret_cast(*(this + 1)); \ } \ const FlexibleDataReturnType& name() const { \ - static_assert(alignof(Type) <= alignof(decltype(*this))); \ using ReturnType = Type[0]; \ return reinterpret_cast(*(this + 1)); \ } \ diff --git a/deps/v8/src/objects/template-objects.cc b/deps/v8/src/objects/template-objects.cc index 003a02d301061d..d1146ad389c3fb 100644 --- a/deps/v8/src/objects/template-objects.cc +++ b/deps/v8/src/objects/template-objects.cc @@ -54,7 +54,7 @@ Handle TemplateObjectDescription::GetTemplateObject( // Check the template weakmap to see if the template object already exists. Handle