Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit 8c0318c

Browse filesBrowse files
indutnyrvagg
authored andcommitted
deps: backport 8d6a228 from the v8's upstream
Original commit message: [heap] fix crash during the scavenge of ArrayBuffer Scavenger should not attempt to visit ArrayBuffer's storage, it is a user-supplied pointer that may have any alignment. Visiting it, may result in a crash. BUG= R=jochen Review URL: https://codereview.chromium.org/1406133003 Cr-Commit-Position: refs/heads/master@{#31611} PR-URL: #3549 Reviewed-By: Trevor Norris <trev.norris@gmail.com>
1 parent 6e887cc commit 8c0318c
Copy full SHA for 8c0318c

File tree

Expand file treeCollapse file tree

3 files changed

+93
-36
lines changed
Open diff view settings
Filter options
Expand file treeCollapse file tree

3 files changed

+93
-36
lines changed
Open diff view settings
Collapse file

‎deps/v8/src/heap/heap.cc‎

Copy file name to clipboardExpand all lines: deps/v8/src/heap/heap.cc
+64-36Lines changed: 64 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -2016,42 +2016,8 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
20162016
// for pointers to from semispace instead of looking for pointers
20172017
// to new space.
20182018
DCHECK(!target->IsMap());
2019-
Address obj_address = target->address();
2020-
2021-
// We are not collecting slots on new space objects during mutation
2022-
// thus we have to scan for pointers to evacuation candidates when we
2023-
// promote objects. But we should not record any slots in non-black
2024-
// objects. Grey object's slots would be rescanned.
2025-
// White object might not survive until the end of collection
2026-
// it would be a violation of the invariant to record it's slots.
2027-
bool record_slots = false;
2028-
if (incremental_marking()->IsCompacting()) {
2029-
MarkBit mark_bit = Marking::MarkBitFrom(target);
2030-
record_slots = Marking::IsBlack(mark_bit);
2031-
}
2032-
#if V8_DOUBLE_FIELDS_UNBOXING
2033-
LayoutDescriptorHelper helper(target->map());
2034-
bool has_only_tagged_fields = helper.all_fields_tagged();
2035-
2036-
if (!has_only_tagged_fields) {
2037-
for (int offset = 0; offset < size;) {
2038-
int end_of_region_offset;
2039-
if (helper.IsTagged(offset, size, &end_of_region_offset)) {
2040-
IterateAndMarkPointersToFromSpace(
2041-
target, obj_address + offset,
2042-
obj_address + end_of_region_offset, record_slots,
2043-
&ScavengeObject);
2044-
}
2045-
offset = end_of_region_offset;
2046-
}
2047-
} else {
2048-
#endif
2049-
IterateAndMarkPointersToFromSpace(target, obj_address,
2050-
obj_address + size, record_slots,
2051-
&ScavengeObject);
2052-
#if V8_DOUBLE_FIELDS_UNBOXING
2053-
}
2054-
#endif
2019+
2020+
IteratePointersToFromSpace(target, size, &ScavengeObject);
20552021
}
20562022
}
20572023

@@ -5184,6 +5150,68 @@ void Heap::IterateAndMarkPointersToFromSpace(HeapObject* object, Address start,
51845150
}
51855151

51865152

5153+
void Heap::IteratePointersToFromSpace(HeapObject* target, int size,
5154+
ObjectSlotCallback callback) {
5155+
Address obj_address = target->address();
5156+
5157+
// We are not collecting slots on new space objects during mutation
5158+
// thus we have to scan for pointers to evacuation candidates when we
5159+
// promote objects. But we should not record any slots in non-black
5160+
// objects. Grey object's slots would be rescanned.
5161+
// White object might not survive until the end of collection
5162+
// it would be a violation of the invariant to record it's slots.
5163+
bool record_slots = false;
5164+
if (incremental_marking()->IsCompacting()) {
5165+
MarkBit mark_bit = Marking::MarkBitFrom(target);
5166+
record_slots = Marking::IsBlack(mark_bit);
5167+
}
5168+
5169+
// Do not scavenge JSArrayBuffer's contents
5170+
switch (target->ContentType()) {
5171+
case HeapObjectContents::kTaggedValues: {
5172+
IterateAndMarkPointersToFromSpace(target, obj_address, obj_address + size,
5173+
record_slots, callback);
5174+
break;
5175+
}
5176+
case HeapObjectContents::kMixedValues: {
5177+
if (target->IsFixedTypedArrayBase()) {
5178+
IterateAndMarkPointersToFromSpace(
5179+
target, obj_address + FixedTypedArrayBase::kBasePointerOffset,
5180+
obj_address + FixedTypedArrayBase::kHeaderSize, record_slots,
5181+
callback);
5182+
} else if (target->IsJSArrayBuffer()) {
5183+
IterateAndMarkPointersToFromSpace(
5184+
target, obj_address,
5185+
obj_address + JSArrayBuffer::kByteLengthOffset + kPointerSize,
5186+
record_slots, callback);
5187+
IterateAndMarkPointersToFromSpace(
5188+
target, obj_address + JSArrayBuffer::kSize, obj_address + size,
5189+
record_slots, callback);
5190+
#if V8_DOUBLE_FIELDS_UNBOXING
5191+
} else if (FLAG_unbox_double_fields) {
5192+
LayoutDescriptorHelper helper(target->map());
5193+
DCHECK(!helper.all_fields_tagged());
5194+
5195+
for (int offset = 0; offset < size;) {
5196+
int end_of_region_offset;
5197+
if (helper.IsTagged(offset, size, &end_of_region_offset)) {
5198+
IterateAndMarkPointersToFromSpace(
5199+
target, obj_address + offset,
5200+
obj_address + end_of_region_offset, record_slots, callback);
5201+
}
5202+
offset = end_of_region_offset;
5203+
}
5204+
#endif
5205+
}
5206+
break;
5207+
}
5208+
case HeapObjectContents::kRawValues: {
5209+
break;
5210+
}
5211+
}
5212+
}
5213+
5214+
51875215
void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) {
51885216
IterateStrongRoots(v, mode);
51895217
IterateWeakRoots(v, mode);
Collapse file

‎deps/v8/src/heap/heap.h‎

Copy file name to clipboardExpand all lines: deps/v8/src/heap/heap.h
+3Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -961,6 +961,9 @@ class Heap {
961961

962962
// Iterate pointers to from semispace of new space found in memory interval
963963
// from start to end within |object|.
964+
void IteratePointersToFromSpace(HeapObject* target, int size,
965+
ObjectSlotCallback callback);
966+
964967
void IterateAndMarkPointersToFromSpace(HeapObject* object, Address start,
965968
Address end, bool record_slots,
966969
ObjectSlotCallback callback);
Collapse file

‎deps/v8/test/cctest/test-api.cc‎

Copy file name to clipboardExpand all lines: deps/v8/test/cctest/test-api.cc
+26Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14242,6 +14242,32 @@ THREADED_TEST(SkipArrayBufferBackingStoreDuringGC) {
1424214242
}
1424314243

1424414244

14245+
THREADED_TEST(SkipArrayBufferDuringScavenge) {
14246+
LocalContext env;
14247+
v8::Isolate* isolate = env->GetIsolate();
14248+
v8::HandleScope handle_scope(isolate);
14249+
14250+
// Make sure the pointer looks like a heap object
14251+
Local<v8::Object> tmp = v8::Object::New(isolate);
14252+
uint8_t* store_ptr =
14253+
reinterpret_cast<uint8_t*>(*reinterpret_cast<uintptr_t*>(*tmp));
14254+
14255+
// Make `store_ptr` point to from space
14256+
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
14257+
14258+
// Create ArrayBuffer with pointer-that-cannot-be-visited in the backing store
14259+
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, store_ptr, 8);
14260+
14261+
// Should not crash,
14262+
// i.e. backing store pointer should not be treated as a heap object pointer
14263+
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now
14264+
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now
14265+
14266+
// Use `ab` to silence compiler warning
14267+
CHECK_EQ(ab->GetContents().Data(), store_ptr);
14268+
}
14269+
14270+
1424514271
THREADED_TEST(SharedUint8Array) {
1424614272
i::FLAG_harmony_sharedarraybuffer = true;
1424714273
TypedArrayTestHelper<uint8_t, v8::Uint8Array, i::FixedUint8Array,

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.