Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit cadee67

Browse filesBrowse files
indutnyjasnell
authored andcommitted
deps: backport 8d6a228 from the v8's upstream
Original commit message: [heap] fix crash during the scavenge of ArrayBuffer Scavenger should not attempt to visit ArrayBuffer's storage, it is a user-supplied pointer that may have any alignment. Visiting it, may result in a crash. BUG= R=jochen Review URL: https://codereview.chromium.org/1406133003 Cr-Commit-Position: refs/heads/master@{#31611} PR-URL: #3549 Reviewed-By: Trevor Norris <trev.norris@gmail.com>
1 parent 2109708 commit cadee67
Copy full SHA for cadee67

File tree

Expand file treeCollapse file tree

3 files changed

+92
-34
lines changed
Open diff view settings
Filter options
Expand file treeCollapse file tree

3 files changed

+92
-34
lines changed
Open diff view settings
Collapse file

‎deps/v8/src/heap/heap.cc‎

Copy file name to clipboardExpand all lines: deps/v8/src/heap/heap.cc
+63-34Lines changed: 63 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -2079,40 +2079,8 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
20792079
// for pointers to from semispace instead of looking for pointers
20802080
// to new space.
20812081
DCHECK(!target->IsMap());
2082-
Address obj_address = target->address();
2083-
2084-
// We are not collecting slots on new space objects during mutation
2085-
// thus we have to scan for pointers to evacuation candidates when we
2086-
// promote objects. But we should not record any slots in non-black
2087-
// objects. Grey object's slots would be rescanned.
2088-
// White object might not survive until the end of collection
2089-
// it would be a violation of the invariant to record it's slots.
2090-
bool record_slots = false;
2091-
if (incremental_marking()->IsCompacting()) {
2092-
MarkBit mark_bit = Marking::MarkBitFrom(target);
2093-
record_slots = Marking::IsBlack(mark_bit);
2094-
}
2095-
#if V8_DOUBLE_FIELDS_UNBOXING
2096-
LayoutDescriptorHelper helper(target->map());
2097-
bool has_only_tagged_fields = helper.all_fields_tagged();
2098-
2099-
if (!has_only_tagged_fields) {
2100-
for (int offset = 0; offset < size;) {
2101-
int end_of_region_offset;
2102-
if (helper.IsTagged(offset, size, &end_of_region_offset)) {
2103-
IterateAndMarkPointersToFromSpace(
2104-
record_slots, obj_address + offset,
2105-
obj_address + end_of_region_offset, &ScavengeObject);
2106-
}
2107-
offset = end_of_region_offset;
2108-
}
2109-
} else {
2110-
#endif
2111-
IterateAndMarkPointersToFromSpace(
2112-
record_slots, obj_address, obj_address + size, &ScavengeObject);
2113-
#if V8_DOUBLE_FIELDS_UNBOXING
2114-
}
2115-
#endif
2082+
2083+
IteratePointersToFromSpace(target, size, &ScavengeObject);
21162084
}
21172085
}
21182086

@@ -5263,6 +5231,67 @@ void Heap::IterateAndMarkPointersToFromSpace(bool record_slots, Address start,
52635231
}
52645232

52655233

5234+
void Heap::IteratePointersToFromSpace(HeapObject* target, int size,
5235+
ObjectSlotCallback callback) {
5236+
Address obj_address = target->address();
5237+
5238+
// We are not collecting slots on new space objects during mutation
5239+
// thus we have to scan for pointers to evacuation candidates when we
5240+
// promote objects. But we should not record any slots in non-black
5241+
// objects. Grey object's slots would be rescanned.
5242+
// White object might not survive until the end of collection
5243+
// it would be a violation of the invariant to record it's slots.
5244+
bool record_slots = false;
5245+
if (incremental_marking()->IsCompacting()) {
5246+
MarkBit mark_bit = Marking::MarkBitFrom(target);
5247+
record_slots = Marking::IsBlack(mark_bit);
5248+
}
5249+
5250+
// Do not scavenge JSArrayBuffer's contents
5251+
switch (target->ContentType()) {
5252+
case HeapObjectContents::kTaggedValues: {
5253+
IterateAndMarkPointersToFromSpace(record_slots, obj_address,
5254+
obj_address + size, callback);
5255+
break;
5256+
}
5257+
case HeapObjectContents::kMixedValues: {
5258+
if (target->IsFixedTypedArrayBase()) {
5259+
IterateAndMarkPointersToFromSpace(
5260+
record_slots, obj_address + FixedTypedArrayBase::kBasePointerOffset,
5261+
obj_address + FixedTypedArrayBase::kHeaderSize, callback);
5262+
} else if (target->IsJSArrayBuffer()) {
5263+
IterateAndMarkPointersToFromSpace(
5264+
record_slots, obj_address,
5265+
obj_address + JSArrayBuffer::kByteLengthOffset + kPointerSize,
5266+
callback);
5267+
IterateAndMarkPointersToFromSpace(
5268+
record_slots, obj_address + JSArrayBuffer::kSize,
5269+
obj_address + size, callback);
5270+
#if V8_DOUBLE_FIELDS_UNBOXING
5271+
} else if (FLAG_unbox_double_fields) {
5272+
LayoutDescriptorHelper helper(target->map());
5273+
DCHECK(!helper.all_fields_tagged());
5274+
5275+
for (int offset = 0; offset < size;) {
5276+
int end_of_region_offset;
5277+
if (helper.IsTagged(offset, size, &end_of_region_offset)) {
5278+
IterateAndMarkPointersToFromSpace(
5279+
record_slots, obj_address + offset,
5280+
obj_address + end_of_region_offset, callback);
5281+
}
5282+
offset = end_of_region_offset;
5283+
}
5284+
#endif
5285+
}
5286+
break;
5287+
}
5288+
case HeapObjectContents::kRawValues: {
5289+
break;
5290+
}
5291+
}
5292+
}
5293+
5294+
52665295
void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) {
52675296
IterateStrongRoots(v, mode);
52685297
IterateWeakRoots(v, mode);
Collapse file

‎deps/v8/src/heap/heap.h‎

Copy file name to clipboardExpand all lines: deps/v8/src/heap/heap.h
+3Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -953,6 +953,9 @@ class Heap {
953953

954954
// Iterate pointers to from semispace of new space found in memory interval
955955
// from start to end.
956+
void IteratePointersToFromSpace(HeapObject* target, int size,
957+
ObjectSlotCallback callback);
958+
956959
void IterateAndMarkPointersToFromSpace(bool record_slots, Address start,
957960
Address end,
958961
ObjectSlotCallback callback);
Collapse file

‎deps/v8/test/cctest/test-api.cc‎

Copy file name to clipboardExpand all lines: deps/v8/test/cctest/test-api.cc
+26Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14191,6 +14191,32 @@ THREADED_TEST(SkipArrayBufferBackingStoreDuringGC) {
1419114191
}
1419214192

1419314193

14194+
THREADED_TEST(SkipArrayBufferDuringScavenge) {
14195+
LocalContext env;
14196+
v8::Isolate* isolate = env->GetIsolate();
14197+
v8::HandleScope handle_scope(isolate);
14198+
14199+
// Make sure the pointer looks like a heap object
14200+
Local<v8::Object> tmp = v8::Object::New(isolate);
14201+
uint8_t* store_ptr =
14202+
reinterpret_cast<uint8_t*>(*reinterpret_cast<uintptr_t*>(*tmp));
14203+
14204+
// Make `store_ptr` point to from space
14205+
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
14206+
14207+
// Create ArrayBuffer with pointer-that-cannot-be-visited in the backing store
14208+
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, store_ptr, 8);
14209+
14210+
// Should not crash,
14211+
// i.e. backing store pointer should not be treated as a heap object pointer
14212+
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now
14213+
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now
14214+
14215+
// Use `ab` to silence compiler warning
14216+
CHECK_EQ(ab->GetContents().Data(), store_ptr);
14217+
}
14218+
14219+
1419414220
THREADED_TEST(SharedUint8Array) {
1419514221
i::FLAG_harmony_sharedarraybuffer = true;
1419614222
TypedArrayTestHelper<uint8_t, v8::Uint8Array, i::ExternalUint8Array,

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.