Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 7 additions & 4 deletions src/api/environment.cc
Original file line number Diff line number Diff line change
Expand Up @@ -303,14 +303,17 @@ void SetIsolateUpForNode(v8::Isolate* isolate) {
SetIsolateUpForNode(isolate, settings);
}

//
IsolateGroup GetOrCreateIsolateGroup() {
// When pointer compression is disabled, we cannot create new groups,
// in which case we'll always return the default.
#ifndef V8_ENABLE_SANDBOX
// When the V8 sandbox is enabled, all isolates must share the same sandbox
// so that ArrayBuffer backing stores allocated via NewDefaultAllocator()
// (which uses the default IsolateGroup's sandbox) are valid for all
// isolates. Creating new groups would give each group its own sandbox,
// causing a mismatch with the allocator.
if (IsolateGroup::CanCreateNewGroups()) {
return IsolateGroup::Create();
}

#endif
return IsolateGroup::GetDefault();
}

Expand Down
23 changes: 21 additions & 2 deletions src/node_buffer.cc
Original file line number Diff line number Diff line change
Expand Up @@ -529,7 +529,7 @@ MaybeLocal<Object> New(Environment* env,
}
}

#if defined(V8_ENABLE_SANDBOX)
#ifdef V8_ENABLE_SANDBOX
// When v8 sandbox is enabled, external backing stores are not supported
// since all arraybuffer allocations are expected to be done by the isolate.
// Since this violates the contract of this function, let's free the data and
Expand Down Expand Up @@ -1453,7 +1453,7 @@ inline size_t CheckNumberToSize(Local<Value> number) {
CHECK(value >= 0 && value < maxSize);
size_t size = static_cast<size_t>(value);
#ifdef V8_ENABLE_SANDBOX
CHECK_LE(size, kMaxSafeBufferSizeForSandbox);
CHECK_LE(size, v8::internal::kMaxSafeBufferSizeForSandbox);
#endif
return size;
}
Expand All @@ -1476,6 +1476,24 @@ void CreateUnsafeArrayBuffer(const FunctionCallbackInfo<Value>& args) {
env->isolate_data()->is_building_snapshot()) {
buf = ArrayBuffer::New(isolate, size);
} else {
#ifdef V8_ENABLE_SANDBOX
std::unique_ptr<ArrayBuffer::Allocator> allocator(
ArrayBuffer::Allocator::NewDefaultAllocator());
void* data = allocator->AllocateUninitialized(size);
if (!data) [[unlikely]] {
THROW_ERR_MEMORY_ALLOCATION_FAILED(env);
return;
}
std::unique_ptr<BackingStore> store = ArrayBuffer::NewBackingStore(
data,
size,
[](void* data, size_t length, void*) {
std::unique_ptr<ArrayBuffer::Allocator> allocator(
ArrayBuffer::Allocator::NewDefaultAllocator());
allocator->Free(data, length);
},
nullptr);
#else
std::unique_ptr<BackingStore> store = ArrayBuffer::NewBackingStore(
isolate,
size,
Expand All @@ -1486,6 +1504,7 @@ void CreateUnsafeArrayBuffer(const FunctionCallbackInfo<Value>& args) {
THROW_ERR_MEMORY_ALLOCATION_FAILED(env);
return;
}
#endif

buf = ArrayBuffer::New(isolate, std::move(store));
}
Expand Down
61 changes: 59 additions & 2 deletions src/node_serdes.cc
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,26 @@ using v8::ValueSerializer;

namespace serdes {

v8::ArrayBuffer::Allocator* GetAllocator() {
static v8::ArrayBuffer::Allocator* allocator =
v8::ArrayBuffer::Allocator::NewDefaultAllocator();
return allocator;
}

void* Reallocate(void* data, size_t old_length, size_t new_length) {
if (old_length == new_length) return data;
uint8_t* new_data = reinterpret_cast<uint8_t*>(
GetAllocator()->AllocateUninitialized(new_length));
if (new_data == nullptr) return nullptr;
size_t bytes_to_copy = std::min(old_length, new_length);
memcpy(new_data, data, bytes_to_copy);
if (new_length > bytes_to_copy) {
memset(new_data + bytes_to_copy, 0, new_length - bytes_to_copy);
}
GetAllocator()->Free(data, old_length);
return new_data;
}

class SerializerContext : public BaseObject,
public ValueSerializer::Delegate {
public:
Expand All @@ -37,10 +57,15 @@ class SerializerContext : public BaseObject,

~SerializerContext() override = default;

// v8::ValueSerializer::Delegate
void ThrowDataCloneError(Local<String> message) override;
Maybe<bool> WriteHostObject(Isolate* isolate, Local<Object> object) override;
Maybe<uint32_t> GetSharedArrayBufferId(
Isolate* isolate, Local<SharedArrayBuffer> shared_array_buffer) override;
void* ReallocateBufferMemory(void* old_buffer,
size_t old_length,
size_t* new_length) override;
void FreeBufferMemory(void* buffer) override;

static void SetTreatArrayBufferViewsAsHostObjects(
const FunctionCallbackInfo<Value>& args);
Expand All @@ -61,6 +86,7 @@ class SerializerContext : public BaseObject,

private:
ValueSerializer serializer_;
size_t last_length_ = 0;
};

class DeserializerContext : public BaseObject,
Expand Down Expand Up @@ -145,6 +171,24 @@ Maybe<uint32_t> SerializerContext::GetSharedArrayBufferId(
return id->Uint32Value(env()->context());
}

void* SerializerContext::ReallocateBufferMemory(void* old_buffer,
size_t requested_size,
size_t* new_length) {
*new_length = std::max(static_cast<size_t>(4096), requested_size);
if (old_buffer) {
void* ret = Reallocate(old_buffer, last_length_, *new_length);
last_length_ = *new_length;
return ret;
} else {
last_length_ = *new_length;
return GetAllocator()->Allocate(*new_length);
}
}

void SerializerContext::FreeBufferMemory(void* buffer) {
GetAllocator()->Free(buffer, last_length_);
}

Maybe<bool> SerializerContext::WriteHostObject(Isolate* isolate,
Local<Object> input) {
Local<Value> args[1] = { input };
Expand Down Expand Up @@ -208,14 +252,27 @@ void SerializerContext::ReleaseBuffer(const FunctionCallbackInfo<Value>& args) {
SerializerContext* ctx;
ASSIGN_OR_RETURN_UNWRAP(&ctx, args.This());

// Note: Both ValueSerializer and this Buffer::New() variant use malloc()
// as the underlying allocator.
std::pair<uint8_t*, size_t> ret = ctx->serializer_.Release();
#ifdef V8_ENABLE_SANDBOX
Local<Object> buf;
if (Buffer::New(ctx->env(), reinterpret_cast<char*>(ret.first), ret.second)
.ToLocal(&buf)) {
args.GetReturnValue().Set(buf);
}
#else
std::unique_ptr<v8::BackingStore> bs = v8::ArrayBuffer::NewBackingStore(
reinterpret_cast<char*>(ret.first),
ret.second,
[](void* data, size_t length, void* deleter_data) {
if (data) GetAllocator()->Free(reinterpret_cast<char*>(data), length);
},
nullptr);
Local<ArrayBuffer> ab =
v8::ArrayBuffer::New(ctx->env()->isolate(), std::move(bs));

auto buf = Buffer::New(ctx->env(), ab, 0, ret.second);
if (!buf.IsEmpty()) args.GetReturnValue().Set(buf.ToLocalChecked());
#endif
}

void SerializerContext::TransferArrayBuffer(
Expand Down
24 changes: 20 additions & 4 deletions src/node_trace_events.cc
Original file line number Diff line number Diff line change
Expand Up @@ -129,12 +129,28 @@ static void GetCategoryEnabledBuffer(const FunctionCallbackInfo<Value>& args) {
const uint8_t* enabled_pointer =
TRACE_EVENT_API_GET_CATEGORY_GROUP_ENABLED(category_name.out());
uint8_t* enabled_pointer_cast = const_cast<uint8_t*>(enabled_pointer);

uint8_t size = sizeof(*enabled_pointer_cast);

#ifdef V8_ENABLE_SANDBOX
std::unique_ptr<ArrayBuffer::Allocator> allocator(
ArrayBuffer::Allocator::NewDefaultAllocator());
void* v8_data = allocator->Allocate(size);
CHECK(v8_data);
memcpy(v8_data, enabled_pointer_cast, size);
std::unique_ptr<BackingStore> bs = ArrayBuffer::NewBackingStore(
enabled_pointer_cast,
sizeof(*enabled_pointer_cast),
[](void*, size_t, void*) {},
v8_data,
size,
[](void* data, size_t length, void*) {
std::unique_ptr<ArrayBuffer::Allocator> allocator(
ArrayBuffer::Allocator::NewDefaultAllocator());
allocator->Free(data, length);
},
nullptr);
#else
std::unique_ptr<BackingStore> bs = ArrayBuffer::NewBackingStore(
enabled_pointer_cast, size, [](void*, size_t, void*) {}, nullptr);
#endif

auto ab = ArrayBuffer::New(isolate, std::move(bs));
v8::Local<Uint8Array> u8 = v8::Uint8Array::New(ab, 0, 1);

Expand Down
Loading