Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion common.gypi
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@

# Reset this number to 0 on major V8 upgrades.
# Increment by one for each non-official patch applied to deps/v8.
'v8_embedder_string': '-node.37',
'v8_embedder_string': '-node.41',

##### V8 defaults for Node.js #####

Expand Down
1 change: 1 addition & 0 deletions deps/v8/AUTHORS
Original file line number Diff line number Diff line change
Expand Up @@ -334,3 +334,4 @@ Kotaro Ohsugi <dec4m4rk@gmail.com>
Jing Peiyang <jingpeiyang@eswincomputing.com>
magic-akari <akari.ccino@gmail.com>
Ryuhei Shima <shimaryuhei@gmail.com>
Domagoj Stolfa <domagoj.stolfa@gmail.com>
7 changes: 7 additions & 0 deletions deps/v8/include/v8-isolate.h
Original file line number Diff line number Diff line change
Expand Up @@ -950,6 +950,13 @@ class V8_EXPORT Isolate {
*/
void GetHeapStatistics(HeapStatistics* heap_statistics);

/**
* Get total allocated bytes since isolate creation.
* This should be used only by Node.JS, since it's a temporary method
* to avoid breaking ABI on HeapStatistics.
*/
uint64_t GetTotalAllocatedBytes();

/**
* Returns the number of spaces in the heap.
*/
Expand Down
5 changes: 5 additions & 0 deletions deps/v8/src/api/api.cc
Original file line number Diff line number Diff line change
Expand Up @@ -10363,6 +10363,11 @@ void Isolate::GetHeapStatistics(HeapStatistics* heap_statistics) {
#endif // V8_ENABLE_WEBASSEMBLY
}

uint64_t Isolate::GetTotalAllocatedBytes() {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(this);
return i_isolate->heap()->GetTotalAllocatedBytes();
}

size_t Isolate::NumberOfHeapSpaces() {
return i::LAST_SPACE - i::FIRST_SPACE + 1;
}
Expand Down
5 changes: 3 additions & 2 deletions deps/v8/src/codegen/riscv/assembler-riscv-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -115,8 +115,9 @@ void Assembler::set_target_compressed_address_at(
Address pc, Address constant_pool, Tagged_t target,
WritableJitAllocation* jit_allocation, ICacheFlushMode icache_flush_mode) {
if (COMPRESS_POINTERS_BOOL) {
Assembler::set_uint32_constant_at(pc, constant_pool, target, jit_allocation,
icache_flush_mode);
Assembler::set_uint32_constant_at(pc, constant_pool,
static_cast<uint32_t>(target),
jit_allocation, icache_flush_mode);
} else {
UNREACHABLE();
}
Expand Down
4 changes: 2 additions & 2 deletions deps/v8/src/codegen/riscv/assembler-riscv.cc
Original file line number Diff line number Diff line change
Expand Up @@ -720,8 +720,8 @@ void Assembler::bind_to(Label* L, int pos) {
trampoline_pos = get_trampoline_entry(fixup_pos);
CHECK_NE(trampoline_pos, kInvalidSlotPos);
}
CHECK((trampoline_pos - fixup_pos) <= kMaxBranchOffset);
DEBUG_PRINTF("\t\ttrampolining: %d\n", trampoline_pos);
CHECK((trampoline_pos - fixup_pos) <= kMaxBranchOffset);
target_at_put(fixup_pos, trampoline_pos, false);
fixup_pos = trampoline_pos;
}
Expand Down Expand Up @@ -1486,6 +1486,7 @@ void Assembler::BlockTrampolinePoolFor(int instructions) {
}

void Assembler::CheckTrampolinePool() {
if (trampoline_emitted_) return;
// Some small sequences of instructions must not be broken up by the
// insertion of a trampoline pool; such sequences are protected by setting
// either trampoline_pool_blocked_nesting_ or no_trampoline_pool_before_,
Expand All @@ -1507,7 +1508,6 @@ void Assembler::CheckTrampolinePool() {
return;
}

DCHECK(!trampoline_emitted_);
DCHECK_GE(unbound_labels_count_, 0);
if (unbound_labels_count_ > 0) {
// First we emit jump, then we emit trampoline pool.
Expand Down
15 changes: 13 additions & 2 deletions deps/v8/src/codegen/riscv/assembler-riscv.h
Original file line number Diff line number Diff line change
Expand Up @@ -303,6 +303,8 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase,
// See Assembler::CheckConstPool for more info.
void EmitPoolGuard();

void FinishCode() { ForceConstantPoolEmissionWithoutJump(); }

#if defined(V8_TARGET_ARCH_RISCV64)
static void set_target_value_at(
Address pc, uint64_t target,
Expand Down Expand Up @@ -617,6 +619,8 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase,
}
}

inline int next_buffer_check() { return next_buffer_check_; }

friend class VectorUnit;
class VectorUnit {
public:
Expand Down Expand Up @@ -728,16 +732,19 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase,

// Block the emission of the trampoline pool before pc_offset.
void BlockTrampolinePoolBefore(int pc_offset) {
if (no_trampoline_pool_before_ < pc_offset)
if (no_trampoline_pool_before_ < pc_offset) {
DEBUG_PRINTF("\tBlockTrampolinePoolBefore %d\n", pc_offset);
no_trampoline_pool_before_ = pc_offset;
}
}

void StartBlockTrampolinePool() {
DEBUG_PRINTF("\tStartBlockTrampolinePool\n");
DEBUG_PRINTF("\tStartBlockTrampolinePool %d\n", pc_offset());
trampoline_pool_blocked_nesting_++;
}

void EndBlockTrampolinePool() {
DEBUG_PRINTF("\tEndBlockTrampolinePool\n");
trampoline_pool_blocked_nesting_--;
DEBUG_PRINTF("\ttrampoline_pool_blocked_nesting:%d\n",
trampoline_pool_blocked_nesting_);
Expand Down Expand Up @@ -767,6 +774,10 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase,

bool is_buffer_growth_blocked() const { return block_buffer_growth_; }

inline int ConstpoolComputesize() {
return constpool_.ComputeSize(Jump::kOmitted, Alignment::kOmitted);
}

private:
// Avoid overflows for displacements etc.
static const int kMaximalBufferSize = 512 * MB;
Expand Down
15 changes: 11 additions & 4 deletions deps/v8/src/codegen/riscv/macro-assembler-riscv.cc
Original file line number Diff line number Diff line change
Expand Up @@ -4926,11 +4926,21 @@ void MacroAssembler::LoadRootRegisterOffset(Register destination,

void MacroAssembler::Jump(Register target, Condition cond, Register rs,
const Operand& rt) {
BlockTrampolinePoolScope block_trampoline_pool(this);
if (cond == cc_always) {
jr(target);
DEBUG_PRINTF("\tCheckTrampolinePool pc_offset:%d %d\n", pc_offset(),
next_buffer_check() - ConstpoolComputesize());
if (!is_trampoline_emitted() &&
pc_offset() >= (next_buffer_check() - ConstpoolComputesize())) {
// We need to check trampoline pool before Constant pool.
// Here need to emit trampoline first.
// Jump(ra, al) will block trampoline pool for 1 instr.
nop();
CheckTrampolinePool();
}
ForceConstantPoolEmissionWithoutJump();
} else {
BlockTrampolinePoolScope block_trampoline_pool(this);
BRANCH_ARGS_CHECK(cond, rs, rt);
Branch(kInstrSize * 2, NegateCondition(cond), rs, rt);
jr(target);
Expand Down Expand Up @@ -5342,9 +5352,6 @@ void MacroAssembler::StoreReturnAddressAndCall(Register target) {

void MacroAssembler::Ret(Condition cond, Register rs, const Operand& rt) {
Jump(ra, cond, rs, rt);
if (cond == al) {
ForceConstantPoolEmissionWithoutJump();
}
}

void MacroAssembler::BranchLong(Label* L) {
Expand Down
1 change: 1 addition & 0 deletions deps/v8/src/execution/riscv/simulator-riscv.h
Original file line number Diff line number Diff line change
Expand Up @@ -538,6 +538,7 @@ class Simulator : public SimulatorBase {
// Return central stack view, without additional safety margins.
// Users, for example wasm::StackMemory, can add their own.
base::Vector<uint8_t> GetCentralStackView() const;
static constexpr int JSStackLimitMargin() { return kAdditionalStackMargin; }

void IterateRegistersAndStack(::heap::base::StackVisitor* visitor);

Expand Down
30 changes: 24 additions & 6 deletions deps/v8/src/heap/heap-allocator.cc
Original file line number Diff line number Diff line change
Expand Up @@ -65,24 +65,42 @@ AllocationResult HeapAllocator::AllocateRawLargeInternal(
int size_in_bytes, AllocationType allocation, AllocationOrigin origin,
AllocationAlignment alignment) {
DCHECK_GT(size_in_bytes, heap_->MaxRegularHeapObjectSize(allocation));
AllocationResult allocation_result;
switch (allocation) {
case AllocationType::kYoung:
return new_lo_space()->AllocateRaw(local_heap_, size_in_bytes);
allocation_result =
new_lo_space()->AllocateRaw(local_heap_, size_in_bytes);
break;
case AllocationType::kOld:
return lo_space()->AllocateRaw(local_heap_, size_in_bytes);
allocation_result =
lo_space()->AllocateRaw(local_heap_, size_in_bytes);
break;
case AllocationType::kCode:
return code_lo_space()->AllocateRaw(local_heap_, size_in_bytes);
allocation_result =
code_lo_space()->AllocateRaw(local_heap_, size_in_bytes);
break;
case AllocationType::kSharedOld:
return shared_lo_space()->AllocateRaw(local_heap_, size_in_bytes);
allocation_result =
shared_lo_space()->AllocateRaw(local_heap_, size_in_bytes);
break;
case AllocationType::kTrusted:
return trusted_lo_space()->AllocateRaw(local_heap_, size_in_bytes);
allocation_result =
trusted_lo_space()->AllocateRaw(local_heap_, size_in_bytes);
break;
case AllocationType::kSharedTrusted:
return shared_trusted_lo_space()->AllocateRaw(local_heap_, size_in_bytes);
allocation_result = shared_trusted_lo_space()->AllocateRaw(
local_heap_, size_in_bytes);
break;
case AllocationType::kMap:
case AllocationType::kReadOnly:
case AllocationType::kSharedMap:
UNREACHABLE();
}
if (!allocation_result.IsFailure()) {
int allocated_size = ALIGN_TO_ALLOCATION_ALIGNMENT(size_in_bytes);
heap_->AddTotalAllocatedBytes(allocated_size);
}
return allocation_result;
}

namespace {
Expand Down
4 changes: 4 additions & 0 deletions deps/v8/src/heap/heap.cc
Original file line number Diff line number Diff line change
Expand Up @@ -7447,6 +7447,10 @@ int Heap::NextStackTraceId() {
return last_id;
}

uint64_t Heap::GetTotalAllocatedBytes() {
return total_allocated_bytes_.load(std::memory_order_relaxed);
}

EmbedderStackStateScope::EmbedderStackStateScope(
Heap* heap, EmbedderStackStateOrigin origin, StackState stack_state)
: heap_(heap),
Expand Down
7 changes: 7 additions & 0 deletions deps/v8/src/heap/heap.h
Original file line number Diff line number Diff line change
Expand Up @@ -1635,6 +1635,11 @@ class Heap final {
bool ShouldUseBackgroundThreads() const;
bool ShouldUseIncrementalMarking() const;

void AddTotalAllocatedBytes(size_t size) {
total_allocated_bytes_.fetch_add(size, std::memory_order_relaxed);
}
uint64_t GetTotalAllocatedBytes();

HeapAllocator* allocator() { return heap_allocator_; }
const HeapAllocator* allocator() const { return heap_allocator_; }

Expand Down Expand Up @@ -2409,6 +2414,8 @@ class Heap final {
// actually finished.
bool is_full_gc_during_loading_ = false;

std::atomic<uint64_t> total_allocated_bytes_ = 0;

// Classes in "heap" can be friends.
friend class ActivateMemoryReducerTask;
friend class AlwaysAllocateScope;
Expand Down
6 changes: 6 additions & 0 deletions deps/v8/src/heap/main-allocator.cc
Original file line number Diff line number Diff line change
Expand Up @@ -298,6 +298,12 @@ void MainAllocator::ResetLab(Address start, Address end, Address extended_end) {
MemoryChunkMetadata::UpdateHighWaterMark(top());
}

// This is going to overestimate a bit of the total allocated bytes, since the
// LAB was not used yet. However the leftover compared to the LAB itself is
// quite small, so it seems tolerable.
if (local_heap_) {
local_heap_->heap()->AddTotalAllocatedBytes(end - start);
}
allocation_info().Reset(start, end);

if (SupportsPendingAllocation()) {
Expand Down
34 changes: 34 additions & 0 deletions deps/v8/src/maglev/riscv/maglev-ir-riscv.cc
Original file line number Diff line number Diff line change
Expand Up @@ -224,6 +224,40 @@ void CheckedIntPtrToInt32::GenerateCode(MaglevAssembler* masm,
Operand(std::numeric_limits<int32_t>::min()));
}

void CheckFloat64SameValue::SetValueLocationConstraints() {
UseRegister(target_input());
// We need two because LoadFPRImmediate needs to acquire one as well in the
// case where value() is not 0.0 or -0.0.
set_temporaries_needed((value().get_scalar() == 0) ? 1 : 2);
set_double_temporaries_needed(
value().is_nan() || (value().get_scalar() == 0) ? 0 : 1);
}

void CheckFloat64SameValue::GenerateCode(MaglevAssembler* masm,
const ProcessingState& state) {
Label* fail = __ GetDeoptLabel(this, deoptimize_reason());
MaglevAssembler::TemporaryRegisterScope temps(masm);
DoubleRegister target = ToDoubleRegister(target_input());
if (value().is_nan()) {
__ JumpIfNotNan(target, fail);
} else {
DoubleRegister double_scratch = temps.AcquireScratchDouble();
Register scratch = temps.AcquireScratch();
__ Move(double_scratch, value().get_scalar());
__ CompareF64(scratch, EQ, double_scratch, target);
__ BranchFalseF(scratch, fail);
if (value().get_scalar() == 0) { // +0.0 or -0.0.
__ MacroAssembler::Move(scratch, target);
__ And(scratch, scratch, Operand(1ULL << 63));
if (value().get_bits() == 0) {
__ BranchTrueF(scratch, fail);
} else {
__ BranchFalseF(scratch, fail);
}
}
}
}

void Int32AddWithOverflow::SetValueLocationConstraints() {
UseRegister(left_input());
UseRegister(right_input());
Expand Down
7 changes: 6 additions & 1 deletion doc/api/v8.md
Original file line number Diff line number Diff line change
Expand Up @@ -197,6 +197,7 @@ Returns an object with the following properties:
* `total_global_handles_size` {number}
* `used_global_handles_size` {number}
* `external_memory` {number}
* `total_allocated_bytes` {number}

`total_heap_size` The value of total\_heap\_size is the number of bytes V8 has
allocated for the heap. This can grow if used\_heap needs more memory.
Expand Down Expand Up @@ -250,6 +251,9 @@ used memory size of V8 global handles.
`external_memory` The value of external\_memory is the memory size of array
buffers and external strings.

`total_allocated_bytes` The value of total allocated bytes since the Isolate
creation.

<!-- eslint-skip -->

```js
Expand All @@ -267,7 +271,8 @@ buffers and external strings.
number_of_detached_contexts: 0,
total_global_handles_size: 8192,
used_global_handles_size: 3296,
external_memory: 318824
external_memory: 318824,
total_allocated_bytes: 24970208
}
```

Expand Down
2 changes: 2 additions & 0 deletions lib/v8.js
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,7 @@ const {
stopCpuProfile: _stopCpuProfile,
isStringOneByteRepresentation: _isStringOneByteRepresentation,
updateHeapStatisticsBuffer,
getTotalAllocatedBytes,
updateHeapSpaceStatisticsBuffer,
updateHeapCodeStatisticsBuffer,
setHeapSnapshotNearHeapLimit: _setHeapSnapshotNearHeapLimit,
Expand Down Expand Up @@ -246,6 +247,7 @@ function getHeapStatistics() {
total_global_handles_size: buffer[kTotalGlobalHandlesSizeIndex],
used_global_handles_size: buffer[kUsedGlobalHandlesSizeIndex],
external_memory: buffer[kExternalMemoryIndex],
total_allocated_bytes: getTotalAllocatedBytes(),
};
}

Expand Down
12 changes: 12 additions & 0 deletions src/node_v8.cc
Original file line number Diff line number Diff line change
Expand Up @@ -212,6 +212,12 @@ void UpdateHeapStatisticsBuffer(const FunctionCallbackInfo<Value>& args) {
#undef V
}

void GetTotalAllocatedBytes(const FunctionCallbackInfo<Value>& args) {
Isolate* isolate = args.GetIsolate();
uint64_t allocated_bytes = isolate->GetTotalAllocatedBytes();
args.GetReturnValue().Set(Number::New(isolate, allocated_bytes));
}


void UpdateHeapSpaceStatisticsBuffer(const FunctionCallbackInfo<Value>& args) {
BindingData* data = Realm::GetBindingData<BindingData>(args);
Expand Down Expand Up @@ -692,6 +698,11 @@ void Initialize(Local<Object> target,
"updateHeapStatisticsBuffer",
UpdateHeapStatisticsBuffer);

SetMethod(context,
target,
"getTotalAllocatedBytes",
GetTotalAllocatedBytes);

SetMethod(context,
target,
"updateHeapCodeStatisticsBuffer",
Expand Down Expand Up @@ -773,6 +784,7 @@ void Initialize(Local<Object> target,
void RegisterExternalReferences(ExternalReferenceRegistry* registry) {
registry->Register(CachedDataVersionTag);
registry->Register(UpdateHeapStatisticsBuffer);
registry->Register(GetTotalAllocatedBytes);
registry->Register(UpdateHeapCodeStatisticsBuffer);
registry->Register(UpdateHeapSpaceStatisticsBuffer);
registry->Register(SetFlagsFromString);
Expand Down
4 changes: 3 additions & 1 deletion src/node_worker.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1263,6 +1263,7 @@ void Worker::GetHeapStatistics(const FunctionCallbackInfo<Value>& args) {
"total_global_handles_size",
"used_global_handles_size",
"external_memory",
"total_allocated_bytes",
};
tmpl = DictionaryTemplate::New(isolate, heap_stats_names);
env->set_heap_statistics_template(tmpl);
Expand All @@ -1283,7 +1284,8 @@ void Worker::GetHeapStatistics(const FunctionCallbackInfo<Value>& args) {
Number::New(isolate, heap_stats->number_of_detached_contexts()),
Number::New(isolate, heap_stats->total_global_handles_size()),
Number::New(isolate, heap_stats->used_global_handles_size()),
Number::New(isolate, heap_stats->external_memory())};
Number::New(isolate, heap_stats->external_memory()),
Number::New(isolate, isolate->GetTotalAllocatedBytes())};

Local<Object> obj;
if (!NewDictionaryInstanceNullProto(
Expand Down
Loading
Loading