diff --git a/common.gypi b/common.gypi index cbe454d9bb0bed..f9fe11388dc242 100644 --- a/common.gypi +++ b/common.gypi @@ -38,7 +38,7 @@ # Reset this number to 0 on major V8 upgrades. # Increment by one for each non-official patch applied to deps/v8. - 'v8_embedder_string': '-node.13', + 'v8_embedder_string': '-node.15', ##### V8 defaults for Node.js ##### diff --git a/deps/v8/src/codegen/riscv/assembler-riscv.cc b/deps/v8/src/codegen/riscv/assembler-riscv.cc index 21a651c7c03cc1..b14c7782442f59 100644 --- a/deps/v8/src/codegen/riscv/assembler-riscv.cc +++ b/deps/v8/src/codegen/riscv/assembler-riscv.cc @@ -720,8 +720,8 @@ void Assembler::bind_to(Label* L, int pos) { trampoline_pos = get_trampoline_entry(fixup_pos); CHECK_NE(trampoline_pos, kInvalidSlotPos); } - CHECK((trampoline_pos - fixup_pos) <= kMaxBranchOffset); DEBUG_PRINTF("\t\ttrampolining: %d\n", trampoline_pos); + CHECK((trampoline_pos - fixup_pos) <= kMaxBranchOffset); target_at_put(fixup_pos, trampoline_pos, false); fixup_pos = trampoline_pos; } @@ -1498,6 +1498,7 @@ void Assembler::BlockTrampolinePoolFor(int instructions) { } void Assembler::CheckTrampolinePool() { + if (trampoline_emitted_) return; // Some small sequences of instructions must not be broken up by the // insertion of a trampoline pool; such sequences are protected by setting // either trampoline_pool_blocked_nesting_ or no_trampoline_pool_before_, @@ -1519,7 +1520,6 @@ void Assembler::CheckTrampolinePool() { return; } - DCHECK(!trampoline_emitted_); DCHECK_GE(unbound_labels_count_, 0); if (unbound_labels_count_ > 0) { // First we emit jump, then we emit trampoline pool. diff --git a/deps/v8/src/codegen/riscv/assembler-riscv.h b/deps/v8/src/codegen/riscv/assembler-riscv.h index dd9db7290036c9..d72efda7e89c8f 100644 --- a/deps/v8/src/codegen/riscv/assembler-riscv.h +++ b/deps/v8/src/codegen/riscv/assembler-riscv.h @@ -303,6 +303,8 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase, // See Assembler::CheckConstPool for more info. void EmitPoolGuard(); + void FinishCode() { ForceConstantPoolEmissionWithoutJump(); } + #if defined(V8_TARGET_ARCH_RISCV64) static void set_target_value_at( Address pc, uint64_t target, @@ -618,6 +620,8 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase, } } + inline int next_buffer_check() { return next_buffer_check_; } + friend class VectorUnit; class VectorUnit { public: @@ -729,16 +733,19 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase, // Block the emission of the trampoline pool before pc_offset. void BlockTrampolinePoolBefore(int pc_offset) { - if (no_trampoline_pool_before_ < pc_offset) + if (no_trampoline_pool_before_ < pc_offset) { + DEBUG_PRINTF("\tBlockTrampolinePoolBefore %d\n", pc_offset); no_trampoline_pool_before_ = pc_offset; + } } void StartBlockTrampolinePool() { - DEBUG_PRINTF("\tStartBlockTrampolinePool\n"); + DEBUG_PRINTF("\tStartBlockTrampolinePool %d\n", pc_offset()); trampoline_pool_blocked_nesting_++; } void EndBlockTrampolinePool() { + DEBUG_PRINTF("\tEndBlockTrampolinePool\n"); trampoline_pool_blocked_nesting_--; DEBUG_PRINTF("\ttrampoline_pool_blocked_nesting:%d\n", trampoline_pool_blocked_nesting_); @@ -768,6 +775,10 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase, bool is_buffer_growth_blocked() const { return block_buffer_growth_; } + inline int ConstpoolComputesize() { + return constpool_.ComputeSize(Jump::kOmitted, Alignment::kOmitted); + } + private: // Avoid overflows for displacements etc. static const int kMaximalBufferSize = 512 * MB; diff --git a/deps/v8/src/codegen/riscv/macro-assembler-riscv.cc b/deps/v8/src/codegen/riscv/macro-assembler-riscv.cc index 2c2c5ccc850f60..04ffd05c0b12f7 100644 --- a/deps/v8/src/codegen/riscv/macro-assembler-riscv.cc +++ b/deps/v8/src/codegen/riscv/macro-assembler-riscv.cc @@ -4937,11 +4937,21 @@ void MacroAssembler::LoadRootRegisterOffset(Register destination, void MacroAssembler::Jump(Register target, Condition cond, Register rs, const Operand& rt) { - BlockTrampolinePoolScope block_trampoline_pool(this); if (cond == cc_always) { jr(target); + DEBUG_PRINTF("\tCheckTrampolinePool pc_offset:%d %d\n", pc_offset(), + next_buffer_check() - ConstpoolComputesize()); + if (!is_trampoline_emitted() && + pc_offset() >= (next_buffer_check() - ConstpoolComputesize())) { + // We need to check trampoline pool before Constant pool. + // Here need to emit trampoline first. + // Jump(ra, al) will block trampoline pool for 1 instr. + nop(); + CheckTrampolinePool(); + } ForceConstantPoolEmissionWithoutJump(); } else { + BlockTrampolinePoolScope block_trampoline_pool(this); BRANCH_ARGS_CHECK(cond, rs, rt); Branch(kInstrSize * 2, NegateCondition(cond), rs, rt); jr(target); @@ -5353,9 +5363,6 @@ void MacroAssembler::StoreReturnAddressAndCall(Register target) { void MacroAssembler::Ret(Condition cond, Register rs, const Operand& rt) { Jump(ra, cond, rs, rt); - if (cond == al) { - ForceConstantPoolEmissionWithoutJump(); - } } void MacroAssembler::BranchLong(Label* L) {