From 5d7b5f5b484b23ce60cbc8c5ca48d5a2ef9e93b5 Mon Sep 17 00:00:00 2001 From: Cong-Cong Date: Wed, 29 Oct 2025 17:30:04 +0800 Subject: [PATCH 01/22] perf: memory pool --- src/cached_source.rs | 5 +-- src/helpers.rs | 25 +++++++----- src/lib.rs | 1 + src/replace_source.rs | 3 +- src/source.rs | 30 +++++++++----- src/source_map_source.rs | 6 +-- src/with_indices.rs | 36 ++++++++++------- src/work_context.rs | 86 ++++++++++++++++++++++++++++++++++++++++ 8 files changed, 148 insertions(+), 44 deletions(-) create mode 100644 src/work_context.rs diff --git a/src/cached_source.rs b/src/cached_source.rs index a98c61c3..254c2458 100644 --- a/src/cached_source.rs +++ b/src/cached_source.rs @@ -301,10 +301,7 @@ mod tests { #[test] fn should_produce_correct_output_for_cached_raw_source() { - let map_options = MapOptions { - columns: true, - final_source: true, - }; + let map_options = MapOptions::new(true); let source = RawStringSource::from("Test\nTest\nTest\n"); let mut on_chunk_count = 0; diff --git a/src/helpers.rs b/src/helpers.rs index a096f708..4b5640bc 100644 --- a/src/helpers.rs +++ b/src/helpers.rs @@ -2,18 +2,13 @@ use std::{ borrow::{BorrowMut, Cow}, cell::{OnceCell, RefCell}, marker::PhantomData, - ops::Range, + ops::Range, rc::Rc, }; use rustc_hash::FxHashMap as HashMap; use crate::{ - decoder::MappingsDecoder, - encoder::create_encoder, - linear_map::LinearMap, - source::{Mapping, OriginalLocation}, - with_indices::WithIndices, - MapOptions, Rope, SourceMap, + decoder::MappingsDecoder, encoder::create_encoder, linear_map::LinearMap, source::{Mapping, OriginalLocation}, with_indices::WithIndices, work_context::{self, WorkContext}, MapOptions, Rope, SourceMap }; // Adding this type because sourceContentLine not happy @@ -33,6 +28,7 @@ pub fn get_map<'a, S: StreamChunks>( &MapOptions { columns: options.columns, final_source: true, + work_context: options.work_context.clone(), }, // on_chunk &mut |_, mapping| { @@ -317,24 +313,28 @@ where MapOptions { columns: true, final_source: true, + .. } => stream_chunks_of_source_map_final( source, source_map, on_chunk, on_source, on_name, ), MapOptions { columns: true, final_source: false, + work_context } => stream_chunks_of_source_map_full( - source, source_map, on_chunk, on_source, on_name, + work_context.clone(), source, source_map, on_chunk, on_source, on_name, ), MapOptions { columns: false, final_source: true, + .. } => stream_chunks_of_source_map_lines_final( source, source_map, on_chunk, on_source, on_name, ), MapOptions { columns: false, final_source: false, + .. } => stream_chunks_of_source_map_lines_full( source, source_map, on_chunk, on_source, on_name, ), @@ -413,6 +413,7 @@ where } fn stream_chunks_of_source_map_full<'a, S>( + work_context: Rc, source: S, source_map: &'a SourceMap, on_chunk: OnChunk<'_, 'a>, @@ -423,7 +424,7 @@ where S: SourceText<'a> + 'a, { let lines = split_into_lines(&source); - let line_with_indices_list = lines.map(WithIndices::new).collect::>(); + let line_with_indices_list = lines.map(|line| WithIndices::new(work_context.clone(), line)).collect::>(); if line_with_indices_list.is_empty() { return GeneratedInfo { @@ -710,6 +711,7 @@ type InnerSourceIndexValueMapping<'a> = #[allow(clippy::too_many_arguments)] pub fn stream_chunks_of_combined_source_map<'a, S>( + work_context: Rc, source: S, source_map: &'a SourceMap, inner_source_name: &'a str, @@ -830,7 +832,7 @@ where match inner_source_contents.get(&inner_source_index) { Some(Some(source_content)) => Some( split_into_lines(source_content) - .map(WithIndices::new) + .map(|line| WithIndices::new(work_context.clone(), line)) .collect(), ), _ => None, @@ -930,7 +932,7 @@ where match inner_source_contents.get(&inner_source_index) { Some(Some(source_content)) => Some( split_into_lines(source_content) - .map(WithIndices::new) + .map(|line| WithIndices::new(work_context.clone(), line)) .collect(), ), _ => None, @@ -1167,6 +1169,7 @@ where &MapOptions { columns: options.columns, final_source: false, + work_context: options.work_context.clone(), }, ); } else { diff --git a/src/lib.rs b/src/lib.rs index 4d78f3b2..c51a3860 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -14,6 +14,7 @@ mod rope; mod source; mod source_map_source; mod with_indices; +mod work_context; pub use cached_source::CachedSource; pub use concat_source::ConcatSource; diff --git a/src/replace_source.rs b/src/replace_source.rs index ad8f2008..301c9e26 100644 --- a/src/replace_source.rs +++ b/src/replace_source.rs @@ -391,7 +391,7 @@ impl StreamChunks for ReplaceSource { match source_content { SourceContent::Raw(source) => { let lines = split_into_lines(source) - .map(WithIndices::new) + .map(|line| WithIndices::new(options.work_context.clone(), line)) .collect::>(); let matched = check_content_at_position(&lines, line, column, expected_chunk); @@ -411,6 +411,7 @@ impl StreamChunks for ReplaceSource { &MapOptions { columns: options.columns, final_source: false, + work_context: options.work_context.clone() }, &mut |chunk, mut mapping| { // SAFETY: final_source is false in ReplaceSource diff --git a/src/source.rs b/src/source.rs index e60a0e09..9835fb1c 100644 --- a/src/source.rs +++ b/src/source.rs @@ -1,19 +1,12 @@ use std::{ - any::{Any, TypeId}, - borrow::Cow, - convert::{TryFrom, TryInto}, - fmt, - hash::{Hash, Hasher}, - sync::Arc, + any::{Any, TypeId}, borrow::Cow, collections::BinaryHeap, convert::{TryFrom, TryInto}, fmt, hash::{Hash, Hasher}, rc::Rc, sync::Arc }; use dyn_clone::DynClone; use serde::{Deserialize, Serialize}; use crate::{ - helpers::{decode_mappings, StreamChunks}, - rope::Rope, - Result, + helpers::{decode_mappings, StreamChunks}, rope::Rope, work_context::WorkContext, Result }; /// An alias for `Box`. @@ -250,12 +243,28 @@ impl SourceExt for T { } /// Options for [Source::map]. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone)] pub struct MapOptions { /// Whether have columns info in generated [SourceMap] mappings. pub columns: bool, /// Whether the source will have changes, internal used for `ReplaceSource`, etc. pub(crate) final_source: bool, + pub(crate) work_context: Rc, +} + +impl PartialEq for MapOptions { + fn eq(&self, other: &Self) -> bool { + self.columns == other.columns && self.final_source == other.final_source + } +} + +impl Eq for MapOptions {} + +impl Hash for MapOptions { + fn hash(&self, state: &mut H) { + self.columns.hash(state); + self.final_source.hash(state); + } } impl Default for MapOptions { @@ -263,6 +272,7 @@ impl Default for MapOptions { Self { columns: true, final_source: false, + work_context: Default::default() } } } diff --git a/src/source_map_source.rs b/src/source_map_source.rs index 428838ef..68c95aa3 100644 --- a/src/source_map_source.rs +++ b/src/source_map_source.rs @@ -191,6 +191,7 @@ impl StreamChunks for SourceMapSource { ) -> crate::helpers::GeneratedInfo { if let Some(inner_source_map) = &self.inner_source_map { stream_chunks_of_combined_source_map( + options.work_context.clone(), &*self.value, &self.source_map, &self.name, @@ -469,10 +470,7 @@ mod tests { .into_string_lossy() .into_owned()); test_cached!(source, |s: &dyn Source| s.map(&MapOptions::default())); - test_cached!(source, |s: &dyn Source| s.map(&MapOptions { - columns: false, - final_source: true - })); + test_cached!(source, |s: &dyn Source| s.map(&MapOptions::new(false))); } #[test] diff --git a/src/with_indices.rs b/src/with_indices.rs index e4912521..12b35116 100644 --- a/src/with_indices.rs +++ b/src/with_indices.rs @@ -1,8 +1,8 @@ -use std::{cell::OnceCell, marker::PhantomData}; +use std::{cell::OnceCell, marker::PhantomData, rc::Rc}; -use crate::helpers::SourceText; +use crate::{helpers::SourceText, work_context::{PooledVec, WorkContext}}; -#[derive(Debug, Clone)] +#[derive(Debug)] pub struct WithIndices<'a, S> where S: SourceText<'a>, @@ -10,7 +10,8 @@ where /// line is a string reference pub line: S, /// the byte position of each `char` in `line` string slice . - pub indices_indexes: OnceCell>, + pub indices_indexes: OnceCell, + work_context: Rc, data: PhantomData<&'a S>, } @@ -18,10 +19,11 @@ impl<'a, S> WithIndices<'a, S> where S: SourceText<'a>, { - pub fn new(line: S) -> Self { + pub fn new(work_context: Rc, line: S) -> Self { Self { indices_indexes: OnceCell::new(), line, + work_context, data: PhantomData, } } @@ -32,8 +34,12 @@ where return S::default(); } - let indices_indexes = self.indices_indexes.get_or_init(|| { - self.line.char_indices().map(|(i, _)| i).collect::>() + let indices_indexes = &*self.indices_indexes.get_or_init(|| { + let mut vec = PooledVec::new(self.work_context.clone(), self.line.len()); + for (i, _) in self.line.char_indices() { + vec.push(i); + } + vec }); let str_len = self.line.len(); @@ -53,13 +59,15 @@ where /// tests are just copy from `substring` crate #[cfg(test)] mod tests { - use crate::Rope; + use std::rc::Rc; + +use crate::{work_context::WorkContext, Rope}; use super::WithIndices; #[test] fn test_substring() { assert_eq!( - WithIndices::new(Rope::from("foobar")).substring(0, 3), + WithIndices::new(Rc::new(WorkContext::default()), Rope::from("foobar")).substring(0, 3), "foo" ); } @@ -67,26 +75,26 @@ mod tests { #[test] fn test_out_of_bounds() { assert_eq!( - WithIndices::new(Rope::from("foobar")).substring(0, 10), + WithIndices::new(Rc::new(WorkContext::default()), Rope::from("foobar")).substring(0, 10), "foobar" ); - assert_eq!(WithIndices::new(Rope::from("foobar")).substring(6, 10), ""); + assert_eq!(WithIndices::new(Rc::new(WorkContext::default()), Rope::from("foobar")).substring(6, 10), ""); } #[test] fn test_start_less_than_end() { - assert_eq!(WithIndices::new(Rope::from("foobar")).substring(3, 2), ""); + assert_eq!(WithIndices::new(Rc::new(WorkContext::default()), Rope::from("foobar")).substring(3, 2), ""); } #[test] fn test_start_and_end_equal() { - assert_eq!(WithIndices::new(Rope::from("foobar")).substring(3, 3), ""); + assert_eq!(WithIndices::new(Rc::new(WorkContext::default()), Rope::from("foobar")).substring(3, 3), ""); } #[test] fn test_multiple_byte_characters() { assert_eq!( - WithIndices::new(Rope::from("fõøbα®")).substring(2, 5), + WithIndices::new(Rc::new(WorkContext::default()), Rope::from("fõøbα®")).substring(2, 5), "øbα" ); } diff --git a/src/work_context.rs b/src/work_context.rs new file mode 100644 index 00000000..2c06fbcf --- /dev/null +++ b/src/work_context.rs @@ -0,0 +1,86 @@ +use std::{cell::RefCell, collections::BTreeMap, rc::Rc}; + +const MIN_POOL_CAPACITY: usize = 64; + +#[derive(Default, Debug)] +pub struct WorkContext { + usize_vec_pool: RefCell>>>, +} + +impl WorkContext { + pub fn new() -> Self { + Self { + usize_vec_pool: RefCell::new(BTreeMap::new()), + } + } + + pub fn pull_usize_vec(&self, requested_capacity: usize) -> Vec { + if requested_capacity < MIN_POOL_CAPACITY { + return Vec::with_capacity(requested_capacity); + } + let mut usize_vec_pool = self.usize_vec_pool.borrow_mut(); + if let Some((_, bucket)) = usize_vec_pool.range_mut(requested_capacity..).next() { + if let Some(mut v) = bucket.pop() { + v.clear(); + return v; + } + } + Vec::with_capacity(requested_capacity) + } + + pub fn return_usize_vec(&self, vec: Vec) { + if vec.capacity() < MIN_POOL_CAPACITY { + return; + } + let mut usize_vec_pool = self.usize_vec_pool.borrow_mut(); + let cap = vec.capacity(); + let bucket = usize_vec_pool.entry(cap).or_default(); + bucket.push(vec); + } +} + +#[derive(Debug)] +pub struct PooledVec { + vec: Option>, + context: Rc, +} + +impl PooledVec { + pub fn new(context: Rc, requested_capacity: usize) -> Self { + let vec = context.pull_usize_vec(requested_capacity); + Self { + vec: Some(vec), + context, + } + } + + pub fn as_mut(&mut self) -> &mut Vec { + self.vec.as_mut().unwrap() + } + + pub fn as_ref(&self) -> &Vec { + self.vec.as_ref().unwrap() + } +} + +impl Drop for PooledVec { + fn drop(&mut self) { + if let Some(vec) = self.vec.take() { + self.context.return_usize_vec(vec); + } + } +} + +impl std::ops::Deref for PooledVec { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + self.as_ref() + } +} + +impl std::ops::DerefMut for PooledVec { + fn deref_mut(&mut self) -> &mut Self::Target { + self.as_mut() + } +} From 8c24d2062f3999283a81213cabbb837a728ebf7f Mon Sep 17 00:00:00 2001 From: Cong-Cong Date: Wed, 29 Oct 2025 18:00:26 +0800 Subject: [PATCH 02/22] cargo fmt --- src/helpers.rs | 37 +++++++++++++++++++------- src/replace_source.rs | 16 ++++++----- src/source.rs | 15 ++++++++--- src/with_indices.rs | 62 ++++++++++++++++++++++++++----------------- src/work_context.rs | 37 +++++++++++++++----------- 5 files changed, 108 insertions(+), 59 deletions(-) diff --git a/src/helpers.rs b/src/helpers.rs index 4b5640bc..80fc0c2c 100644 --- a/src/helpers.rs +++ b/src/helpers.rs @@ -2,18 +2,26 @@ use std::{ borrow::{BorrowMut, Cow}, cell::{OnceCell, RefCell}, marker::PhantomData, - ops::Range, rc::Rc, + ops::Range, + rc::Rc, }; use rustc_hash::FxHashMap as HashMap; use crate::{ - decoder::MappingsDecoder, encoder::create_encoder, linear_map::LinearMap, source::{Mapping, OriginalLocation}, with_indices::WithIndices, work_context::{self, WorkContext}, MapOptions, Rope, SourceMap + decoder::MappingsDecoder, + encoder::create_encoder, + linear_map::LinearMap, + source::{Mapping, OriginalLocation}, + with_indices::WithIndices, + work_context::WorkContext, + MapOptions, Rope, SourceMap, }; // Adding this type because sourceContentLine not happy -type InnerSourceContentLine<'a, 'b> = - RefCell>>>>>>; +type InnerSourceContentLine<'context, 'text> = RefCell< + LinearMap>>>>>, +>; pub fn get_map<'a, S: StreamChunks>( stream: &'a S, @@ -320,9 +328,14 @@ where MapOptions { columns: true, final_source: false, - work_context + work_context, } => stream_chunks_of_source_map_full( - work_context.clone(), source, source_map, on_chunk, on_source, on_name, + work_context.clone(), + source, + source_map, + on_chunk, + on_source, + on_name, ), MapOptions { columns: false, @@ -424,7 +437,9 @@ where S: SourceText<'a> + 'a, { let lines = split_into_lines(&source); - let line_with_indices_list = lines.map(|line| WithIndices::new(work_context.clone(), line)).collect::>(); + let line_with_indices_list = lines + .map(|line| WithIndices::new(work_context.as_ref(), line)) + .collect::>(); if line_with_indices_list.is_empty() { return GeneratedInfo { @@ -832,7 +847,9 @@ where match inner_source_contents.get(&inner_source_index) { Some(Some(source_content)) => Some( split_into_lines(source_content) - .map(|line| WithIndices::new(work_context.clone(), line)) + .map(|line| { + WithIndices::new(work_context.as_ref(), line) + }) .collect(), ), _ => None, @@ -932,7 +949,9 @@ where match inner_source_contents.get(&inner_source_index) { Some(Some(source_content)) => Some( split_into_lines(source_content) - .map(|line| WithIndices::new(work_context.clone(), line)) + .map(|line| { + WithIndices::new(work_context.as_ref(), line) + }) .collect(), ), _ => None, diff --git a/src/replace_source.rs b/src/replace_source.rs index 301c9e26..2bbdc2a2 100644 --- a/src/replace_source.rs +++ b/src/replace_source.rs @@ -313,13 +313,13 @@ impl std::fmt::Debug for ReplaceSource { } } -enum SourceContent<'a> { - Raw(Rope<'a>), - Lines(Vec>>), +enum SourceContent<'context, 'text> { + Raw(Rope<'text>), + Lines(Vec>>), } -fn check_content_at_position<'a>( - lines: &[WithIndices<'a, Rope<'a>>], +fn check_content_at_position<'text>( + lines: &[WithIndices<'_, 'text, Rope<'text>>], line: u32, column: u32, expected: Rope, // FIXME: memory @@ -391,7 +391,9 @@ impl StreamChunks for ReplaceSource { match source_content { SourceContent::Raw(source) => { let lines = split_into_lines(source) - .map(|line| WithIndices::new(options.work_context.clone(), line)) + .map(|line| { + WithIndices::new(options.work_context.as_ref(), line) + }) .collect::>(); let matched = check_content_at_position(&lines, line, column, expected_chunk); @@ -411,7 +413,7 @@ impl StreamChunks for ReplaceSource { &MapOptions { columns: options.columns, final_source: false, - work_context: options.work_context.clone() + work_context: options.work_context.clone(), }, &mut |chunk, mut mapping| { // SAFETY: final_source is false in ReplaceSource diff --git a/src/source.rs b/src/source.rs index 9835fb1c..561e8fd0 100644 --- a/src/source.rs +++ b/src/source.rs @@ -1,12 +1,21 @@ use std::{ - any::{Any, TypeId}, borrow::Cow, collections::BinaryHeap, convert::{TryFrom, TryInto}, fmt, hash::{Hash, Hasher}, rc::Rc, sync::Arc + any::{Any, TypeId}, + borrow::Cow, + convert::{TryFrom, TryInto}, + fmt, + hash::{Hash, Hasher}, + rc::Rc, + sync::Arc, }; use dyn_clone::DynClone; use serde::{Deserialize, Serialize}; use crate::{ - helpers::{decode_mappings, StreamChunks}, rope::Rope, work_context::WorkContext, Result + helpers::{decode_mappings, StreamChunks}, + rope::Rope, + work_context::WorkContext, + Result, }; /// An alias for `Box`. @@ -272,7 +281,7 @@ impl Default for MapOptions { Self { columns: true, final_source: false, - work_context: Default::default() + work_context: Default::default(), } } } diff --git a/src/with_indices.rs b/src/with_indices.rs index 12b35116..bd44ff0c 100644 --- a/src/with_indices.rs +++ b/src/with_indices.rs @@ -1,25 +1,28 @@ -use std::{cell::OnceCell, marker::PhantomData, rc::Rc}; +use std::{cell::OnceCell, marker::PhantomData}; -use crate::{helpers::SourceText, work_context::{PooledVec, WorkContext}}; +use crate::{ + helpers::SourceText, + work_context::{PooledUsizeVec, WorkContext}, +}; #[derive(Debug)] -pub struct WithIndices<'a, S> +pub struct WithIndices<'context, 'text, S> where - S: SourceText<'a>, + S: SourceText<'text>, { /// line is a string reference pub line: S, /// the byte position of each `char` in `line` string slice . - pub indices_indexes: OnceCell, - work_context: Rc, - data: PhantomData<&'a S>, + pub indices_indexes: OnceCell>, + work_context: &'context WorkContext, + data: PhantomData<&'text S>, } -impl<'a, S> WithIndices<'a, S> +impl<'context, 'text, S> WithIndices<'context, 'text, S> where - S: SourceText<'a>, + S: SourceText<'text>, { - pub fn new(work_context: Rc, line: S) -> Self { + pub fn new(work_context: &'context WorkContext, line: S) -> Self { Self { indices_indexes: OnceCell::new(), line, @@ -34,11 +37,9 @@ where return S::default(); } - let indices_indexes = &*self.indices_indexes.get_or_init(|| { - let mut vec = PooledVec::new(self.work_context.clone(), self.line.len()); - for (i, _) in self.line.char_indices() { - vec.push(i); - } + let indices_indexes = self.indices_indexes.get_or_init(|| { + let mut vec = PooledUsizeVec::new(self.work_context, self.line.len()); + vec.extend(self.line.char_indices().map(|(i, _)| i)); vec }); @@ -59,15 +60,14 @@ where /// tests are just copy from `substring` crate #[cfg(test)] mod tests { - use std::rc::Rc; - -use crate::{work_context::WorkContext, Rope}; + use crate::{work_context::WorkContext, Rope}; use super::WithIndices; #[test] fn test_substring() { assert_eq!( - WithIndices::new(Rc::new(WorkContext::default()), Rope::from("foobar")).substring(0, 3), + WithIndices::new(&WorkContext::default(), Rope::from("foobar")) + .substring(0, 3), "foo" ); } @@ -75,26 +75,40 @@ use crate::{work_context::WorkContext, Rope}; #[test] fn test_out_of_bounds() { assert_eq!( - WithIndices::new(Rc::new(WorkContext::default()), Rope::from("foobar")).substring(0, 10), + WithIndices::new(&WorkContext::default(), Rope::from("foobar")) + .substring(0, 10), "foobar" ); - assert_eq!(WithIndices::new(Rc::new(WorkContext::default()), Rope::from("foobar")).substring(6, 10), ""); + assert_eq!( + WithIndices::new(&WorkContext::default(), Rope::from("foobar")) + .substring(6, 10), + "" + ); } #[test] fn test_start_less_than_end() { - assert_eq!(WithIndices::new(Rc::new(WorkContext::default()), Rope::from("foobar")).substring(3, 2), ""); + assert_eq!( + WithIndices::new(&WorkContext::default(), Rope::from("foobar")) + .substring(3, 2), + "" + ); } #[test] fn test_start_and_end_equal() { - assert_eq!(WithIndices::new(Rc::new(WorkContext::default()), Rope::from("foobar")).substring(3, 3), ""); + assert_eq!( + WithIndices::new(&WorkContext::default(), Rope::from("foobar")) + .substring(3, 3), + "" + ); } #[test] fn test_multiple_byte_characters() { assert_eq!( - WithIndices::new(Rc::new(WorkContext::default()), Rope::from("fõøbα®")).substring(2, 5), + WithIndices::new(&WorkContext::default(), Rope::from("fõøbα®")) + .substring(2, 5), "øbα" ); } diff --git a/src/work_context.rs b/src/work_context.rs index 2c06fbcf..210e26df 100644 --- a/src/work_context.rs +++ b/src/work_context.rs @@ -1,5 +1,12 @@ -use std::{cell::RefCell, collections::BTreeMap, rc::Rc}; +use std::{cell::RefCell, collections::BTreeMap}; +// Vector pooling minimum capacity threshold +// Recommended threshold: 64 +// Reasons: +// 1. Memory consideration: 64 * 8 bytes = 512 bytes, a reasonable memory block size +// 2. Allocation cost: Allocations smaller than 512 bytes are usually fast, pooling benefits are limited +// 3. Cache friendly: 512 bytes can typically utilize CPU cache well +// 4. Empirical value: 64 is a proven balance point in real projects const MIN_POOL_CAPACITY: usize = 64; #[derive(Default, Debug)] @@ -8,18 +15,16 @@ pub struct WorkContext { } impl WorkContext { - pub fn new() -> Self { - Self { - usize_vec_pool: RefCell::new(BTreeMap::new()), - } - } - pub fn pull_usize_vec(&self, requested_capacity: usize) -> Vec { - if requested_capacity < MIN_POOL_CAPACITY { + if requested_capacity < MIN_POOL_CAPACITY + || self.usize_vec_pool.borrow().len() == 0 + { return Vec::with_capacity(requested_capacity); } let mut usize_vec_pool = self.usize_vec_pool.borrow_mut(); - if let Some((_, bucket)) = usize_vec_pool.range_mut(requested_capacity..).next() { + if let Some((_, bucket)) = + usize_vec_pool.range_mut(requested_capacity..).next() + { if let Some(mut v) = bucket.pop() { v.clear(); return v; @@ -40,13 +45,13 @@ impl WorkContext { } #[derive(Debug)] -pub struct PooledVec { +pub struct PooledUsizeVec<'a> { vec: Option>, - context: Rc, + context: &'a WorkContext, } -impl PooledVec { - pub fn new(context: Rc, requested_capacity: usize) -> Self { +impl<'a> PooledUsizeVec<'a> { + pub fn new(context: &'a WorkContext, requested_capacity: usize) -> Self { let vec = context.pull_usize_vec(requested_capacity); Self { vec: Some(vec), @@ -63,7 +68,7 @@ impl PooledVec { } } -impl Drop for PooledVec { +impl Drop for PooledUsizeVec<'_> { fn drop(&mut self) { if let Some(vec) = self.vec.take() { self.context.return_usize_vec(vec); @@ -71,7 +76,7 @@ impl Drop for PooledVec { } } -impl std::ops::Deref for PooledVec { +impl std::ops::Deref for PooledUsizeVec<'_> { type Target = Vec; fn deref(&self) -> &Self::Target { @@ -79,7 +84,7 @@ impl std::ops::Deref for PooledVec { } } -impl std::ops::DerefMut for PooledVec { +impl std::ops::DerefMut for PooledUsizeVec<'_> { fn deref_mut(&mut self) -> &mut Self::Target { self.as_mut() } From 0ae1644ad1dbe1da73519a5398af77da78ac3218 Mon Sep 17 00:00:00 2001 From: Cong-Cong Date: Wed, 29 Oct 2025 19:23:09 +0800 Subject: [PATCH 03/22] refactor --- src/cached_source.rs | 15 +++++++++++++-- src/concat_source.rs | 16 ++++++++++++---- src/helpers.rs | 37 ++++++++++++++++++++++--------------- src/lib.rs | 1 + src/original_source.rs | 5 +++-- src/raw_source.rs | 4 +++- src/replace_source.rs | 11 +++++------ src/source.rs | 13 ++++++++----- src/source_map_source.rs | 7 +++++-- tests/compat_source.rs | 4 +++- 10 files changed, 75 insertions(+), 38 deletions(-) diff --git a/src/cached_source.rs b/src/cached_source.rs index 254c2458..4cef9059 100644 --- a/src/cached_source.rs +++ b/src/cached_source.rs @@ -13,7 +13,7 @@ use crate::{ }, rope::Rope, source::SourceValue, - BoxSource, MapOptions, Source, SourceExt, SourceMap, + BoxSource, MapOptions, Source, SourceExt, SourceMap, WorkContext, }; #[derive(Default)] @@ -123,6 +123,7 @@ impl StreamChunks for CachedSource { fn stream_chunks<'a>( &'a self, options: &MapOptions, + work_context: &'a WorkContext, on_chunk: crate::helpers::OnChunk<'_, 'a>, on_source: crate::helpers::OnSource<'_, 'a>, on_name: crate::helpers::OnName<'_, 'a>, @@ -137,7 +138,13 @@ impl StreamChunks for CachedSource { let source = self.rope(); if let Some(map) = map { stream_chunks_of_source_map( - source, map, on_chunk, on_source, on_name, options, + work_context, + source, + map, + on_chunk, + on_source, + on_name, + options, ) } else { stream_chunks_of_raw_source( @@ -149,6 +156,7 @@ impl StreamChunks for CachedSource { let (generated_info, map) = stream_and_get_source_and_map( &self.inner, options, + work_context, on_chunk, on_source, on_name, @@ -309,6 +317,7 @@ mod tests { let mut on_name_count = 0; let generated_info = source.stream_chunks( &map_options, + &WorkContext::default(), &mut |_chunk, _mapping| { on_chunk_count += 1; }, @@ -323,6 +332,7 @@ mod tests { let cached_source = CachedSource::new(source); cached_source.stream_chunks( &map_options, + &WorkContext::default(), &mut |_chunk, _mapping| {}, &mut |_source_index, _source, _source_content| {}, &mut |_name_index, _name| {}, @@ -333,6 +343,7 @@ mod tests { let mut cached_on_name_count = 0; let cached_generated_info = cached_source.stream_chunks( &map_options, + &WorkContext::default(), &mut |_chunk, _mapping| { cached_on_chunk_count += 1; }, diff --git a/src/concat_source.rs b/src/concat_source.rs index e7048d4d..14919c79 100644 --- a/src/concat_source.rs +++ b/src/concat_source.rs @@ -11,8 +11,8 @@ use crate::{ helpers::{get_map, GeneratedInfo, OnChunk, OnName, OnSource, StreamChunks}, linear_map::LinearMap, source::{Mapping, OriginalLocation}, - BoxSource, MapOptions, RawStringSource, Rope, Source, SourceExt, SourceMap, - SourceValue, + work_context, BoxSource, MapOptions, RawStringSource, Rope, Source, + SourceExt, SourceMap, SourceValue, WorkContext, }; /// Concatenate multiple [Source]s to a single [Source]. @@ -200,7 +200,7 @@ impl Source for ConcatSource { } fn map(&self, options: &MapOptions) -> Option { - get_map(self, options) + get_map(&WorkContext::default(), self, options) } fn to_writer(&self, writer: &mut dyn std::io::Write) -> std::io::Result<()> { @@ -231,6 +231,7 @@ impl StreamChunks for ConcatSource { fn stream_chunks<'a>( &'a self, options: &MapOptions, + work_context: &'a WorkContext, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, @@ -238,7 +239,13 @@ impl StreamChunks for ConcatSource { let children = self.optimized_children(); if children.len() == 1 { - return children[0].stream_chunks(options, on_chunk, on_source, on_name); + return children[0].stream_chunks( + options, + work_context, + on_chunk, + on_source, + on_name, + ); } let mut current_line_offset = 0; let mut current_column_offset = 0; @@ -260,6 +267,7 @@ impl StreamChunks for ConcatSource { generated_column, } = item.stream_chunks( options, + work_context, &mut |chunk, mapping| { let line = mapping.generated_line + current_line_offset; let column = if mapping.generated_line == 1 { diff --git a/src/helpers.rs b/src/helpers.rs index 80fc0c2c..984bfd14 100644 --- a/src/helpers.rs +++ b/src/helpers.rs @@ -24,6 +24,7 @@ type InnerSourceContentLine<'context, 'text> = RefCell< >; pub fn get_map<'a, S: StreamChunks>( + work_context: &'a WorkContext, stream: &'a S, options: &'a MapOptions, ) -> Option { @@ -36,8 +37,8 @@ pub fn get_map<'a, S: StreamChunks>( &MapOptions { columns: options.columns, final_source: true, - work_context: options.work_context.clone(), }, + work_context, // on_chunk &mut |_, mapping| { mappings_encoder.encode(&mapping); @@ -76,6 +77,7 @@ pub trait StreamChunks { fn stream_chunks<'a>( &'a self, options: &MapOptions, + context: &'a WorkContext, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, @@ -94,6 +96,7 @@ pub type OnName<'a, 'b> = &'a mut dyn FnMut(u32, Cow<'b, str>); /// Default stream chunks behavior impl, see [webpack-sources streamChunks](https://github.com/webpack/webpack-sources/blob/9f98066311d53a153fdc7c633422a1d086528027/lib/helpers/streamChunks.js#L15-L35). pub fn stream_chunks_default<'a, S>( + work_context: &'a WorkContext, source: S, source_map: Option<&'a SourceMap>, options: &MapOptions, @@ -106,7 +109,13 @@ where { if let Some(map) = source_map { stream_chunks_of_source_map( - source, map, on_chunk, on_source, on_name, options, + work_context, + source, + map, + on_chunk, + on_source, + on_name, + options, ) } else { stream_chunks_of_raw_source(source, options, on_chunk, on_source, on_name) @@ -307,6 +316,7 @@ where } pub fn stream_chunks_of_source_map<'a, S>( + work_context: &'a WorkContext, source: S, source_map: &'a SourceMap, on_chunk: OnChunk<'_, 'a>, @@ -321,16 +331,14 @@ where MapOptions { columns: true, final_source: true, - .. } => stream_chunks_of_source_map_final( source, source_map, on_chunk, on_source, on_name, ), MapOptions { columns: true, final_source: false, - work_context, } => stream_chunks_of_source_map_full( - work_context.clone(), + work_context, source, source_map, on_chunk, @@ -426,7 +434,7 @@ where } fn stream_chunks_of_source_map_full<'a, S>( - work_context: Rc, + work_context: &'a WorkContext, source: S, source_map: &'a SourceMap, on_chunk: OnChunk<'_, 'a>, @@ -438,7 +446,7 @@ where { let lines = split_into_lines(&source); let line_with_indices_list = lines - .map(|line| WithIndices::new(work_context.as_ref(), line)) + .map(|line| WithIndices::new(work_context, line)) .collect::>(); if line_with_indices_list.is_empty() { @@ -726,7 +734,7 @@ type InnerSourceIndexValueMapping<'a> = #[allow(clippy::too_many_arguments)] pub fn stream_chunks_of_combined_source_map<'a, S>( - work_context: Rc, + work_context: &'a WorkContext, source: S, source_map: &'a SourceMap, inner_source_name: &'a str, @@ -792,6 +800,7 @@ where }; stream_chunks_of_source_map( + work_context, source.clone(), source_map, &mut |chunk, mapping| { @@ -847,9 +856,7 @@ where match inner_source_contents.get(&inner_source_index) { Some(Some(source_content)) => Some( split_into_lines(source_content) - .map(|line| { - WithIndices::new(work_context.as_ref(), line) - }) + .map(|line| WithIndices::new(work_context, line)) .collect(), ), _ => None, @@ -949,9 +956,7 @@ where match inner_source_contents.get(&inner_source_index) { Some(Some(source_content)) => Some( split_into_lines(source_content) - .map(|line| { - WithIndices::new(work_context.as_ref(), line) - }) + .map(|line| WithIndices::new(work_context, line)) .collect(), ), _ => None, @@ -1116,6 +1121,7 @@ where } source_index_mapping.borrow_mut().insert(i, -2); stream_chunks_of_source_map( + work_context, source_content.unwrap(), inner_source_map, &mut |chunk, mapping| { @@ -1188,7 +1194,6 @@ where &MapOptions { columns: options.columns, final_source: false, - work_context: options.work_context.clone(), }, ); } else { @@ -1216,6 +1221,7 @@ where pub fn stream_and_get_source_and_map<'a, S: StreamChunks>( input_source: &'a S, options: &MapOptions, + work_context: &'a WorkContext, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, @@ -1227,6 +1233,7 @@ pub fn stream_and_get_source_and_map<'a, S: StreamChunks>( let generated_info = input_source.stream_chunks( options, + work_context, &mut |chunk, mapping| { mappings_encoder.encode(&mapping); on_chunk(chunk, mapping); diff --git a/src/lib.rs b/src/lib.rs index c51a3860..f970785f 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -30,6 +30,7 @@ pub use source::{ pub use source_map_source::{ SourceMapSource, SourceMapSourceOptions, WithoutOriginalOptions, }; +pub use work_context::WorkContext; /// Reexport `StreamChunks` related types. pub mod stream_chunks { diff --git a/src/original_source.rs b/src/original_source.rs index 47349abc..d99f418e 100644 --- a/src/original_source.rs +++ b/src/original_source.rs @@ -10,7 +10,7 @@ use crate::{ SourceText, StreamChunks, }, source::{Mapping, OriginalLocation}, - MapOptions, Rope, Source, SourceMap, SourceValue, + work_context, MapOptions, Rope, Source, SourceMap, SourceValue, WorkContext, }; /// Represents source code, it will create source map for the source code, @@ -68,7 +68,7 @@ impl Source for OriginalSource { } fn map(&self, options: &MapOptions) -> Option { - get_map(self, options) + get_map(&WorkContext::default(), self, options) } fn to_writer(&self, writer: &mut dyn std::io::Write) -> std::io::Result<()> { @@ -109,6 +109,7 @@ impl StreamChunks for OriginalSource { fn stream_chunks<'a>( &'a self, options: &MapOptions, + work_context: &'a WorkContext, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, _on_name: OnName, diff --git a/src/raw_source.rs b/src/raw_source.rs index f55856ba..347a314e 100644 --- a/src/raw_source.rs +++ b/src/raw_source.rs @@ -9,7 +9,7 @@ use crate::{ get_generated_source_info, stream_chunks_of_raw_source, OnChunk, OnName, OnSource, StreamChunks, }, - MapOptions, Rope, Source, SourceMap, SourceValue, + MapOptions, Rope, Source, SourceMap, SourceValue, WorkContext, }; /// A string variant of [RawStringSource]. @@ -110,6 +110,7 @@ impl StreamChunks for RawStringSource { fn stream_chunks<'a>( &'a self, options: &MapOptions, + work_context: &'a WorkContext, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, @@ -243,6 +244,7 @@ impl StreamChunks for RawBufferSource { fn stream_chunks<'a>( &'a self, options: &MapOptions, + work_context: &'a WorkContext, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, diff --git a/src/replace_source.rs b/src/replace_source.rs index 2bbdc2a2..9ec61799 100644 --- a/src/replace_source.rs +++ b/src/replace_source.rs @@ -19,7 +19,7 @@ use crate::{ rope::Rope, with_indices::WithIndices, BoxSource, MapOptions, Mapping, OriginalLocation, Source, SourceExt, - SourceMap, SourceValue, + SourceMap, SourceValue, WorkContext, }; /// Decorates a Source with replacements and insertions of source code, @@ -263,7 +263,7 @@ impl Source for ReplaceSource { if replacements.is_empty() { return self.inner.map(options); } - get_map(self, options) + get_map(&WorkContext::default(), self, options) } fn to_writer(&self, writer: &mut dyn std::io::Write) -> std::io::Result<()> { @@ -337,6 +337,7 @@ impl StreamChunks for ReplaceSource { fn stream_chunks<'a>( &'a self, options: &crate::MapOptions, + work_context: &'a WorkContext, on_chunk: crate::helpers::OnChunk<'_, 'a>, on_source: crate::helpers::OnSource<'_, 'a>, on_name: crate::helpers::OnName<'_, 'a>, @@ -391,9 +392,7 @@ impl StreamChunks for ReplaceSource { match source_content { SourceContent::Raw(source) => { let lines = split_into_lines(source) - .map(|line| { - WithIndices::new(options.work_context.as_ref(), line) - }) + .map(|line| WithIndices::new(work_context, line)) .collect::>(); let matched = check_content_at_position(&lines, line, column, expected_chunk); @@ -413,8 +412,8 @@ impl StreamChunks for ReplaceSource { &MapOptions { columns: options.columns, final_source: false, - work_context: options.work_context.clone(), }, + work_context, &mut |chunk, mut mapping| { // SAFETY: final_source is false in ReplaceSource let chunk = chunk.unwrap(); diff --git a/src/source.rs b/src/source.rs index 561e8fd0..5fd26637 100644 --- a/src/source.rs +++ b/src/source.rs @@ -169,13 +169,18 @@ impl StreamChunks for BoxSource { fn stream_chunks<'a>( &'a self, options: &MapOptions, + work_context: &'a WorkContext, on_chunk: crate::helpers::OnChunk<'_, 'a>, on_source: crate::helpers::OnSource<'_, 'a>, on_name: crate::helpers::OnName<'_, 'a>, ) -> crate::helpers::GeneratedInfo { - self - .as_ref() - .stream_chunks(options, on_chunk, on_source, on_name) + self.as_ref().stream_chunks( + options, + work_context, + on_chunk, + on_source, + on_name, + ) } } @@ -258,7 +263,6 @@ pub struct MapOptions { pub columns: bool, /// Whether the source will have changes, internal used for `ReplaceSource`, etc. pub(crate) final_source: bool, - pub(crate) work_context: Rc, } impl PartialEq for MapOptions { @@ -281,7 +285,6 @@ impl Default for MapOptions { Self { columns: true, final_source: false, - work_context: Default::default(), } } } diff --git a/src/source_map_source.rs b/src/source_map_source.rs index 68c95aa3..0b0d4bdb 100644 --- a/src/source_map_source.rs +++ b/src/source_map_source.rs @@ -8,6 +8,7 @@ use crate::{ get_map, stream_chunks_of_combined_source_map, stream_chunks_of_source_map, StreamChunks, }, + work_context::{self, WorkContext}, MapOptions, Rope, Source, SourceMap, SourceValue, }; @@ -108,7 +109,7 @@ impl Source for SourceMapSource { if self.inner_source_map.is_none() { return Some(self.source_map.clone()); } - get_map(self, options) + get_map(&WorkContext::default(), self, options) } fn to_writer(&self, writer: &mut dyn std::io::Write) -> std::io::Result<()> { @@ -185,13 +186,14 @@ impl StreamChunks for SourceMapSource { fn stream_chunks<'a>( &'a self, options: &MapOptions, + work_context: &'a WorkContext, on_chunk: crate::helpers::OnChunk<'_, 'a>, on_source: crate::helpers::OnSource<'_, 'a>, on_name: crate::helpers::OnName<'_, 'a>, ) -> crate::helpers::GeneratedInfo { if let Some(inner_source_map) = &self.inner_source_map { stream_chunks_of_combined_source_map( - options.work_context.clone(), + work_context, &*self.value, &self.source_map, &self.name, @@ -205,6 +207,7 @@ impl StreamChunks for SourceMapSource { ) } else { stream_chunks_of_source_map( + work_context, self.value.as_str(), &self.source_map, on_chunk, diff --git a/tests/compat_source.rs b/tests/compat_source.rs index 5cc97576..50fdab42 100644 --- a/tests/compat_source.rs +++ b/tests/compat_source.rs @@ -7,7 +7,7 @@ use rspack_sources::stream_chunks::{ }; use rspack_sources::{ ConcatSource, MapOptions, RawStringSource, Rope, Source, SourceExt, - SourceMap, SourceValue, + SourceMap, SourceValue, WorkContext, }; #[derive(Debug, Eq)] @@ -43,11 +43,13 @@ impl StreamChunks for CompatSource { fn stream_chunks<'a>( &'a self, options: &MapOptions, + work_context: &'a WorkContext, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, ) -> GeneratedInfo { stream_chunks_default( + work_context, self.0, self.1.as_ref(), options, From e015025e801532ffafe78473402418dae48b1971 Mon Sep 17 00:00:00 2001 From: Cong-Cong Date: Wed, 29 Oct 2025 19:39:34 +0800 Subject: [PATCH 04/22] refactor --- src/cached_source.rs | 14 +++++----- src/concat_source.rs | 12 ++++----- src/helpers.rs | 36 ++++++++++++------------- src/lib.rs | 4 +-- src/{work_context.rs => memory_pool.rs} | 11 +++++--- src/original_source.rs | 6 ++--- src/raw_source.rs | 6 ++--- src/replace_source.rs | 12 ++++----- src/source.rs | 8 +++--- src/source_map_source.rs | 10 +++---- src/with_indices.rs | 27 +++++++++---------- tests/compat_source.rs | 8 +++--- 12 files changed, 75 insertions(+), 79 deletions(-) rename src/{work_context.rs => memory_pool.rs} (85%) diff --git a/src/cached_source.rs b/src/cached_source.rs index 4cef9059..ae93dfc9 100644 --- a/src/cached_source.rs +++ b/src/cached_source.rs @@ -13,7 +13,7 @@ use crate::{ }, rope::Rope, source::SourceValue, - BoxSource, MapOptions, Source, SourceExt, SourceMap, WorkContext, + BoxSource, MapOptions, MemoryPool, Source, SourceExt, SourceMap, }; #[derive(Default)] @@ -122,8 +122,8 @@ impl Source for CachedSource { impl StreamChunks for CachedSource { fn stream_chunks<'a>( &'a self, + memory_pool: &'a MemoryPool, options: &MapOptions, - work_context: &'a WorkContext, on_chunk: crate::helpers::OnChunk<'_, 'a>, on_source: crate::helpers::OnSource<'_, 'a>, on_name: crate::helpers::OnName<'_, 'a>, @@ -138,7 +138,7 @@ impl StreamChunks for CachedSource { let source = self.rope(); if let Some(map) = map { stream_chunks_of_source_map( - work_context, + memory_pool, source, map, on_chunk, @@ -154,9 +154,9 @@ impl StreamChunks for CachedSource { } None => { let (generated_info, map) = stream_and_get_source_and_map( + memory_pool, &self.inner, options, - work_context, on_chunk, on_source, on_name, @@ -316,8 +316,8 @@ mod tests { let mut on_source_count = 0; let mut on_name_count = 0; let generated_info = source.stream_chunks( + &MemoryPool::default(), &map_options, - &WorkContext::default(), &mut |_chunk, _mapping| { on_chunk_count += 1; }, @@ -331,8 +331,8 @@ mod tests { let cached_source = CachedSource::new(source); cached_source.stream_chunks( + &MemoryPool::default(), &map_options, - &WorkContext::default(), &mut |_chunk, _mapping| {}, &mut |_source_index, _source, _source_content| {}, &mut |_name_index, _name| {}, @@ -342,8 +342,8 @@ mod tests { let mut cached_on_source_count = 0; let mut cached_on_name_count = 0; let cached_generated_info = cached_source.stream_chunks( + &MemoryPool::default(), &map_options, - &WorkContext::default(), &mut |_chunk, _mapping| { cached_on_chunk_count += 1; }, diff --git a/src/concat_source.rs b/src/concat_source.rs index 14919c79..42f777eb 100644 --- a/src/concat_source.rs +++ b/src/concat_source.rs @@ -11,8 +11,8 @@ use crate::{ helpers::{get_map, GeneratedInfo, OnChunk, OnName, OnSource, StreamChunks}, linear_map::LinearMap, source::{Mapping, OriginalLocation}, - work_context, BoxSource, MapOptions, RawStringSource, Rope, Source, - SourceExt, SourceMap, SourceValue, WorkContext, + BoxSource, MapOptions, MemoryPool, RawStringSource, Rope, Source, SourceExt, + SourceMap, SourceValue, }; /// Concatenate multiple [Source]s to a single [Source]. @@ -200,7 +200,7 @@ impl Source for ConcatSource { } fn map(&self, options: &MapOptions) -> Option { - get_map(&WorkContext::default(), self, options) + get_map(&MemoryPool::default(), self, options) } fn to_writer(&self, writer: &mut dyn std::io::Write) -> std::io::Result<()> { @@ -230,8 +230,8 @@ impl Eq for ConcatSource {} impl StreamChunks for ConcatSource { fn stream_chunks<'a>( &'a self, + memory_pool: &'a MemoryPool, options: &MapOptions, - work_context: &'a WorkContext, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, @@ -240,8 +240,8 @@ impl StreamChunks for ConcatSource { if children.len() == 1 { return children[0].stream_chunks( + memory_pool, options, - work_context, on_chunk, on_source, on_name, @@ -266,8 +266,8 @@ impl StreamChunks for ConcatSource { generated_line, generated_column, } = item.stream_chunks( + memory_pool, options, - work_context, &mut |chunk, mapping| { let line = mapping.generated_line + current_line_offset; let column = if mapping.generated_line == 1 { diff --git a/src/helpers.rs b/src/helpers.rs index 984bfd14..71878b7e 100644 --- a/src/helpers.rs +++ b/src/helpers.rs @@ -3,7 +3,6 @@ use std::{ cell::{OnceCell, RefCell}, marker::PhantomData, ops::Range, - rc::Rc, }; use rustc_hash::FxHashMap as HashMap; @@ -14,8 +13,7 @@ use crate::{ linear_map::LinearMap, source::{Mapping, OriginalLocation}, with_indices::WithIndices, - work_context::WorkContext, - MapOptions, Rope, SourceMap, + MapOptions, MemoryPool, Rope, SourceMap, }; // Adding this type because sourceContentLine not happy @@ -24,7 +22,7 @@ type InnerSourceContentLine<'context, 'text> = RefCell< >; pub fn get_map<'a, S: StreamChunks>( - work_context: &'a WorkContext, + memory_pool: &'a MemoryPool, stream: &'a S, options: &'a MapOptions, ) -> Option { @@ -34,11 +32,11 @@ pub fn get_map<'a, S: StreamChunks>( let mut names: Vec = Vec::new(); stream.stream_chunks( + memory_pool, &MapOptions { columns: options.columns, final_source: true, }, - work_context, // on_chunk &mut |_, mapping| { mappings_encoder.encode(&mapping); @@ -76,8 +74,8 @@ pub trait StreamChunks { /// [StreamChunks] abstraction fn stream_chunks<'a>( &'a self, + memory_pool: &'a MemoryPool, options: &MapOptions, - context: &'a WorkContext, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, @@ -96,7 +94,7 @@ pub type OnName<'a, 'b> = &'a mut dyn FnMut(u32, Cow<'b, str>); /// Default stream chunks behavior impl, see [webpack-sources streamChunks](https://github.com/webpack/webpack-sources/blob/9f98066311d53a153fdc7c633422a1d086528027/lib/helpers/streamChunks.js#L15-L35). pub fn stream_chunks_default<'a, S>( - work_context: &'a WorkContext, + memory_pool: &'a MemoryPool, source: S, source_map: Option<&'a SourceMap>, options: &MapOptions, @@ -109,7 +107,7 @@ where { if let Some(map) = source_map { stream_chunks_of_source_map( - work_context, + memory_pool, source, map, on_chunk, @@ -316,7 +314,7 @@ where } pub fn stream_chunks_of_source_map<'a, S>( - work_context: &'a WorkContext, + memory_pool: &'a MemoryPool, source: S, source_map: &'a SourceMap, on_chunk: OnChunk<'_, 'a>, @@ -338,7 +336,7 @@ where columns: true, final_source: false, } => stream_chunks_of_source_map_full( - work_context, + memory_pool, source, source_map, on_chunk, @@ -434,7 +432,7 @@ where } fn stream_chunks_of_source_map_full<'a, S>( - work_context: &'a WorkContext, + memory_pool: &'a MemoryPool, source: S, source_map: &'a SourceMap, on_chunk: OnChunk<'_, 'a>, @@ -446,7 +444,7 @@ where { let lines = split_into_lines(&source); let line_with_indices_list = lines - .map(|line| WithIndices::new(work_context, line)) + .map(|line| WithIndices::new(memory_pool, line)) .collect::>(); if line_with_indices_list.is_empty() { @@ -734,7 +732,7 @@ type InnerSourceIndexValueMapping<'a> = #[allow(clippy::too_many_arguments)] pub fn stream_chunks_of_combined_source_map<'a, S>( - work_context: &'a WorkContext, + memory_pool: &'a MemoryPool, source: S, source_map: &'a SourceMap, inner_source_name: &'a str, @@ -800,7 +798,7 @@ where }; stream_chunks_of_source_map( - work_context, + memory_pool, source.clone(), source_map, &mut |chunk, mapping| { @@ -856,7 +854,7 @@ where match inner_source_contents.get(&inner_source_index) { Some(Some(source_content)) => Some( split_into_lines(source_content) - .map(|line| WithIndices::new(work_context, line)) + .map(|line| WithIndices::new(memory_pool, line)) .collect(), ), _ => None, @@ -956,7 +954,7 @@ where match inner_source_contents.get(&inner_source_index) { Some(Some(source_content)) => Some( split_into_lines(source_content) - .map(|line| WithIndices::new(work_context, line)) + .map(|line| WithIndices::new(memory_pool, line)) .collect(), ), _ => None, @@ -1121,7 +1119,7 @@ where } source_index_mapping.borrow_mut().insert(i, -2); stream_chunks_of_source_map( - work_context, + memory_pool, source_content.unwrap(), inner_source_map, &mut |chunk, mapping| { @@ -1219,9 +1217,9 @@ where } pub fn stream_and_get_source_and_map<'a, S: StreamChunks>( + memory_pool: &'a MemoryPool, input_source: &'a S, options: &MapOptions, - work_context: &'a WorkContext, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, @@ -1232,8 +1230,8 @@ pub fn stream_and_get_source_and_map<'a, S: StreamChunks>( let mut names: Vec = Vec::new(); let generated_info = input_source.stream_chunks( + memory_pool, options, - work_context, &mut |chunk, mapping| { mappings_encoder.encode(&mapping); on_chunk(chunk, mapping); diff --git a/src/lib.rs b/src/lib.rs index f970785f..f2c657e2 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -7,6 +7,7 @@ mod encoder; mod error; mod helpers; mod linear_map; +mod memory_pool; mod original_source; mod raw_source; mod replace_source; @@ -14,11 +15,11 @@ mod rope; mod source; mod source_map_source; mod with_indices; -mod work_context; pub use cached_source::CachedSource; pub use concat_source::ConcatSource; pub use error::{Error, Result}; +pub use memory_pool::MemoryPool; pub use original_source::OriginalSource; pub use raw_source::{RawBufferSource, RawStringSource}; pub use replace_source::{ReplaceSource, ReplacementEnforce}; @@ -30,7 +31,6 @@ pub use source::{ pub use source_map_source::{ SourceMapSource, SourceMapSourceOptions, WithoutOriginalOptions, }; -pub use work_context::WorkContext; /// Reexport `StreamChunks` related types. pub mod stream_chunks { diff --git a/src/work_context.rs b/src/memory_pool.rs similarity index 85% rename from src/work_context.rs rename to src/memory_pool.rs index 210e26df..fca0c6f4 100644 --- a/src/work_context.rs +++ b/src/memory_pool.rs @@ -9,12 +9,14 @@ use std::{cell::RefCell, collections::BTreeMap}; // 4. Empirical value: 64 is a proven balance point in real projects const MIN_POOL_CAPACITY: usize = 64; +/// A memory pool for reusing `Vec` allocations to reduce memory allocation overhead. #[derive(Default, Debug)] -pub struct WorkContext { +pub struct MemoryPool { usize_vec_pool: RefCell>>>, } -impl WorkContext { +impl MemoryPool { + /// Retrieves a reusable `Vec` from the pool with at least the requested capacity. pub fn pull_usize_vec(&self, requested_capacity: usize) -> Vec { if requested_capacity < MIN_POOL_CAPACITY || self.usize_vec_pool.borrow().len() == 0 @@ -33,6 +35,7 @@ impl WorkContext { Vec::with_capacity(requested_capacity) } + /// Returns a `Vec` to the pool for future reuse. pub fn return_usize_vec(&self, vec: Vec) { if vec.capacity() < MIN_POOL_CAPACITY { return; @@ -47,11 +50,11 @@ impl WorkContext { #[derive(Debug)] pub struct PooledUsizeVec<'a> { vec: Option>, - context: &'a WorkContext, + context: &'a MemoryPool, } impl<'a> PooledUsizeVec<'a> { - pub fn new(context: &'a WorkContext, requested_capacity: usize) -> Self { + pub fn new(context: &'a MemoryPool, requested_capacity: usize) -> Self { let vec = context.pull_usize_vec(requested_capacity); Self { vec: Some(vec), diff --git a/src/original_source.rs b/src/original_source.rs index d99f418e..b9f1e6ea 100644 --- a/src/original_source.rs +++ b/src/original_source.rs @@ -10,7 +10,7 @@ use crate::{ SourceText, StreamChunks, }, source::{Mapping, OriginalLocation}, - work_context, MapOptions, Rope, Source, SourceMap, SourceValue, WorkContext, + MapOptions, MemoryPool, Rope, Source, SourceMap, SourceValue, }; /// Represents source code, it will create source map for the source code, @@ -68,7 +68,7 @@ impl Source for OriginalSource { } fn map(&self, options: &MapOptions) -> Option { - get_map(&WorkContext::default(), self, options) + get_map(&MemoryPool::default(), self, options) } fn to_writer(&self, writer: &mut dyn std::io::Write) -> std::io::Result<()> { @@ -108,8 +108,8 @@ impl std::fmt::Debug for OriginalSource { impl StreamChunks for OriginalSource { fn stream_chunks<'a>( &'a self, + _: &'a MemoryPool, options: &MapOptions, - work_context: &'a WorkContext, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, _on_name: OnName, diff --git a/src/raw_source.rs b/src/raw_source.rs index 347a314e..68773bf6 100644 --- a/src/raw_source.rs +++ b/src/raw_source.rs @@ -9,7 +9,7 @@ use crate::{ get_generated_source_info, stream_chunks_of_raw_source, OnChunk, OnName, OnSource, StreamChunks, }, - MapOptions, Rope, Source, SourceMap, SourceValue, WorkContext, + MapOptions, MemoryPool, Rope, Source, SourceMap, SourceValue, }; /// A string variant of [RawStringSource]. @@ -109,8 +109,8 @@ impl Hash for RawStringSource { impl StreamChunks for RawStringSource { fn stream_chunks<'a>( &'a self, + _: &'a MemoryPool, options: &MapOptions, - work_context: &'a WorkContext, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, @@ -243,8 +243,8 @@ impl Hash for RawBufferSource { impl StreamChunks for RawBufferSource { fn stream_chunks<'a>( &'a self, + _: &'a MemoryPool, options: &MapOptions, - work_context: &'a WorkContext, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, diff --git a/src/replace_source.rs b/src/replace_source.rs index 9ec61799..07ba7bf0 100644 --- a/src/replace_source.rs +++ b/src/replace_source.rs @@ -18,8 +18,8 @@ use crate::{ linear_map::LinearMap, rope::Rope, with_indices::WithIndices, - BoxSource, MapOptions, Mapping, OriginalLocation, Source, SourceExt, - SourceMap, SourceValue, WorkContext, + BoxSource, MapOptions, Mapping, MemoryPool, OriginalLocation, Source, + SourceExt, SourceMap, SourceValue, }; /// Decorates a Source with replacements and insertions of source code, @@ -263,7 +263,7 @@ impl Source for ReplaceSource { if replacements.is_empty() { return self.inner.map(options); } - get_map(&WorkContext::default(), self, options) + get_map(&MemoryPool::default(), self, options) } fn to_writer(&self, writer: &mut dyn std::io::Write) -> std::io::Result<()> { @@ -336,8 +336,8 @@ fn check_content_at_position<'text>( impl StreamChunks for ReplaceSource { fn stream_chunks<'a>( &'a self, + memory_pool: &'a MemoryPool, options: &crate::MapOptions, - work_context: &'a WorkContext, on_chunk: crate::helpers::OnChunk<'_, 'a>, on_source: crate::helpers::OnSource<'_, 'a>, on_name: crate::helpers::OnName<'_, 'a>, @@ -392,7 +392,7 @@ impl StreamChunks for ReplaceSource { match source_content { SourceContent::Raw(source) => { let lines = split_into_lines(source) - .map(|line| WithIndices::new(work_context, line)) + .map(|line| WithIndices::new(memory_pool, line)) .collect::>(); let matched = check_content_at_position(&lines, line, column, expected_chunk); @@ -409,11 +409,11 @@ impl StreamChunks for ReplaceSource { }; let result = self.inner.stream_chunks( + memory_pool, &MapOptions { columns: options.columns, final_source: false, }, - work_context, &mut |chunk, mut mapping| { // SAFETY: final_source is false in ReplaceSource let chunk = chunk.unwrap(); diff --git a/src/source.rs b/src/source.rs index 5fd26637..5afc1435 100644 --- a/src/source.rs +++ b/src/source.rs @@ -4,7 +4,6 @@ use std::{ convert::{TryFrom, TryInto}, fmt, hash::{Hash, Hasher}, - rc::Rc, sync::Arc, }; @@ -14,8 +13,7 @@ use serde::{Deserialize, Serialize}; use crate::{ helpers::{decode_mappings, StreamChunks}, rope::Rope, - work_context::WorkContext, - Result, + MemoryPool, Result, }; /// An alias for `Box`. @@ -168,15 +166,15 @@ dyn_clone::clone_trait_object!(Source); impl StreamChunks for BoxSource { fn stream_chunks<'a>( &'a self, + memory_pool: &'a MemoryPool, options: &MapOptions, - work_context: &'a WorkContext, on_chunk: crate::helpers::OnChunk<'_, 'a>, on_source: crate::helpers::OnSource<'_, 'a>, on_name: crate::helpers::OnName<'_, 'a>, ) -> crate::helpers::GeneratedInfo { self.as_ref().stream_chunks( + memory_pool, options, - work_context, on_chunk, on_source, on_name, diff --git a/src/source_map_source.rs b/src/source_map_source.rs index 0b0d4bdb..389dafc2 100644 --- a/src/source_map_source.rs +++ b/src/source_map_source.rs @@ -8,7 +8,7 @@ use crate::{ get_map, stream_chunks_of_combined_source_map, stream_chunks_of_source_map, StreamChunks, }, - work_context::{self, WorkContext}, + memory_pool::MemoryPool, MapOptions, Rope, Source, SourceMap, SourceValue, }; @@ -109,7 +109,7 @@ impl Source for SourceMapSource { if self.inner_source_map.is_none() { return Some(self.source_map.clone()); } - get_map(&WorkContext::default(), self, options) + get_map(&MemoryPool::default(), self, options) } fn to_writer(&self, writer: &mut dyn std::io::Write) -> std::io::Result<()> { @@ -185,15 +185,15 @@ impl std::fmt::Debug for SourceMapSource { impl StreamChunks for SourceMapSource { fn stream_chunks<'a>( &'a self, + memory_pool: &'a MemoryPool, options: &MapOptions, - work_context: &'a WorkContext, on_chunk: crate::helpers::OnChunk<'_, 'a>, on_source: crate::helpers::OnSource<'_, 'a>, on_name: crate::helpers::OnName<'_, 'a>, ) -> crate::helpers::GeneratedInfo { if let Some(inner_source_map) = &self.inner_source_map { stream_chunks_of_combined_source_map( - work_context, + memory_pool, &*self.value, &self.source_map, &self.name, @@ -207,7 +207,7 @@ impl StreamChunks for SourceMapSource { ) } else { stream_chunks_of_source_map( - work_context, + memory_pool, self.value.as_str(), &self.source_map, on_chunk, diff --git a/src/with_indices.rs b/src/with_indices.rs index bd44ff0c..284b8926 100644 --- a/src/with_indices.rs +++ b/src/with_indices.rs @@ -1,9 +1,6 @@ use std::{cell::OnceCell, marker::PhantomData}; -use crate::{ - helpers::SourceText, - work_context::{PooledUsizeVec, WorkContext}, -}; +use crate::{helpers::SourceText, memory_pool::PooledUsizeVec, MemoryPool}; #[derive(Debug)] pub struct WithIndices<'context, 'text, S> @@ -14,7 +11,7 @@ where pub line: S, /// the byte position of each `char` in `line` string slice . pub indices_indexes: OnceCell>, - work_context: &'context WorkContext, + memory_pool: &'context MemoryPool, data: PhantomData<&'text S>, } @@ -22,11 +19,11 @@ impl<'context, 'text, S> WithIndices<'context, 'text, S> where S: SourceText<'text>, { - pub fn new(work_context: &'context WorkContext, line: S) -> Self { + pub fn new(memory_pool: &'context MemoryPool, line: S) -> Self { Self { indices_indexes: OnceCell::new(), line, - work_context, + memory_pool, data: PhantomData, } } @@ -38,7 +35,7 @@ where } let indices_indexes = self.indices_indexes.get_or_init(|| { - let mut vec = PooledUsizeVec::new(self.work_context, self.line.len()); + let mut vec = PooledUsizeVec::new(self.memory_pool, self.line.len()); vec.extend(self.line.char_indices().map(|(i, _)| i)); vec }); @@ -60,13 +57,13 @@ where /// tests are just copy from `substring` crate #[cfg(test)] mod tests { - use crate::{work_context::WorkContext, Rope}; + use crate::{MemoryPool, Rope}; use super::WithIndices; #[test] fn test_substring() { assert_eq!( - WithIndices::new(&WorkContext::default(), Rope::from("foobar")) + WithIndices::new(&MemoryPool::default(), Rope::from("foobar")) .substring(0, 3), "foo" ); @@ -75,12 +72,12 @@ mod tests { #[test] fn test_out_of_bounds() { assert_eq!( - WithIndices::new(&WorkContext::default(), Rope::from("foobar")) + WithIndices::new(&MemoryPool::default(), Rope::from("foobar")) .substring(0, 10), "foobar" ); assert_eq!( - WithIndices::new(&WorkContext::default(), Rope::from("foobar")) + WithIndices::new(&MemoryPool::default(), Rope::from("foobar")) .substring(6, 10), "" ); @@ -89,7 +86,7 @@ mod tests { #[test] fn test_start_less_than_end() { assert_eq!( - WithIndices::new(&WorkContext::default(), Rope::from("foobar")) + WithIndices::new(&MemoryPool::default(), Rope::from("foobar")) .substring(3, 2), "" ); @@ -98,7 +95,7 @@ mod tests { #[test] fn test_start_and_end_equal() { assert_eq!( - WithIndices::new(&WorkContext::default(), Rope::from("foobar")) + WithIndices::new(&MemoryPool::default(), Rope::from("foobar")) .substring(3, 3), "" ); @@ -107,7 +104,7 @@ mod tests { #[test] fn test_multiple_byte_characters() { assert_eq!( - WithIndices::new(&WorkContext::default(), Rope::from("fõøbα®")) + WithIndices::new(&MemoryPool::default(), Rope::from("fõøbα®")) .substring(2, 5), "øbα" ); diff --git a/tests/compat_source.rs b/tests/compat_source.rs index 50fdab42..b3257f3c 100644 --- a/tests/compat_source.rs +++ b/tests/compat_source.rs @@ -6,8 +6,8 @@ use rspack_sources::stream_chunks::{ stream_chunks_default, GeneratedInfo, OnChunk, OnName, OnSource, StreamChunks, }; use rspack_sources::{ - ConcatSource, MapOptions, RawStringSource, Rope, Source, SourceExt, - SourceMap, SourceValue, WorkContext, + ConcatSource, MapOptions, MemoryPool, RawStringSource, Rope, Source, + SourceExt, SourceMap, SourceValue, }; #[derive(Debug, Eq)] @@ -42,14 +42,14 @@ impl Source for CompatSource { impl StreamChunks for CompatSource { fn stream_chunks<'a>( &'a self, + memory_pool: &'a MemoryPool, options: &MapOptions, - work_context: &'a WorkContext, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, ) -> GeneratedInfo { stream_chunks_default( - work_context, + memory_pool, self.0, self.1.as_ref(), options, From 36d575d74a0169c9f5564cee095b6bb329fded7a Mon Sep 17 00:00:00 2001 From: Cong-Cong Date: Wed, 29 Oct 2025 19:56:12 +0800 Subject: [PATCH 05/22] refactor --- src/memory_pool.rs | 2 +- src/source.rs | 17 +---------------- 2 files changed, 2 insertions(+), 17 deletions(-) diff --git a/src/memory_pool.rs b/src/memory_pool.rs index fca0c6f4..d8532349 100644 --- a/src/memory_pool.rs +++ b/src/memory_pool.rs @@ -19,7 +19,7 @@ impl MemoryPool { /// Retrieves a reusable `Vec` from the pool with at least the requested capacity. pub fn pull_usize_vec(&self, requested_capacity: usize) -> Vec { if requested_capacity < MIN_POOL_CAPACITY - || self.usize_vec_pool.borrow().len() == 0 + || self.usize_vec_pool.borrow().is_empty() { return Vec::with_capacity(requested_capacity); } diff --git a/src/source.rs b/src/source.rs index 5afc1435..2060e003 100644 --- a/src/source.rs +++ b/src/source.rs @@ -255,7 +255,7 @@ impl SourceExt for T { } /// Options for [Source::map]. -#[derive(Debug, Clone)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct MapOptions { /// Whether have columns info in generated [SourceMap] mappings. pub columns: bool, @@ -263,21 +263,6 @@ pub struct MapOptions { pub(crate) final_source: bool, } -impl PartialEq for MapOptions { - fn eq(&self, other: &Self) -> bool { - self.columns == other.columns && self.final_source == other.final_source - } -} - -impl Eq for MapOptions {} - -impl Hash for MapOptions { - fn hash(&self, state: &mut H) { - self.columns.hash(state); - self.final_source.hash(state); - } -} - impl Default for MapOptions { fn default() -> Self { Self { From c31724180f0e39d73502ba90d3d1b2c6bd69b665 Mon Sep 17 00:00:00 2001 From: Cong-Cong Date: Thu, 30 Oct 2025 16:13:08 +0800 Subject: [PATCH 06/22] refactor --- src/cached_source.rs | 15 +----- src/concat_source.rs | 16 ++----- src/helpers.rs | 42 ++++------------- src/lib.rs | 1 - src/memory_pool.rs | 98 +++++++++++++++++++++++++++------------- src/original_source.rs | 5 +- src/raw_source.rs | 4 +- src/replace_source.rs | 16 +++---- src/source.rs | 13 ++---- src/source_map_source.rs | 6 +-- src/with_indices.rs | 41 +++++------------ 11 files changed, 108 insertions(+), 149 deletions(-) diff --git a/src/cached_source.rs b/src/cached_source.rs index ae93dfc9..254c2458 100644 --- a/src/cached_source.rs +++ b/src/cached_source.rs @@ -13,7 +13,7 @@ use crate::{ }, rope::Rope, source::SourceValue, - BoxSource, MapOptions, MemoryPool, Source, SourceExt, SourceMap, + BoxSource, MapOptions, Source, SourceExt, SourceMap, }; #[derive(Default)] @@ -122,7 +122,6 @@ impl Source for CachedSource { impl StreamChunks for CachedSource { fn stream_chunks<'a>( &'a self, - memory_pool: &'a MemoryPool, options: &MapOptions, on_chunk: crate::helpers::OnChunk<'_, 'a>, on_source: crate::helpers::OnSource<'_, 'a>, @@ -138,13 +137,7 @@ impl StreamChunks for CachedSource { let source = self.rope(); if let Some(map) = map { stream_chunks_of_source_map( - memory_pool, - source, - map, - on_chunk, - on_source, - on_name, - options, + source, map, on_chunk, on_source, on_name, options, ) } else { stream_chunks_of_raw_source( @@ -154,7 +147,6 @@ impl StreamChunks for CachedSource { } None => { let (generated_info, map) = stream_and_get_source_and_map( - memory_pool, &self.inner, options, on_chunk, @@ -316,7 +308,6 @@ mod tests { let mut on_source_count = 0; let mut on_name_count = 0; let generated_info = source.stream_chunks( - &MemoryPool::default(), &map_options, &mut |_chunk, _mapping| { on_chunk_count += 1; @@ -331,7 +322,6 @@ mod tests { let cached_source = CachedSource::new(source); cached_source.stream_chunks( - &MemoryPool::default(), &map_options, &mut |_chunk, _mapping| {}, &mut |_source_index, _source, _source_content| {}, @@ -342,7 +332,6 @@ mod tests { let mut cached_on_source_count = 0; let mut cached_on_name_count = 0; let cached_generated_info = cached_source.stream_chunks( - &MemoryPool::default(), &map_options, &mut |_chunk, _mapping| { cached_on_chunk_count += 1; diff --git a/src/concat_source.rs b/src/concat_source.rs index 42f777eb..e7048d4d 100644 --- a/src/concat_source.rs +++ b/src/concat_source.rs @@ -11,8 +11,8 @@ use crate::{ helpers::{get_map, GeneratedInfo, OnChunk, OnName, OnSource, StreamChunks}, linear_map::LinearMap, source::{Mapping, OriginalLocation}, - BoxSource, MapOptions, MemoryPool, RawStringSource, Rope, Source, SourceExt, - SourceMap, SourceValue, + BoxSource, MapOptions, RawStringSource, Rope, Source, SourceExt, SourceMap, + SourceValue, }; /// Concatenate multiple [Source]s to a single [Source]. @@ -200,7 +200,7 @@ impl Source for ConcatSource { } fn map(&self, options: &MapOptions) -> Option { - get_map(&MemoryPool::default(), self, options) + get_map(self, options) } fn to_writer(&self, writer: &mut dyn std::io::Write) -> std::io::Result<()> { @@ -230,7 +230,6 @@ impl Eq for ConcatSource {} impl StreamChunks for ConcatSource { fn stream_chunks<'a>( &'a self, - memory_pool: &'a MemoryPool, options: &MapOptions, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, @@ -239,13 +238,7 @@ impl StreamChunks for ConcatSource { let children = self.optimized_children(); if children.len() == 1 { - return children[0].stream_chunks( - memory_pool, - options, - on_chunk, - on_source, - on_name, - ); + return children[0].stream_chunks(options, on_chunk, on_source, on_name); } let mut current_line_offset = 0; let mut current_column_offset = 0; @@ -266,7 +259,6 @@ impl StreamChunks for ConcatSource { generated_line, generated_column, } = item.stream_chunks( - memory_pool, options, &mut |chunk, mapping| { let line = mapping.generated_line + current_line_offset; diff --git a/src/helpers.rs b/src/helpers.rs index 71878b7e..89c94ac4 100644 --- a/src/helpers.rs +++ b/src/helpers.rs @@ -13,16 +13,14 @@ use crate::{ linear_map::LinearMap, source::{Mapping, OriginalLocation}, with_indices::WithIndices, - MapOptions, MemoryPool, Rope, SourceMap, + MapOptions, Rope, SourceMap, }; // Adding this type because sourceContentLine not happy -type InnerSourceContentLine<'context, 'text> = RefCell< - LinearMap>>>>>, ->; +type InnerSourceContentLine<'text> = + RefCell>>>>>>; pub fn get_map<'a, S: StreamChunks>( - memory_pool: &'a MemoryPool, stream: &'a S, options: &'a MapOptions, ) -> Option { @@ -32,7 +30,6 @@ pub fn get_map<'a, S: StreamChunks>( let mut names: Vec = Vec::new(); stream.stream_chunks( - memory_pool, &MapOptions { columns: options.columns, final_source: true, @@ -74,7 +71,6 @@ pub trait StreamChunks { /// [StreamChunks] abstraction fn stream_chunks<'a>( &'a self, - memory_pool: &'a MemoryPool, options: &MapOptions, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, @@ -94,7 +90,6 @@ pub type OnName<'a, 'b> = &'a mut dyn FnMut(u32, Cow<'b, str>); /// Default stream chunks behavior impl, see [webpack-sources streamChunks](https://github.com/webpack/webpack-sources/blob/9f98066311d53a153fdc7c633422a1d086528027/lib/helpers/streamChunks.js#L15-L35). pub fn stream_chunks_default<'a, S>( - memory_pool: &'a MemoryPool, source: S, source_map: Option<&'a SourceMap>, options: &MapOptions, @@ -107,13 +102,7 @@ where { if let Some(map) = source_map { stream_chunks_of_source_map( - memory_pool, - source, - map, - on_chunk, - on_source, - on_name, - options, + source, map, on_chunk, on_source, on_name, options, ) } else { stream_chunks_of_raw_source(source, options, on_chunk, on_source, on_name) @@ -314,7 +303,6 @@ where } pub fn stream_chunks_of_source_map<'a, S>( - memory_pool: &'a MemoryPool, source: S, source_map: &'a SourceMap, on_chunk: OnChunk<'_, 'a>, @@ -336,12 +324,7 @@ where columns: true, final_source: false, } => stream_chunks_of_source_map_full( - memory_pool, - source, - source_map, - on_chunk, - on_source, - on_name, + source, source_map, on_chunk, on_source, on_name, ), MapOptions { columns: false, @@ -432,7 +415,6 @@ where } fn stream_chunks_of_source_map_full<'a, S>( - memory_pool: &'a MemoryPool, source: S, source_map: &'a SourceMap, on_chunk: OnChunk<'_, 'a>, @@ -443,9 +425,8 @@ where S: SourceText<'a> + 'a, { let lines = split_into_lines(&source); - let line_with_indices_list = lines - .map(|line| WithIndices::new(memory_pool, line)) - .collect::>(); + let line_with_indices_list = + lines.map(|line| WithIndices::new(line)).collect::>(); if line_with_indices_list.is_empty() { return GeneratedInfo { @@ -732,7 +713,6 @@ type InnerSourceIndexValueMapping<'a> = #[allow(clippy::too_many_arguments)] pub fn stream_chunks_of_combined_source_map<'a, S>( - memory_pool: &'a MemoryPool, source: S, source_map: &'a SourceMap, inner_source_name: &'a str, @@ -798,7 +778,6 @@ where }; stream_chunks_of_source_map( - memory_pool, source.clone(), source_map, &mut |chunk, mapping| { @@ -854,7 +833,7 @@ where match inner_source_contents.get(&inner_source_index) { Some(Some(source_content)) => Some( split_into_lines(source_content) - .map(|line| WithIndices::new(memory_pool, line)) + .map(|line| WithIndices::new(line)) .collect(), ), _ => None, @@ -954,7 +933,7 @@ where match inner_source_contents.get(&inner_source_index) { Some(Some(source_content)) => Some( split_into_lines(source_content) - .map(|line| WithIndices::new(memory_pool, line)) + .map(|line| WithIndices::new(line)) .collect(), ), _ => None, @@ -1119,7 +1098,6 @@ where } source_index_mapping.borrow_mut().insert(i, -2); stream_chunks_of_source_map( - memory_pool, source_content.unwrap(), inner_source_map, &mut |chunk, mapping| { @@ -1217,7 +1195,6 @@ where } pub fn stream_and_get_source_and_map<'a, S: StreamChunks>( - memory_pool: &'a MemoryPool, input_source: &'a S, options: &MapOptions, on_chunk: OnChunk<'_, 'a>, @@ -1230,7 +1207,6 @@ pub fn stream_and_get_source_and_map<'a, S: StreamChunks>( let mut names: Vec = Vec::new(); let generated_info = input_source.stream_chunks( - memory_pool, options, &mut |chunk, mapping| { mappings_encoder.encode(&mapping); diff --git a/src/lib.rs b/src/lib.rs index f2c657e2..11ee1d07 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -19,7 +19,6 @@ mod with_indices; pub use cached_source::CachedSource; pub use concat_source::ConcatSource; pub use error::{Error, Result}; -pub use memory_pool::MemoryPool; pub use original_source::OriginalSource; pub use raw_source::{RawBufferSource, RawStringSource}; pub use replace_source::{ReplaceSource, ReplacementEnforce}; diff --git a/src/memory_pool.rs b/src/memory_pool.rs index d8532349..3a56d23d 100644 --- a/src/memory_pool.rs +++ b/src/memory_pool.rs @@ -1,4 +1,6 @@ -use std::{cell::RefCell, collections::BTreeMap}; +use std::{ + cell::RefCell, collections::BTreeMap, rc::Rc, sync::atomic::AtomicBool, +}; // Vector pooling minimum capacity threshold // Recommended threshold: 64 @@ -9,19 +11,39 @@ use std::{cell::RefCell, collections::BTreeMap}; // 4. Empirical value: 64 is a proven balance point in real projects const MIN_POOL_CAPACITY: usize = 64; -/// A memory pool for reusing `Vec` allocations to reduce memory allocation overhead. +trait Poolable { + fn with_capacity(capacity: usize) -> Self; + fn capacity(&self) -> usize; + fn clear(&mut self); +} + +impl Poolable for Vec { + fn with_capacity(capacity: usize) -> Self { + Vec::with_capacity(capacity) + } + + fn capacity(&self) -> usize { + self.capacity() + } + + fn clear(&mut self) { + self.clear(); + } +} + +/// A memory pool for reusing `T` allocations to reduce memory allocation overhead. #[derive(Default, Debug)] -pub struct MemoryPool { - usize_vec_pool: RefCell>>>, +pub struct ObjectPool { + usize_vec_pool: RefCell>>, } -impl MemoryPool { - /// Retrieves a reusable `Vec` from the pool with at least the requested capacity. - pub fn pull_usize_vec(&self, requested_capacity: usize) -> Vec { +impl ObjectPool { + /// Retrieves a reusable `T` from the pool with at least the requested capacity. + pub fn pull(&self, requested_capacity: usize) -> T { if requested_capacity < MIN_POOL_CAPACITY || self.usize_vec_pool.borrow().is_empty() { - return Vec::with_capacity(requested_capacity); + return T::with_capacity(requested_capacity); } let mut usize_vec_pool = self.usize_vec_pool.borrow_mut(); if let Some((_, bucket)) = @@ -32,63 +54,75 @@ impl MemoryPool { return v; } } - Vec::with_capacity(requested_capacity) + T::with_capacity(requested_capacity) } - /// Returns a `Vec` to the pool for future reuse. - pub fn return_usize_vec(&self, vec: Vec) { - if vec.capacity() < MIN_POOL_CAPACITY { + /// Returns a `T` to the pool for future reuse. + pub fn ret(&self, object: T) { + if object.capacity() < MIN_POOL_CAPACITY { return; } let mut usize_vec_pool = self.usize_vec_pool.borrow_mut(); - let cap = vec.capacity(); + let cap = object.capacity(); let bucket = usize_vec_pool.entry(cap).or_default(); - bucket.push(vec); + bucket.push(object); } } #[derive(Debug)] -pub struct PooledUsizeVec<'a> { - vec: Option>, - context: &'a MemoryPool, +pub struct Pooled { + object: Option, + pool: Rc>, } -impl<'a> PooledUsizeVec<'a> { - pub fn new(context: &'a MemoryPool, requested_capacity: usize) -> Self { - let vec = context.pull_usize_vec(requested_capacity); +impl Pooled { + fn new(pool: Rc>, requested_capacity: usize) -> Self { + let object = pool.pull(requested_capacity); Self { - vec: Some(vec), - context, + object: Some(object), + pool, } } - pub fn as_mut(&mut self) -> &mut Vec { - self.vec.as_mut().unwrap() + pub fn as_mut(&mut self) -> &mut T { + self.object.as_mut().unwrap() } - pub fn as_ref(&self) -> &Vec { - self.vec.as_ref().unwrap() + pub fn as_ref(&self) -> &T { + self.object.as_ref().unwrap() } } -impl Drop for PooledUsizeVec<'_> { +impl Drop for Pooled { fn drop(&mut self) { - if let Some(vec) = self.vec.take() { - self.context.return_usize_vec(vec); + if let Some(object) = self.object.take() { + self.pool.ret(object); } } } -impl std::ops::Deref for PooledUsizeVec<'_> { - type Target = Vec; +impl std::ops::Deref for Pooled { + type Target = T; fn deref(&self) -> &Self::Target { self.as_ref() } } -impl std::ops::DerefMut for PooledUsizeVec<'_> { +impl std::ops::DerefMut for Pooled { fn deref_mut(&mut self) -> &mut Self::Target { self.as_mut() } } + +pub(crate) const USING_OBJECT_POOL: AtomicBool = AtomicBool::new(false); + +pub fn using_object_pool(f: F) -> R +where + F: FnOnce() -> R, +{ + USING_OBJECT_POOL.store(true, std::sync::atomic::Ordering::SeqCst); + let result = f(); + USING_OBJECT_POOL.store(false, std::sync::atomic::Ordering::SeqCst); + result +} diff --git a/src/original_source.rs b/src/original_source.rs index b9f1e6ea..47349abc 100644 --- a/src/original_source.rs +++ b/src/original_source.rs @@ -10,7 +10,7 @@ use crate::{ SourceText, StreamChunks, }, source::{Mapping, OriginalLocation}, - MapOptions, MemoryPool, Rope, Source, SourceMap, SourceValue, + MapOptions, Rope, Source, SourceMap, SourceValue, }; /// Represents source code, it will create source map for the source code, @@ -68,7 +68,7 @@ impl Source for OriginalSource { } fn map(&self, options: &MapOptions) -> Option { - get_map(&MemoryPool::default(), self, options) + get_map(self, options) } fn to_writer(&self, writer: &mut dyn std::io::Write) -> std::io::Result<()> { @@ -108,7 +108,6 @@ impl std::fmt::Debug for OriginalSource { impl StreamChunks for OriginalSource { fn stream_chunks<'a>( &'a self, - _: &'a MemoryPool, options: &MapOptions, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, diff --git a/src/raw_source.rs b/src/raw_source.rs index 68773bf6..f55856ba 100644 --- a/src/raw_source.rs +++ b/src/raw_source.rs @@ -9,7 +9,7 @@ use crate::{ get_generated_source_info, stream_chunks_of_raw_source, OnChunk, OnName, OnSource, StreamChunks, }, - MapOptions, MemoryPool, Rope, Source, SourceMap, SourceValue, + MapOptions, Rope, Source, SourceMap, SourceValue, }; /// A string variant of [RawStringSource]. @@ -109,7 +109,6 @@ impl Hash for RawStringSource { impl StreamChunks for RawStringSource { fn stream_chunks<'a>( &'a self, - _: &'a MemoryPool, options: &MapOptions, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, @@ -243,7 +242,6 @@ impl Hash for RawBufferSource { impl StreamChunks for RawBufferSource { fn stream_chunks<'a>( &'a self, - _: &'a MemoryPool, options: &MapOptions, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, diff --git a/src/replace_source.rs b/src/replace_source.rs index 07ba7bf0..87d1bd58 100644 --- a/src/replace_source.rs +++ b/src/replace_source.rs @@ -18,8 +18,8 @@ use crate::{ linear_map::LinearMap, rope::Rope, with_indices::WithIndices, - BoxSource, MapOptions, Mapping, MemoryPool, OriginalLocation, Source, - SourceExt, SourceMap, SourceValue, + BoxSource, MapOptions, Mapping, OriginalLocation, Source, SourceExt, + SourceMap, SourceValue, }; /// Decorates a Source with replacements and insertions of source code, @@ -263,7 +263,7 @@ impl Source for ReplaceSource { if replacements.is_empty() { return self.inner.map(options); } - get_map(&MemoryPool::default(), self, options) + get_map(self, options) } fn to_writer(&self, writer: &mut dyn std::io::Write) -> std::io::Result<()> { @@ -313,13 +313,13 @@ impl std::fmt::Debug for ReplaceSource { } } -enum SourceContent<'context, 'text> { +enum SourceContent<'text> { Raw(Rope<'text>), - Lines(Vec>>), + Lines(Vec>>), } fn check_content_at_position<'text>( - lines: &[WithIndices<'_, 'text, Rope<'text>>], + lines: &[WithIndices<'text, Rope<'text>>], line: u32, column: u32, expected: Rope, // FIXME: memory @@ -336,7 +336,6 @@ fn check_content_at_position<'text>( impl StreamChunks for ReplaceSource { fn stream_chunks<'a>( &'a self, - memory_pool: &'a MemoryPool, options: &crate::MapOptions, on_chunk: crate::helpers::OnChunk<'_, 'a>, on_source: crate::helpers::OnSource<'_, 'a>, @@ -392,7 +391,7 @@ impl StreamChunks for ReplaceSource { match source_content { SourceContent::Raw(source) => { let lines = split_into_lines(source) - .map(|line| WithIndices::new(memory_pool, line)) + .map(|line| WithIndices::new(line)) .collect::>(); let matched = check_content_at_position(&lines, line, column, expected_chunk); @@ -409,7 +408,6 @@ impl StreamChunks for ReplaceSource { }; let result = self.inner.stream_chunks( - memory_pool, &MapOptions { columns: options.columns, final_source: false, diff --git a/src/source.rs b/src/source.rs index 2060e003..e60a0e09 100644 --- a/src/source.rs +++ b/src/source.rs @@ -13,7 +13,7 @@ use serde::{Deserialize, Serialize}; use crate::{ helpers::{decode_mappings, StreamChunks}, rope::Rope, - MemoryPool, Result, + Result, }; /// An alias for `Box`. @@ -166,19 +166,14 @@ dyn_clone::clone_trait_object!(Source); impl StreamChunks for BoxSource { fn stream_chunks<'a>( &'a self, - memory_pool: &'a MemoryPool, options: &MapOptions, on_chunk: crate::helpers::OnChunk<'_, 'a>, on_source: crate::helpers::OnSource<'_, 'a>, on_name: crate::helpers::OnName<'_, 'a>, ) -> crate::helpers::GeneratedInfo { - self.as_ref().stream_chunks( - memory_pool, - options, - on_chunk, - on_source, - on_name, - ) + self + .as_ref() + .stream_chunks(options, on_chunk, on_source, on_name) } } diff --git a/src/source_map_source.rs b/src/source_map_source.rs index 389dafc2..c0747412 100644 --- a/src/source_map_source.rs +++ b/src/source_map_source.rs @@ -8,7 +8,6 @@ use crate::{ get_map, stream_chunks_of_combined_source_map, stream_chunks_of_source_map, StreamChunks, }, - memory_pool::MemoryPool, MapOptions, Rope, Source, SourceMap, SourceValue, }; @@ -109,7 +108,7 @@ impl Source for SourceMapSource { if self.inner_source_map.is_none() { return Some(self.source_map.clone()); } - get_map(&MemoryPool::default(), self, options) + get_map(self, options) } fn to_writer(&self, writer: &mut dyn std::io::Write) -> std::io::Result<()> { @@ -185,7 +184,6 @@ impl std::fmt::Debug for SourceMapSource { impl StreamChunks for SourceMapSource { fn stream_chunks<'a>( &'a self, - memory_pool: &'a MemoryPool, options: &MapOptions, on_chunk: crate::helpers::OnChunk<'_, 'a>, on_source: crate::helpers::OnSource<'_, 'a>, @@ -193,7 +191,6 @@ impl StreamChunks for SourceMapSource { ) -> crate::helpers::GeneratedInfo { if let Some(inner_source_map) = &self.inner_source_map { stream_chunks_of_combined_source_map( - memory_pool, &*self.value, &self.source_map, &self.name, @@ -207,7 +204,6 @@ impl StreamChunks for SourceMapSource { ) } else { stream_chunks_of_source_map( - memory_pool, self.value.as_str(), &self.source_map, on_chunk, diff --git a/src/with_indices.rs b/src/with_indices.rs index 284b8926..e917474f 100644 --- a/src/with_indices.rs +++ b/src/with_indices.rs @@ -1,9 +1,9 @@ use std::{cell::OnceCell, marker::PhantomData}; -use crate::{helpers::SourceText, memory_pool::PooledUsizeVec, MemoryPool}; +use crate::helpers::SourceText; #[derive(Debug)] -pub struct WithIndices<'context, 'text, S> +pub struct WithIndices<'text, S> where S: SourceText<'text>, { @@ -11,19 +11,17 @@ where pub line: S, /// the byte position of each `char` in `line` string slice . pub indices_indexes: OnceCell>, - memory_pool: &'context MemoryPool, data: PhantomData<&'text S>, } -impl<'context, 'text, S> WithIndices<'context, 'text, S> +impl<'text, S> WithIndices<'text, S> where S: SourceText<'text>, { - pub fn new(memory_pool: &'context MemoryPool, line: S) -> Self { + pub fn new(line: S) -> Self { Self { indices_indexes: OnceCell::new(), line, - memory_pool, data: PhantomData, } } @@ -35,7 +33,7 @@ where } let indices_indexes = self.indices_indexes.get_or_init(|| { - let mut vec = PooledUsizeVec::new(self.memory_pool, self.line.len()); + let mut vec = PooledUsizeVec::new(self.line.len()); vec.extend(self.line.char_indices().map(|(i, _)| i)); vec }); @@ -57,14 +55,13 @@ where /// tests are just copy from `substring` crate #[cfg(test)] mod tests { - use crate::{MemoryPool, Rope}; + use crate::Rope; use super::WithIndices; #[test] fn test_substring() { assert_eq!( - WithIndices::new(&MemoryPool::default(), Rope::from("foobar")) - .substring(0, 3), + WithIndices::new(Rope::from("foobar")).substring(0, 3), "foo" ); } @@ -72,40 +69,26 @@ mod tests { #[test] fn test_out_of_bounds() { assert_eq!( - WithIndices::new(&MemoryPool::default(), Rope::from("foobar")) - .substring(0, 10), + WithIndices::new(Rope::from("foobar")).substring(0, 10), "foobar" ); - assert_eq!( - WithIndices::new(&MemoryPool::default(), Rope::from("foobar")) - .substring(6, 10), - "" - ); + assert_eq!(WithIndices::new(Rope::from("foobar")).substring(6, 10), ""); } #[test] fn test_start_less_than_end() { - assert_eq!( - WithIndices::new(&MemoryPool::default(), Rope::from("foobar")) - .substring(3, 2), - "" - ); + assert_eq!(WithIndices::new(Rope::from("foobar")).substring(3, 2), ""); } #[test] fn test_start_and_end_equal() { - assert_eq!( - WithIndices::new(&MemoryPool::default(), Rope::from("foobar")) - .substring(3, 3), - "" - ); + assert_eq!(WithIndices::new(Rope::from("foobar")).substring(3, 3), ""); } #[test] fn test_multiple_byte_characters() { assert_eq!( - WithIndices::new(&MemoryPool::default(), Rope::from("fõøbα®")) - .substring(2, 5), + WithIndices::new(Rope::from("fõøbα®")).substring(2, 5), "øbα" ); } From 2349f1534e28b394c020fa2b8c3a63d01426df7c Mon Sep 17 00:00:00 2001 From: Cong-Cong Date: Thu, 30 Oct 2025 16:48:42 +0800 Subject: [PATCH 07/22] refactor: object pool --- benches/bench_complex_replace_source.rs | 9 ++- .../benchmark_repetitive_react_components.rs | 12 +-- src/helpers.rs | 4 +- src/lib.rs | 4 +- src/{memory_pool.rs => object_pool.rs} | 79 +++++++++++++------ src/replace_source.rs | 2 +- src/with_indices.rs | 19 +++-- tests/compat_source.rs | 6 +- 8 files changed, 89 insertions(+), 46 deletions(-) rename src/{memory_pool.rs => object_pool.rs} (51%) diff --git a/benches/bench_complex_replace_source.rs b/benches/bench_complex_replace_source.rs index 60d8502f..a95a7204 100644 --- a/benches/bench_complex_replace_source.rs +++ b/benches/bench_complex_replace_source.rs @@ -9,7 +9,8 @@ pub use criterion::*; pub use codspeed_criterion_compat::*; use rspack_sources::{ - BoxSource, MapOptions, OriginalSource, ReplaceSource, SourceExt, + using_object_pool, BoxSource, MapOptions, OriginalSource, ReplaceSource, + SourceExt, }; static LARGE_REPLACE_SOURCE: LazyLock = LazyLock::new(|| { @@ -36723,8 +36724,10 @@ static LARGE_REPLACE_SOURCE: LazyLock = LazyLock::new(|| { pub fn benchmark_complex_replace_source_map(b: &mut Bencher) { let source = LARGE_REPLACE_SOURCE.clone(); - b.iter(|| { - black_box(source.map(&MapOptions::default())); + using_object_pool(|| { + b.iter(|| { + black_box(source.map(&MapOptions::default())); + }); }); } diff --git a/benches/benchmark_repetitive_react_components.rs b/benches/benchmark_repetitive_react_components.rs index 96875033..f73efa30 100644 --- a/benches/benchmark_repetitive_react_components.rs +++ b/benches/benchmark_repetitive_react_components.rs @@ -9,9 +9,9 @@ pub use criterion::*; pub use codspeed_criterion_compat::*; use rspack_sources::{ - BoxSource, ConcatSource, MapOptions, OriginalSource, RawStringSource, - ReplaceSource, ReplacementEnforce, Source, SourceExt, SourceMap, - SourceMapSource, SourceMapSourceOptions, + using_object_pool, BoxSource, ConcatSource, MapOptions, OriginalSource, + RawStringSource, ReplaceSource, ReplacementEnforce, Source, SourceExt, + SourceMap, SourceMapSource, SourceMapSourceOptions, }; static REPETITIVE_1K_REACT_COMPONENTS_SOURCE: LazyLock = @@ -3504,8 +3504,10 @@ static REPETITIVE_1K_REACT_COMPONENTS_SOURCE: LazyLock = pub fn benchmark_repetitive_react_components_map(b: &mut Bencher) { let source = REPETITIVE_1K_REACT_COMPONENTS_SOURCE.clone(); - b.iter(|| { - black_box(source.map(&MapOptions::default())); + using_object_pool(|| { + b.iter(|| { + black_box(source.map(&MapOptions::default())); + }); }); } diff --git a/src/helpers.rs b/src/helpers.rs index 89c94ac4..0215f933 100644 --- a/src/helpers.rs +++ b/src/helpers.rs @@ -833,7 +833,7 @@ where match inner_source_contents.get(&inner_source_index) { Some(Some(source_content)) => Some( split_into_lines(source_content) - .map(|line| WithIndices::new(line)) + .map(WithIndices::new) .collect(), ), _ => None, @@ -933,7 +933,7 @@ where match inner_source_contents.get(&inner_source_index) { Some(Some(source_content)) => Some( split_into_lines(source_content) - .map(|line| WithIndices::new(line)) + .map(WithIndices::new) .collect(), ), _ => None, diff --git a/src/lib.rs b/src/lib.rs index 11ee1d07..3dd711a6 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -7,7 +7,7 @@ mod encoder; mod error; mod helpers; mod linear_map; -mod memory_pool; +mod object_pool; mod original_source; mod raw_source; mod replace_source; @@ -40,3 +40,5 @@ pub mod stream_chunks { } pub use helpers::{decode_mappings, encode_mappings}; + +pub use object_pool::using_object_pool; diff --git a/src/memory_pool.rs b/src/object_pool.rs similarity index 51% rename from src/memory_pool.rs rename to src/object_pool.rs index 3a56d23d..e7287c81 100644 --- a/src/memory_pool.rs +++ b/src/object_pool.rs @@ -1,6 +1,4 @@ -use std::{ - cell::RefCell, collections::BTreeMap, rc::Rc, sync::atomic::AtomicBool, -}; +use std::{cell::RefCell, collections::BTreeMap, rc::Rc}; // Vector pooling minimum capacity threshold // Recommended threshold: 64 @@ -11,7 +9,7 @@ use std::{ // 4. Empirical value: 64 is a proven balance point in real projects const MIN_POOL_CAPACITY: usize = 64; -trait Poolable { +pub trait Poolable { fn with_capacity(capacity: usize) -> Self; fn capacity(&self) -> usize; fn clear(&mut self); @@ -34,50 +32,64 @@ impl Poolable for Vec { /// A memory pool for reusing `T` allocations to reduce memory allocation overhead. #[derive(Default, Debug)] pub struct ObjectPool { - usize_vec_pool: RefCell>>, + objects: Rc>>>, +} + +impl Clone for ObjectPool { + fn clone(&self) -> Self { + Self { + objects: self.objects.clone(), + } + } } impl ObjectPool { /// Retrieves a reusable `T` from the pool with at least the requested capacity. pub fn pull(&self, requested_capacity: usize) -> T { if requested_capacity < MIN_POOL_CAPACITY - || self.usize_vec_pool.borrow().is_empty() + || self.objects.borrow().is_empty() { return T::with_capacity(requested_capacity); } - let mut usize_vec_pool = self.usize_vec_pool.borrow_mut(); - if let Some((_, bucket)) = - usize_vec_pool.range_mut(requested_capacity..).next() - { - if let Some(mut v) = bucket.pop() { - v.clear(); - return v; + let mut objects = self.objects.borrow_mut(); + if let Some((_, bucket)) = objects.range_mut(requested_capacity..).next() { + if let Some(mut object) = bucket.pop() { + object.clear(); + return object; } } T::with_capacity(requested_capacity) } /// Returns a `T` to the pool for future reuse. - pub fn ret(&self, object: T) { + pub fn return_to_pool(&self, object: T) { if object.capacity() < MIN_POOL_CAPACITY { return; } - let mut usize_vec_pool = self.usize_vec_pool.borrow_mut(); + let mut objects = self.objects.borrow_mut(); let cap = object.capacity(); - let bucket = usize_vec_pool.entry(cap).or_default(); + let bucket = objects.entry(cap).or_default(); bucket.push(object); } } +/// A smart pointer that holds a pooled object and automatically returns it to the pool when dropped. +/// +/// `Pooled` implements RAII (Resource Acquisition Is Initialization) pattern to manage +/// pooled objects lifecycle. When the `Pooled` instance is dropped, the contained object +/// is automatically returned to its associated pool for future reuse. #[derive(Debug)] pub struct Pooled { object: Option, - pool: Rc>, + pool: Option>, } impl Pooled { - fn new(pool: Rc>, requested_capacity: usize) -> Self { - let object = pool.pull(requested_capacity); + pub fn new(pool: Option>, requested_capacity: usize) -> Self { + let object = match &pool { + Some(pool) => pool.pull(requested_capacity), + None => T::with_capacity(requested_capacity), + }; Self { object: Some(object), pool, @@ -96,7 +108,9 @@ impl Pooled { impl Drop for Pooled { fn drop(&mut self) { if let Some(object) = self.object.take() { - self.pool.ret(object); + if let Some(pool) = &self.pool { + pool.return_to_pool(object); + } } } } @@ -115,14 +129,33 @@ impl std::ops::DerefMut for Pooled { } } -pub(crate) const USING_OBJECT_POOL: AtomicBool = AtomicBool::new(false); +thread_local! { + pub static USIZE_VEC_POOL: RefCell>>> = RefCell::default(); +} +/// Executes a function with object pooling enabled for the current thread. +/// +/// This function temporarily enables a thread-local object pool for `Vec` allocations, +/// executes the provided closure, and then cleans up the pool to prevent memory leaks. pub fn using_object_pool(f: F) -> R where F: FnOnce() -> R, { - USING_OBJECT_POOL.store(true, std::sync::atomic::Ordering::SeqCst); + // Initialize the thread-local pool if needed + USIZE_VEC_POOL.with(|pool| { + let mut pool_ref = pool.borrow_mut(); + if pool_ref.is_none() { + *pool_ref = Some(ObjectPool::default()); + } + }); + let result = f(); - USING_OBJECT_POOL.store(false, std::sync::atomic::Ordering::SeqCst); + + // Clean up the pool to prevent memory retention + // This ensures no memory is held between different pooling sessions + USIZE_VEC_POOL.with(|pool| { + pool.borrow_mut().take(); + }); + result } diff --git a/src/replace_source.rs b/src/replace_source.rs index 87d1bd58..490d7ce5 100644 --- a/src/replace_source.rs +++ b/src/replace_source.rs @@ -391,7 +391,7 @@ impl StreamChunks for ReplaceSource { match source_content { SourceContent::Raw(source) => { let lines = split_into_lines(source) - .map(|line| WithIndices::new(line)) + .map(WithIndices::new) .collect::>(); let matched = check_content_at_position(&lines, line, column, expected_chunk); diff --git a/src/with_indices.rs b/src/with_indices.rs index e917474f..afdcceda 100644 --- a/src/with_indices.rs +++ b/src/with_indices.rs @@ -1,6 +1,9 @@ use std::{cell::OnceCell, marker::PhantomData}; -use crate::helpers::SourceText; +use crate::{ + helpers::SourceText, + object_pool::{Pooled, USIZE_VEC_POOL}, +}; #[derive(Debug)] pub struct WithIndices<'text, S> @@ -10,7 +13,7 @@ where /// line is a string reference pub line: S, /// the byte position of each `char` in `line` string slice . - pub indices_indexes: OnceCell>, + pub char_byte_indices: OnceCell>>, data: PhantomData<&'text S>, } @@ -20,7 +23,7 @@ where { pub fn new(line: S) -> Self { Self { - indices_indexes: OnceCell::new(), + char_byte_indices: OnceCell::new(), line, data: PhantomData, } @@ -32,15 +35,17 @@ where return S::default(); } - let indices_indexes = self.indices_indexes.get_or_init(|| { - let mut vec = PooledUsizeVec::new(self.line.len()); + let char_byte_indices = self.char_byte_indices.get_or_init(|| { + let mut vec = USIZE_VEC_POOL.with(|pool| { + Pooled::new(pool.borrow().as_ref().cloned(), self.line.len()) + }); vec.extend(self.line.char_indices().map(|(i, _)| i)); vec }); let str_len = self.line.len(); - let start = *indices_indexes.get(start_index).unwrap_or(&str_len); - let end = *indices_indexes.get(end_index).unwrap_or(&str_len); + let start = *char_byte_indices.get(start_index).unwrap_or(&str_len); + let end = *char_byte_indices.get(end_index).unwrap_or(&str_len); #[allow(unsafe_code)] unsafe { diff --git a/tests/compat_source.rs b/tests/compat_source.rs index b3257f3c..5cc97576 100644 --- a/tests/compat_source.rs +++ b/tests/compat_source.rs @@ -6,8 +6,8 @@ use rspack_sources::stream_chunks::{ stream_chunks_default, GeneratedInfo, OnChunk, OnName, OnSource, StreamChunks, }; use rspack_sources::{ - ConcatSource, MapOptions, MemoryPool, RawStringSource, Rope, Source, - SourceExt, SourceMap, SourceValue, + ConcatSource, MapOptions, RawStringSource, Rope, Source, SourceExt, + SourceMap, SourceValue, }; #[derive(Debug, Eq)] @@ -42,14 +42,12 @@ impl Source for CompatSource { impl StreamChunks for CompatSource { fn stream_chunks<'a>( &'a self, - memory_pool: &'a MemoryPool, options: &MapOptions, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, ) -> GeneratedInfo { stream_chunks_default( - memory_pool, self.0, self.1.as_ref(), options, From 5908bef3eba717c6f5df9b13348f47d8ab909e63 Mon Sep 17 00:00:00 2001 From: Cong-Cong Date: Thu, 30 Oct 2025 17:44:18 +0800 Subject: [PATCH 08/22] refactor: using_object_pool --- benches/bench.rs | 18 ++++++- benches/bench_complex_replace_source.rs | 10 ++++ .../benchmark_repetitive_react_components.rs | 10 ++++ src/helpers.rs | 9 +++- src/object_pool.rs | 47 ++++++++++++++----- src/with_indices.rs | 7 +-- 6 files changed, 84 insertions(+), 17 deletions(-) diff --git a/benches/bench.rs b/benches/bench.rs index febed45a..3eda0282 100644 --- a/benches/bench.rs +++ b/benches/bench.rs @@ -30,7 +30,13 @@ use benchmark_repetitive_react_components::{ benchmark_repetitive_react_components_source, }; -use crate::bench_complex_replace_source::benchmark_complex_replace_source_size; +use crate::{ + bench_complex_replace_source::{ + benchmark_complex_replace_source_map_in_using_object_pool, + benchmark_complex_replace_source_size, + }, + benchmark_repetitive_react_components::benchmark_repetitive_react_components_map_in_using_object_pool, +}; const HELLOWORLD_JS: &str = include_str!(concat!( env!("CARGO_MANIFEST_DIR"), @@ -161,6 +167,11 @@ fn bench_rspack_sources(criterion: &mut Criterion) { benchmark_complex_replace_source_map, ); + group.bench_function( + "complex_replace_source_map_in_using_object_pool", + benchmark_complex_replace_source_map_in_using_object_pool, + ); + group.bench_function( "complex_replace_source_source", benchmark_complex_replace_source_source, @@ -188,6 +199,11 @@ fn bench_rspack_sources(criterion: &mut Criterion) { benchmark_repetitive_react_components_map, ); + group.bench_function( + "repetitive_react_components_map_in_using_object_pool", + benchmark_repetitive_react_components_map_in_using_object_pool, + ); + group.bench_function( "repetitive_react_components_source", benchmark_repetitive_react_components_source, diff --git a/benches/bench_complex_replace_source.rs b/benches/bench_complex_replace_source.rs index 7cd6a946..6d541712 100644 --- a/benches/bench_complex_replace_source.rs +++ b/benches/bench_complex_replace_source.rs @@ -36724,6 +36724,16 @@ static LARGE_REPLACE_SOURCE: LazyLock = LazyLock::new(|| { pub fn benchmark_complex_replace_source_map(b: &mut Bencher) { let source = LARGE_REPLACE_SOURCE.clone(); + b.iter(|| { + black_box(source.map(&MapOptions::default())); + }); +} + +pub fn benchmark_complex_replace_source_map_in_using_object_pool( + b: &mut Bencher, +) { + let source = LARGE_REPLACE_SOURCE.clone(); + using_object_pool(|| { b.iter(|| { black_box(source.map(&MapOptions::default())); diff --git a/benches/benchmark_repetitive_react_components.rs b/benches/benchmark_repetitive_react_components.rs index f73efa30..5e3bb6a1 100644 --- a/benches/benchmark_repetitive_react_components.rs +++ b/benches/benchmark_repetitive_react_components.rs @@ -3504,6 +3504,16 @@ static REPETITIVE_1K_REACT_COMPONENTS_SOURCE: LazyLock = pub fn benchmark_repetitive_react_components_map(b: &mut Bencher) { let source = REPETITIVE_1K_REACT_COMPONENTS_SOURCE.clone(); + b.iter(|| { + black_box(source.map(&MapOptions::default())); + }); +} + +pub fn benchmark_repetitive_react_components_map_in_using_object_pool( + b: &mut Bencher, +) { + let source = REPETITIVE_1K_REACT_COMPONENTS_SOURCE.clone(); + using_object_pool(|| { b.iter(|| { black_box(source.map(&MapOptions::default())); diff --git a/src/helpers.rs b/src/helpers.rs index 0215f933..5f420cd8 100644 --- a/src/helpers.rs +++ b/src/helpers.rs @@ -11,6 +11,7 @@ use crate::{ decoder::MappingsDecoder, encoder::create_encoder, linear_map::LinearMap, + object_pool::cleanup_idle_object_pool, source::{Mapping, OriginalLocation}, with_indices::WithIndices, MapOptions, Rope, SourceMap, @@ -62,8 +63,12 @@ pub fn get_map<'a, S: StreamChunks>( }, ); let mappings = mappings_encoder.drain(); - (!mappings.is_empty()) - .then(|| SourceMap::new(mappings, sources, sources_content, names)) + let source_map = (!mappings.is_empty()) + .then(|| SourceMap::new(mappings, sources, sources_content, names)); + + cleanup_idle_object_pool(); + + source_map } /// [StreamChunks] abstraction, see [webpack-sources source.streamChunks](https://github.com/webpack/webpack-sources/blob/9f98066311d53a153fdc7c633422a1d086528027/lib/helpers/streamChunks.js#L13). diff --git a/src/object_pool.rs b/src/object_pool.rs index e7287c81..c375b278 100644 --- a/src/object_pool.rs +++ b/src/object_pool.rs @@ -1,4 +1,9 @@ -use std::{cell::RefCell, collections::BTreeMap, rc::Rc}; +use std::{ + cell::{OnceCell, RefCell}, + collections::BTreeMap, + rc::Rc, + sync::atomic::AtomicBool, +}; // Vector pooling minimum capacity threshold // Recommended threshold: 64 @@ -71,6 +76,10 @@ impl ObjectPool { let bucket = objects.entry(cap).or_default(); bucket.push(object); } + + pub fn clear(&self) { + self.objects.borrow_mut().clear(); + } } /// A smart pointer that holds a pooled object and automatically returns it to the pool when dropped. @@ -130,9 +139,11 @@ impl std::ops::DerefMut for Pooled { } thread_local! { - pub static USIZE_VEC_POOL: RefCell>>> = RefCell::default(); + pub static USIZE_VEC_POOL: OnceCell>> = OnceCell::default(); } +pub static IN_USING_OBJECT_POOL: AtomicBool = AtomicBool::new(false); + /// Executes a function with object pooling enabled for the current thread. /// /// This function temporarily enables a thread-local object pool for `Vec` allocations, @@ -141,21 +152,35 @@ pub fn using_object_pool(f: F) -> R where F: FnOnce() -> R, { + IN_USING_OBJECT_POOL.store(true, std::sync::atomic::Ordering::Relaxed); // Initialize the thread-local pool if needed - USIZE_VEC_POOL.with(|pool| { - let mut pool_ref = pool.borrow_mut(); - if pool_ref.is_none() { - *pool_ref = Some(ObjectPool::default()); - } + USIZE_VEC_POOL.with(|once_cell| { + once_cell.get_or_init(ObjectPool::default); }); let result = f(); - // Clean up the pool to prevent memory retention - // This ensures no memory is held between different pooling sessions - USIZE_VEC_POOL.with(|pool| { - pool.borrow_mut().take(); + IN_USING_OBJECT_POOL.store(false, std::sync::atomic::Ordering::Relaxed); + USIZE_VEC_POOL.with(|once_cell| { + if let Some(pool) = once_cell.get() { + pool.clear(); + } }); result } + +/// Cleans up the object pool when not in pooling mode to prevent memory retention. +/// +/// This function is called automatically after map operations complete to ensure +/// that memory is not retained unnecessarily outside of pooling contexts. +pub fn cleanup_idle_object_pool() { + // Only clear if we're not in an explicit pooling context + if !IN_USING_OBJECT_POOL.load(std::sync::atomic::Ordering::Relaxed) { + USIZE_VEC_POOL.with(|once_cell| { + if let Some(pool) = once_cell.get() { + pool.clear(); + } + }); + } +} diff --git a/src/with_indices.rs b/src/with_indices.rs index afdcceda..72dbbce1 100644 --- a/src/with_indices.rs +++ b/src/with_indices.rs @@ -2,7 +2,7 @@ use std::{cell::OnceCell, marker::PhantomData}; use crate::{ helpers::SourceText, - object_pool::{Pooled, USIZE_VEC_POOL}, + object_pool::{ObjectPool, Pooled, USIZE_VEC_POOL}, }; #[derive(Debug)] @@ -36,8 +36,9 @@ where } let char_byte_indices = self.char_byte_indices.get_or_init(|| { - let mut vec = USIZE_VEC_POOL.with(|pool| { - Pooled::new(pool.borrow().as_ref().cloned(), self.line.len()) + let mut vec = USIZE_VEC_POOL.with(|once_cell| { + let pool = once_cell.get_or_init(ObjectPool::default); + Pooled::new(Some(pool.clone()), self.line.len()) }); vec.extend(self.line.char_indices().map(|(i, _)| i)); vec From a3ea2f60f7d30ff6fe9fae376640ecdc5eb01b41 Mon Sep 17 00:00:00 2001 From: Cong-Cong Date: Thu, 30 Oct 2025 17:55:30 +0800 Subject: [PATCH 09/22] revert unused code --- src/helpers.rs | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/helpers.rs b/src/helpers.rs index 5f420cd8..d2020699 100644 --- a/src/helpers.rs +++ b/src/helpers.rs @@ -334,14 +334,12 @@ where MapOptions { columns: false, final_source: true, - .. } => stream_chunks_of_source_map_lines_final( source, source_map, on_chunk, on_source, on_name, ), MapOptions { columns: false, final_source: false, - .. } => stream_chunks_of_source_map_lines_full( source, source_map, on_chunk, on_source, on_name, ), @@ -430,8 +428,7 @@ where S: SourceText<'a> + 'a, { let lines = split_into_lines(&source); - let line_with_indices_list = - lines.map(|line| WithIndices::new(line)).collect::>(); + let line_with_indices_list = lines.map(WithIndices::new).collect::>(); if line_with_indices_list.is_empty() { return GeneratedInfo { From dd3c27ac7edaabd36c4962bc9caa209a31ae8a90 Mon Sep 17 00:00:00 2001 From: Cong-Cong Date: Thu, 30 Oct 2025 19:31:17 +0800 Subject: [PATCH 10/22] fix: benchmark case --- benches/bench_complex_replace_source.rs | 3 +++ benches/benchmark_repetitive_react_components.rs | 3 +++ 2 files changed, 6 insertions(+) diff --git a/benches/bench_complex_replace_source.rs b/benches/bench_complex_replace_source.rs index 6d541712..9c6d8d82 100644 --- a/benches/bench_complex_replace_source.rs +++ b/benches/bench_complex_replace_source.rs @@ -36735,6 +36735,9 @@ pub fn benchmark_complex_replace_source_map_in_using_object_pool( let source = LARGE_REPLACE_SOURCE.clone(); using_object_pool(|| { + // Warm up object pool + black_box(source.map(&MapOptions::default())); + b.iter(|| { black_box(source.map(&MapOptions::default())); }); diff --git a/benches/benchmark_repetitive_react_components.rs b/benches/benchmark_repetitive_react_components.rs index 5e3bb6a1..83b1498b 100644 --- a/benches/benchmark_repetitive_react_components.rs +++ b/benches/benchmark_repetitive_react_components.rs @@ -3515,6 +3515,9 @@ pub fn benchmark_repetitive_react_components_map_in_using_object_pool( let source = REPETITIVE_1K_REACT_COMPONENTS_SOURCE.clone(); using_object_pool(|| { + // Warm up object pool + black_box(source.map(&MapOptions::default())); + b.iter(|| { black_box(source.map(&MapOptions::default())); }); From cbd72e638a5b299dc693ef90756dc2c823557f6e Mon Sep 17 00:00:00 2001 From: Cong-Cong Date: Thu, 30 Oct 2025 20:14:54 +0800 Subject: [PATCH 11/22] remove using object pool --- benches/bench.rs | 21 +--------- benches/bench_complex_replace_source.rs | 18 +-------- .../benchmark_repetitive_react_components.rs | 21 ++-------- src/helpers.rs | 4 +- src/lib.rs | 2 - src/object_pool.rs | 38 +------------------ 6 files changed, 10 insertions(+), 94 deletions(-) diff --git a/benches/bench.rs b/benches/bench.rs index 3eda0282..5600fb44 100644 --- a/benches/bench.rs +++ b/benches/bench.rs @@ -18,7 +18,8 @@ use rspack_sources::{ }; use bench_complex_replace_source::{ - benchmark_complex_replace_source_map, benchmark_complex_replace_source_source, + benchmark_complex_replace_source_map, benchmark_complex_replace_source_size, + benchmark_complex_replace_source_source, }; use bench_source_map::{ benchmark_parse_source_map_from_json, benchmark_source_map_clone, @@ -30,14 +31,6 @@ use benchmark_repetitive_react_components::{ benchmark_repetitive_react_components_source, }; -use crate::{ - bench_complex_replace_source::{ - benchmark_complex_replace_source_map_in_using_object_pool, - benchmark_complex_replace_source_size, - }, - benchmark_repetitive_react_components::benchmark_repetitive_react_components_map_in_using_object_pool, -}; - const HELLOWORLD_JS: &str = include_str!(concat!( env!("CARGO_MANIFEST_DIR"), "/benches/fixtures/transpile-minify/files/helloworld.js" @@ -167,11 +160,6 @@ fn bench_rspack_sources(criterion: &mut Criterion) { benchmark_complex_replace_source_map, ); - group.bench_function( - "complex_replace_source_map_in_using_object_pool", - benchmark_complex_replace_source_map_in_using_object_pool, - ); - group.bench_function( "complex_replace_source_source", benchmark_complex_replace_source_source, @@ -199,11 +187,6 @@ fn bench_rspack_sources(criterion: &mut Criterion) { benchmark_repetitive_react_components_map, ); - group.bench_function( - "repetitive_react_components_map_in_using_object_pool", - benchmark_repetitive_react_components_map_in_using_object_pool, - ); - group.bench_function( "repetitive_react_components_source", benchmark_repetitive_react_components_source, diff --git a/benches/bench_complex_replace_source.rs b/benches/bench_complex_replace_source.rs index 9c6d8d82..2439e66e 100644 --- a/benches/bench_complex_replace_source.rs +++ b/benches/bench_complex_replace_source.rs @@ -9,8 +9,7 @@ pub use criterion::*; pub use codspeed_criterion_compat::*; use rspack_sources::{ - using_object_pool, BoxSource, MapOptions, OriginalSource, ReplaceSource, - SourceExt, + BoxSource, MapOptions, OriginalSource, ReplaceSource, SourceExt, }; static LARGE_REPLACE_SOURCE: LazyLock = LazyLock::new(|| { @@ -36729,21 +36728,6 @@ pub fn benchmark_complex_replace_source_map(b: &mut Bencher) { }); } -pub fn benchmark_complex_replace_source_map_in_using_object_pool( - b: &mut Bencher, -) { - let source = LARGE_REPLACE_SOURCE.clone(); - - using_object_pool(|| { - // Warm up object pool - black_box(source.map(&MapOptions::default())); - - b.iter(|| { - black_box(source.map(&MapOptions::default())); - }); - }); -} - pub fn benchmark_complex_replace_source_source(b: &mut Bencher) { let source = LARGE_REPLACE_SOURCE.clone(); diff --git a/benches/benchmark_repetitive_react_components.rs b/benches/benchmark_repetitive_react_components.rs index 83b1498b..96875033 100644 --- a/benches/benchmark_repetitive_react_components.rs +++ b/benches/benchmark_repetitive_react_components.rs @@ -9,9 +9,9 @@ pub use criterion::*; pub use codspeed_criterion_compat::*; use rspack_sources::{ - using_object_pool, BoxSource, ConcatSource, MapOptions, OriginalSource, - RawStringSource, ReplaceSource, ReplacementEnforce, Source, SourceExt, - SourceMap, SourceMapSource, SourceMapSourceOptions, + BoxSource, ConcatSource, MapOptions, OriginalSource, RawStringSource, + ReplaceSource, ReplacementEnforce, Source, SourceExt, SourceMap, + SourceMapSource, SourceMapSourceOptions, }; static REPETITIVE_1K_REACT_COMPONENTS_SOURCE: LazyLock = @@ -3509,21 +3509,6 @@ pub fn benchmark_repetitive_react_components_map(b: &mut Bencher) { }); } -pub fn benchmark_repetitive_react_components_map_in_using_object_pool( - b: &mut Bencher, -) { - let source = REPETITIVE_1K_REACT_COMPONENTS_SOURCE.clone(); - - using_object_pool(|| { - // Warm up object pool - black_box(source.map(&MapOptions::default())); - - b.iter(|| { - black_box(source.map(&MapOptions::default())); - }); - }); -} - pub fn benchmark_repetitive_react_components_source(b: &mut Bencher) { let source = REPETITIVE_1K_REACT_COMPONENTS_SOURCE.clone(); diff --git a/src/helpers.rs b/src/helpers.rs index d2020699..5926c35c 100644 --- a/src/helpers.rs +++ b/src/helpers.rs @@ -11,7 +11,7 @@ use crate::{ decoder::MappingsDecoder, encoder::create_encoder, linear_map::LinearMap, - object_pool::cleanup_idle_object_pool, + object_pool::cleanup_object_pool, source::{Mapping, OriginalLocation}, with_indices::WithIndices, MapOptions, Rope, SourceMap, @@ -66,7 +66,7 @@ pub fn get_map<'a, S: StreamChunks>( let source_map = (!mappings.is_empty()) .then(|| SourceMap::new(mappings, sources, sources_content, names)); - cleanup_idle_object_pool(); + cleanup_object_pool(); source_map } diff --git a/src/lib.rs b/src/lib.rs index 3dd711a6..eb274fce 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -40,5 +40,3 @@ pub mod stream_chunks { } pub use helpers::{decode_mappings, encode_mappings}; - -pub use object_pool::using_object_pool; diff --git a/src/object_pool.rs b/src/object_pool.rs index c375b278..52c60545 100644 --- a/src/object_pool.rs +++ b/src/object_pool.rs @@ -142,45 +142,11 @@ thread_local! { pub static USIZE_VEC_POOL: OnceCell>> = OnceCell::default(); } -pub static IN_USING_OBJECT_POOL: AtomicBool = AtomicBool::new(false); - -/// Executes a function with object pooling enabled for the current thread. -/// -/// This function temporarily enables a thread-local object pool for `Vec` allocations, -/// executes the provided closure, and then cleans up the pool to prevent memory leaks. -pub fn using_object_pool(f: F) -> R -where - F: FnOnce() -> R, -{ - IN_USING_OBJECT_POOL.store(true, std::sync::atomic::Ordering::Relaxed); - // Initialize the thread-local pool if needed - USIZE_VEC_POOL.with(|once_cell| { - once_cell.get_or_init(ObjectPool::default); - }); - - let result = f(); - - IN_USING_OBJECT_POOL.store(false, std::sync::atomic::Ordering::Relaxed); +/// Cleans up the object pool when not in pooling mode to prevent memory retention. +pub fn cleanup_object_pool() { USIZE_VEC_POOL.with(|once_cell| { if let Some(pool) = once_cell.get() { pool.clear(); } }); - - result -} - -/// Cleans up the object pool when not in pooling mode to prevent memory retention. -/// -/// This function is called automatically after map operations complete to ensure -/// that memory is not retained unnecessarily outside of pooling contexts. -pub fn cleanup_idle_object_pool() { - // Only clear if we're not in an explicit pooling context - if !IN_USING_OBJECT_POOL.load(std::sync::atomic::Ordering::Relaxed) { - USIZE_VEC_POOL.with(|once_cell| { - if let Some(pool) = once_cell.get() { - pool.clear(); - } - }); - } } From d59ddfb5d9121ee5d249517cbabe9939ccdf170c Mon Sep 17 00:00:00 2001 From: Cong-Cong Date: Fri, 31 Oct 2025 11:58:01 +0800 Subject: [PATCH 12/22] fix cargo lint --- src/object_pool.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/object_pool.rs b/src/object_pool.rs index 52c60545..c77bd66b 100644 --- a/src/object_pool.rs +++ b/src/object_pool.rs @@ -2,7 +2,6 @@ use std::{ cell::{OnceCell, RefCell}, collections::BTreeMap, rc::Rc, - sync::atomic::AtomicBool, }; // Vector pooling minimum capacity threshold From b65b51a09f1bdfbf7818d0f5c2020346a876c62c Mon Sep 17 00:00:00 2001 From: Cong-Cong Date: Fri, 31 Oct 2025 16:54:06 +0800 Subject: [PATCH 13/22] refactor: by THREAD_ISOLATED_MAP --- Cargo.lock | 70 +++++++++++++++++++++++++++++++++++++++++++++ Cargo.toml | 1 + src/helpers.rs | 4 +-- src/object_pool.rs | 36 ++++++++++++++++------- src/with_indices.rs | 7 ++--- 5 files changed, 101 insertions(+), 17 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index cbabaca0..d7bd5b08 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -195,6 +195,26 @@ dependencies = [ "itertools", ] +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + +[[package]] +name = "dashmap" +version = "6.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" +dependencies = [ + "cfg-if", + "crossbeam-utils", + "hashbrown", + "lock_api", + "once_cell", + "parking_lot_core", +] + [[package]] name = "dyn-clone" version = "1.0.17" @@ -324,6 +344,15 @@ version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "969488b55f8ac402214f3f5fd243ebb7206cf82de60d3172994707a4bcc2b829" +[[package]] +name = "lock_api" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" +dependencies = [ + "scopeguard", +] + [[package]] name = "log" version = "0.4.22" @@ -357,6 +386,19 @@ version = "11.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" +[[package]] +name = "parking_lot_core" +version = "0.9.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-link", +] + [[package]] name = "ppv-lite86" version = "0.2.20" @@ -414,6 +456,15 @@ dependencies = [ "getrandom", ] +[[package]] +name = "redox_syscall" +version = "0.5.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" +dependencies = [ + "bitflags", +] + [[package]] name = "ref-cast" version = "1.0.23" @@ -469,6 +520,7 @@ version = "0.4.13" dependencies = [ "codspeed-criterion-compat", "criterion", + "dashmap", "dyn-clone", "memchr", "regex", @@ -514,6 +566,12 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + [[package]] name = "serde" version = "1.0.216" @@ -567,6 +625,12 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f27f6278552951f1f2b8cf9da965d10969b2efdea95a6ec47987ab46edfe263a" +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" + [[package]] name = "static_assertions" version = "1.1.0" @@ -729,6 +793,12 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + [[package]] name = "windows-sys" version = "0.48.0" diff --git a/Cargo.toml b/Cargo.toml index 1d4c1441..1888a641 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -40,6 +40,7 @@ memchr = "2.7.4" codspeed-criterion-compat = { version = "2.7.2", default-features = false, optional = true } static_assertions = "1.1.0" simd-json = "0.14.3" +dashmap = "6.1.0" [dev-dependencies] twox-hash = "2.1.0" diff --git a/src/helpers.rs b/src/helpers.rs index 5926c35c..4ae35cd7 100644 --- a/src/helpers.rs +++ b/src/helpers.rs @@ -11,7 +11,7 @@ use crate::{ decoder::MappingsDecoder, encoder::create_encoder, linear_map::LinearMap, - object_pool::cleanup_object_pool, + object_pool::clear_current_thread_object_pool, source::{Mapping, OriginalLocation}, with_indices::WithIndices, MapOptions, Rope, SourceMap, @@ -66,7 +66,7 @@ pub fn get_map<'a, S: StreamChunks>( let source_map = (!mappings.is_empty()) .then(|| SourceMap::new(mappings, sources, sources_content, names)); - cleanup_object_pool(); + clear_current_thread_object_pool(); source_map } diff --git a/src/object_pool.rs b/src/object_pool.rs index c77bd66b..9bf89850 100644 --- a/src/object_pool.rs +++ b/src/object_pool.rs @@ -1,9 +1,13 @@ use std::{ - cell::{OnceCell, RefCell}, + cell::RefCell, collections::BTreeMap, rc::Rc, + sync::{Arc, LazyLock}, + thread::ThreadId, }; +use dashmap::DashMap; + // Vector pooling minimum capacity threshold // Recommended threshold: 64 // Reasons: @@ -39,6 +43,13 @@ pub struct ObjectPool { objects: Rc>>>, } +// SAFETY: Each ObjectPool is only used within a single thread in rspack-sources, +// which is guaranteed by THREAD_ISOLATED_MAP. Therefore, it is safe to implement Send and Sync. +#[allow(unsafe_code)] +unsafe impl Send for ObjectPool {} +#[allow(unsafe_code)] +unsafe impl Sync for ObjectPool {} + impl Clone for ObjectPool { fn clone(&self) -> Self { Self { @@ -137,15 +148,20 @@ impl std::ops::DerefMut for Pooled { } } -thread_local! { - pub static USIZE_VEC_POOL: OnceCell>> = OnceCell::default(); -} +pub static THREAD_ISOLATED_MAP: LazyLock< + Arc>>>, +> = LazyLock::new(|| Arc::new(DashMap::new())); /// Cleans up the object pool when not in pooling mode to prevent memory retention. -pub fn cleanup_object_pool() { - USIZE_VEC_POOL.with(|once_cell| { - if let Some(pool) = once_cell.get() { - pool.clear(); - } - }); +pub fn clear_current_thread_object_pool() { + let thread_id = std::thread::current().id(); + if let Some(thread_isolated_map) = THREAD_ISOLATED_MAP.get(&thread_id) { + thread_isolated_map.value().clear(); + } +} + +pub fn pull_usize_vec(requested_capacity: usize) -> Pooled> { + let thread_id = std::thread::current().id(); + let pool = THREAD_ISOLATED_MAP.entry(thread_id).or_default(); + Pooled::new(Some(pool.clone()), requested_capacity) } diff --git a/src/with_indices.rs b/src/with_indices.rs index 72dbbce1..ece3058c 100644 --- a/src/with_indices.rs +++ b/src/with_indices.rs @@ -2,7 +2,7 @@ use std::{cell::OnceCell, marker::PhantomData}; use crate::{ helpers::SourceText, - object_pool::{ObjectPool, Pooled, USIZE_VEC_POOL}, + object_pool::{pull_usize_vec, Pooled}, }; #[derive(Debug)] @@ -36,10 +36,7 @@ where } let char_byte_indices = self.char_byte_indices.get_or_init(|| { - let mut vec = USIZE_VEC_POOL.with(|once_cell| { - let pool = once_cell.get_or_init(ObjectPool::default); - Pooled::new(Some(pool.clone()), self.line.len()) - }); + let mut vec = pull_usize_vec(self.line.len()); vec.extend(self.line.char_indices().map(|(i, _)| i)); vec }); From b6de355e1f5f360850c835c19dedbfe838a81609 Mon Sep 17 00:00:00 2001 From: Cong-Cong Date: Fri, 31 Oct 2025 17:22:56 +0800 Subject: [PATCH 14/22] refactor --- src/helpers.rs | 17 ++++++----- src/lib.rs | 2 ++ src/object_pool.rs | 72 +++++++++++++++++++++++++++++++++++++++------- 3 files changed, 74 insertions(+), 17 deletions(-) diff --git a/src/helpers.rs b/src/helpers.rs index 43f450a9..69c374c7 100644 --- a/src/helpers.rs +++ b/src/helpers.rs @@ -12,7 +12,7 @@ use crate::{ decoder::MappingsDecoder, encoder::create_encoder, linear_map::LinearMap, - object_pool::clear_current_thread_object_pool, + object_pool::with_current_thread_object_pool_scope, source::{Mapping, OriginalLocation}, source_content_lines::SourceContentLines, with_indices::WithIndices, @@ -22,6 +22,13 @@ use crate::{ pub fn get_map<'a, S: StreamChunks>( stream: &'a S, options: &'a MapOptions, +) -> Option { + with_current_thread_object_pool_scope(|| get_map_impl(stream, options)) +} + +pub fn get_map_impl<'a, S: StreamChunks>( + stream: &'a S, + options: &'a MapOptions, ) -> Option { let mut mappings_encoder = create_encoder(options.columns); let mut sources: Vec = Vec::new(); @@ -61,12 +68,8 @@ pub fn get_map<'a, S: StreamChunks>( }, ); let mappings = mappings_encoder.drain(); - let source_map = (!mappings.is_empty()) - .then(|| SourceMap::new(mappings, sources, sources_content, names)); - - clear_current_thread_object_pool(); - - source_map + (!mappings.is_empty()) + .then(|| SourceMap::new(mappings, sources, sources_content, names)) } /// [StreamChunks] abstraction, see [webpack-sources source.streamChunks](https://github.com/webpack/webpack-sources/blob/9f98066311d53a153fdc7c633422a1d086528027/lib/helpers/streamChunks.js#L13). diff --git a/src/lib.rs b/src/lib.rs index 189b04f9..7ae381cb 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -41,3 +41,5 @@ pub mod stream_chunks { } pub use helpers::{decode_mappings, encode_mappings}; + +pub use object_pool::with_object_pool_scope; diff --git a/src/object_pool.rs b/src/object_pool.rs index 9bf89850..c3354367 100644 --- a/src/object_pool.rs +++ b/src/object_pool.rs @@ -2,7 +2,7 @@ use std::{ cell::RefCell, collections::BTreeMap, rc::Rc, - sync::{Arc, LazyLock}, + sync::{atomic::AtomicBool, Arc, LazyLock}, thread::ThreadId, }; @@ -148,20 +148,72 @@ impl std::ops::DerefMut for Pooled { } } -pub static THREAD_ISOLATED_MAP: LazyLock< - Arc>>>, -> = LazyLock::new(|| Arc::new(DashMap::new())); +type ThreadIsolatedMap = Arc>)>>; -/// Cleans up the object pool when not in pooling mode to prevent memory retention. -pub fn clear_current_thread_object_pool() { +pub static THREAD_ISOLATED_MAP: LazyLock = + LazyLock::new(|| Arc::new(DashMap::new())); + +pub static IN_OBJECT_POOL_SCOPE: AtomicBool = AtomicBool::new(false); + +pub fn with_current_thread_object_pool_scope(f: F) -> R +where + F: FnOnce() -> R, +{ let thread_id = std::thread::current().id(); - if let Some(thread_isolated_map) = THREAD_ISOLATED_MAP.get(&thread_id) { - thread_isolated_map.value().clear(); + { + THREAD_ISOLATED_MAP + .entry(thread_id) + .or_default() + .value_mut() + .0 = true; } + + let result = f(); + { + if let Some(mut isolation) = THREAD_ISOLATED_MAP.get_mut(&thread_id) { + isolation.0 = false; + if !IN_OBJECT_POOL_SCOPE.load(std::sync::atomic::Ordering::Relaxed) { + isolation.1.clear(); + } + } + } + result } pub fn pull_usize_vec(requested_capacity: usize) -> Pooled> { let thread_id = std::thread::current().id(); - let pool = THREAD_ISOLATED_MAP.entry(thread_id).or_default(); - Pooled::new(Some(pool.clone()), requested_capacity) + let ref_multi = THREAD_ISOLATED_MAP.entry(thread_id).or_default(); + Pooled::new(Some(ref_multi.1.clone()), requested_capacity) +} + +/// Extends the lifetime of the object pool to the end of the provided closure, +/// instead of just the end of `source.map()`. This is primarily designed for +/// integration with parallel frameworks like rayon, ensuring the object pool +/// remains available throughout parallel tasks. +/// +/// # Example +/// ``` +/// with_object_pool_scope(|| { +/// sources.into_par_iter() +/// .map(|source| source.map(&MapOptions::default())) +/// .collect::>() +/// }); +/// ``` +pub fn with_object_pool_scope(f: F) -> R +where + F: FnOnce() -> R, +{ + IN_OBJECT_POOL_SCOPE.store(true, std::sync::atomic::Ordering::SeqCst); + + let result = f(); + + for ref_multi in THREAD_ISOLATED_MAP.iter() { + if !ref_multi.value().0 { + ref_multi.value().1.clear(); + } + } + + IN_OBJECT_POOL_SCOPE.store(false, std::sync::atomic::Ordering::SeqCst); + + result } From e454915294ab0055aaed27d42f7021c748414d49 Mon Sep 17 00:00:00 2001 From: Cong-Cong Date: Fri, 31 Oct 2025 17:25:33 +0800 Subject: [PATCH 15/22] benchmark_repetitive_react_components_map_with_object_pool_scope --- benches/bench.rs | 6 ++++++ .../benchmark_repetitive_react_components.rs | 21 ++++++++++++++++--- 2 files changed, 24 insertions(+), 3 deletions(-) diff --git a/benches/bench.rs b/benches/bench.rs index 0b8d96ae..8b708c4b 100644 --- a/benches/bench.rs +++ b/benches/bench.rs @@ -28,6 +28,7 @@ use bench_source_map::{ use benchmark_repetitive_react_components::{ benchmark_repetitive_react_components_map, + benchmark_repetitive_react_components_map_with_object_pool_scope, benchmark_repetitive_react_components_source, }; @@ -187,6 +188,11 @@ fn bench_rspack_sources(criterion: &mut Criterion) { benchmark_repetitive_react_components_map, ); + group.bench_function( + "repetitive_react_components_map_with_object_pool_scope", + benchmark_repetitive_react_components_map_with_object_pool_scope, + ); + group.bench_function( "repetitive_react_components_source", benchmark_repetitive_react_components_source, diff --git a/benches/benchmark_repetitive_react_components.rs b/benches/benchmark_repetitive_react_components.rs index 96875033..247bfd90 100644 --- a/benches/benchmark_repetitive_react_components.rs +++ b/benches/benchmark_repetitive_react_components.rs @@ -9,9 +9,9 @@ pub use criterion::*; pub use codspeed_criterion_compat::*; use rspack_sources::{ - BoxSource, ConcatSource, MapOptions, OriginalSource, RawStringSource, - ReplaceSource, ReplacementEnforce, Source, SourceExt, SourceMap, - SourceMapSource, SourceMapSourceOptions, + with_object_pool_scope, BoxSource, ConcatSource, MapOptions, OriginalSource, + RawStringSource, ReplaceSource, ReplacementEnforce, Source, SourceExt, + SourceMap, SourceMapSource, SourceMapSourceOptions, }; static REPETITIVE_1K_REACT_COMPONENTS_SOURCE: LazyLock = @@ -3516,3 +3516,18 @@ pub fn benchmark_repetitive_react_components_source(b: &mut Bencher) { black_box(source.source()); }); } + +pub fn benchmark_repetitive_react_components_map_with_object_pool_scope( + b: &mut Bencher, +) { + let source = REPETITIVE_1K_REACT_COMPONENTS_SOURCE.clone(); + + with_object_pool_scope(|| { + // Warm up the object pool + black_box(source.map(&MapOptions::default())); + + b.iter(|| { + black_box(source.map(&MapOptions::default())); + }); + }) +} From c95cb8388aed7d5ede18b261b64038716d2029f0 Mon Sep 17 00:00:00 2001 From: Cong-Cong Date: Fri, 31 Oct 2025 17:28:20 +0800 Subject: [PATCH 16/22] fix: cargo test --- Cargo.lock | 40 ++++++++++++++++++++++++++++++++++++++++ Cargo.toml | 1 + src/object_pool.rs | 5 +++++ 3 files changed, 46 insertions(+) diff --git a/Cargo.lock b/Cargo.lock index 12b10e8b..02d1d030 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -195,6 +195,25 @@ dependencies = [ "itertools", ] +[[package]] +name = "crossbeam-deque" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" +dependencies = [ + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" +dependencies = [ + "crossbeam-utils", +] + [[package]] name = "crossbeam-utils" version = "0.8.21" @@ -456,6 +475,26 @@ dependencies = [ "getrandom", ] +[[package]] +name = "rayon" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91" +dependencies = [ + "crossbeam-deque", + "crossbeam-utils", +] + [[package]] name = "redox_syscall" version = "0.5.18" @@ -523,6 +562,7 @@ dependencies = [ "dashmap", "dyn-clone", "memchr", + "rayon", "regex", "rustc-hash", "self_cell", diff --git a/Cargo.toml b/Cargo.toml index 67618ec0..62d3e31c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -47,6 +47,7 @@ self_cell = "1.2.1" twox-hash = "2.1.0" regex = "1.11.1" criterion = { version = "0.5.1", default-features = false } +rayon = "1.11.0" [features] codspeed = ["codspeed-criterion-compat"] diff --git a/src/object_pool.rs b/src/object_pool.rs index c3354367..8246cc67 100644 --- a/src/object_pool.rs +++ b/src/object_pool.rs @@ -193,6 +193,11 @@ pub fn pull_usize_vec(requested_capacity: usize) -> Pooled> { /// /// # Example /// ``` +/// use rspack_sources::{BoxSource, MapOptions, with_object_pool_scope}; +/// use rayon::prelude::*; +/// +/// let sources: Vec = vec![/* ... */]; +/// /// with_object_pool_scope(|| { /// sources.into_par_iter() /// .map(|source| source.map(&MapOptions::default())) From 6ddd2b1cb8e90cc42127c1e1d951e79a164e9230 Mon Sep 17 00:00:00 2001 From: Cong-Cong Date: Sun, 2 Nov 2025 14:41:30 +0800 Subject: [PATCH 17/22] refactor --- benches/bench.rs | 6 - .../benchmark_repetitive_react_components.rs | 21 +-- src/cached_source.rs | 16 +- src/concat_source.rs | 11 +- src/helpers.rs | 70 +++++--- src/lib.rs | 2 +- src/object_pool.rs | 166 +++--------------- src/original_source.rs | 2 + src/raw_source.rs | 3 + src/replace_source.rs | 9 +- src/source.rs | 14 +- src/source_content_lines.rs | 32 ++-- src/source_map_source.rs | 8 +- src/with_indices.rs | 39 ++-- tests/compat_source.rs | 10 +- 15 files changed, 181 insertions(+), 228 deletions(-) diff --git a/benches/bench.rs b/benches/bench.rs index 8b708c4b..0b8d96ae 100644 --- a/benches/bench.rs +++ b/benches/bench.rs @@ -28,7 +28,6 @@ use bench_source_map::{ use benchmark_repetitive_react_components::{ benchmark_repetitive_react_components_map, - benchmark_repetitive_react_components_map_with_object_pool_scope, benchmark_repetitive_react_components_source, }; @@ -188,11 +187,6 @@ fn bench_rspack_sources(criterion: &mut Criterion) { benchmark_repetitive_react_components_map, ); - group.bench_function( - "repetitive_react_components_map_with_object_pool_scope", - benchmark_repetitive_react_components_map_with_object_pool_scope, - ); - group.bench_function( "repetitive_react_components_source", benchmark_repetitive_react_components_source, diff --git a/benches/benchmark_repetitive_react_components.rs b/benches/benchmark_repetitive_react_components.rs index 247bfd90..96875033 100644 --- a/benches/benchmark_repetitive_react_components.rs +++ b/benches/benchmark_repetitive_react_components.rs @@ -9,9 +9,9 @@ pub use criterion::*; pub use codspeed_criterion_compat::*; use rspack_sources::{ - with_object_pool_scope, BoxSource, ConcatSource, MapOptions, OriginalSource, - RawStringSource, ReplaceSource, ReplacementEnforce, Source, SourceExt, - SourceMap, SourceMapSource, SourceMapSourceOptions, + BoxSource, ConcatSource, MapOptions, OriginalSource, RawStringSource, + ReplaceSource, ReplacementEnforce, Source, SourceExt, SourceMap, + SourceMapSource, SourceMapSourceOptions, }; static REPETITIVE_1K_REACT_COMPONENTS_SOURCE: LazyLock = @@ -3516,18 +3516,3 @@ pub fn benchmark_repetitive_react_components_source(b: &mut Bencher) { black_box(source.source()); }); } - -pub fn benchmark_repetitive_react_components_map_with_object_pool_scope( - b: &mut Bencher, -) { - let source = REPETITIVE_1K_REACT_COMPONENTS_SOURCE.clone(); - - with_object_pool_scope(|| { - // Warm up the object pool - black_box(source.map(&MapOptions::default())); - - b.iter(|| { - black_box(source.map(&MapOptions::default())); - }); - }) -} diff --git a/src/cached_source.rs b/src/cached_source.rs index 8147bd36..4a52d2c6 100644 --- a/src/cached_source.rs +++ b/src/cached_source.rs @@ -11,6 +11,7 @@ use crate::{ stream_and_get_source_and_map, stream_chunks_of_raw_source, stream_chunks_of_source_map, StreamChunks, }, + object_pool::ObjectPool, rope::Rope, source::SourceValue, BoxSource, MapOptions, Source, SourceExt, SourceMap, @@ -124,6 +125,7 @@ impl StreamChunks for CachedSource { fn stream_chunks<'a>( &'a self, options: &MapOptions, + object_pool: &'a ObjectPool, on_chunk: crate::helpers::OnChunk<'_, 'a>, on_source: crate::helpers::OnSource<'_, 'a>, on_name: crate::helpers::OnName<'_, 'a>, @@ -138,7 +140,13 @@ impl StreamChunks for CachedSource { let source = self.rope(); if let Some(map) = map { stream_chunks_of_source_map( - source, map, on_chunk, on_source, on_name, options, + options, + object_pool, + source, + map, + on_chunk, + on_source, + on_name, ) } else { stream_chunks_of_raw_source( @@ -148,8 +156,9 @@ impl StreamChunks for CachedSource { } None => { let (generated_info, map) = stream_and_get_source_and_map( - &self.inner, options, + object_pool, + &self.inner, on_chunk, on_source, on_name, @@ -310,6 +319,7 @@ mod tests { let mut on_name_count = 0; let generated_info = source.stream_chunks( &map_options, + &ObjectPool::default(), &mut |_chunk, _mapping| { on_chunk_count += 1; }, @@ -324,6 +334,7 @@ mod tests { let cached_source = CachedSource::new(source); cached_source.stream_chunks( &map_options, + &ObjectPool::default(), &mut |_chunk, _mapping| {}, &mut |_source_index, _source, _source_content| {}, &mut |_name_index, _name| {}, @@ -334,6 +345,7 @@ mod tests { let mut cached_on_name_count = 0; let cached_generated_info = cached_source.stream_chunks( &map_options, + &ObjectPool::default(), &mut |_chunk, _mapping| { cached_on_chunk_count += 1; }, diff --git a/src/concat_source.rs b/src/concat_source.rs index e7048d4d..04679a1c 100644 --- a/src/concat_source.rs +++ b/src/concat_source.rs @@ -10,6 +10,7 @@ use rustc_hash::FxHashMap as HashMap; use crate::{ helpers::{get_map, GeneratedInfo, OnChunk, OnName, OnSource, StreamChunks}, linear_map::LinearMap, + object_pool::ObjectPool, source::{Mapping, OriginalLocation}, BoxSource, MapOptions, RawStringSource, Rope, Source, SourceExt, SourceMap, SourceValue, @@ -231,6 +232,7 @@ impl StreamChunks for ConcatSource { fn stream_chunks<'a>( &'a self, options: &MapOptions, + object_pool: &'a ObjectPool, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, @@ -238,7 +240,13 @@ impl StreamChunks for ConcatSource { let children = self.optimized_children(); if children.len() == 1 { - return children[0].stream_chunks(options, on_chunk, on_source, on_name); + return children[0].stream_chunks( + options, + object_pool, + on_chunk, + on_source, + on_name, + ); } let mut current_line_offset = 0; let mut current_column_offset = 0; @@ -260,6 +268,7 @@ impl StreamChunks for ConcatSource { generated_column, } = item.stream_chunks( options, + object_pool, &mut |chunk, mapping| { let line = mapping.generated_line + current_line_offset; let column = if mapping.generated_line == 1 { diff --git a/src/helpers.rs b/src/helpers.rs index 69c374c7..204447ca 100644 --- a/src/helpers.rs +++ b/src/helpers.rs @@ -12,7 +12,7 @@ use crate::{ decoder::MappingsDecoder, encoder::create_encoder, linear_map::LinearMap, - object_pool::with_current_thread_object_pool_scope, + object_pool::ObjectPool, source::{Mapping, OriginalLocation}, source_content_lines::SourceContentLines, with_indices::WithIndices, @@ -22,13 +22,6 @@ use crate::{ pub fn get_map<'a, S: StreamChunks>( stream: &'a S, options: &'a MapOptions, -) -> Option { - with_current_thread_object_pool_scope(|| get_map_impl(stream, options)) -} - -pub fn get_map_impl<'a, S: StreamChunks>( - stream: &'a S, - options: &'a MapOptions, ) -> Option { let mut mappings_encoder = create_encoder(options.columns); let mut sources: Vec = Vec::new(); @@ -40,6 +33,7 @@ pub fn get_map_impl<'a, S: StreamChunks>( columns: options.columns, final_source: true, }, + &ObjectPool::default(), // on_chunk &mut |_, mapping| { mappings_encoder.encode(&mapping); @@ -78,6 +72,7 @@ pub trait StreamChunks { fn stream_chunks<'a>( &'a self, options: &MapOptions, + object_pool: &'a ObjectPool, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, @@ -97,9 +92,10 @@ pub type OnName<'a, 'b> = &'a mut dyn FnMut(u32, Cow<'b, str>); /// Default stream chunks behavior impl, see [webpack-sources streamChunks](https://github.com/webpack/webpack-sources/blob/9f98066311d53a153fdc7c633422a1d086528027/lib/helpers/streamChunks.js#L15-L35). pub fn stream_chunks_default<'a, S>( + options: &MapOptions, + object_pool: &'a ObjectPool, source: S, source_map: Option<&'a SourceMap>, - options: &MapOptions, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, @@ -109,7 +105,13 @@ where { if let Some(map) = source_map { stream_chunks_of_source_map( - source, map, on_chunk, on_source, on_name, options, + options, + object_pool, + source, + map, + on_chunk, + on_source, + on_name, ) } else { stream_chunks_of_raw_source(source, options, on_chunk, on_source, on_name) @@ -308,12 +310,13 @@ where } pub fn stream_chunks_of_source_map<'a, S>( + options: &MapOptions, + object_pool: &'a ObjectPool, source: S, source_map: &'a SourceMap, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, - options: &MapOptions, ) -> GeneratedInfo where S: SourceText<'a> + 'a, @@ -322,24 +325,33 @@ where MapOptions { columns: true, final_source: true, + .. } => stream_chunks_of_source_map_final( source, source_map, on_chunk, on_source, on_name, ), MapOptions { columns: true, final_source: false, + .. } => stream_chunks_of_source_map_full( - source, source_map, on_chunk, on_source, on_name, + object_pool, + source, + source_map, + on_chunk, + on_source, + on_name, ), MapOptions { columns: false, final_source: true, + .. } => stream_chunks_of_source_map_lines_final( source, source_map, on_chunk, on_source, on_name, ), MapOptions { columns: false, final_source: false, + .. } => stream_chunks_of_source_map_lines_full( source, source_map, on_chunk, on_source, on_name, ), @@ -418,6 +430,7 @@ where } fn stream_chunks_of_source_map_full<'a, S>( + object_pool: &'a ObjectPool, source: S, source_map: &'a SourceMap, on_chunk: OnChunk<'_, 'a>, @@ -428,7 +441,9 @@ where S: SourceText<'a> + 'a, { let lines = split_into_lines(&source); - let line_with_indices_list = lines.map(WithIndices::new).collect::>(); + let line_with_indices_list = lines + .map(|line| WithIndices::new(object_pool, line)) + .collect::>(); if line_with_indices_list.is_empty() { return GeneratedInfo { @@ -715,6 +730,8 @@ type InnerSourceIndexValueMapping<'a> = #[allow(clippy::too_many_arguments)] pub fn stream_chunks_of_combined_source_map<'a, S>( + options: &MapOptions, + object_pool: &'a ObjectPool, source: S, source_map: &'a SourceMap, inner_source_name: &'a str, @@ -724,7 +741,6 @@ pub fn stream_chunks_of_combined_source_map<'a, S>( on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, - options: &MapOptions, ) -> GeneratedInfo where S: SourceText<'a> + 'a, @@ -781,6 +797,8 @@ where }; stream_chunks_of_source_map( + options, + object_pool, source.clone(), source_map, &mut |chunk, mapping| { @@ -835,7 +853,10 @@ where let inner_source_contents = inner_source_contents.borrow(); match inner_source_contents.get(&inner_source_index) { Some(Some(source_content)) => { - Some(SourceContentLines::from(source_content.clone())) + Some(SourceContentLines::from( + object_pool, + source_content.clone(), + )) } _ => None, } @@ -934,7 +955,10 @@ where let inner_source_contents = inner_source_contents.borrow(); match inner_source_contents.get(&inner_source_index) { Some(Some(source_content)) => { - Some(SourceContentLines::from(source_content.clone())) + Some(SourceContentLines::from( + object_pool, + source_content.clone(), + )) } _ => None, } @@ -1098,6 +1122,11 @@ where } source_index_mapping.borrow_mut().insert(i, -2); stream_chunks_of_source_map( + &MapOptions { + columns: options.columns, + final_source: false, + }, + object_pool, source_content.unwrap().as_ref(), inner_source_map, &mut |chunk, mapping| { @@ -1167,10 +1196,6 @@ where inner_name_index_mapping.borrow_mut().insert(i, -2); inner_name_index_value_mapping.borrow_mut().insert(i, name); }, - &MapOptions { - columns: options.columns, - final_source: false, - }, ); } else { let mut source_mapping = source_mapping.borrow_mut(); @@ -1190,13 +1215,13 @@ where name_index_mapping.borrow_mut().insert(i, -2); name_index_value_mapping.borrow_mut().insert(i, name); }, - options, ) } pub fn stream_and_get_source_and_map<'a, S: StreamChunks>( - input_source: &'a S, options: &MapOptions, + object_pool: &'a ObjectPool, + input_source: &'a S, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, @@ -1208,6 +1233,7 @@ pub fn stream_and_get_source_and_map<'a, S: StreamChunks>( let generated_info = input_source.stream_chunks( options, + object_pool, &mut |chunk, mapping| { mappings_encoder.encode(&mapping); on_chunk(chunk, mapping); diff --git a/src/lib.rs b/src/lib.rs index 7ae381cb..02c53d53 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -42,4 +42,4 @@ pub mod stream_chunks { pub use helpers::{decode_mappings, encode_mappings}; -pub use object_pool::with_object_pool_scope; +pub use object_pool::ObjectPool; diff --git a/src/object_pool.rs b/src/object_pool.rs index 8246cc67..df860a8c 100644 --- a/src/object_pool.rs +++ b/src/object_pool.rs @@ -1,12 +1,4 @@ -use std::{ - cell::RefCell, - collections::BTreeMap, - rc::Rc, - sync::{atomic::AtomicBool, Arc, LazyLock}, - thread::ThreadId, -}; - -use dashmap::DashMap; +use std::{cell::RefCell, collections::BTreeMap, rc::Rc}; // Vector pooling minimum capacity threshold // Recommended threshold: 64 @@ -17,40 +9,13 @@ use dashmap::DashMap; // 4. Empirical value: 64 is a proven balance point in real projects const MIN_POOL_CAPACITY: usize = 64; -pub trait Poolable { - fn with_capacity(capacity: usize) -> Self; - fn capacity(&self) -> usize; - fn clear(&mut self); -} - -impl Poolable for Vec { - fn with_capacity(capacity: usize) -> Self { - Vec::with_capacity(capacity) - } - - fn capacity(&self) -> usize { - self.capacity() - } - - fn clear(&mut self) { - self.clear(); - } -} - /// A memory pool for reusing `T` allocations to reduce memory allocation overhead. #[derive(Default, Debug)] -pub struct ObjectPool { - objects: Rc>>>, +pub struct ObjectPool { + objects: Rc>>>>, } -// SAFETY: Each ObjectPool is only used within a single thread in rspack-sources, -// which is guaranteed by THREAD_ISOLATED_MAP. Therefore, it is safe to implement Send and Sync. -#[allow(unsafe_code)] -unsafe impl Send for ObjectPool {} -#[allow(unsafe_code)] -unsafe impl Sync for ObjectPool {} - -impl Clone for ObjectPool { +impl Clone for ObjectPool { fn clone(&self) -> Self { Self { objects: self.objects.clone(), @@ -58,26 +23,26 @@ impl Clone for ObjectPool { } } -impl ObjectPool { +impl ObjectPool { /// Retrieves a reusable `T` from the pool with at least the requested capacity. - pub fn pull(&self, requested_capacity: usize) -> T { + pub fn pull(&self, requested_capacity: usize) -> Pooled { if requested_capacity < MIN_POOL_CAPACITY || self.objects.borrow().is_empty() { - return T::with_capacity(requested_capacity); + return Pooled::new(self.clone(), Vec::with_capacity(requested_capacity)); } let mut objects = self.objects.borrow_mut(); if let Some((_, bucket)) = objects.range_mut(requested_capacity..).next() { if let Some(mut object) = bucket.pop() { object.clear(); - return object; + return Pooled::new(self.clone(), object); } } - T::with_capacity(requested_capacity) + Pooled::new(self.clone(), Vec::with_capacity(requested_capacity)) } /// Returns a `T` to the pool for future reuse. - pub fn return_to_pool(&self, object: T) { + fn return_to_pool(&self, object: Vec) { if object.capacity() < MIN_POOL_CAPACITY { return; } @@ -86,10 +51,6 @@ impl ObjectPool { let bucket = objects.entry(cap).or_default(); bucket.push(object); } - - pub fn clear(&self) { - self.objects.borrow_mut().clear(); - } } /// A smart pointer that holds a pooled object and automatically returns it to the pool when dropped. @@ -98,127 +59,46 @@ impl ObjectPool { /// pooled objects lifecycle. When the `Pooled` instance is dropped, the contained object /// is automatically returned to its associated pool for future reuse. #[derive(Debug)] -pub struct Pooled { - object: Option, - pool: Option>, +pub struct Pooled { + object: Option>, + pool: ObjectPool, } -impl Pooled { - pub fn new(pool: Option>, requested_capacity: usize) -> Self { - let object = match &pool { - Some(pool) => pool.pull(requested_capacity), - None => T::with_capacity(requested_capacity), - }; - Self { +impl Pooled { + fn new(pool: ObjectPool, object: Vec) -> Self { + Pooled { object: Some(object), pool, } } - pub fn as_mut(&mut self) -> &mut T { + pub fn as_mut(&mut self) -> &mut Vec { self.object.as_mut().unwrap() } - pub fn as_ref(&self) -> &T { + pub fn as_ref(&self) -> &Vec { self.object.as_ref().unwrap() } } -impl Drop for Pooled { +impl Drop for Pooled { fn drop(&mut self) { if let Some(object) = self.object.take() { - if let Some(pool) = &self.pool { - pool.return_to_pool(object); - } + self.pool.return_to_pool(object); } } } -impl std::ops::Deref for Pooled { - type Target = T; +impl std::ops::Deref for Pooled { + type Target = Vec; fn deref(&self) -> &Self::Target { self.as_ref() } } -impl std::ops::DerefMut for Pooled { +impl std::ops::DerefMut for Pooled { fn deref_mut(&mut self) -> &mut Self::Target { self.as_mut() } } - -type ThreadIsolatedMap = Arc>)>>; - -pub static THREAD_ISOLATED_MAP: LazyLock = - LazyLock::new(|| Arc::new(DashMap::new())); - -pub static IN_OBJECT_POOL_SCOPE: AtomicBool = AtomicBool::new(false); - -pub fn with_current_thread_object_pool_scope(f: F) -> R -where - F: FnOnce() -> R, -{ - let thread_id = std::thread::current().id(); - { - THREAD_ISOLATED_MAP - .entry(thread_id) - .or_default() - .value_mut() - .0 = true; - } - - let result = f(); - { - if let Some(mut isolation) = THREAD_ISOLATED_MAP.get_mut(&thread_id) { - isolation.0 = false; - if !IN_OBJECT_POOL_SCOPE.load(std::sync::atomic::Ordering::Relaxed) { - isolation.1.clear(); - } - } - } - result -} - -pub fn pull_usize_vec(requested_capacity: usize) -> Pooled> { - let thread_id = std::thread::current().id(); - let ref_multi = THREAD_ISOLATED_MAP.entry(thread_id).or_default(); - Pooled::new(Some(ref_multi.1.clone()), requested_capacity) -} - -/// Extends the lifetime of the object pool to the end of the provided closure, -/// instead of just the end of `source.map()`. This is primarily designed for -/// integration with parallel frameworks like rayon, ensuring the object pool -/// remains available throughout parallel tasks. -/// -/// # Example -/// ``` -/// use rspack_sources::{BoxSource, MapOptions, with_object_pool_scope}; -/// use rayon::prelude::*; -/// -/// let sources: Vec = vec![/* ... */]; -/// -/// with_object_pool_scope(|| { -/// sources.into_par_iter() -/// .map(|source| source.map(&MapOptions::default())) -/// .collect::>() -/// }); -/// ``` -pub fn with_object_pool_scope(f: F) -> R -where - F: FnOnce() -> R, -{ - IN_OBJECT_POOL_SCOPE.store(true, std::sync::atomic::Ordering::SeqCst); - - let result = f(); - - for ref_multi in THREAD_ISOLATED_MAP.iter() { - if !ref_multi.value().0 { - ref_multi.value().1.clear(); - } - } - - IN_OBJECT_POOL_SCOPE.store(false, std::sync::atomic::Ordering::SeqCst); - - result -} diff --git a/src/original_source.rs b/src/original_source.rs index cbed9585..be90b938 100644 --- a/src/original_source.rs +++ b/src/original_source.rs @@ -10,6 +10,7 @@ use crate::{ split_into_potential_tokens, GeneratedInfo, OnChunk, OnName, OnSource, SourceText, StreamChunks, }, + object_pool::ObjectPool, source::{Mapping, OriginalLocation}, MapOptions, Rope, Source, SourceMap, SourceValue, }; @@ -110,6 +111,7 @@ impl StreamChunks for OriginalSource { fn stream_chunks<'a>( &'a self, options: &MapOptions, + _: &'a ObjectPool, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, _on_name: OnName, diff --git a/src/raw_source.rs b/src/raw_source.rs index f55856ba..e7895217 100644 --- a/src/raw_source.rs +++ b/src/raw_source.rs @@ -9,6 +9,7 @@ use crate::{ get_generated_source_info, stream_chunks_of_raw_source, OnChunk, OnName, OnSource, StreamChunks, }, + object_pool::ObjectPool, MapOptions, Rope, Source, SourceMap, SourceValue, }; @@ -110,6 +111,7 @@ impl StreamChunks for RawStringSource { fn stream_chunks<'a>( &'a self, options: &MapOptions, + _: &'a ObjectPool, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, @@ -243,6 +245,7 @@ impl StreamChunks for RawBufferSource { fn stream_chunks<'a>( &'a self, options: &MapOptions, + _: &'a ObjectPool, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, diff --git a/src/replace_source.rs b/src/replace_source.rs index aaf66e43..8142f7c8 100644 --- a/src/replace_source.rs +++ b/src/replace_source.rs @@ -12,6 +12,7 @@ use crate::{ get_map, split_into_lines, GeneratedInfo, SourceText, StreamChunks, }, linear_map::LinearMap, + object_pool::ObjectPool, rope::Rope, source_content_lines::SourceContentLines, BoxSource, MapOptions, Mapping, OriginalLocation, Source, SourceExt, @@ -319,9 +320,9 @@ impl std::fmt::Debug for ReplaceSource { } } -enum SourceContent { +enum SourceContent<'object_pool> { Raw(Arc), - Lines(SourceContentLines), + Lines(SourceContentLines<'object_pool>), } fn check_content_at_position( @@ -344,6 +345,7 @@ impl StreamChunks for ReplaceSource { fn stream_chunks<'a>( &'a self, options: &crate::MapOptions, + object_pool: &'a ObjectPool, on_chunk: crate::helpers::OnChunk<'_, 'a>, on_source: crate::helpers::OnSource<'_, 'a>, on_name: crate::helpers::OnName<'_, 'a>, @@ -397,7 +399,7 @@ impl StreamChunks for ReplaceSource { { match source_content { SourceContent::Raw(source) => { - let lines = SourceContentLines::from(source.clone()); + let lines = SourceContentLines::from(object_pool, source.clone()); let matched = check_content_at_position(&lines, line, column, expected_chunk); *source_content = SourceContent::Lines(lines); @@ -417,6 +419,7 @@ impl StreamChunks for ReplaceSource { columns: options.columns, final_source: false, }, + object_pool, &mut |chunk, mut mapping| { // SAFETY: final_source is false in ReplaceSource let chunk = chunk.unwrap(); diff --git a/src/source.rs b/src/source.rs index a56f922e..584d6739 100644 --- a/src/source.rs +++ b/src/source.rs @@ -12,6 +12,7 @@ use serde::{Deserialize, Serialize}; use crate::{ helpers::{decode_mappings, StreamChunks}, + object_pool::ObjectPool, rope::Rope, Result, }; @@ -167,13 +168,18 @@ impl StreamChunks for BoxSource { fn stream_chunks<'a>( &'a self, options: &MapOptions, + object_pool: &'a ObjectPool, on_chunk: crate::helpers::OnChunk<'_, 'a>, on_source: crate::helpers::OnSource<'_, 'a>, on_name: crate::helpers::OnName<'_, 'a>, ) -> crate::helpers::GeneratedInfo { - self - .as_ref() - .stream_chunks(options, on_chunk, on_source, on_name) + self.as_ref().stream_chunks( + options, + object_pool, + on_chunk, + on_source, + on_name, + ) } } @@ -250,7 +256,7 @@ impl SourceExt for T { } /// Options for [Source::map]. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone)] pub struct MapOptions { /// Whether have columns info in generated [SourceMap] mappings. pub columns: bool, diff --git a/src/source_content_lines.rs b/src/source_content_lines.rs index 656a37bf..5c0bc046 100644 --- a/src/source_content_lines.rs +++ b/src/source_content_lines.rs @@ -1,30 +1,38 @@ use std::sync::Arc; -use crate::{helpers::split_into_lines, with_indices::WithIndices}; +use crate::{ + helpers::split_into_lines, object_pool::ObjectPool, with_indices::WithIndices, +}; -type Owner = Arc; +struct Owner<'object_pool> { + text: Arc, + object_pool: &'object_pool ObjectPool, +} -type BorrowedValue<'a> = Vec>; +type BorrowedValue<'text> = Vec>; self_cell::self_cell!( - pub struct SourceContentLines { - owner: Owner, + pub struct SourceContentLines<'object_pool> { + owner: Owner<'object_pool>, #[covariant] dependent: BorrowedValue, } ); -impl SourceContentLines { +impl<'object_pool> SourceContentLines<'object_pool> { pub fn get(&self, line: usize) -> Option<&WithIndices<'_, &str>> { self.borrow_dependent().get(line) } -} -impl From> for SourceContentLines { - fn from(value: Arc) -> Self { - SourceContentLines::new(value, |owner| { - split_into_lines(&owner.as_ref()) - .map(WithIndices::new) + pub fn from(object_pool: &'object_pool ObjectPool, value: Arc) -> Self { + let owner = Owner { + text: value.clone(), + object_pool, + }; + + SourceContentLines::new(owner, |owner| { + split_into_lines(&owner.text.as_ref()) + .map(|line| WithIndices::new(owner.object_pool, line)) .collect::>() }) } diff --git a/src/source_map_source.rs b/src/source_map_source.rs index 63c4f169..f0511213 100644 --- a/src/source_map_source.rs +++ b/src/source_map_source.rs @@ -9,6 +9,7 @@ use crate::{ get_map, stream_chunks_of_combined_source_map, stream_chunks_of_source_map, StreamChunks, }, + object_pool::ObjectPool, MapOptions, Rope, Source, SourceMap, SourceValue, }; @@ -186,12 +187,15 @@ impl StreamChunks for SourceMapSource { fn stream_chunks<'a>( &'a self, options: &MapOptions, + object_pool: &'a ObjectPool, on_chunk: crate::helpers::OnChunk<'_, 'a>, on_source: crate::helpers::OnSource<'_, 'a>, on_name: crate::helpers::OnName<'_, 'a>, ) -> crate::helpers::GeneratedInfo { if let Some(inner_source_map) = &self.inner_source_map { stream_chunks_of_combined_source_map( + options, + object_pool, &*self.value, &self.source_map, &self.name, @@ -201,16 +205,16 @@ impl StreamChunks for SourceMapSource { on_chunk, on_source, on_name, - options, ) } else { stream_chunks_of_source_map( + options, + object_pool, self.value.as_str(), &self.source_map, on_chunk, on_source, on_name, - options, ) } } diff --git a/src/with_indices.rs b/src/with_indices.rs index ece3058c..90d41c1f 100644 --- a/src/with_indices.rs +++ b/src/with_indices.rs @@ -2,7 +2,7 @@ use std::{cell::OnceCell, marker::PhantomData}; use crate::{ helpers::SourceText, - object_pool::{pull_usize_vec, Pooled}, + object_pool::{ObjectPool, Pooled}, }; #[derive(Debug)] @@ -13,18 +13,20 @@ where /// line is a string reference pub line: S, /// the byte position of each `char` in `line` string slice . - pub char_byte_indices: OnceCell>>, + pub char_byte_indices: OnceCell, data: PhantomData<&'text S>, + object_pool: &'text ObjectPool, } impl<'text, S> WithIndices<'text, S> where S: SourceText<'text>, { - pub fn new(line: S) -> Self { + pub fn new(object_pool: &'text ObjectPool, line: S) -> Self { Self { char_byte_indices: OnceCell::new(), line, + object_pool, data: PhantomData, } } @@ -36,7 +38,7 @@ where } let char_byte_indices = self.char_byte_indices.get_or_init(|| { - let mut vec = pull_usize_vec(self.line.len()); + let mut vec = self.object_pool.pull(self.line.len()); vec.extend(self.line.char_indices().map(|(i, _)| i)); vec }); @@ -58,13 +60,14 @@ where /// tests are just copy from `substring` crate #[cfg(test)] mod tests { - use crate::Rope; + use crate::{object_pool::ObjectPool, Rope}; use super::WithIndices; #[test] fn test_substring() { assert_eq!( - WithIndices::new(Rope::from("foobar")).substring(0, 3), + WithIndices::new(&ObjectPool::default(), Rope::from("foobar")) + .substring(0, 3), "foo" ); } @@ -72,26 +75,40 @@ mod tests { #[test] fn test_out_of_bounds() { assert_eq!( - WithIndices::new(Rope::from("foobar")).substring(0, 10), + WithIndices::new(&ObjectPool::default(), Rope::from("foobar")) + .substring(0, 10), "foobar" ); - assert_eq!(WithIndices::new(Rope::from("foobar")).substring(6, 10), ""); + assert_eq!( + WithIndices::new(&ObjectPool::default(), Rope::from("foobar")) + .substring(6, 10), + "" + ); } #[test] fn test_start_less_than_end() { - assert_eq!(WithIndices::new(Rope::from("foobar")).substring(3, 2), ""); + assert_eq!( + WithIndices::new(&ObjectPool::default(), Rope::from("foobar")) + .substring(3, 2), + "" + ); } #[test] fn test_start_and_end_equal() { - assert_eq!(WithIndices::new(Rope::from("foobar")).substring(3, 3), ""); + assert_eq!( + WithIndices::new(&ObjectPool::default(), Rope::from("foobar")) + .substring(3, 3), + "" + ); } #[test] fn test_multiple_byte_characters() { assert_eq!( - WithIndices::new(Rope::from("fõøbα®")).substring(2, 5), + WithIndices::new(&ObjectPool::default(), Rope::from("fõøbα®")) + .substring(2, 5), "øbα" ); } diff --git a/tests/compat_source.rs b/tests/compat_source.rs index 5cc97576..d30b492d 100644 --- a/tests/compat_source.rs +++ b/tests/compat_source.rs @@ -6,9 +6,11 @@ use rspack_sources::stream_chunks::{ stream_chunks_default, GeneratedInfo, OnChunk, OnName, OnSource, StreamChunks, }; use rspack_sources::{ - ConcatSource, MapOptions, RawStringSource, Rope, Source, SourceExt, - SourceMap, SourceValue, + ConcatSource, MapOptions, ObjectPool, RawStringSource, Rope, Source, + SourceExt, SourceMap, SourceValue, }; +use simd_json::borrowed::Object; +use simd_json::lazy::object; #[derive(Debug, Eq)] struct CompatSource(&'static str, Option); @@ -43,14 +45,16 @@ impl StreamChunks for CompatSource { fn stream_chunks<'a>( &'a self, options: &MapOptions, + object_pool: &'a ObjectPool, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, ) -> GeneratedInfo { stream_chunks_default( + options, + object_pool, self.0, self.1.as_ref(), - options, on_chunk, on_source, on_name, From 7e2869d252499d9eaa9e6be7fd26243b6db51b40 Mon Sep 17 00:00:00 2001 From: Cong-Cong Date: Mon, 3 Nov 2025 11:20:02 +0800 Subject: [PATCH 18/22] pass object pool to map --- benches/bench.rs | 8 +- benches/bench_complex_replace_source.rs | 5 +- .../benchmark_repetitive_react_components.rs | 8 +- src/cached_source.rs | 26 +++--- src/concat_source.rs | 55 +++++++++---- src/helpers.rs | 9 ++- src/original_source.rs | 41 +++++++--- src/raw_source.rs | 12 +-- src/replace_source.rs | 77 +++++++++++++----- src/source.rs | 18 +++-- src/source_map_source.rs | 79 +++++++++++++------ tests/compat_source.rs | 19 +++-- 12 files changed, 247 insertions(+), 110 deletions(-) diff --git a/benches/bench.rs b/benches/bench.rs index 0b8d96ae..04e9e863 100644 --- a/benches/bench.rs +++ b/benches/bench.rs @@ -13,8 +13,8 @@ pub use criterion::*; pub use codspeed_criterion_compat::*; use rspack_sources::{ - BoxSource, CachedSource, ConcatSource, MapOptions, Source, SourceExt, - SourceMap, SourceMapSource, SourceMapSourceOptions, + BoxSource, CachedSource, ConcatSource, MapOptions, ObjectPool, Source, + SourceExt, SourceMap, SourceMapSource, SourceMapSourceOptions, }; use bench_complex_replace_source::{ @@ -79,7 +79,7 @@ fn benchmark_concat_generate_string(b: &mut Bencher) { b.iter(|| { concat - .map(&MapOptions::default()) + .map(&ObjectPool::default(), &MapOptions::default()) .unwrap() .to_json() .unwrap(); @@ -108,7 +108,7 @@ fn benchmark_concat_generate_string_with_cache(b: &mut Bencher) { b.iter(|| { cached - .map(&MapOptions::default()) + .map(&ObjectPool::default(), &MapOptions::default()) .unwrap() .to_json() .unwrap(); diff --git a/benches/bench_complex_replace_source.rs b/benches/bench_complex_replace_source.rs index 2439e66e..65a13c77 100644 --- a/benches/bench_complex_replace_source.rs +++ b/benches/bench_complex_replace_source.rs @@ -9,8 +9,9 @@ pub use criterion::*; pub use codspeed_criterion_compat::*; use rspack_sources::{ - BoxSource, MapOptions, OriginalSource, ReplaceSource, SourceExt, + BoxSource, MapOptions, ObjectPool, OriginalSource, ReplaceSource, SourceExt, }; +use simd_json::borrowed::Object; static LARGE_REPLACE_SOURCE: LazyLock = LazyLock::new(|| { let mut source = ReplaceSource::new( @@ -36724,7 +36725,7 @@ pub fn benchmark_complex_replace_source_map(b: &mut Bencher) { let source = LARGE_REPLACE_SOURCE.clone(); b.iter(|| { - black_box(source.map(&MapOptions::default())); + black_box(source.map(&ObjectPool::default(), &MapOptions::default())); }); } diff --git a/benches/benchmark_repetitive_react_components.rs b/benches/benchmark_repetitive_react_components.rs index 96875033..602282ee 100644 --- a/benches/benchmark_repetitive_react_components.rs +++ b/benches/benchmark_repetitive_react_components.rs @@ -9,9 +9,9 @@ pub use criterion::*; pub use codspeed_criterion_compat::*; use rspack_sources::{ - BoxSource, ConcatSource, MapOptions, OriginalSource, RawStringSource, - ReplaceSource, ReplacementEnforce, Source, SourceExt, SourceMap, - SourceMapSource, SourceMapSourceOptions, + BoxSource, ConcatSource, MapOptions, ObjectPool, OriginalSource, + RawStringSource, ReplaceSource, ReplacementEnforce, Source, SourceExt, + SourceMap, SourceMapSource, SourceMapSourceOptions, }; static REPETITIVE_1K_REACT_COMPONENTS_SOURCE: LazyLock = @@ -3505,7 +3505,7 @@ pub fn benchmark_repetitive_react_components_map(b: &mut Bencher) { let source = REPETITIVE_1K_REACT_COMPONENTS_SOURCE.clone(); b.iter(|| { - black_box(source.map(&MapOptions::default())); + black_box(source.map(&ObjectPool::default(), &MapOptions::default())); }); } diff --git a/src/cached_source.rs b/src/cached_source.rs index 4a52d2c6..41ab3df5 100644 --- a/src/cached_source.rs +++ b/src/cached_source.rs @@ -100,18 +100,22 @@ impl Source for CachedSource { *self.cache.size.get_or_init(|| self.inner.size()) } - fn map(&self, options: &MapOptions) -> Option { + fn map( + &self, + object_pool: &ObjectPool, + options: &MapOptions, + ) -> Option { if options.columns { self .cache .columns_map - .get_or_init(|| self.inner.map(options)) + .get_or_init(|| self.inner.map(object_pool, options)) .clone() } else { self .cache .line_only_map - .get_or_init(|| self.inner.map(options)) + .get_or_init(|| self.inner.map(object_pool, options)) .clone() } } @@ -124,8 +128,8 @@ impl Source for CachedSource { impl StreamChunks for CachedSource { fn stream_chunks<'a>( &'a self, - options: &MapOptions, object_pool: &'a ObjectPool, + options: &MapOptions, on_chunk: crate::helpers::OnChunk<'_, 'a>, on_source: crate::helpers::OnSource<'_, 'a>, on_name: crate::helpers::OnName<'_, 'a>, @@ -242,7 +246,9 @@ mod tests { }) .boxed(), ]); - let map = source.map(&Default::default()).unwrap(); + let map = source + .map(&ObjectPool::default(), &Default::default()) + .unwrap(); assert_eq!(map.mappings(), ";;AACA"); } @@ -257,11 +263,11 @@ mod tests { source.source(); source.buffer(); source.size(); - source.map(&map_options); + source.map(&ObjectPool::default(), &map_options); assert_eq!( *clone.cache.columns_map.get().unwrap(), - source.map(&map_options) + source.map(&ObjectPool::default(), &map_options) ); } @@ -318,8 +324,8 @@ mod tests { let mut on_source_count = 0; let mut on_name_count = 0; let generated_info = source.stream_chunks( - &map_options, &ObjectPool::default(), + &map_options, &mut |_chunk, _mapping| { on_chunk_count += 1; }, @@ -333,8 +339,8 @@ mod tests { let cached_source = CachedSource::new(source); cached_source.stream_chunks( - &map_options, &ObjectPool::default(), + &map_options, &mut |_chunk, _mapping| {}, &mut |_source_index, _source, _source_content| {}, &mut |_name_index, _name| {}, @@ -344,8 +350,8 @@ mod tests { let mut cached_on_source_count = 0; let mut cached_on_name_count = 0; let cached_generated_info = cached_source.stream_chunks( - &map_options, &ObjectPool::default(), + &map_options, &mut |_chunk, _mapping| { cached_on_chunk_count += 1; }, diff --git a/src/concat_source.rs b/src/concat_source.rs index 04679a1c..64a09916 100644 --- a/src/concat_source.rs +++ b/src/concat_source.rs @@ -200,8 +200,12 @@ impl Source for ConcatSource { .sum() } - fn map(&self, options: &MapOptions) -> Option { - get_map(self, options) + fn map( + &self, + object_pool: &ObjectPool, + options: &MapOptions, + ) -> Option { + get_map(object_pool, self, options) } fn to_writer(&self, writer: &mut dyn std::io::Write) -> std::io::Result<()> { @@ -231,8 +235,8 @@ impl Eq for ConcatSource {} impl StreamChunks for ConcatSource { fn stream_chunks<'a>( &'a self, - options: &MapOptions, object_pool: &'a ObjectPool, + options: &MapOptions, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, @@ -241,8 +245,8 @@ impl StreamChunks for ConcatSource { if children.len() == 1 { return children[0].stream_chunks( - options, object_pool, + options, on_chunk, on_source, on_name, @@ -267,8 +271,8 @@ impl StreamChunks for ConcatSource { generated_line, generated_column, } = item.stream_chunks( - options, object_pool, + options, &mut |chunk, mapping| { let line = mapping.generated_line + current_line_offset; let column = if mapping.generated_line == 1 { @@ -479,7 +483,9 @@ mod tests { assert_eq!(source.size(), 62); assert_eq!(source.source().into_string_lossy(), expected_source); assert_eq!( - source.map(&MapOptions::new(false)).unwrap(), + source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(), SourceMap::from_json( r#"{ "version": 3, @@ -495,7 +501,9 @@ mod tests { .unwrap() ); assert_eq!( - source.map(&MapOptions::default()).unwrap(), + source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(), SourceMap::from_json( r#"{ "version": 3, @@ -529,7 +537,9 @@ mod tests { assert_eq!(source.size(), 62); assert_eq!(source.source().into_string_lossy(), expected_source); assert_eq!( - source.map(&MapOptions::new(false)).unwrap(), + source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(), SourceMap::from_json( r#"{ "version": 3, @@ -545,7 +555,9 @@ mod tests { .unwrap() ); assert_eq!( - source.map(&MapOptions::default()).unwrap(), + source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(), SourceMap::from_json( r#"{ "version": 3, @@ -579,7 +591,9 @@ mod tests { assert_eq!(source.size(), 62); assert_eq!(source.source().into_string_lossy(), expected_source); assert_eq!( - source.map(&MapOptions::new(false)).unwrap(), + source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(), SourceMap::from_json( r#"{ "version": 3, @@ -595,7 +609,9 @@ mod tests { .unwrap() ); assert_eq!( - source.map(&MapOptions::default()).unwrap(), + source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(), SourceMap::from_json( r#"{ "version": 3, @@ -647,7 +663,9 @@ mod tests { assert_eq!(source.source().into_string_lossy(), expected_source); assert_eq!(source.buffer(), expected_source.as_bytes()); - let map = source.map(&MapOptions::new(false)).unwrap(); + let map = source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(); assert_eq!(map, expected_map1); // TODO: test hash @@ -662,8 +680,9 @@ mod tests { ]); let result_text = source.source(); - let result_map = source.map(&MapOptions::default()); - let result_list_map = source.map(&MapOptions::new(false)); + let result_map = source.map(&ObjectPool::default(), &MapOptions::default()); + let result_list_map = + source.map(&ObjectPool::default(), &MapOptions::new(false)); assert_eq!( result_text.into_string_lossy(), @@ -687,7 +706,9 @@ mod tests { ]); assert_eq!( - source.map(&MapOptions::default()).unwrap(), + source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(), SourceMap::from_json( r#"{ "mappings": "AAAA,K,CCAA,M;ADAA;;ACAA", @@ -713,7 +734,9 @@ mod tests { RawStringSource::from("c"), ]); assert_eq!(source.source().into_string_lossy(), "abc"); - assert!(source.map(&MapOptions::default()).is_none()); + assert!(source + .map(&ObjectPool::default(), &MapOptions::default()) + .is_none()); } #[test] diff --git a/src/helpers.rs b/src/helpers.rs index 204447ca..ed01b13d 100644 --- a/src/helpers.rs +++ b/src/helpers.rs @@ -20,8 +20,9 @@ use crate::{ }; pub fn get_map<'a, S: StreamChunks>( + object_pool: &'a ObjectPool, stream: &'a S, - options: &'a MapOptions, + options: &MapOptions, ) -> Option { let mut mappings_encoder = create_encoder(options.columns); let mut sources: Vec = Vec::new(); @@ -29,11 +30,11 @@ pub fn get_map<'a, S: StreamChunks>( let mut names: Vec = Vec::new(); stream.stream_chunks( + object_pool, &MapOptions { columns: options.columns, final_source: true, }, - &ObjectPool::default(), // on_chunk &mut |_, mapping| { mappings_encoder.encode(&mapping); @@ -71,8 +72,8 @@ pub trait StreamChunks { /// [StreamChunks] abstraction fn stream_chunks<'a>( &'a self, - options: &MapOptions, object_pool: &'a ObjectPool, + options: &MapOptions, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, @@ -1232,8 +1233,8 @@ pub fn stream_and_get_source_and_map<'a, S: StreamChunks>( let mut names: Vec = Vec::new(); let generated_info = input_source.stream_chunks( - options, object_pool, + options, &mut |chunk, mapping| { mappings_encoder.encode(&mapping); on_chunk(chunk, mapping); diff --git a/src/original_source.rs b/src/original_source.rs index be90b938..8b868925 100644 --- a/src/original_source.rs +++ b/src/original_source.rs @@ -69,8 +69,12 @@ impl Source for OriginalSource { self.value.len() } - fn map(&self, options: &MapOptions) -> Option { - get_map(self, options) + fn map( + &self, + object_pool: &ObjectPool, + options: &MapOptions, + ) -> Option { + get_map(object_pool, self, options) } fn to_writer(&self, writer: &mut dyn std::io::Write) -> std::io::Result<()> { @@ -110,8 +114,8 @@ impl std::fmt::Debug for OriginalSource { impl StreamChunks for OriginalSource { fn stream_chunks<'a>( &'a self, - options: &MapOptions, _: &'a ObjectPool, + options: &MapOptions, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, _on_name: OnName, @@ -247,8 +251,12 @@ mod tests { fn should_handle_multiline_string() { let source = OriginalSource::new("Line1\n\nLine3\n", "file.js"); let result_text = source.source(); - let result_map = source.map(&MapOptions::default()).unwrap(); - let result_list_map = source.map(&MapOptions::new(false)).unwrap(); + let result_map = source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(); + let result_list_map = source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(); assert_eq!(result_text.into_string_lossy(), "Line1\n\nLine3\n"); assert_eq!(result_map.sources(), &["file.js".to_string()]); @@ -266,8 +274,9 @@ mod tests { fn should_handle_empty_string() { let source = OriginalSource::new("", "file.js"); let result_text = source.source(); - let result_map = source.map(&MapOptions::default()); - let result_list_map = source.map(&MapOptions::new(false)); + let result_map = source.map(&ObjectPool::default(), &MapOptions::default()); + let result_list_map = + source.map(&ObjectPool::default(), &MapOptions::new(false)); assert_eq!(result_text.into_string_lossy(), ""); assert!(result_map.is_none()); @@ -277,7 +286,9 @@ mod tests { #[test] fn should_omit_mappings_for_columns_with_node() { let source = OriginalSource::new("Line1\n\nLine3\n", "file.js"); - let result_map = source.map(&MapOptions::new(false)).unwrap(); + let result_map = source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(); assert_eq!(result_map.mappings(), "AAAA;AACA;AACA"); } @@ -302,11 +313,17 @@ mod tests { let source = OriginalSource::new(input, "file.js"); assert_eq!(source.source().into_string_lossy(), input); assert_eq!( - source.map(&MapOptions::default()).unwrap().mappings(), + source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap() + .mappings(), "AAAA,eAAe,SAAS,MAAM,WAAW;AACzC,eAAe,SAAS,MAAM,WAAW", ); assert_eq!( - source.map(&MapOptions::new(false)).unwrap().mappings(), + source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap() + .mappings(), "AAAA;AACA", ); } @@ -322,7 +339,9 @@ mod tests { let source2 = OriginalSource::new(code2, "world.txt"); let concat = ConcatSource::new([source1.boxed(), source2.boxed()]); - let map = concat.map(&MapOptions::new(false)).unwrap(); + let map = concat + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(); assert_eq!(map.mappings(), "AAAA;AACA;ACDA",); } } diff --git a/src/raw_source.rs b/src/raw_source.rs index e7895217..28e89a8f 100644 --- a/src/raw_source.rs +++ b/src/raw_source.rs @@ -76,7 +76,7 @@ impl Source for RawStringSource { self.0.len() } - fn map(&self, _: &MapOptions) -> Option { + fn map(&self, _: &ObjectPool, _: &MapOptions) -> Option { None } @@ -110,8 +110,8 @@ impl Hash for RawStringSource { impl StreamChunks for RawStringSource { fn stream_chunks<'a>( &'a self, - options: &MapOptions, _: &'a ObjectPool, + options: &MapOptions, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, @@ -210,7 +210,7 @@ impl Source for RawBufferSource { self.value.len() } - fn map(&self, _: &MapOptions) -> Option { + fn map(&self, _: &ObjectPool, _: &MapOptions) -> Option { None } @@ -244,8 +244,8 @@ impl Hash for RawBufferSource { impl StreamChunks for RawBufferSource { fn stream_chunks<'a>( &'a self, - options: &MapOptions, _: &'a ObjectPool, + options: &MapOptions, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, @@ -277,7 +277,9 @@ mod tests { let source1 = ReplaceSource::new(source1); let source2 = OriginalSource::new("world".to_string(), "world.txt"); let concat = ConcatSource::new([source1.boxed(), source2.boxed()]); - let map = concat.map(&MapOptions::new(false)).unwrap(); + let map = concat + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(); assert_eq!(map.mappings(), ";;AAAA",); } } diff --git a/src/replace_source.rs b/src/replace_source.rs index 8142f7c8..cdd3b100 100644 --- a/src/replace_source.rs +++ b/src/replace_source.rs @@ -265,12 +265,16 @@ impl Source for ReplaceSource { size } - fn map(&self, options: &crate::MapOptions) -> Option { + fn map( + &self, + _: &ObjectPool, + options: &crate::MapOptions, + ) -> Option { let replacements = &self.replacements; if replacements.is_empty() { - return self.inner.map(options); + return self.inner.map(&ObjectPool::default(), options); } - get_map(self, options) + get_map(&ObjectPool::default(), self, options) } fn to_writer(&self, writer: &mut dyn std::io::Write) -> std::io::Result<()> { @@ -344,8 +348,8 @@ fn check_content_at_position( impl StreamChunks for ReplaceSource { fn stream_chunks<'a>( &'a self, - options: &crate::MapOptions, object_pool: &'a ObjectPool, + options: &crate::MapOptions, on_chunk: crate::helpers::OnChunk<'_, 'a>, on_source: crate::helpers::OnSource<'_, 'a>, on_name: crate::helpers::OnName<'_, 'a>, @@ -415,11 +419,11 @@ impl StreamChunks for ReplaceSource { }; let result = self.inner.stream_chunks( + object_pool, &MapOptions { columns: options.columns, final_source: false, }, - object_pool, &mut |chunk, mut mapping| { // SAFETY: final_source is false in ReplaceSource let chunk = chunk.unwrap(); @@ -869,7 +873,9 @@ mod tests { source.replace(start_line6 + 4, start_line6 + 5, " ", None); let result = source.source(); - let result_map = source.map(&MapOptions::default()).unwrap(); + let result_map = source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(); assert_eq!( code, @@ -901,7 +907,9 @@ Last Line"# 5:0 -> [file.txt] 6:0, :4 -> [file.txt] 6:4, :5 -> [file.txt] 7:0"# ); - let result_list_map = source.map(&MapOptions::new(false)).unwrap(); + let result_list_map = source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(); assert_eq!( with_readable_mappings(&result_list_map), r#" @@ -924,8 +932,12 @@ Last Line"# source.insert(0, "Message: ", None); source.replace(2, (line1.len() + 5) as u32, "y A", None); let result_text = source.source(); - let result_map = source.map(&MapOptions::default()).unwrap(); - let result_list_map = source.map(&MapOptions::new(false)).unwrap(); + let result_map = source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(); + let result_list_map = source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(); assert_eq!( original_code, @@ -955,8 +967,12 @@ World!"# source.insert(0, "Line 0\n", None); let result_text = source.source(); - let result_map = source.map(&MapOptions::default()).unwrap(); - let result_list_map = source.map(&MapOptions::new(false)).unwrap(); + let result_map = source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(); + let result_list_map = source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(); assert_eq!(result_text.into_string_lossy(), "Line -1\nLine 0\nLine 1"); assert_eq!( @@ -984,8 +1000,12 @@ World!"# source.insert(0, "Line 0\n", None); source.replace(0, 6, "Hello", None); let result_text = source.source(); - let result_map = source.map(&MapOptions::default()).unwrap(); - let result_list_map = source.map(&MapOptions::new(false)).unwrap(); + let result_map = source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(); + let result_list_map = source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(); assert_eq!( result_text.into_string_lossy(), @@ -1009,8 +1029,12 @@ Line 2"# let mut source = ReplaceSource::new(OriginalSource::new(line1, "file.txt")); source.insert((line1.len() + 1) as u32, "Line 2\n", None); let result_text = source.source(); - let result_map = source.map(&MapOptions::default()).unwrap(); - let result_list_map = source.map(&MapOptions::new(false)).unwrap(); + let result_map = source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(); + let result_list_map = source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(); assert_eq!(result_text.into_string_lossy(), "Line 1\nLine 2\n"); assert_eq!( @@ -1030,7 +1054,9 @@ Line 2"# ReplaceSource::new(OriginalSource::new(bootstrap_code, "file.js")); source.replace(7, 12, "h", Some("hello")); source.replace(20, 25, "w", Some("world")); - let result_map = source.map(&MapOptions::default()).expect("failed"); + let result_map = source + .map(&ObjectPool::default(), &MapOptions::default()) + .expect("failed"); let target_code = source.source(); assert_eq!(target_code.into_string_lossy(), " var h\n var w\n"); @@ -1095,7 +1121,9 @@ export default function StaticPage(_ref) { ); let target_code = source.source(); - let source_map = source.map(&MapOptions::default()).unwrap(); + let source_map = source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(); assert_eq!( target_code.into_string_lossy(), @@ -1146,7 +1174,9 @@ return
{data.foo}
source.replace(12, 24, "", None); let target_code = source.source(); - let source_map = source.map(&MapOptions::default()).unwrap(); + let source_map = source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(); assert_eq!(target_code.into_string_lossy(), "if (false) {}"); assert_eq!( @@ -1172,7 +1202,9 @@ return
{data.foo}
source.replace(0, 999, "replaced!\n", Some("whole")); let result_text = source.source(); - let result_map = source.map(&MapOptions::default()).unwrap(); + let result_map = source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(); assert_eq!( result_text.into_string_lossy(), @@ -1193,7 +1225,10 @@ return
{data.foo}
source.replace(3, 5, "", None); assert_eq!(source.size(), 3); assert_eq!(source.source().into_string_lossy(), "box"); - assert_eq!(source.map(&MapOptions::default()), None); + assert_eq!( + source.map(&ObjectPool::default(), &MapOptions::default()), + None + ); let mut hasher = twox_hash::XxHash64::default(); source.hash(&mut hasher); assert_eq!(format!("{:x}", hasher.finish()), "96abdb94c6fd5aba"); @@ -1228,7 +1263,7 @@ return
{data.foo}
); assert_eq!( source - .map(&MapOptions::default()) + .map(&ObjectPool::default(), &MapOptions::default()) .unwrap() .to_json() .unwrap(), diff --git a/src/source.rs b/src/source.rs index 584d6739..f015d24e 100644 --- a/src/source.rs +++ b/src/source.rs @@ -125,7 +125,11 @@ pub trait Source: fn size(&self) -> usize; /// Get the [SourceMap]. - fn map(&self, options: &MapOptions) -> Option; + fn map( + &self, + object_pool: &ObjectPool, + options: &MapOptions, + ) -> Option; /// Update hash based on the source. fn update_hash(&self, state: &mut dyn Hasher) { @@ -153,8 +157,12 @@ impl Source for BoxSource { self.as_ref().size() } - fn map(&self, options: &MapOptions) -> Option { - self.as_ref().map(options) + fn map( + &self, + object_pool: &ObjectPool, + options: &MapOptions, + ) -> Option { + self.as_ref().map(object_pool, options) } fn to_writer(&self, writer: &mut dyn std::io::Write) -> std::io::Result<()> { @@ -167,15 +175,15 @@ dyn_clone::clone_trait_object!(Source); impl StreamChunks for BoxSource { fn stream_chunks<'a>( &'a self, - options: &MapOptions, object_pool: &'a ObjectPool, + options: &MapOptions, on_chunk: crate::helpers::OnChunk<'_, 'a>, on_source: crate::helpers::OnSource<'_, 'a>, on_name: crate::helpers::OnName<'_, 'a>, ) -> crate::helpers::GeneratedInfo { self.as_ref().stream_chunks( - options, object_pool, + options, on_chunk, on_source, on_name, diff --git a/src/source_map_source.rs b/src/source_map_source.rs index f0511213..5a2b6166 100644 --- a/src/source_map_source.rs +++ b/src/source_map_source.rs @@ -106,11 +106,15 @@ impl Source for SourceMapSource { self.value.len() } - fn map(&self, options: &MapOptions) -> Option { + fn map( + &self, + object_pool: &ObjectPool, + options: &MapOptions, + ) -> Option { if self.inner_source_map.is_none() { return Some(self.source_map.clone()); } - get_map(self, options) + get_map(object_pool, self, options) } fn to_writer(&self, writer: &mut dyn std::io::Write) -> std::io::Result<()> { @@ -186,8 +190,8 @@ impl std::fmt::Debug for SourceMapSource { impl StreamChunks for SourceMapSource { fn stream_chunks<'a>( &'a self, - options: &MapOptions, object_pool: &'a ObjectPool, + options: &MapOptions, on_chunk: crate::helpers::OnChunk<'_, 'a>, on_source: crate::helpers::OnSource<'_, 'a>, on_name: crate::helpers::OnName<'_, 'a>, @@ -255,7 +259,8 @@ mod tests { name: "text", source_map: source_r_map.clone(), original_source: Some(inner_source.source().into_string_lossy().into()), - inner_source_map: inner_source.map(&MapOptions::default()), + inner_source_map: inner_source + .map(&ObjectPool::default(), &MapOptions::default()), remove_original_source: false, }); let sms2 = SourceMapSource::new(SourceMapSourceOptions { @@ -263,7 +268,8 @@ mod tests { name: "text", source_map: source_r_map, original_source: Some(inner_source.source().into_string_lossy().into()), - inner_source_map: inner_source.map(&MapOptions::default()), + inner_source_map: inner_source + .map(&ObjectPool::default(), &MapOptions::default()), remove_original_source: true, }); let expected_content = @@ -271,7 +277,9 @@ mod tests { assert_eq!(sms1.source().into_string_lossy(), expected_content); assert_eq!(sms2.source().into_string_lossy(), expected_content); assert_eq!( - sms1.map(&MapOptions::default()).unwrap(), + sms1 + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(), SourceMap::from_json( r#"{ "mappings": "YAAAA,K,CAAMC;AACN,O,MAAU;ACCC,O,CAAM", @@ -287,7 +295,9 @@ mod tests { .unwrap(), ); assert_eq!( - sms2.map(&MapOptions::default()).unwrap(), + sms2 + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(), SourceMap::from_json( r#"{ "mappings": "YAAAA,K,CAAMC;AACN,O,MAAU", @@ -343,7 +353,9 @@ mod tests { "hi world\nhi world\nhi world\n" ); assert_eq!( - source.map(&MapOptions::default()).unwrap(), + source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(), SourceMap::from_json( r#"{ "mappings": "AAAA;;ACAA,CAAC,CAAI", @@ -356,7 +368,9 @@ mod tests { .unwrap() ); assert_eq!( - source.map(&MapOptions::new(false)).unwrap(), + source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(), SourceMap::from_json( r#"{ "mappings": "AAAA;;ACAA", @@ -448,7 +462,9 @@ mod tests { a, b, ]); - let map = source.map(&MapOptions::default()).unwrap(); + let map = source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(); assert_eq!( map.mappings(), "AAAA;AAAA;ACAA,ICAA,EDAA,ECAA,EFAA;AEAA,EFAA;ACAA", @@ -469,8 +485,10 @@ mod tests { .source() .into_string_lossy() .into_owned()); - test_cached!(source, |s: &dyn Source| s.map(&MapOptions::default())); - test_cached!(source, |s: &dyn Source| s.map(&MapOptions::new(false))); + test_cached!(source, |s: &dyn Source| s + .map(&ObjectPool::default(), &MapOptions::default())); + test_cached!(source, |s: &dyn Source| s + .map(&ObjectPool::default(), &MapOptions::new(false))); } #[test] @@ -501,7 +519,9 @@ mod tests { remove_original_source: false, }); assert_eq!( - source.map(&MapOptions::default()).unwrap(), + source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(), SourceMap::from_json( r#"{ "mappings": "AAAA", @@ -548,7 +568,9 @@ mod tests { ); assert_eq!(source.size(), 13); assert_eq!( - source.map(&MapOptions::default()).unwrap(), + source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(), SourceMap::from_json( r#"{ "mappings": "AAAAA,SCAA,ECAMC,C", @@ -599,7 +621,9 @@ mod tests { ).unwrap()), remove_original_source: true, }); - let map = source.map(&MapOptions::default()).unwrap(); + let map = source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(); assert_eq!( map, SourceMap::from_json( @@ -620,7 +644,9 @@ mod tests { let source = SourceMapSource::new(WithoutOriginalOptions { value: "console.log('a')\n", name: "a.js", - source_map: original.map(&MapOptions::new(false)).unwrap(), + source_map: original + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(), }); let source = ConcatSource::new([ RawStringSource::from("\n").boxed(), @@ -628,7 +654,9 @@ mod tests { RawStringSource::from("\n").boxed(), source.boxed(), ]); - let map = source.map(&MapOptions::new(false)).unwrap(); + let map = source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(); assert_eq!(map.mappings(), ";;;AAAA"); } @@ -653,8 +681,9 @@ mod tests { }"#, ) .unwrap(); - let inner_source_map = - inner_source.map(&MapOptions::default()).map(|mut map| { + let inner_source_map = inner_source + .map(&ObjectPool::default(), &MapOptions::default()) + .map(|mut map| { map.set_source_root(Some("/path/to/folder/".to_string())); map }); @@ -667,7 +696,9 @@ mod tests { remove_original_source: false, }); assert_eq!( - sms.map(&MapOptions::default()).unwrap(), + sms + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(), SourceMap::from_json( r#"{ "mappings": "YAAAA,K,CAAMC;AACN,O,MAAU;ACCC,O,CAAM", @@ -714,7 +745,9 @@ mod tests { remove_original_source: false, }); assert_eq!( - source.map(&MapOptions::new(false)).unwrap(), + source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(), SourceMap::from_json( r#"{ "mappings": "AAAA", @@ -756,7 +789,9 @@ mod tests { remove_original_source: false, }); assert_eq!( - source.map(&MapOptions::default()).unwrap(), + source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(), SourceMap::from_json( r#"{ "version": 3, diff --git a/tests/compat_source.rs b/tests/compat_source.rs index d30b492d..83858b25 100644 --- a/tests/compat_source.rs +++ b/tests/compat_source.rs @@ -9,8 +9,6 @@ use rspack_sources::{ ConcatSource, MapOptions, ObjectPool, RawStringSource, Rope, Source, SourceExt, SourceMap, SourceValue, }; -use simd_json::borrowed::Object; -use simd_json::lazy::object; #[derive(Debug, Eq)] struct CompatSource(&'static str, Option); @@ -32,7 +30,11 @@ impl Source for CompatSource { 42 } - fn map(&self, _options: &MapOptions) -> Option { + fn map( + &self, + _object_pool: &ObjectPool, + _options: &MapOptions, + ) -> Option { self.1.clone() } @@ -44,8 +46,8 @@ impl Source for CompatSource { impl StreamChunks for CompatSource { fn stream_chunks<'a>( &'a self, - options: &MapOptions, object_pool: &'a ObjectPool, + options: &MapOptions, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, @@ -89,7 +91,10 @@ fn should_work_with_custom_compat_source() { assert_eq!(source.source().into_string_lossy(), CONTENT); assert_eq!(source.size(), 42); assert_eq!(source.buffer(), CONTENT.as_bytes()); - assert_eq!(source.map(&MapOptions::default()), None); + assert_eq!( + source.map(&ObjectPool::default(), &MapOptions::default()), + None + ); } #[test] @@ -111,7 +116,9 @@ fn should_generate_correct_source_map() { ]); let source = result.source(); - let map = result.map(&MapOptions::default()).unwrap(); + let map = result + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(); let expected_source = "Line0\nLine1\nLine2\nLine3\n"; let expected_source_map = SourceMap::from_json( From 941c4e055325ed9f2a335f99e8857ea3734631b0 Mon Sep 17 00:00:00 2001 From: Cong-Cong Date: Mon, 3 Nov 2025 12:01:08 +0800 Subject: [PATCH 19/22] fix: cargo test --- benches/bench.rs | 6 ------ benches/bench_source_map.rs | 7 ------- src/concat_source.rs | 4 ++-- src/original_source.rs | 6 +++--- src/raw_source.rs | 8 ++++---- 5 files changed, 9 insertions(+), 22 deletions(-) diff --git a/benches/bench.rs b/benches/bench.rs index 04e9e863..bf33c477 100644 --- a/benches/bench.rs +++ b/benches/bench.rs @@ -23,7 +23,6 @@ use bench_complex_replace_source::{ }; use bench_source_map::{ benchmark_parse_source_map_from_json, benchmark_source_map_clone, - benchmark_stringify_source_map_to_json, }; use benchmark_repetitive_react_components::{ @@ -177,11 +176,6 @@ fn bench_rspack_sources(criterion: &mut Criterion) { group.bench_function("source_map_clone", benchmark_source_map_clone); - group.bench_function( - "stringify_source_map_to_json", - benchmark_stringify_source_map_to_json, - ); - group.bench_function( "repetitive_react_components_map", benchmark_repetitive_react_components_map, diff --git a/benches/bench_source_map.rs b/benches/bench_source_map.rs index 879c6a84..1992fdf3 100644 --- a/benches/bench_source_map.rs +++ b/benches/bench_source_map.rs @@ -25,10 +25,3 @@ pub fn benchmark_source_map_clone(b: &mut Bencher) { let _ = black_box(source.clone()); }) } - -pub fn benchmark_stringify_source_map_to_json(b: &mut Bencher) { - let source = SourceMap::from_json(ANTD_MIN_JS_MAP).unwrap(); - b.iter(|| { - let _ = black_box(source.to_json().unwrap()); - }) -} diff --git a/src/concat_source.rs b/src/concat_source.rs index 64a09916..931f39c7 100644 --- a/src/concat_source.rs +++ b/src/concat_source.rs @@ -23,7 +23,7 @@ use crate::{ /// ``` /// use rspack_sources::{ /// BoxSource, ConcatSource, MapOptions, OriginalSource, RawStringSource, Source, -/// SourceExt, SourceMap, +/// SourceExt, SourceMap, ObjectPool /// }; /// /// let mut source = ConcatSource::new([ @@ -42,7 +42,7 @@ use crate::{ /// "Hello World\nconsole.log('test');\nconsole.log('test2');\nHello2\n" /// ); /// assert_eq!( -/// source.map(&MapOptions::new(false)).unwrap(), +/// source.map(&ObjectPool::default(), &MapOptions::new(false)).unwrap(), /// SourceMap::from_json( /// r#"{ /// "version": 3, diff --git a/src/original_source.rs b/src/original_source.rs index 8b868925..e216f4da 100644 --- a/src/original_source.rs +++ b/src/original_source.rs @@ -22,17 +22,17 @@ use crate::{ /// - [webpack-sources docs](https://github.com/webpack/webpack-sources/#originalsource). /// /// ``` -/// use rspack_sources::{OriginalSource, MapOptions, Source}; +/// use rspack_sources::{OriginalSource, MapOptions, Source, ObjectPool}; /// /// let input = "if (hello()) { world(); hi(); there(); } done();\nif (hello()) { world(); hi(); there(); } done();"; /// let source = OriginalSource::new(input, "file.js"); /// assert_eq!(source.source().into_string_lossy(), input); /// assert_eq!( -/// source.map(&MapOptions::default()).unwrap().mappings(), +/// source.map(&ObjectPool::default(), &MapOptions::default()).unwrap().mappings(), /// "AAAA,eAAe,SAAS,MAAM,WAAW;AACzC,eAAe,SAAS,MAAM,WAAW", /// ); /// assert_eq!( -/// source.map(&MapOptions::new(false)).unwrap().mappings(), +/// source.map(&ObjectPool::default(), &MapOptions::new(false)).unwrap().mappings(), /// "AAAA;AACA", /// ); /// ``` diff --git a/src/raw_source.rs b/src/raw_source.rs index 28e89a8f..14e71a4a 100644 --- a/src/raw_source.rs +++ b/src/raw_source.rs @@ -18,12 +18,12 @@ use crate::{ /// - [webpack-sources docs](https://github.com/webpack/webpack-sources/#rawsource). /// /// ``` -/// use rspack_sources::{MapOptions, RawStringSource, Source}; +/// use rspack_sources::{MapOptions, RawStringSource, Source, ObjectPool}; /// /// let code = "some source code"; /// let s = RawStringSource::from(code.to_string()); /// assert_eq!(s.source().into_string_lossy(), code); -/// assert_eq!(s.map(&MapOptions::default()), None); +/// assert_eq!(s.map(&ObjectPool::default(), &MapOptions::default()), None); /// assert_eq!(s.size(), 16); /// ``` #[derive(Clone, PartialEq, Eq)] @@ -131,12 +131,12 @@ impl StreamChunks for RawStringSource { /// - [webpack-sources docs](https://github.com/webpack/webpack-sources/#rawsource). /// /// ``` -/// use rspack_sources::{MapOptions, RawBufferSource, Source}; +/// use rspack_sources::{MapOptions, RawBufferSource, Source, ObjectPool}; /// /// let code = "some source code".as_bytes(); /// let s = RawBufferSource::from(code); /// assert_eq!(s.buffer(), code); -/// assert_eq!(s.map(&MapOptions::default()), None); +/// assert_eq!(s.map(&ObjectPool::default(), &MapOptions::default()), None); /// assert_eq!(s.size(), 16); /// ``` pub struct RawBufferSource { From 28d1681759fe913b43ceb1bd55d9dfe3d3025235 Mon Sep 17 00:00:00 2001 From: Cong-Cong Date: Mon, 3 Nov 2025 13:01:22 +0800 Subject: [PATCH 20/22] rm arc clone --- src/source_content_lines.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/source_content_lines.rs b/src/source_content_lines.rs index 5c0bc046..02abe0e4 100644 --- a/src/source_content_lines.rs +++ b/src/source_content_lines.rs @@ -24,9 +24,9 @@ impl<'object_pool> SourceContentLines<'object_pool> { self.borrow_dependent().get(line) } - pub fn from(object_pool: &'object_pool ObjectPool, value: Arc) -> Self { + pub fn from(object_pool: &'object_pool ObjectPool, text: Arc) -> Self { let owner = Owner { - text: value.clone(), + text, object_pool, }; From a923139cba82f618ee70adfd609c12c905afcba0 Mon Sep 17 00:00:00 2001 From: Cong-Cong Date: Mon, 3 Nov 2025 13:32:14 +0800 Subject: [PATCH 21/22] perf: rm Rc in ObjectPool --- src/helpers.rs | 4 ++-- src/object_pool.rs | 32 ++++++++++----------------- src/replace_source.rs | 2 +- src/source_content_lines.rs | 44 ++++++++++++++++--------------------- src/with_indices.rs | 10 ++++----- 5 files changed, 39 insertions(+), 53 deletions(-) diff --git a/src/helpers.rs b/src/helpers.rs index ed01b13d..1e776542 100644 --- a/src/helpers.rs +++ b/src/helpers.rs @@ -854,7 +854,7 @@ where let inner_source_contents = inner_source_contents.borrow(); match inner_source_contents.get(&inner_source_index) { Some(Some(source_content)) => { - Some(SourceContentLines::from( + Some(SourceContentLines::new( object_pool, source_content.clone(), )) @@ -956,7 +956,7 @@ where let inner_source_contents = inner_source_contents.borrow(); match inner_source_contents.get(&inner_source_index) { Some(Some(source_content)) => { - Some(SourceContentLines::from( + Some(SourceContentLines::new( object_pool, source_content.clone(), )) diff --git a/src/object_pool.rs b/src/object_pool.rs index df860a8c..f3ca2d89 100644 --- a/src/object_pool.rs +++ b/src/object_pool.rs @@ -1,4 +1,4 @@ -use std::{cell::RefCell, collections::BTreeMap, rc::Rc}; +use std::{cell::RefCell, collections::BTreeMap}; // Vector pooling minimum capacity threshold // Recommended threshold: 64 @@ -12,15 +12,7 @@ const MIN_POOL_CAPACITY: usize = 64; /// A memory pool for reusing `T` allocations to reduce memory allocation overhead. #[derive(Default, Debug)] pub struct ObjectPool { - objects: Rc>>>>, -} - -impl Clone for ObjectPool { - fn clone(&self) -> Self { - Self { - objects: self.objects.clone(), - } - } + objects: RefCell>>>, } impl ObjectPool { @@ -29,16 +21,16 @@ impl ObjectPool { if requested_capacity < MIN_POOL_CAPACITY || self.objects.borrow().is_empty() { - return Pooled::new(self.clone(), Vec::with_capacity(requested_capacity)); + return Pooled::new(self, Vec::with_capacity(requested_capacity)); } let mut objects = self.objects.borrow_mut(); if let Some((_, bucket)) = objects.range_mut(requested_capacity..).next() { if let Some(mut object) = bucket.pop() { object.clear(); - return Pooled::new(self.clone(), object); + return Pooled::new(self, object); } } - Pooled::new(self.clone(), Vec::with_capacity(requested_capacity)) + Pooled::new(self, Vec::with_capacity(requested_capacity)) } /// Returns a `T` to the pool for future reuse. @@ -59,13 +51,13 @@ impl ObjectPool { /// pooled objects lifecycle. When the `Pooled` instance is dropped, the contained object /// is automatically returned to its associated pool for future reuse. #[derive(Debug)] -pub struct Pooled { +pub struct Pooled<'object_pool> { object: Option>, - pool: ObjectPool, + pool: &'object_pool ObjectPool, } -impl Pooled { - fn new(pool: ObjectPool, object: Vec) -> Self { +impl<'object_pool> Pooled<'object_pool> { + fn new(pool: &'object_pool ObjectPool, object: Vec) -> Self { Pooled { object: Some(object), pool, @@ -81,7 +73,7 @@ impl Pooled { } } -impl Drop for Pooled { +impl Drop for Pooled<'_> { fn drop(&mut self) { if let Some(object) = self.object.take() { self.pool.return_to_pool(object); @@ -89,7 +81,7 @@ impl Drop for Pooled { } } -impl std::ops::Deref for Pooled { +impl std::ops::Deref for Pooled<'_> { type Target = Vec; fn deref(&self) -> &Self::Target { @@ -97,7 +89,7 @@ impl std::ops::Deref for Pooled { } } -impl std::ops::DerefMut for Pooled { +impl std::ops::DerefMut for Pooled<'_> { fn deref_mut(&mut self) -> &mut Self::Target { self.as_mut() } diff --git a/src/replace_source.rs b/src/replace_source.rs index cdd3b100..06c141c7 100644 --- a/src/replace_source.rs +++ b/src/replace_source.rs @@ -403,7 +403,7 @@ impl StreamChunks for ReplaceSource { { match source_content { SourceContent::Raw(source) => { - let lines = SourceContentLines::from(object_pool, source.clone()); + let lines = SourceContentLines::new(object_pool, source.clone()); let matched = check_content_at_position(&lines, line, column, expected_chunk); *source_content = SourceContent::Lines(lines); diff --git a/src/source_content_lines.rs b/src/source_content_lines.rs index 02abe0e4..d923a501 100644 --- a/src/source_content_lines.rs +++ b/src/source_content_lines.rs @@ -4,36 +4,30 @@ use crate::{ helpers::split_into_lines, object_pool::ObjectPool, with_indices::WithIndices, }; -struct Owner<'object_pool> { +pub struct SourceContentLines<'object_pool> { text: Arc, - object_pool: &'object_pool ObjectPool, + // Self-referential data structure: lines borrow from the text. + lines: Vec>, } -type BorrowedValue<'text> = Vec>; - -self_cell::self_cell!( - pub struct SourceContentLines<'object_pool> { - owner: Owner<'object_pool>, - #[covariant] - dependent: BorrowedValue, - } -); - impl<'object_pool> SourceContentLines<'object_pool> { - pub fn get(&self, line: usize) -> Option<&WithIndices<'_, &str>> { - self.borrow_dependent().get(line) + pub fn new(object_pool: &'object_pool ObjectPool, text: Arc) -> Self { + // SAFETY: We extend the lifetime of the &str to 'static because the Arc is owned by this struct, + // and all &'static str references are only used within the lifetime of this struct. + #[allow(unsafe_code)] + let text_ref = + unsafe { std::mem::transmute::<&str, &'static str>(text.as_ref()) }; + let lines = split_into_lines::<&str>(&text_ref) + .map(|line| WithIndices::new(object_pool, line)) + .collect::>(); + Self { text, lines } } - pub fn from(object_pool: &'object_pool ObjectPool, text: Arc) -> Self { - let owner = Owner { - text, - object_pool, - }; - - SourceContentLines::new(owner, |owner| { - split_into_lines(&owner.text.as_ref()) - .map(|line| WithIndices::new(owner.object_pool, line)) - .collect::>() - }) + pub fn get( + &self, + line: usize, + ) -> Option<&WithIndices<'object_pool, '_, &str>> { + let _ = &self.text; + self.lines.get(line) } } diff --git a/src/with_indices.rs b/src/with_indices.rs index 90d41c1f..3136c428 100644 --- a/src/with_indices.rs +++ b/src/with_indices.rs @@ -6,23 +6,23 @@ use crate::{ }; #[derive(Debug)] -pub struct WithIndices<'text, S> +pub struct WithIndices<'object_pool, 'text, S> where S: SourceText<'text>, { /// line is a string reference pub line: S, /// the byte position of each `char` in `line` string slice . - pub char_byte_indices: OnceCell, + pub char_byte_indices: OnceCell>, data: PhantomData<&'text S>, - object_pool: &'text ObjectPool, + object_pool: &'object_pool ObjectPool, } -impl<'text, S> WithIndices<'text, S> +impl<'object_pool, 'text, S> WithIndices<'object_pool, 'text, S> where S: SourceText<'text>, { - pub fn new(object_pool: &'text ObjectPool, line: S) -> Self { + pub fn new(object_pool: &'object_pool ObjectPool, line: S) -> Self { Self { char_byte_indices: OnceCell::new(), line, From be40f33d6a58ea6561628854111c7addc40ca8a5 Mon Sep 17 00:00:00 2001 From: Cong-Cong Date: Mon, 3 Nov 2025 14:19:08 +0800 Subject: [PATCH 22/22] rm not used crate --- Cargo.lock | 71 ------------------------- Cargo.toml | 3 -- benches/bench_complex_replace_source.rs | 1 - 3 files changed, 75 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 02d1d030..f36209d1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -220,20 +220,6 @@ version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" -[[package]] -name = "dashmap" -version = "6.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" -dependencies = [ - "cfg-if", - "crossbeam-utils", - "hashbrown", - "lock_api", - "once_cell", - "parking_lot_core", -] - [[package]] name = "dyn-clone" version = "1.0.17" @@ -363,15 +349,6 @@ version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "969488b55f8ac402214f3f5fd243ebb7206cf82de60d3172994707a4bcc2b829" -[[package]] -name = "lock_api" -version = "0.4.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" -dependencies = [ - "scopeguard", -] - [[package]] name = "log" version = "0.4.22" @@ -405,19 +382,6 @@ version = "11.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" -[[package]] -name = "parking_lot_core" -version = "0.9.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" -dependencies = [ - "cfg-if", - "libc", - "redox_syscall", - "smallvec", - "windows-link", -] - [[package]] name = "ppv-lite86" version = "0.2.20" @@ -495,15 +459,6 @@ dependencies = [ "crossbeam-utils", ] -[[package]] -name = "redox_syscall" -version = "0.5.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" -dependencies = [ - "bitflags", -] - [[package]] name = "ref-cast" version = "1.0.23" @@ -559,13 +514,11 @@ version = "0.4.13" dependencies = [ "codspeed-criterion-compat", "criterion", - "dashmap", "dyn-clone", "memchr", "rayon", "regex", "rustc-hash", - "self_cell", "serde", "serde_json", "simd-json", @@ -607,18 +560,6 @@ dependencies = [ "winapi-util", ] -[[package]] -name = "scopeguard" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" - -[[package]] -name = "self_cell" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16c2f82143577edb4921b71ede051dac62ca3c16084e918bf7b40c96ae10eb33" - [[package]] name = "serde" version = "1.0.216" @@ -672,12 +613,6 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f27f6278552951f1f2b8cf9da965d10969b2efdea95a6ec47987ab46edfe263a" -[[package]] -name = "smallvec" -version = "1.15.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" - [[package]] name = "static_assertions" version = "1.1.0" @@ -840,12 +775,6 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" -[[package]] -name = "windows-link" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" - [[package]] name = "windows-sys" version = "0.48.0" diff --git a/Cargo.toml b/Cargo.toml index 62d3e31c..22c38776 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -36,12 +36,9 @@ dyn-clone = "1.0.17" rustc-hash = "2.1.0" memchr = "2.7.4" - codspeed-criterion-compat = { version = "2.7.2", default-features = false, optional = true } static_assertions = "1.1.0" simd-json = "0.14.3" -dashmap = "6.1.0" -self_cell = "1.2.1" [dev-dependencies] twox-hash = "2.1.0" diff --git a/benches/bench_complex_replace_source.rs b/benches/bench_complex_replace_source.rs index 65a13c77..90fc9e60 100644 --- a/benches/bench_complex_replace_source.rs +++ b/benches/bench_complex_replace_source.rs @@ -11,7 +11,6 @@ pub use codspeed_criterion_compat::*; use rspack_sources::{ BoxSource, MapOptions, ObjectPool, OriginalSource, ReplaceSource, SourceExt, }; -use simd_json::borrowed::Object; static LARGE_REPLACE_SOURCE: LazyLock = LazyLock::new(|| { let mut source = ReplaceSource::new(