diff --git a/Cargo.lock b/Cargo.lock index 5c87ad0c..f36209d1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -195,6 +195,31 @@ dependencies = [ "itertools", ] +[[package]] +name = "crossbeam-deque" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" +dependencies = [ + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + [[package]] name = "dyn-clone" version = "1.0.17" @@ -414,6 +439,26 @@ dependencies = [ "getrandom", ] +[[package]] +name = "rayon" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91" +dependencies = [ + "crossbeam-deque", + "crossbeam-utils", +] + [[package]] name = "ref-cast" version = "1.0.23" @@ -471,9 +516,9 @@ dependencies = [ "criterion", "dyn-clone", "memchr", + "rayon", "regex", "rustc-hash", - "self_cell", "serde", "serde_json", "simd-json", @@ -515,12 +560,6 @@ dependencies = [ "winapi-util", ] -[[package]] -name = "self_cell" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16c2f82143577edb4921b71ede051dac62ca3c16084e918bf7b40c96ae10eb33" - [[package]] name = "serde" version = "1.0.216" diff --git a/Cargo.toml b/Cargo.toml index 67cd82e9..22c38776 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -36,16 +36,15 @@ dyn-clone = "1.0.17" rustc-hash = "2.1.0" memchr = "2.7.4" - codspeed-criterion-compat = { version = "2.7.2", default-features = false, optional = true } static_assertions = "1.1.0" simd-json = "0.14.3" -self_cell = "1.2.1" [dev-dependencies] twox-hash = "2.1.0" regex = "1.11.1" criterion = { version = "0.5.1", default-features = false } +rayon = "1.11.0" [features] codspeed = ["codspeed-criterion-compat"] diff --git a/benches/bench.rs b/benches/bench.rs index d03ab5cc..bf33c477 100644 --- a/benches/bench.rs +++ b/benches/bench.rs @@ -13,16 +13,16 @@ pub use criterion::*; pub use codspeed_criterion_compat::*; use rspack_sources::{ - BoxSource, CachedSource, ConcatSource, MapOptions, Source, SourceExt, - SourceMap, SourceMapSource, SourceMapSourceOptions, + BoxSource, CachedSource, ConcatSource, MapOptions, ObjectPool, Source, + SourceExt, SourceMap, SourceMapSource, SourceMapSourceOptions, }; use bench_complex_replace_source::{ - benchmark_complex_replace_source_map, benchmark_complex_replace_source_source, + benchmark_complex_replace_source_map, benchmark_complex_replace_source_size, + benchmark_complex_replace_source_source, }; use bench_source_map::{ benchmark_parse_source_map_from_json, benchmark_source_map_clone, - benchmark_stringify_source_map_to_json, }; use benchmark_repetitive_react_components::{ @@ -30,8 +30,6 @@ use benchmark_repetitive_react_components::{ benchmark_repetitive_react_components_source, }; -use crate::bench_complex_replace_source::benchmark_complex_replace_source_size; - const HELLOWORLD_JS: &str = include_str!(concat!( env!("CARGO_MANIFEST_DIR"), "/benches/fixtures/transpile-minify/files/helloworld.js" @@ -80,7 +78,7 @@ fn benchmark_concat_generate_string(b: &mut Bencher) { b.iter(|| { concat - .map(&MapOptions::default()) + .map(&ObjectPool::default(), &MapOptions::default()) .unwrap() .to_json() .unwrap(); @@ -109,7 +107,7 @@ fn benchmark_concat_generate_string_with_cache(b: &mut Bencher) { b.iter(|| { cached - .map(&MapOptions::default()) + .map(&ObjectPool::default(), &MapOptions::default()) .unwrap() .to_json() .unwrap(); @@ -178,11 +176,6 @@ fn bench_rspack_sources(criterion: &mut Criterion) { group.bench_function("source_map_clone", benchmark_source_map_clone); - group.bench_function( - "stringify_source_map_to_json", - benchmark_stringify_source_map_to_json, - ); - group.bench_function( "repetitive_react_components_map", benchmark_repetitive_react_components_map, diff --git a/benches/bench_complex_replace_source.rs b/benches/bench_complex_replace_source.rs index 2439e66e..90fc9e60 100644 --- a/benches/bench_complex_replace_source.rs +++ b/benches/bench_complex_replace_source.rs @@ -9,7 +9,7 @@ pub use criterion::*; pub use codspeed_criterion_compat::*; use rspack_sources::{ - BoxSource, MapOptions, OriginalSource, ReplaceSource, SourceExt, + BoxSource, MapOptions, ObjectPool, OriginalSource, ReplaceSource, SourceExt, }; static LARGE_REPLACE_SOURCE: LazyLock = LazyLock::new(|| { @@ -36724,7 +36724,7 @@ pub fn benchmark_complex_replace_source_map(b: &mut Bencher) { let source = LARGE_REPLACE_SOURCE.clone(); b.iter(|| { - black_box(source.map(&MapOptions::default())); + black_box(source.map(&ObjectPool::default(), &MapOptions::default())); }); } diff --git a/benches/bench_source_map.rs b/benches/bench_source_map.rs index 879c6a84..1992fdf3 100644 --- a/benches/bench_source_map.rs +++ b/benches/bench_source_map.rs @@ -25,10 +25,3 @@ pub fn benchmark_source_map_clone(b: &mut Bencher) { let _ = black_box(source.clone()); }) } - -pub fn benchmark_stringify_source_map_to_json(b: &mut Bencher) { - let source = SourceMap::from_json(ANTD_MIN_JS_MAP).unwrap(); - b.iter(|| { - let _ = black_box(source.to_json().unwrap()); - }) -} diff --git a/benches/benchmark_repetitive_react_components.rs b/benches/benchmark_repetitive_react_components.rs index 96875033..602282ee 100644 --- a/benches/benchmark_repetitive_react_components.rs +++ b/benches/benchmark_repetitive_react_components.rs @@ -9,9 +9,9 @@ pub use criterion::*; pub use codspeed_criterion_compat::*; use rspack_sources::{ - BoxSource, ConcatSource, MapOptions, OriginalSource, RawStringSource, - ReplaceSource, ReplacementEnforce, Source, SourceExt, SourceMap, - SourceMapSource, SourceMapSourceOptions, + BoxSource, ConcatSource, MapOptions, ObjectPool, OriginalSource, + RawStringSource, ReplaceSource, ReplacementEnforce, Source, SourceExt, + SourceMap, SourceMapSource, SourceMapSourceOptions, }; static REPETITIVE_1K_REACT_COMPONENTS_SOURCE: LazyLock = @@ -3505,7 +3505,7 @@ pub fn benchmark_repetitive_react_components_map(b: &mut Bencher) { let source = REPETITIVE_1K_REACT_COMPONENTS_SOURCE.clone(); b.iter(|| { - black_box(source.map(&MapOptions::default())); + black_box(source.map(&ObjectPool::default(), &MapOptions::default())); }); } diff --git a/src/cached_source.rs b/src/cached_source.rs index 07329248..41ab3df5 100644 --- a/src/cached_source.rs +++ b/src/cached_source.rs @@ -11,6 +11,7 @@ use crate::{ stream_and_get_source_and_map, stream_chunks_of_raw_source, stream_chunks_of_source_map, StreamChunks, }, + object_pool::ObjectPool, rope::Rope, source::SourceValue, BoxSource, MapOptions, Source, SourceExt, SourceMap, @@ -99,18 +100,22 @@ impl Source for CachedSource { *self.cache.size.get_or_init(|| self.inner.size()) } - fn map(&self, options: &MapOptions) -> Option { + fn map( + &self, + object_pool: &ObjectPool, + options: &MapOptions, + ) -> Option { if options.columns { self .cache .columns_map - .get_or_init(|| self.inner.map(options)) + .get_or_init(|| self.inner.map(object_pool, options)) .clone() } else { self .cache .line_only_map - .get_or_init(|| self.inner.map(options)) + .get_or_init(|| self.inner.map(object_pool, options)) .clone() } } @@ -123,6 +128,7 @@ impl Source for CachedSource { impl StreamChunks for CachedSource { fn stream_chunks<'a>( &'a self, + object_pool: &'a ObjectPool, options: &MapOptions, on_chunk: crate::helpers::OnChunk<'_, 'a>, on_source: crate::helpers::OnSource<'_, 'a>, @@ -138,7 +144,13 @@ impl StreamChunks for CachedSource { let source = self.rope(); if let Some(map) = map { stream_chunks_of_source_map( - source, map, on_chunk, on_source, on_name, options, + options, + object_pool, + source, + map, + on_chunk, + on_source, + on_name, ) } else { stream_chunks_of_raw_source( @@ -148,8 +160,9 @@ impl StreamChunks for CachedSource { } None => { let (generated_info, map) = stream_and_get_source_and_map( - &self.inner, options, + object_pool, + &self.inner, on_chunk, on_source, on_name, @@ -233,7 +246,9 @@ mod tests { }) .boxed(), ]); - let map = source.map(&Default::default()).unwrap(); + let map = source + .map(&ObjectPool::default(), &Default::default()) + .unwrap(); assert_eq!(map.mappings(), ";;AACA"); } @@ -248,11 +263,11 @@ mod tests { source.source(); source.buffer(); source.size(); - source.map(&map_options); + source.map(&ObjectPool::default(), &map_options); assert_eq!( *clone.cache.columns_map.get().unwrap(), - source.map(&map_options) + source.map(&ObjectPool::default(), &map_options) ); } @@ -302,16 +317,14 @@ mod tests { #[test] fn should_produce_correct_output_for_cached_raw_source() { - let map_options = MapOptions { - columns: true, - final_source: true, - }; + let map_options = MapOptions::new(true); let source = RawStringSource::from("Test\nTest\nTest\n"); let mut on_chunk_count = 0; let mut on_source_count = 0; let mut on_name_count = 0; let generated_info = source.stream_chunks( + &ObjectPool::default(), &map_options, &mut |_chunk, _mapping| { on_chunk_count += 1; @@ -326,6 +339,7 @@ mod tests { let cached_source = CachedSource::new(source); cached_source.stream_chunks( + &ObjectPool::default(), &map_options, &mut |_chunk, _mapping| {}, &mut |_source_index, _source, _source_content| {}, @@ -336,6 +350,7 @@ mod tests { let mut cached_on_source_count = 0; let mut cached_on_name_count = 0; let cached_generated_info = cached_source.stream_chunks( + &ObjectPool::default(), &map_options, &mut |_chunk, _mapping| { cached_on_chunk_count += 1; diff --git a/src/concat_source.rs b/src/concat_source.rs index e7048d4d..931f39c7 100644 --- a/src/concat_source.rs +++ b/src/concat_source.rs @@ -10,6 +10,7 @@ use rustc_hash::FxHashMap as HashMap; use crate::{ helpers::{get_map, GeneratedInfo, OnChunk, OnName, OnSource, StreamChunks}, linear_map::LinearMap, + object_pool::ObjectPool, source::{Mapping, OriginalLocation}, BoxSource, MapOptions, RawStringSource, Rope, Source, SourceExt, SourceMap, SourceValue, @@ -22,7 +23,7 @@ use crate::{ /// ``` /// use rspack_sources::{ /// BoxSource, ConcatSource, MapOptions, OriginalSource, RawStringSource, Source, -/// SourceExt, SourceMap, +/// SourceExt, SourceMap, ObjectPool /// }; /// /// let mut source = ConcatSource::new([ @@ -41,7 +42,7 @@ use crate::{ /// "Hello World\nconsole.log('test');\nconsole.log('test2');\nHello2\n" /// ); /// assert_eq!( -/// source.map(&MapOptions::new(false)).unwrap(), +/// source.map(&ObjectPool::default(), &MapOptions::new(false)).unwrap(), /// SourceMap::from_json( /// r#"{ /// "version": 3, @@ -199,8 +200,12 @@ impl Source for ConcatSource { .sum() } - fn map(&self, options: &MapOptions) -> Option { - get_map(self, options) + fn map( + &self, + object_pool: &ObjectPool, + options: &MapOptions, + ) -> Option { + get_map(object_pool, self, options) } fn to_writer(&self, writer: &mut dyn std::io::Write) -> std::io::Result<()> { @@ -230,6 +235,7 @@ impl Eq for ConcatSource {} impl StreamChunks for ConcatSource { fn stream_chunks<'a>( &'a self, + object_pool: &'a ObjectPool, options: &MapOptions, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, @@ -238,7 +244,13 @@ impl StreamChunks for ConcatSource { let children = self.optimized_children(); if children.len() == 1 { - return children[0].stream_chunks(options, on_chunk, on_source, on_name); + return children[0].stream_chunks( + object_pool, + options, + on_chunk, + on_source, + on_name, + ); } let mut current_line_offset = 0; let mut current_column_offset = 0; @@ -259,6 +271,7 @@ impl StreamChunks for ConcatSource { generated_line, generated_column, } = item.stream_chunks( + object_pool, options, &mut |chunk, mapping| { let line = mapping.generated_line + current_line_offset; @@ -470,7 +483,9 @@ mod tests { assert_eq!(source.size(), 62); assert_eq!(source.source().into_string_lossy(), expected_source); assert_eq!( - source.map(&MapOptions::new(false)).unwrap(), + source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(), SourceMap::from_json( r#"{ "version": 3, @@ -486,7 +501,9 @@ mod tests { .unwrap() ); assert_eq!( - source.map(&MapOptions::default()).unwrap(), + source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(), SourceMap::from_json( r#"{ "version": 3, @@ -520,7 +537,9 @@ mod tests { assert_eq!(source.size(), 62); assert_eq!(source.source().into_string_lossy(), expected_source); assert_eq!( - source.map(&MapOptions::new(false)).unwrap(), + source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(), SourceMap::from_json( r#"{ "version": 3, @@ -536,7 +555,9 @@ mod tests { .unwrap() ); assert_eq!( - source.map(&MapOptions::default()).unwrap(), + source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(), SourceMap::from_json( r#"{ "version": 3, @@ -570,7 +591,9 @@ mod tests { assert_eq!(source.size(), 62); assert_eq!(source.source().into_string_lossy(), expected_source); assert_eq!( - source.map(&MapOptions::new(false)).unwrap(), + source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(), SourceMap::from_json( r#"{ "version": 3, @@ -586,7 +609,9 @@ mod tests { .unwrap() ); assert_eq!( - source.map(&MapOptions::default()).unwrap(), + source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(), SourceMap::from_json( r#"{ "version": 3, @@ -638,7 +663,9 @@ mod tests { assert_eq!(source.source().into_string_lossy(), expected_source); assert_eq!(source.buffer(), expected_source.as_bytes()); - let map = source.map(&MapOptions::new(false)).unwrap(); + let map = source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(); assert_eq!(map, expected_map1); // TODO: test hash @@ -653,8 +680,9 @@ mod tests { ]); let result_text = source.source(); - let result_map = source.map(&MapOptions::default()); - let result_list_map = source.map(&MapOptions::new(false)); + let result_map = source.map(&ObjectPool::default(), &MapOptions::default()); + let result_list_map = + source.map(&ObjectPool::default(), &MapOptions::new(false)); assert_eq!( result_text.into_string_lossy(), @@ -678,7 +706,9 @@ mod tests { ]); assert_eq!( - source.map(&MapOptions::default()).unwrap(), + source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(), SourceMap::from_json( r#"{ "mappings": "AAAA,K,CCAA,M;ADAA;;ACAA", @@ -704,7 +734,9 @@ mod tests { RawStringSource::from("c"), ]); assert_eq!(source.source().into_string_lossy(), "abc"); - assert!(source.map(&MapOptions::default()).is_none()); + assert!(source + .map(&ObjectPool::default(), &MapOptions::default()) + .is_none()); } #[test] diff --git a/src/helpers.rs b/src/helpers.rs index a587c7e0..1e776542 100644 --- a/src/helpers.rs +++ b/src/helpers.rs @@ -12,6 +12,7 @@ use crate::{ decoder::MappingsDecoder, encoder::create_encoder, linear_map::LinearMap, + object_pool::ObjectPool, source::{Mapping, OriginalLocation}, source_content_lines::SourceContentLines, with_indices::WithIndices, @@ -19,8 +20,9 @@ use crate::{ }; pub fn get_map<'a, S: StreamChunks>( + object_pool: &'a ObjectPool, stream: &'a S, - options: &'a MapOptions, + options: &MapOptions, ) -> Option { let mut mappings_encoder = create_encoder(options.columns); let mut sources: Vec = Vec::new(); @@ -28,6 +30,7 @@ pub fn get_map<'a, S: StreamChunks>( let mut names: Vec = Vec::new(); stream.stream_chunks( + object_pool, &MapOptions { columns: options.columns, final_source: true, @@ -69,6 +72,7 @@ pub trait StreamChunks { /// [StreamChunks] abstraction fn stream_chunks<'a>( &'a self, + object_pool: &'a ObjectPool, options: &MapOptions, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, @@ -89,9 +93,10 @@ pub type OnName<'a, 'b> = &'a mut dyn FnMut(u32, Cow<'b, str>); /// Default stream chunks behavior impl, see [webpack-sources streamChunks](https://github.com/webpack/webpack-sources/blob/9f98066311d53a153fdc7c633422a1d086528027/lib/helpers/streamChunks.js#L15-L35). pub fn stream_chunks_default<'a, S>( + options: &MapOptions, + object_pool: &'a ObjectPool, source: S, source_map: Option<&'a SourceMap>, - options: &MapOptions, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, @@ -101,7 +106,13 @@ where { if let Some(map) = source_map { stream_chunks_of_source_map( - source, map, on_chunk, on_source, on_name, options, + options, + object_pool, + source, + map, + on_chunk, + on_source, + on_name, ) } else { stream_chunks_of_raw_source(source, options, on_chunk, on_source, on_name) @@ -300,12 +311,13 @@ where } pub fn stream_chunks_of_source_map<'a, S>( + options: &MapOptions, + object_pool: &'a ObjectPool, source: S, source_map: &'a SourceMap, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, - options: &MapOptions, ) -> GeneratedInfo where S: SourceText<'a> + 'a, @@ -314,24 +326,33 @@ where MapOptions { columns: true, final_source: true, + .. } => stream_chunks_of_source_map_final( source, source_map, on_chunk, on_source, on_name, ), MapOptions { columns: true, final_source: false, + .. } => stream_chunks_of_source_map_full( - source, source_map, on_chunk, on_source, on_name, + object_pool, + source, + source_map, + on_chunk, + on_source, + on_name, ), MapOptions { columns: false, final_source: true, + .. } => stream_chunks_of_source_map_lines_final( source, source_map, on_chunk, on_source, on_name, ), MapOptions { columns: false, final_source: false, + .. } => stream_chunks_of_source_map_lines_full( source, source_map, on_chunk, on_source, on_name, ), @@ -410,6 +431,7 @@ where } fn stream_chunks_of_source_map_full<'a, S>( + object_pool: &'a ObjectPool, source: S, source_map: &'a SourceMap, on_chunk: OnChunk<'_, 'a>, @@ -420,7 +442,9 @@ where S: SourceText<'a> + 'a, { let lines = split_into_lines(&source); - let line_with_indices_list = lines.map(WithIndices::new).collect::>(); + let line_with_indices_list = lines + .map(|line| WithIndices::new(object_pool, line)) + .collect::>(); if line_with_indices_list.is_empty() { return GeneratedInfo { @@ -707,6 +731,8 @@ type InnerSourceIndexValueMapping<'a> = #[allow(clippy::too_many_arguments)] pub fn stream_chunks_of_combined_source_map<'a, S>( + options: &MapOptions, + object_pool: &'a ObjectPool, source: S, source_map: &'a SourceMap, inner_source_name: &'a str, @@ -716,7 +742,6 @@ pub fn stream_chunks_of_combined_source_map<'a, S>( on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, - options: &MapOptions, ) -> GeneratedInfo where S: SourceText<'a> + 'a, @@ -773,6 +798,8 @@ where }; stream_chunks_of_source_map( + options, + object_pool, source.clone(), source_map, &mut |chunk, mapping| { @@ -827,7 +854,10 @@ where let inner_source_contents = inner_source_contents.borrow(); match inner_source_contents.get(&inner_source_index) { Some(Some(source_content)) => { - Some(SourceContentLines::from(source_content.clone())) + Some(SourceContentLines::new( + object_pool, + source_content.clone(), + )) } _ => None, } @@ -926,7 +956,10 @@ where let inner_source_contents = inner_source_contents.borrow(); match inner_source_contents.get(&inner_source_index) { Some(Some(source_content)) => { - Some(SourceContentLines::from(source_content.clone())) + Some(SourceContentLines::new( + object_pool, + source_content.clone(), + )) } _ => None, } @@ -1090,6 +1123,11 @@ where } source_index_mapping.borrow_mut().insert(i, -2); stream_chunks_of_source_map( + &MapOptions { + columns: options.columns, + final_source: false, + }, + object_pool, source_content.unwrap().as_ref(), inner_source_map, &mut |chunk, mapping| { @@ -1159,10 +1197,6 @@ where inner_name_index_mapping.borrow_mut().insert(i, -2); inner_name_index_value_mapping.borrow_mut().insert(i, name); }, - &MapOptions { - columns: options.columns, - final_source: false, - }, ); } else { let mut source_mapping = source_mapping.borrow_mut(); @@ -1182,13 +1216,13 @@ where name_index_mapping.borrow_mut().insert(i, -2); name_index_value_mapping.borrow_mut().insert(i, name); }, - options, ) } pub fn stream_and_get_source_and_map<'a, S: StreamChunks>( - input_source: &'a S, options: &MapOptions, + object_pool: &'a ObjectPool, + input_source: &'a S, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, @@ -1199,6 +1233,7 @@ pub fn stream_and_get_source_and_map<'a, S: StreamChunks>( let mut names: Vec = Vec::new(); let generated_info = input_source.stream_chunks( + object_pool, options, &mut |chunk, mapping| { mappings_encoder.encode(&mapping); diff --git a/src/lib.rs b/src/lib.rs index fe46d523..02c53d53 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -7,6 +7,7 @@ mod encoder; mod error; mod helpers; mod linear_map; +mod object_pool; mod original_source; mod raw_source; mod replace_source; @@ -40,3 +41,5 @@ pub mod stream_chunks { } pub use helpers::{decode_mappings, encode_mappings}; + +pub use object_pool::ObjectPool; diff --git a/src/object_pool.rs b/src/object_pool.rs new file mode 100644 index 00000000..f3ca2d89 --- /dev/null +++ b/src/object_pool.rs @@ -0,0 +1,96 @@ +use std::{cell::RefCell, collections::BTreeMap}; + +// Vector pooling minimum capacity threshold +// Recommended threshold: 64 +// Reasons: +// 1. Memory consideration: 64 * 8 bytes = 512 bytes, a reasonable memory block size +// 2. Allocation cost: Allocations smaller than 512 bytes are usually fast, pooling benefits are limited +// 3. Cache friendly: 512 bytes can typically utilize CPU cache well +// 4. Empirical value: 64 is a proven balance point in real projects +const MIN_POOL_CAPACITY: usize = 64; + +/// A memory pool for reusing `T` allocations to reduce memory allocation overhead. +#[derive(Default, Debug)] +pub struct ObjectPool { + objects: RefCell>>>, +} + +impl ObjectPool { + /// Retrieves a reusable `T` from the pool with at least the requested capacity. + pub fn pull(&self, requested_capacity: usize) -> Pooled { + if requested_capacity < MIN_POOL_CAPACITY + || self.objects.borrow().is_empty() + { + return Pooled::new(self, Vec::with_capacity(requested_capacity)); + } + let mut objects = self.objects.borrow_mut(); + if let Some((_, bucket)) = objects.range_mut(requested_capacity..).next() { + if let Some(mut object) = bucket.pop() { + object.clear(); + return Pooled::new(self, object); + } + } + Pooled::new(self, Vec::with_capacity(requested_capacity)) + } + + /// Returns a `T` to the pool for future reuse. + fn return_to_pool(&self, object: Vec) { + if object.capacity() < MIN_POOL_CAPACITY { + return; + } + let mut objects = self.objects.borrow_mut(); + let cap = object.capacity(); + let bucket = objects.entry(cap).or_default(); + bucket.push(object); + } +} + +/// A smart pointer that holds a pooled object and automatically returns it to the pool when dropped. +/// +/// `Pooled` implements RAII (Resource Acquisition Is Initialization) pattern to manage +/// pooled objects lifecycle. When the `Pooled` instance is dropped, the contained object +/// is automatically returned to its associated pool for future reuse. +#[derive(Debug)] +pub struct Pooled<'object_pool> { + object: Option>, + pool: &'object_pool ObjectPool, +} + +impl<'object_pool> Pooled<'object_pool> { + fn new(pool: &'object_pool ObjectPool, object: Vec) -> Self { + Pooled { + object: Some(object), + pool, + } + } + + pub fn as_mut(&mut self) -> &mut Vec { + self.object.as_mut().unwrap() + } + + pub fn as_ref(&self) -> &Vec { + self.object.as_ref().unwrap() + } +} + +impl Drop for Pooled<'_> { + fn drop(&mut self) { + if let Some(object) = self.object.take() { + self.pool.return_to_pool(object); + } + } +} + +impl std::ops::Deref for Pooled<'_> { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + self.as_ref() + } +} + +impl std::ops::DerefMut for Pooled<'_> { + fn deref_mut(&mut self) -> &mut Self::Target { + self.as_mut() + } +} diff --git a/src/original_source.rs b/src/original_source.rs index cbed9585..e216f4da 100644 --- a/src/original_source.rs +++ b/src/original_source.rs @@ -10,6 +10,7 @@ use crate::{ split_into_potential_tokens, GeneratedInfo, OnChunk, OnName, OnSource, SourceText, StreamChunks, }, + object_pool::ObjectPool, source::{Mapping, OriginalLocation}, MapOptions, Rope, Source, SourceMap, SourceValue, }; @@ -21,17 +22,17 @@ use crate::{ /// - [webpack-sources docs](https://github.com/webpack/webpack-sources/#originalsource). /// /// ``` -/// use rspack_sources::{OriginalSource, MapOptions, Source}; +/// use rspack_sources::{OriginalSource, MapOptions, Source, ObjectPool}; /// /// let input = "if (hello()) { world(); hi(); there(); } done();\nif (hello()) { world(); hi(); there(); } done();"; /// let source = OriginalSource::new(input, "file.js"); /// assert_eq!(source.source().into_string_lossy(), input); /// assert_eq!( -/// source.map(&MapOptions::default()).unwrap().mappings(), +/// source.map(&ObjectPool::default(), &MapOptions::default()).unwrap().mappings(), /// "AAAA,eAAe,SAAS,MAAM,WAAW;AACzC,eAAe,SAAS,MAAM,WAAW", /// ); /// assert_eq!( -/// source.map(&MapOptions::new(false)).unwrap().mappings(), +/// source.map(&ObjectPool::default(), &MapOptions::new(false)).unwrap().mappings(), /// "AAAA;AACA", /// ); /// ``` @@ -68,8 +69,12 @@ impl Source for OriginalSource { self.value.len() } - fn map(&self, options: &MapOptions) -> Option { - get_map(self, options) + fn map( + &self, + object_pool: &ObjectPool, + options: &MapOptions, + ) -> Option { + get_map(object_pool, self, options) } fn to_writer(&self, writer: &mut dyn std::io::Write) -> std::io::Result<()> { @@ -109,6 +114,7 @@ impl std::fmt::Debug for OriginalSource { impl StreamChunks for OriginalSource { fn stream_chunks<'a>( &'a self, + _: &'a ObjectPool, options: &MapOptions, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, @@ -245,8 +251,12 @@ mod tests { fn should_handle_multiline_string() { let source = OriginalSource::new("Line1\n\nLine3\n", "file.js"); let result_text = source.source(); - let result_map = source.map(&MapOptions::default()).unwrap(); - let result_list_map = source.map(&MapOptions::new(false)).unwrap(); + let result_map = source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(); + let result_list_map = source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(); assert_eq!(result_text.into_string_lossy(), "Line1\n\nLine3\n"); assert_eq!(result_map.sources(), &["file.js".to_string()]); @@ -264,8 +274,9 @@ mod tests { fn should_handle_empty_string() { let source = OriginalSource::new("", "file.js"); let result_text = source.source(); - let result_map = source.map(&MapOptions::default()); - let result_list_map = source.map(&MapOptions::new(false)); + let result_map = source.map(&ObjectPool::default(), &MapOptions::default()); + let result_list_map = + source.map(&ObjectPool::default(), &MapOptions::new(false)); assert_eq!(result_text.into_string_lossy(), ""); assert!(result_map.is_none()); @@ -275,7 +286,9 @@ mod tests { #[test] fn should_omit_mappings_for_columns_with_node() { let source = OriginalSource::new("Line1\n\nLine3\n", "file.js"); - let result_map = source.map(&MapOptions::new(false)).unwrap(); + let result_map = source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(); assert_eq!(result_map.mappings(), "AAAA;AACA;AACA"); } @@ -300,11 +313,17 @@ mod tests { let source = OriginalSource::new(input, "file.js"); assert_eq!(source.source().into_string_lossy(), input); assert_eq!( - source.map(&MapOptions::default()).unwrap().mappings(), + source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap() + .mappings(), "AAAA,eAAe,SAAS,MAAM,WAAW;AACzC,eAAe,SAAS,MAAM,WAAW", ); assert_eq!( - source.map(&MapOptions::new(false)).unwrap().mappings(), + source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap() + .mappings(), "AAAA;AACA", ); } @@ -320,7 +339,9 @@ mod tests { let source2 = OriginalSource::new(code2, "world.txt"); let concat = ConcatSource::new([source1.boxed(), source2.boxed()]); - let map = concat.map(&MapOptions::new(false)).unwrap(); + let map = concat + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(); assert_eq!(map.mappings(), "AAAA;AACA;ACDA",); } } diff --git a/src/raw_source.rs b/src/raw_source.rs index f55856ba..14e71a4a 100644 --- a/src/raw_source.rs +++ b/src/raw_source.rs @@ -9,6 +9,7 @@ use crate::{ get_generated_source_info, stream_chunks_of_raw_source, OnChunk, OnName, OnSource, StreamChunks, }, + object_pool::ObjectPool, MapOptions, Rope, Source, SourceMap, SourceValue, }; @@ -17,12 +18,12 @@ use crate::{ /// - [webpack-sources docs](https://github.com/webpack/webpack-sources/#rawsource). /// /// ``` -/// use rspack_sources::{MapOptions, RawStringSource, Source}; +/// use rspack_sources::{MapOptions, RawStringSource, Source, ObjectPool}; /// /// let code = "some source code"; /// let s = RawStringSource::from(code.to_string()); /// assert_eq!(s.source().into_string_lossy(), code); -/// assert_eq!(s.map(&MapOptions::default()), None); +/// assert_eq!(s.map(&ObjectPool::default(), &MapOptions::default()), None); /// assert_eq!(s.size(), 16); /// ``` #[derive(Clone, PartialEq, Eq)] @@ -75,7 +76,7 @@ impl Source for RawStringSource { self.0.len() } - fn map(&self, _: &MapOptions) -> Option { + fn map(&self, _: &ObjectPool, _: &MapOptions) -> Option { None } @@ -109,6 +110,7 @@ impl Hash for RawStringSource { impl StreamChunks for RawStringSource { fn stream_chunks<'a>( &'a self, + _: &'a ObjectPool, options: &MapOptions, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, @@ -129,12 +131,12 @@ impl StreamChunks for RawStringSource { /// - [webpack-sources docs](https://github.com/webpack/webpack-sources/#rawsource). /// /// ``` -/// use rspack_sources::{MapOptions, RawBufferSource, Source}; +/// use rspack_sources::{MapOptions, RawBufferSource, Source, ObjectPool}; /// /// let code = "some source code".as_bytes(); /// let s = RawBufferSource::from(code); /// assert_eq!(s.buffer(), code); -/// assert_eq!(s.map(&MapOptions::default()), None); +/// assert_eq!(s.map(&ObjectPool::default(), &MapOptions::default()), None); /// assert_eq!(s.size(), 16); /// ``` pub struct RawBufferSource { @@ -208,7 +210,7 @@ impl Source for RawBufferSource { self.value.len() } - fn map(&self, _: &MapOptions) -> Option { + fn map(&self, _: &ObjectPool, _: &MapOptions) -> Option { None } @@ -242,6 +244,7 @@ impl Hash for RawBufferSource { impl StreamChunks for RawBufferSource { fn stream_chunks<'a>( &'a self, + _: &'a ObjectPool, options: &MapOptions, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, @@ -274,7 +277,9 @@ mod tests { let source1 = ReplaceSource::new(source1); let source2 = OriginalSource::new("world".to_string(), "world.txt"); let concat = ConcatSource::new([source1.boxed(), source2.boxed()]); - let map = concat.map(&MapOptions::new(false)).unwrap(); + let map = concat + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(); assert_eq!(map.mappings(), ";;AAAA",); } } diff --git a/src/replace_source.rs b/src/replace_source.rs index aaf66e43..06c141c7 100644 --- a/src/replace_source.rs +++ b/src/replace_source.rs @@ -12,6 +12,7 @@ use crate::{ get_map, split_into_lines, GeneratedInfo, SourceText, StreamChunks, }, linear_map::LinearMap, + object_pool::ObjectPool, rope::Rope, source_content_lines::SourceContentLines, BoxSource, MapOptions, Mapping, OriginalLocation, Source, SourceExt, @@ -264,12 +265,16 @@ impl Source for ReplaceSource { size } - fn map(&self, options: &crate::MapOptions) -> Option { + fn map( + &self, + _: &ObjectPool, + options: &crate::MapOptions, + ) -> Option { let replacements = &self.replacements; if replacements.is_empty() { - return self.inner.map(options); + return self.inner.map(&ObjectPool::default(), options); } - get_map(self, options) + get_map(&ObjectPool::default(), self, options) } fn to_writer(&self, writer: &mut dyn std::io::Write) -> std::io::Result<()> { @@ -319,9 +324,9 @@ impl std::fmt::Debug for ReplaceSource { } } -enum SourceContent { +enum SourceContent<'object_pool> { Raw(Arc), - Lines(SourceContentLines), + Lines(SourceContentLines<'object_pool>), } fn check_content_at_position( @@ -343,6 +348,7 @@ fn check_content_at_position( impl StreamChunks for ReplaceSource { fn stream_chunks<'a>( &'a self, + object_pool: &'a ObjectPool, options: &crate::MapOptions, on_chunk: crate::helpers::OnChunk<'_, 'a>, on_source: crate::helpers::OnSource<'_, 'a>, @@ -397,7 +403,7 @@ impl StreamChunks for ReplaceSource { { match source_content { SourceContent::Raw(source) => { - let lines = SourceContentLines::from(source.clone()); + let lines = SourceContentLines::new(object_pool, source.clone()); let matched = check_content_at_position(&lines, line, column, expected_chunk); *source_content = SourceContent::Lines(lines); @@ -413,6 +419,7 @@ impl StreamChunks for ReplaceSource { }; let result = self.inner.stream_chunks( + object_pool, &MapOptions { columns: options.columns, final_source: false, @@ -866,7 +873,9 @@ mod tests { source.replace(start_line6 + 4, start_line6 + 5, " ", None); let result = source.source(); - let result_map = source.map(&MapOptions::default()).unwrap(); + let result_map = source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(); assert_eq!( code, @@ -898,7 +907,9 @@ Last Line"# 5:0 -> [file.txt] 6:0, :4 -> [file.txt] 6:4, :5 -> [file.txt] 7:0"# ); - let result_list_map = source.map(&MapOptions::new(false)).unwrap(); + let result_list_map = source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(); assert_eq!( with_readable_mappings(&result_list_map), r#" @@ -921,8 +932,12 @@ Last Line"# source.insert(0, "Message: ", None); source.replace(2, (line1.len() + 5) as u32, "y A", None); let result_text = source.source(); - let result_map = source.map(&MapOptions::default()).unwrap(); - let result_list_map = source.map(&MapOptions::new(false)).unwrap(); + let result_map = source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(); + let result_list_map = source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(); assert_eq!( original_code, @@ -952,8 +967,12 @@ World!"# source.insert(0, "Line 0\n", None); let result_text = source.source(); - let result_map = source.map(&MapOptions::default()).unwrap(); - let result_list_map = source.map(&MapOptions::new(false)).unwrap(); + let result_map = source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(); + let result_list_map = source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(); assert_eq!(result_text.into_string_lossy(), "Line -1\nLine 0\nLine 1"); assert_eq!( @@ -981,8 +1000,12 @@ World!"# source.insert(0, "Line 0\n", None); source.replace(0, 6, "Hello", None); let result_text = source.source(); - let result_map = source.map(&MapOptions::default()).unwrap(); - let result_list_map = source.map(&MapOptions::new(false)).unwrap(); + let result_map = source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(); + let result_list_map = source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(); assert_eq!( result_text.into_string_lossy(), @@ -1006,8 +1029,12 @@ Line 2"# let mut source = ReplaceSource::new(OriginalSource::new(line1, "file.txt")); source.insert((line1.len() + 1) as u32, "Line 2\n", None); let result_text = source.source(); - let result_map = source.map(&MapOptions::default()).unwrap(); - let result_list_map = source.map(&MapOptions::new(false)).unwrap(); + let result_map = source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(); + let result_list_map = source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(); assert_eq!(result_text.into_string_lossy(), "Line 1\nLine 2\n"); assert_eq!( @@ -1027,7 +1054,9 @@ Line 2"# ReplaceSource::new(OriginalSource::new(bootstrap_code, "file.js")); source.replace(7, 12, "h", Some("hello")); source.replace(20, 25, "w", Some("world")); - let result_map = source.map(&MapOptions::default()).expect("failed"); + let result_map = source + .map(&ObjectPool::default(), &MapOptions::default()) + .expect("failed"); let target_code = source.source(); assert_eq!(target_code.into_string_lossy(), " var h\n var w\n"); @@ -1092,7 +1121,9 @@ export default function StaticPage(_ref) { ); let target_code = source.source(); - let source_map = source.map(&MapOptions::default()).unwrap(); + let source_map = source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(); assert_eq!( target_code.into_string_lossy(), @@ -1143,7 +1174,9 @@ return
{data.foo}
source.replace(12, 24, "", None); let target_code = source.source(); - let source_map = source.map(&MapOptions::default()).unwrap(); + let source_map = source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(); assert_eq!(target_code.into_string_lossy(), "if (false) {}"); assert_eq!( @@ -1169,7 +1202,9 @@ return
{data.foo}
source.replace(0, 999, "replaced!\n", Some("whole")); let result_text = source.source(); - let result_map = source.map(&MapOptions::default()).unwrap(); + let result_map = source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(); assert_eq!( result_text.into_string_lossy(), @@ -1190,7 +1225,10 @@ return
{data.foo}
source.replace(3, 5, "", None); assert_eq!(source.size(), 3); assert_eq!(source.source().into_string_lossy(), "box"); - assert_eq!(source.map(&MapOptions::default()), None); + assert_eq!( + source.map(&ObjectPool::default(), &MapOptions::default()), + None + ); let mut hasher = twox_hash::XxHash64::default(); source.hash(&mut hasher); assert_eq!(format!("{:x}", hasher.finish()), "96abdb94c6fd5aba"); @@ -1225,7 +1263,7 @@ return
{data.foo}
); assert_eq!( source - .map(&MapOptions::default()) + .map(&ObjectPool::default(), &MapOptions::default()) .unwrap() .to_json() .unwrap(), diff --git a/src/source.rs b/src/source.rs index a56f922e..f015d24e 100644 --- a/src/source.rs +++ b/src/source.rs @@ -12,6 +12,7 @@ use serde::{Deserialize, Serialize}; use crate::{ helpers::{decode_mappings, StreamChunks}, + object_pool::ObjectPool, rope::Rope, Result, }; @@ -124,7 +125,11 @@ pub trait Source: fn size(&self) -> usize; /// Get the [SourceMap]. - fn map(&self, options: &MapOptions) -> Option; + fn map( + &self, + object_pool: &ObjectPool, + options: &MapOptions, + ) -> Option; /// Update hash based on the source. fn update_hash(&self, state: &mut dyn Hasher) { @@ -152,8 +157,12 @@ impl Source for BoxSource { self.as_ref().size() } - fn map(&self, options: &MapOptions) -> Option { - self.as_ref().map(options) + fn map( + &self, + object_pool: &ObjectPool, + options: &MapOptions, + ) -> Option { + self.as_ref().map(object_pool, options) } fn to_writer(&self, writer: &mut dyn std::io::Write) -> std::io::Result<()> { @@ -166,14 +175,19 @@ dyn_clone::clone_trait_object!(Source); impl StreamChunks for BoxSource { fn stream_chunks<'a>( &'a self, + object_pool: &'a ObjectPool, options: &MapOptions, on_chunk: crate::helpers::OnChunk<'_, 'a>, on_source: crate::helpers::OnSource<'_, 'a>, on_name: crate::helpers::OnName<'_, 'a>, ) -> crate::helpers::GeneratedInfo { - self - .as_ref() - .stream_chunks(options, on_chunk, on_source, on_name) + self.as_ref().stream_chunks( + object_pool, + options, + on_chunk, + on_source, + on_name, + ) } } @@ -250,7 +264,7 @@ impl SourceExt for T { } /// Options for [Source::map]. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone)] pub struct MapOptions { /// Whether have columns info in generated [SourceMap] mappings. pub columns: bool, diff --git a/src/source_content_lines.rs b/src/source_content_lines.rs index 656a37bf..d923a501 100644 --- a/src/source_content_lines.rs +++ b/src/source_content_lines.rs @@ -1,31 +1,33 @@ use std::sync::Arc; -use crate::{helpers::split_into_lines, with_indices::WithIndices}; +use crate::{ + helpers::split_into_lines, object_pool::ObjectPool, with_indices::WithIndices, +}; -type Owner = Arc; - -type BorrowedValue<'a> = Vec>; - -self_cell::self_cell!( - pub struct SourceContentLines { - owner: Owner, - #[covariant] - dependent: BorrowedValue, - } -); +pub struct SourceContentLines<'object_pool> { + text: Arc, + // Self-referential data structure: lines borrow from the text. + lines: Vec>, +} -impl SourceContentLines { - pub fn get(&self, line: usize) -> Option<&WithIndices<'_, &str>> { - self.borrow_dependent().get(line) +impl<'object_pool> SourceContentLines<'object_pool> { + pub fn new(object_pool: &'object_pool ObjectPool, text: Arc) -> Self { + // SAFETY: We extend the lifetime of the &str to 'static because the Arc is owned by this struct, + // and all &'static str references are only used within the lifetime of this struct. + #[allow(unsafe_code)] + let text_ref = + unsafe { std::mem::transmute::<&str, &'static str>(text.as_ref()) }; + let lines = split_into_lines::<&str>(&text_ref) + .map(|line| WithIndices::new(object_pool, line)) + .collect::>(); + Self { text, lines } } -} -impl From> for SourceContentLines { - fn from(value: Arc) -> Self { - SourceContentLines::new(value, |owner| { - split_into_lines(&owner.as_ref()) - .map(WithIndices::new) - .collect::>() - }) + pub fn get( + &self, + line: usize, + ) -> Option<&WithIndices<'object_pool, '_, &str>> { + let _ = &self.text; + self.lines.get(line) } } diff --git a/src/source_map_source.rs b/src/source_map_source.rs index b2310e7a..5a2b6166 100644 --- a/src/source_map_source.rs +++ b/src/source_map_source.rs @@ -9,6 +9,7 @@ use crate::{ get_map, stream_chunks_of_combined_source_map, stream_chunks_of_source_map, StreamChunks, }, + object_pool::ObjectPool, MapOptions, Rope, Source, SourceMap, SourceValue, }; @@ -105,11 +106,15 @@ impl Source for SourceMapSource { self.value.len() } - fn map(&self, options: &MapOptions) -> Option { + fn map( + &self, + object_pool: &ObjectPool, + options: &MapOptions, + ) -> Option { if self.inner_source_map.is_none() { return Some(self.source_map.clone()); } - get_map(self, options) + get_map(object_pool, self, options) } fn to_writer(&self, writer: &mut dyn std::io::Write) -> std::io::Result<()> { @@ -185,6 +190,7 @@ impl std::fmt::Debug for SourceMapSource { impl StreamChunks for SourceMapSource { fn stream_chunks<'a>( &'a self, + object_pool: &'a ObjectPool, options: &MapOptions, on_chunk: crate::helpers::OnChunk<'_, 'a>, on_source: crate::helpers::OnSource<'_, 'a>, @@ -192,6 +198,8 @@ impl StreamChunks for SourceMapSource { ) -> crate::helpers::GeneratedInfo { if let Some(inner_source_map) = &self.inner_source_map { stream_chunks_of_combined_source_map( + options, + object_pool, &*self.value, &self.source_map, &self.name, @@ -201,16 +209,16 @@ impl StreamChunks for SourceMapSource { on_chunk, on_source, on_name, - options, ) } else { stream_chunks_of_source_map( + options, + object_pool, self.value.as_str(), &self.source_map, on_chunk, on_source, on_name, - options, ) } } @@ -251,7 +259,8 @@ mod tests { name: "text", source_map: source_r_map.clone(), original_source: Some(inner_source.source().into_string_lossy().into()), - inner_source_map: inner_source.map(&MapOptions::default()), + inner_source_map: inner_source + .map(&ObjectPool::default(), &MapOptions::default()), remove_original_source: false, }); let sms2 = SourceMapSource::new(SourceMapSourceOptions { @@ -259,7 +268,8 @@ mod tests { name: "text", source_map: source_r_map, original_source: Some(inner_source.source().into_string_lossy().into()), - inner_source_map: inner_source.map(&MapOptions::default()), + inner_source_map: inner_source + .map(&ObjectPool::default(), &MapOptions::default()), remove_original_source: true, }); let expected_content = @@ -267,7 +277,9 @@ mod tests { assert_eq!(sms1.source().into_string_lossy(), expected_content); assert_eq!(sms2.source().into_string_lossy(), expected_content); assert_eq!( - sms1.map(&MapOptions::default()).unwrap(), + sms1 + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(), SourceMap::from_json( r#"{ "mappings": "YAAAA,K,CAAMC;AACN,O,MAAU;ACCC,O,CAAM", @@ -283,7 +295,9 @@ mod tests { .unwrap(), ); assert_eq!( - sms2.map(&MapOptions::default()).unwrap(), + sms2 + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(), SourceMap::from_json( r#"{ "mappings": "YAAAA,K,CAAMC;AACN,O,MAAU", @@ -339,7 +353,9 @@ mod tests { "hi world\nhi world\nhi world\n" ); assert_eq!( - source.map(&MapOptions::default()).unwrap(), + source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(), SourceMap::from_json( r#"{ "mappings": "AAAA;;ACAA,CAAC,CAAI", @@ -352,7 +368,9 @@ mod tests { .unwrap() ); assert_eq!( - source.map(&MapOptions::new(false)).unwrap(), + source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(), SourceMap::from_json( r#"{ "mappings": "AAAA;;ACAA", @@ -444,7 +462,9 @@ mod tests { a, b, ]); - let map = source.map(&MapOptions::default()).unwrap(); + let map = source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(); assert_eq!( map.mappings(), "AAAA;AAAA;ACAA,ICAA,EDAA,ECAA,EFAA;AEAA,EFAA;ACAA", @@ -465,11 +485,10 @@ mod tests { .source() .into_string_lossy() .into_owned()); - test_cached!(source, |s: &dyn Source| s.map(&MapOptions::default())); - test_cached!(source, |s: &dyn Source| s.map(&MapOptions { - columns: false, - final_source: true - })); + test_cached!(source, |s: &dyn Source| s + .map(&ObjectPool::default(), &MapOptions::default())); + test_cached!(source, |s: &dyn Source| s + .map(&ObjectPool::default(), &MapOptions::new(false))); } #[test] @@ -500,7 +519,9 @@ mod tests { remove_original_source: false, }); assert_eq!( - source.map(&MapOptions::default()).unwrap(), + source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(), SourceMap::from_json( r#"{ "mappings": "AAAA", @@ -547,7 +568,9 @@ mod tests { ); assert_eq!(source.size(), 13); assert_eq!( - source.map(&MapOptions::default()).unwrap(), + source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(), SourceMap::from_json( r#"{ "mappings": "AAAAA,SCAA,ECAMC,C", @@ -598,7 +621,9 @@ mod tests { ).unwrap()), remove_original_source: true, }); - let map = source.map(&MapOptions::default()).unwrap(); + let map = source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(); assert_eq!( map, SourceMap::from_json( @@ -619,7 +644,9 @@ mod tests { let source = SourceMapSource::new(WithoutOriginalOptions { value: "console.log('a')\n", name: "a.js", - source_map: original.map(&MapOptions::new(false)).unwrap(), + source_map: original + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(), }); let source = ConcatSource::new([ RawStringSource::from("\n").boxed(), @@ -627,7 +654,9 @@ mod tests { RawStringSource::from("\n").boxed(), source.boxed(), ]); - let map = source.map(&MapOptions::new(false)).unwrap(); + let map = source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(); assert_eq!(map.mappings(), ";;;AAAA"); } @@ -652,8 +681,9 @@ mod tests { }"#, ) .unwrap(); - let inner_source_map = - inner_source.map(&MapOptions::default()).map(|mut map| { + let inner_source_map = inner_source + .map(&ObjectPool::default(), &MapOptions::default()) + .map(|mut map| { map.set_source_root(Some("/path/to/folder/".to_string())); map }); @@ -666,7 +696,9 @@ mod tests { remove_original_source: false, }); assert_eq!( - sms.map(&MapOptions::default()).unwrap(), + sms + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(), SourceMap::from_json( r#"{ "mappings": "YAAAA,K,CAAMC;AACN,O,MAAU;ACCC,O,CAAM", @@ -713,7 +745,9 @@ mod tests { remove_original_source: false, }); assert_eq!( - source.map(&MapOptions::new(false)).unwrap(), + source + .map(&ObjectPool::default(), &MapOptions::new(false)) + .unwrap(), SourceMap::from_json( r#"{ "mappings": "AAAA", @@ -755,7 +789,9 @@ mod tests { remove_original_source: false, }); assert_eq!( - source.map(&MapOptions::default()).unwrap(), + source + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(), SourceMap::from_json( r#"{ "version": 3, diff --git a/src/with_indices.rs b/src/with_indices.rs index e4912521..3136c428 100644 --- a/src/with_indices.rs +++ b/src/with_indices.rs @@ -1,27 +1,32 @@ use std::{cell::OnceCell, marker::PhantomData}; -use crate::helpers::SourceText; +use crate::{ + helpers::SourceText, + object_pool::{ObjectPool, Pooled}, +}; -#[derive(Debug, Clone)] -pub struct WithIndices<'a, S> +#[derive(Debug)] +pub struct WithIndices<'object_pool, 'text, S> where - S: SourceText<'a>, + S: SourceText<'text>, { /// line is a string reference pub line: S, /// the byte position of each `char` in `line` string slice . - pub indices_indexes: OnceCell>, - data: PhantomData<&'a S>, + pub char_byte_indices: OnceCell>, + data: PhantomData<&'text S>, + object_pool: &'object_pool ObjectPool, } -impl<'a, S> WithIndices<'a, S> +impl<'object_pool, 'text, S> WithIndices<'object_pool, 'text, S> where - S: SourceText<'a>, + S: SourceText<'text>, { - pub fn new(line: S) -> Self { + pub fn new(object_pool: &'object_pool ObjectPool, line: S) -> Self { Self { - indices_indexes: OnceCell::new(), + char_byte_indices: OnceCell::new(), line, + object_pool, data: PhantomData, } } @@ -32,13 +37,15 @@ where return S::default(); } - let indices_indexes = self.indices_indexes.get_or_init(|| { - self.line.char_indices().map(|(i, _)| i).collect::>() + let char_byte_indices = self.char_byte_indices.get_or_init(|| { + let mut vec = self.object_pool.pull(self.line.len()); + vec.extend(self.line.char_indices().map(|(i, _)| i)); + vec }); let str_len = self.line.len(); - let start = *indices_indexes.get(start_index).unwrap_or(&str_len); - let end = *indices_indexes.get(end_index).unwrap_or(&str_len); + let start = *char_byte_indices.get(start_index).unwrap_or(&str_len); + let end = *char_byte_indices.get(end_index).unwrap_or(&str_len); #[allow(unsafe_code)] unsafe { @@ -53,13 +60,14 @@ where /// tests are just copy from `substring` crate #[cfg(test)] mod tests { - use crate::Rope; + use crate::{object_pool::ObjectPool, Rope}; use super::WithIndices; #[test] fn test_substring() { assert_eq!( - WithIndices::new(Rope::from("foobar")).substring(0, 3), + WithIndices::new(&ObjectPool::default(), Rope::from("foobar")) + .substring(0, 3), "foo" ); } @@ -67,26 +75,40 @@ mod tests { #[test] fn test_out_of_bounds() { assert_eq!( - WithIndices::new(Rope::from("foobar")).substring(0, 10), + WithIndices::new(&ObjectPool::default(), Rope::from("foobar")) + .substring(0, 10), "foobar" ); - assert_eq!(WithIndices::new(Rope::from("foobar")).substring(6, 10), ""); + assert_eq!( + WithIndices::new(&ObjectPool::default(), Rope::from("foobar")) + .substring(6, 10), + "" + ); } #[test] fn test_start_less_than_end() { - assert_eq!(WithIndices::new(Rope::from("foobar")).substring(3, 2), ""); + assert_eq!( + WithIndices::new(&ObjectPool::default(), Rope::from("foobar")) + .substring(3, 2), + "" + ); } #[test] fn test_start_and_end_equal() { - assert_eq!(WithIndices::new(Rope::from("foobar")).substring(3, 3), ""); + assert_eq!( + WithIndices::new(&ObjectPool::default(), Rope::from("foobar")) + .substring(3, 3), + "" + ); } #[test] fn test_multiple_byte_characters() { assert_eq!( - WithIndices::new(Rope::from("fõøbα®")).substring(2, 5), + WithIndices::new(&ObjectPool::default(), Rope::from("fõøbα®")) + .substring(2, 5), "øbα" ); } diff --git a/tests/compat_source.rs b/tests/compat_source.rs index 5cc97576..83858b25 100644 --- a/tests/compat_source.rs +++ b/tests/compat_source.rs @@ -6,8 +6,8 @@ use rspack_sources::stream_chunks::{ stream_chunks_default, GeneratedInfo, OnChunk, OnName, OnSource, StreamChunks, }; use rspack_sources::{ - ConcatSource, MapOptions, RawStringSource, Rope, Source, SourceExt, - SourceMap, SourceValue, + ConcatSource, MapOptions, ObjectPool, RawStringSource, Rope, Source, + SourceExt, SourceMap, SourceValue, }; #[derive(Debug, Eq)] @@ -30,7 +30,11 @@ impl Source for CompatSource { 42 } - fn map(&self, _options: &MapOptions) -> Option { + fn map( + &self, + _object_pool: &ObjectPool, + _options: &MapOptions, + ) -> Option { self.1.clone() } @@ -42,15 +46,17 @@ impl Source for CompatSource { impl StreamChunks for CompatSource { fn stream_chunks<'a>( &'a self, + object_pool: &'a ObjectPool, options: &MapOptions, on_chunk: OnChunk<'_, 'a>, on_source: OnSource<'_, 'a>, on_name: OnName<'_, 'a>, ) -> GeneratedInfo { stream_chunks_default( + options, + object_pool, self.0, self.1.as_ref(), - options, on_chunk, on_source, on_name, @@ -85,7 +91,10 @@ fn should_work_with_custom_compat_source() { assert_eq!(source.source().into_string_lossy(), CONTENT); assert_eq!(source.size(), 42); assert_eq!(source.buffer(), CONTENT.as_bytes()); - assert_eq!(source.map(&MapOptions::default()), None); + assert_eq!( + source.map(&ObjectPool::default(), &MapOptions::default()), + None + ); } #[test] @@ -107,7 +116,9 @@ fn should_generate_correct_source_map() { ]); let source = result.source(); - let map = result.map(&MapOptions::default()).unwrap(); + let map = result + .map(&ObjectPool::default(), &MapOptions::default()) + .unwrap(); let expected_source = "Line0\nLine1\nLine2\nLine3\n"; let expected_source_map = SourceMap::from_json(