Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master' into fix/inter_op_missing
Browse files Browse the repository at this point in the history
  • Loading branch information
stormslowly committed Mar 18, 2024
2 parents 9e3e8e2 + 16942b3 commit 9eb03f2
Show file tree
Hide file tree
Showing 55 changed files with 461 additions and 128 deletions.
10 changes: 10 additions & 0 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,16 @@ $ time ./target/release/mako examples/with-antd
$ hyperfine --runs 10 "./target/release/mako examples/with-antd"
```

Benchmark three10x.

```bash
$ just setup-bench
# default: --baseline master --case tmp/three10x
$ just bench
$ just bench --baseline v0.4.4
$ just bench --baseline v0.4.4 --case examples/with-antd
```

Performance analysis with puffin.

```bash
Expand Down
18 changes: 10 additions & 8 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion crates/core/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ serde_json = { workspace = true }
serde_yaml = "0.9.22"
svgr-rs = "0.1.3"
thiserror = "1.0.43"
tokio = { version = "1", features = ["rt", "sync"] }
tokio = { version = "1", features = ["rt-multi-thread", "sync"] }
tokio-tungstenite = "0.19.0"
toml = "0.7.6"
tracing = "0.1.37"
Expand Down
18 changes: 10 additions & 8 deletions crates/mako/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,16 @@ path = "src/main.rs"
test = false

[dependencies]
cached = { workspace = true }
glob-match = "0.2.1"
mako_core = { path = "../core" }
miette = { version = "5.10.0", features = ["fancy"] }
oxc_resolver = "1.5.4"
serde = { workspace = true }
serde_json = { workspace = true }
swc_core = { workspace = true, features = ["swc_ecma_quote_macros"] }
cached = { workspace = true }
glob-match = "0.2.1"
mako_core = { path = "../core" }
miette = { version = "5.10.0", features = ["fancy"] }
oxc_resolver = "1.5.4"
percent-encoding = { version = "2.3.1" }
serde = { workspace = true }
serde_json = { workspace = true }
swc_core = { workspace = true, features = ["swc_ecma_quote_macros"] }
url = { version = "2.5.0" }

[target.'cfg(not(target_os = "linux"))'.dependencies]
mimalloc-rust = { workspace = true }
Expand Down
50 changes: 24 additions & 26 deletions crates/mako/src/ast_2/file.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ use mako_core::regex::Regex;
use mako_core::thiserror::Error;
use mako_core::twox_hash::XxHash64;
use mako_core::{md5, mime_guess};
use percent_encoding::percent_decode_str;
use url::Url;

use crate::compiler::Context;
use crate::util::base64_decode;
Expand Down Expand Up @@ -51,6 +53,7 @@ pub struct File {
pub pathname: PathBuf,
pub search: String,
pub params: Vec<(String, String)>,
pub fragment: Option<String>,
}

impl Default for File {
Expand All @@ -67,6 +70,7 @@ impl Default for File {
pathname: PathBuf::new(),
search: "".to_string(),
params: vec![],
fragment: None,
}
}
}
Expand All @@ -84,7 +88,7 @@ lazy_static! {
impl File {
pub fn new(path: String, context: Arc<Context>) -> Self {
let path = PathBuf::from(path);
let (pathname, search, params) = parse_path(&path.to_string_lossy()).unwrap();
let (pathname, search, params, fragment) = parse_path(&path.to_string_lossy()).unwrap();
let pathname = PathBuf::from(pathname);
let is_virtual = path.starts_with(&*VIRTUAL) ||
// TODO: remove this specific logic
Expand All @@ -103,6 +107,7 @@ impl File {
pathname,
search,
params,
fragment,
is_under_node_modules,
extname,
..Default::default()
Expand Down Expand Up @@ -148,14 +153,14 @@ impl File {
}
}

pub fn get_raw_hash(&self, init: u64) -> u64 {
pub fn get_raw_hash(&self) -> u64 {
let mut hasher: XxHash64 = Default::default();
if let Some(content) = &self.content {
match content {
Content::Js(content)
| Content::Css(content)
| Content::Assets(Asset { content, .. }) => {
hasher.write_u64(init);
// hasher.write_u64(init);
hasher.write(content.as_bytes());
hasher.finish()
}
Expand Down Expand Up @@ -246,27 +251,20 @@ impl File {
type PathName = String;
type Search = String;
type Params = Vec<(String, String)>;
fn parse_path(path: &str) -> Result<(PathName, Search, Params)> {
let mut iter = path.split('?');
let path = iter.next().unwrap();
let query = iter.next().unwrap_or("");
let mut query_vec = vec![];
for pair in query.split('&') {
if pair.contains('=') {
let mut it = pair.split('=').take(2);
let kv = match (it.next(), it.next()) {
(Some(k), Some(v)) => (k.to_string(), v.to_string()),
_ => continue,
};
query_vec.push(kv);
} else if !pair.is_empty() {
query_vec.push((pair.to_string(), "".to_string()));
}
}
let search = if query.is_empty() {
"".to_string()
} else {
format!("?{}", query)
};
Ok((path.to_string(), search, query_vec))
type Fragment = Option<String>;
fn parse_path(path: &str) -> Result<(PathName, Search, Params, Fragment)> {
let base = "http://a.com/";
let base_url = Url::parse(base)?;
let full_url = base_url.join(path)?;
let path = full_url.path().to_string();
let fragment = full_url.fragment().map(|s| s.to_string());
let search = full_url.query().unwrap_or("").to_string();
let query_vec = full_url
.query_pairs()
.map(|(k, v)| (k.to_string(), v.to_string()))
.collect();
// dir or filename may contains space or other special characters
// so we need to decode it, e.g. "a%20b" -> "a b"
let path = percent_decode_str(&path).decode_utf8()?;
Ok((path.to_string(), search, query_vec, fragment))
}
14 changes: 12 additions & 2 deletions crates/mako/src/ast_2/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,25 @@ pub fn base64_encode<T: AsRef<[u8]>>(raw: T) -> String {
general_purpose::STANDARD.encode(raw)
}

// TODO: more accurate
pub fn is_remote(url: &str) -> bool {
pub fn is_remote_or_data(url: &str) -> bool {
let lower_url = url.to_lowercase();
// ref:
// https://developer.mozilla.org/en-US/docs/Web/CSS/url
// https://www.ietf.org/rfc/rfc3986
lower_url.starts_with("http://")
|| lower_url.starts_with("https://")
|| lower_url.starts_with("data:")
|| lower_url.starts_with("//")
}

pub fn is_remote_or_data_or_hash(url: &str) -> bool {
let lower_url = url.to_lowercase();
is_remote_or_data(url)
// css url() should not resolve hash only url
// e.g. fill: url(#gradient)
|| lower_url.starts_with('#')
}

pub fn remove_first_tilde(url: String) -> String {
if let Some(stripped) = url.strip_prefix('~') {
// ~/ is the case when use ~ as alias or folder
Expand Down
6 changes: 3 additions & 3 deletions crates/mako/src/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ impl Compiler {
let dep_module_id = ModuleId::new(path.clone());
if !module_graph.has_module(&dep_module_id) {
let module = match dep.resolver_resource {
ResolverResource::Resolved(_) => {
ResolverResource::Virtual(_) | ResolverResource::Resolved(_) => {
count += 1;

let file = File::new(path.clone(), self.context.clone());
Expand Down Expand Up @@ -211,7 +211,7 @@ __mako_require__.loadScript('{}', (e) => e.type === 'load' ? resolve() : reject(
let info = {
let file = File::with_content(
path.to_owned(),
Content::Js("".to_string()),
Content::Js("export {};".to_string()),
context.clone(),
);
let ast = Parse::parse(&file, context.clone()).unwrap();
Expand Down Expand Up @@ -284,7 +284,7 @@ __mako_require__.loadScript('{}', (e) => e.type === 'load' ? resolve() : reject(
// raw_hash is only used in watch mode
// so we don't need to calculate when watch is off
let raw_hash = if context.args.watch {
file.get_raw_hash(context.config_hash)
file.get_raw_hash()
.wrapping_add(hash_hashmap(&deps.missing_deps))
} else {
0
Expand Down
7 changes: 0 additions & 7 deletions crates/mako/src/chunk_pot/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -93,13 +93,6 @@ pub(crate) fn empty_module_fn_expr() -> FnExpr {
}
}

#[cached(
result = true,
key = "u64",
type = "SizedCache<u64, String>",
convert = r#"{context.config_hash}"#,
create = "{ SizedCache::with_size(5) }"
)]
pub(crate) fn runtime_code(context: &Arc<Context>) -> Result<String> {
let umd = context.config.umd.clone();
let chunk_graph = context.chunk_graph.read().unwrap();
Expand Down
18 changes: 11 additions & 7 deletions crates/mako/src/compiler.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use std::collections::HashMap;
use std::fs;
use std::path::PathBuf;
use std::sync::mpsc::channel;
use std::sync::{Arc, Mutex, RwLock};
use std::time::{Instant, UNIX_EPOCH};

Expand All @@ -13,23 +14,22 @@ use mako_core::swc_ecma_ast::Ident;

use crate::chunk_graph::ChunkGraph;
use crate::comments::Comments;
use crate::config::{hash_config, Config, OutputMode};
use crate::config::{Config, OutputMode};
use crate::module_graph::ModuleGraph;
use crate::optimize_chunk::OptimizeChunksInfo;
use crate::plugin::{Plugin, PluginDriver, PluginGenerateEndParams, PluginGenerateStats};
use crate::plugins;
use crate::plugins::minifish::Inject;
use crate::resolve::{get_resolvers, Resolvers};
use crate::stats::StatsInfo;
use crate::util::ParseRegex;
use crate::{plugins, thread_pool};

pub struct Context {
pub module_graph: RwLock<ModuleGraph>,
pub chunk_graph: RwLock<ChunkGraph>,
pub assets_info: Mutex<HashMap<String, String>>,
pub modules_with_missing_deps: RwLock<Vec<String>>,
pub config: Config,
pub config_hash: u64,
pub args: Args,
pub root: PathBuf,
pub meta: Meta,
Expand Down Expand Up @@ -110,10 +110,8 @@ impl Default for Context {
fn default() -> Self {
let config: Config = Default::default();
let resolvers = get_resolvers(&config);
let config_hash = hash_config(&config);
Self {
config,
config_hash,
args: Args { watch: false },
root: PathBuf::from(""),
module_graph: RwLock::new(ModuleGraph::new()),
Expand Down Expand Up @@ -313,7 +311,6 @@ impl Compiler {
} else {
Default::default()
},
config_hash: hash_config(&config),
config,
args,
root,
Expand Down Expand Up @@ -371,7 +368,14 @@ impl Compiler {
}
let result = {
mako_core::mako_profile_scope!("Generate Stage");
self.generate()
let (rs, rr) = channel::<Result<()>>();
// need to put all rayon parallel iterators run in the existed scope, or else rayon
// will create a new thread pool for those parallel iterators
thread_pool::install(|| {
let res = self.generate();
rs.send(res).unwrap();
});
rr.recv().unwrap()
};
let t_compiler_duration = t_compiler.elapsed();
if result.is_ok() {
Expand Down
Loading

0 comments on commit 9eb03f2

Please sign in to comment.