Skip to content

Commit

Permalink
Fix links, dump all output files in compilation metrics page (#48)
Browse files Browse the repository at this point in the history
With the changes to make tlparse artifacts unique, we also made it a bit
harder to link to compilation artifacts from pages other than
index.html. This change factors out compilationmetrics so that you can
pass the entire directory into tlparse, so that each compilation metrics
page wlil have all of the build products associated with that compile id
directly linked.

It also allows the failures and restarts page to link correctly to the
compilation metrics page, using a similar technique.

It's a bit hacky how I derive the filenames for
compilation_metrics.html, but I couldn't really come up with that good
of a way otherwise. We might wanna refactor later so that each file has
a unique id associated with it in the FXIndexMap or something.

<img width="1333" alt="image"
src="https://github.com/ezyang/tlparse/assets/4811293/35c4a830-49ab-490e-bb65-525fc1efe13b">
  • Loading branch information
jamesjwu authored Jul 2, 2024
1 parent 013cc16 commit b1a6e19
Show file tree
Hide file tree
Showing 4 changed files with 138 additions and 74 deletions.
183 changes: 114 additions & 69 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ use tinytemplate::TinyTemplate;

use crate::parsers::default_parsers;
use crate::parsers::ParserOutput;
use crate::parsers::StructuredLogParser;
use crate::templates::*;
use crate::types::*;
mod parsers;
Expand Down Expand Up @@ -61,6 +62,67 @@ fn maybe_remove_convert_frame_suffixes(frames: &mut Vec<FrameSummary>) {
}
}

fn run_parser<'t>(
lineno: usize,
parser: &Box<dyn StructuredLogParser + 't>,
e: &Envelope,
payload: &str,
output_count: &mut i32,
output: &mut Vec<(PathBuf, String)>,
compile_directory: &mut Vec<(String, String, i32)>,
multi: &MultiProgress,
stats: &mut Stats,
) {
if let Some(md) = parser.get_metadata(&e) {
let results = parser.parse(lineno, md, e.rank, &e.compile_id, &payload);
match results {
Ok(results) => {
for parser_result in results {
match parser_result {
ParserOutput::File(raw_filename, out) => {
let filename = if let Some(stem) = raw_filename.file_stem() {
let mut r = OsString::new();
r.push(stem);
r.push(OsStr::new("_"));
r.push(output_count.to_string());
if let Some(e) = raw_filename.extension() {
r.push(OsStr::new("."));
r.push(e);
};
raw_filename.with_file_name(r)
} else {
raw_filename
};
output.push((filename.clone(), out));
let filename_str = format!("{}", filename.to_string_lossy());
compile_directory.push((
filename_str.clone(),
filename_str,
*output_count,
));
*output_count += 1;
}
ParserOutput::Link(name, url) => {
compile_directory.push((url, name, *output_count));
*output_count += 1;
}
}
}
}
Err(err) => match parser.name() {
"dynamo_guards" => {
multi.suspend(|| eprintln!("Failed to parse guards json: {}", err));
stats.fail_dynamo_guards_json += 1;
}
name => {
multi.suspend(|| eprintln!("Parser {name} failed: {err}"));
stats.fail_parser += 1;
}
},
}
}
}

pub fn parse_path(path: &PathBuf, config: ParseConfig) -> anyhow::Result<ParseOutput> {
let strict = config.strict;
if !path.is_file() {
Expand Down Expand Up @@ -149,11 +211,6 @@ pub fn parse_path(path: &PathBuf, config: ParseConfig) -> anyhow::Result<ParseOu
.peekable();

let mut all_parsers = default_parsers(&tt);
all_parsers.push(Box::new(crate::parsers::CompilationMetricsParser {
tt: &tt,
stack_index: &stack_index,
symbolic_shape_specialization_index: &symbolic_shape_specialization_index,
})); // TODO: use own tt instances
all_parsers.extend(config.custom_parsers);

while let Some((lineno, line)) = iter.next() {
Expand Down Expand Up @@ -244,69 +301,39 @@ pub fn parse_path(path: &PathBuf, config: ParseConfig) -> anyhow::Result<ParseOu
let compile_directory = directory.entry(e.compile_id.clone()).or_default();

for parser in &all_parsers {
if let Some(md) = parser.get_metadata(&e) {
let results = parser.parse(lineno, md, e.rank, &e.compile_id, &payload);
match results {
Ok(results) => {
for parser_result in results {
match parser_result {
ParserOutput::File(raw_filename, out) => {
let filename = if let Some(stem) = raw_filename.file_stem() {
let mut r = OsString::new();
r.push(stem);
r.push(OsStr::new("_"));
r.push(output_count.to_string());
if let Some(e) = raw_filename.extension() {
r.push(OsStr::new("."));
r.push(e);
};
raw_filename.with_file_name(r)
} else {
raw_filename
};
output.push((filename.clone(), out));
let filename_str = format!("{}", filename.to_string_lossy());
compile_directory.push((
filename_str.clone(),
filename_str,
output_count,
));
output_count += 1;
}
ParserOutput::Link(name, url) => {
compile_directory.push((url, name, output_count));
output_count += 1;
}
}
}
}
Err(err) => match parser.name() {
"dynamo_guards" => {
multi.suspend(|| eprintln!("Failed to parse guards json: {}", err));
stats.fail_dynamo_guards_json += 1;
}
name => {
multi.suspend(|| eprintln!("Parser {name} failed: {err}"));
stats.fail_parser += 1;
}
},
}
}
}

if let Some(stack) = e.stack {
unknown_stack_trie.insert(stack, None);
run_parser(
lineno,
parser,
&e,
&payload,
&mut output_count,
&mut output,
compile_directory,
&multi,
&mut stats,
)
}

if let Some(specialization) = e.symbolic_shape_specialization {
symbolic_shape_specialization_index
.borrow_mut()
.entry(e.compile_id.clone())
.or_default()
.push(specialization);
}

if let Some(m) = e.compilation_metrics {
if let Some(ref m) = e.compilation_metrics {
let copied_directory = compile_directory.clone();
let parser: Box<dyn StructuredLogParser> =
Box::new(crate::parsers::CompilationMetricsParser {
tt: &tt,
stack_index: &stack_index,
symbolic_shape_specialization_index: &symbolic_shape_specialization_index,
output_files: &copied_directory,
});
run_parser(
lineno,
&parser,
&e,
&payload,
&mut output_count,
&mut output,
compile_directory,
&multi,
&mut stats,
);
let compile_id_dir: PathBuf = e
.compile_id
.as_ref()
Expand All @@ -320,11 +347,17 @@ pub fn parse_path(path: &PathBuf, config: ParseConfig) -> anyhow::Result<ParseOu
)
.into();

// compilation metrics is always the last output, since it just ran
let metrics_filename = format!(
"compilation_metrics_{}.html",
(output_count - 1).to_string(),
);
let id = e.compile_id.clone().map_or("(unknown) ".to_string(), |c| {
format!(
"<a href='{}/compilation_metrics.html'>{cid}</a> ",
"<a href='{}/{}'>{cid}</a> ",
compile_id_dir.display(),
cid = c
metrics_filename,
cid = c,
)
});
if let Some(rr) = m.restart_reasons.as_ref() {
Expand Down Expand Up @@ -359,7 +392,19 @@ pub fn parse_path(path: &PathBuf, config: ParseConfig) -> anyhow::Result<ParseOu
if let Some(c) = cid.as_mut() {
c.attempt = 0;
}
metrics_index.entry(cid).or_default().push(m);
metrics_index.entry(cid).or_default().push(m.clone());
}

if let Some(stack) = e.stack {
unknown_stack_trie.insert(stack.clone(), None);
}

if let Some(specialization) = e.symbolic_shape_specialization {
symbolic_shape_specialization_index
.borrow_mut()
.entry(e.compile_id.clone())
.or_default()
.push(specialization);
}

if let Some(m) = e.dynamo_start {
Expand Down
16 changes: 16 additions & 0 deletions src/parsers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -319,6 +319,7 @@ pub struct CompilationMetricsParser<'t> {
pub tt: &'t TinyTemplate<'t>,
pub stack_index: &'t RefCell<StackIndex>,
pub symbolic_shape_specialization_index: &'t RefCell<SymbolicShapeSpecializationIndex>,
pub output_files: &'t Vec<(String, String, i32)>,
}
impl StructuredLogParser for CompilationMetricsParser<'_> {
fn name(&self) -> &'static str {
Expand Down Expand Up @@ -365,12 +366,27 @@ impl StructuredLogParser for CompilationMetricsParser<'_> {
stack_html: format_stack(&spec.stack.unwrap_or(Vec::new())),
})
.collect();
let remove_prefix = |x: &String| -> String {
// url is X_Y_Z/<rest>. Get the rest of the string for the link
// on compilation metrics page
let parts: Vec<_> = x.split("/").collect();
let new_str: String = parts[1..].join("");
new_str
};
let output_files: Vec<(String, String, i32)> = self
.output_files
.iter()
.map(|(url, name, number)| {
return (remove_prefix(url), remove_prefix(name), number.clone());
})
.collect();
let context = CompilationMetricsContext {
css: crate::CSS,
m: &m,
compile_id: id,
stack_html: stack_html,
symbolic_shape_specializations: specializations,
output_files: &output_files,
};
let output = self.tt.render(&filename, &context)?;
simple_file_output(&filename, lineno, compile_id, &output)
Expand Down
10 changes: 6 additions & 4 deletions src/templates.rs
Original file line number Diff line number Diff line change
Expand Up @@ -207,10 +207,12 @@ pub static TEMPLATE_COMPILATION_METRICS: &str = r#"
</head>
<body>
<h1>Compilation Info for {compile_id}</h1>
<h2>Dynamo Output:</h2>
<object data="dynamo_output_graph.txt" style="width:80%; height:auto">
<a href="dynamo_output_graph.txt"> dynamo_output_graph.txt </a>
</object>
<h2>Output files:</h2>
<ul>
{{ for path_idx in output_files }}
<li><a href="{path_idx.0}">{path_idx.1}</a> ({path_idx.2})</li>
{{ endfor }}
</ul>
<h2>Stack</h2>
{stack_html | format_unescaped}
<h2>Compile Time(seconds)</h2>
Expand Down
3 changes: 2 additions & 1 deletion src/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -240,7 +240,7 @@ pub struct ArtifactMetadata {
pub encoding: String,
}

#[derive(Debug, Deserialize, Serialize)]
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct CompilationMetricsMetadata {
// Other information like frame_key, co_name, etc. are already in envelope
pub cache_size: Option<u64>,
Expand Down Expand Up @@ -297,6 +297,7 @@ pub struct CompilationMetricsContext<'e> {
pub compile_id: String,
pub stack_html: String,
pub symbolic_shape_specializations: Vec<SymbolicShapeSpecializationContext>,
pub output_files: &'e Vec<(String, String, i32)>,
}

#[derive(Debug, Serialize)]
Expand Down

0 comments on commit b1a6e19

Please sign in to comment.