diff --git a/huff_codegen/README.md b/huff_codegen/README.md index 7c5d2b8c..e51c8093 100644 --- a/huff_codegen/README.md +++ b/huff_codegen/README.md @@ -94,6 +94,7 @@ let contract = Contract { functions: vec![], events: vec![], tables: vec![], + labels: vec![], }; // Generate the main bytecode @@ -149,6 +150,7 @@ let contract = Contract { functions: vec![], events: vec![], tables: vec![], + labels: vec![], }; // Generate the constructor bytecode diff --git a/huff_codegen/src/lib.rs b/huff_codegen/src/lib.rs index e281235f..6556ebd7 100644 --- a/huff_codegen/src/lib.rs +++ b/huff_codegen/src/lib.rs @@ -300,7 +300,7 @@ impl Codegen { let circular_codesize_invocations = circular_codesize_invocations.unwrap_or(&mut ccsi); // Loop through all intermediate bytecode representations generated from the AST - for (_ir_bytes_index, ir_byte) in ir_bytes.iter().enumerate() { + for ir_byte in ir_bytes.iter() { let starting_offset = offset; match &ir_byte.ty { IRByteType::Bytes(b) => { diff --git a/huff_codegen/tests/abigen.rs b/huff_codegen/tests/abigen.rs index 5118b5f3..c6a8f9be 100644 --- a/huff_codegen/tests/abigen.rs +++ b/huff_codegen/tests/abigen.rs @@ -28,6 +28,7 @@ fn constructs_valid_abi() { functions: vec![], events: vec![], tables: vec![], + labels: vec![], }; // Generate the abi from the contract @@ -68,6 +69,7 @@ fn missing_constructor_fails() { functions: vec![], events: vec![], tables: vec![], + labels: vec![], }; // Generate the abi from the contract diff --git a/huff_core/benches/huff_benchmark.rs b/huff_core/benches/huff_benchmark.rs index e001cf71..394cc2f0 100644 --- a/huff_core/benches/huff_benchmark.rs +++ b/huff_core/benches/huff_benchmark.rs @@ -17,7 +17,7 @@ fn lex_erc20_from_source_benchmark(c: &mut Criterion) { .collect(); // Recurse file deps + generate flattened source - let file_source = file_sources.get(0).unwrap(); + let file_source = file_sources.first().unwrap(); let recursed_file_source = Compiler::recurse_deps(Arc::clone(file_source), &files::Remapper::new("./"), file_provider) .unwrap(); @@ -48,7 +48,7 @@ fn parse_erc20_benchmark(c: &mut Criterion) { .collect(); // Recurse file deps + generate flattened source - let file_source = file_sources.get(0).unwrap(); + let file_source = file_sources.first().unwrap(); let recursed_file_source = Compiler::recurse_deps(Arc::clone(file_source), &files::Remapper::new("./"), file_provider) .unwrap(); @@ -84,7 +84,7 @@ fn codegen_erc20_benchmark(c: &mut Criterion) { .collect(); // Recurse file deps + generate flattened source - let file_source = file_sources.get(0).unwrap(); + let file_source = file_sources.first().unwrap(); let recursed_file_source = Compiler::recurse_deps(Arc::clone(file_source), &files::Remapper::new("./"), file_provider) .unwrap(); @@ -133,7 +133,7 @@ fn erc20_compilation_benchmark(c: &mut Criterion) { .collect(); // Recurse file deps + generate flattened source - let file_source = file_sources.get(0).unwrap(); + let file_source = file_sources.first().unwrap(); let recursed_file_source = Compiler::recurse_deps( Arc::clone(file_source), &files::Remapper::new("./"), @@ -180,7 +180,7 @@ fn erc721_compilation_benchmark(c: &mut Criterion) { .collect(); // Recurse file deps + generate flattened source - let file_source = file_sources.get(0).unwrap(); + let file_source = file_sources.first().unwrap(); let recursed_file_source = Compiler::recurse_deps( Arc::clone(file_source), &files::Remapper::new("./"), diff --git a/huff_core/tests/erc20.rs b/huff_core/tests/erc20.rs index 3c1144fe..84ed1c3b 100644 --- a/huff_core/tests/erc20.rs +++ b/huff_core/tests/erc20.rs @@ -18,7 +18,7 @@ fn test_erc20_compile() { .collect(); // Recurse file deps + generate flattened source - let file_source = file_sources.get(0).unwrap(); + let file_source = file_sources.first().unwrap(); let recursed_file_source = Compiler::recurse_deps(Arc::clone(file_source), &files::Remapper::new("./"), file_provider) .unwrap(); diff --git a/huff_core/tests/erc721.rs b/huff_core/tests/erc721.rs index 3460cec3..29ee3961 100644 --- a/huff_core/tests/erc721.rs +++ b/huff_core/tests/erc721.rs @@ -18,7 +18,7 @@ fn test_erc721_compile() { .collect(); // Recurse file deps + generate flattened source - let file_source = file_sources.get(0).unwrap(); + let file_source = file_sources.first().unwrap(); let recursed_file_source = Compiler::recurse_deps(Arc::clone(file_source), &files::Remapper::new("./"), file_provider) .unwrap(); diff --git a/huff_lexer/tests/tables.rs b/huff_lexer/tests/tables.rs index feabae28..1ff07d74 100644 --- a/huff_lexer/tests/tables.rs +++ b/huff_lexer/tests/tables.rs @@ -12,7 +12,7 @@ fn parses_jump_table() { .filter(|x| !matches!(x.kind, TokenKind::Whitespace)) .collect::>(); - assert_eq!(tokens.get(0).unwrap().kind, TokenKind::Define); + assert_eq!(tokens.first().unwrap().kind, TokenKind::Define); assert_eq!(tokens.get(1).unwrap().kind, TokenKind::JumpTable); assert_eq!(tokens.get(2).unwrap().kind, TokenKind::Ident(String::from("JUMP_TABLE"))); assert_eq!(tokens.get(3).unwrap().kind, TokenKind::OpenParen); @@ -30,7 +30,7 @@ fn parses_packed_jump_table() { .filter(|x| !matches!(x.kind, TokenKind::Whitespace)) .collect::>(); - assert_eq!(tokens.get(0).unwrap().kind, TokenKind::Define); + assert_eq!(tokens.first().unwrap().kind, TokenKind::Define); assert_eq!(tokens.get(1).unwrap().kind, TokenKind::JumpTablePacked); assert_eq!(tokens.get(2).unwrap().kind, TokenKind::Ident(String::from("JUMP_TABLE_PACKED"))); assert_eq!(tokens.get(3).unwrap().kind, TokenKind::OpenParen); @@ -48,7 +48,7 @@ fn parses_code_table() { .filter(|x| !matches!(x.kind, TokenKind::Whitespace)) .collect::>(); - assert_eq!(tokens.get(0).unwrap().kind, TokenKind::Define); + assert_eq!(tokens.first().unwrap().kind, TokenKind::Define); assert_eq!(tokens.get(1).unwrap().kind, TokenKind::CodeTable); assert_eq!(tokens.get(2).unwrap().kind, TokenKind::Ident(String::from("CODE_TABLE"))); assert_eq!(tokens.get(3).unwrap().kind, TokenKind::OpenParen); diff --git a/huff_parser/README.md b/huff_parser/README.md index 9d8e488a..d0d8fc38 100644 --- a/huff_parser/README.md +++ b/huff_parser/README.md @@ -58,6 +58,7 @@ let expected_contract = Contract { functions: vec![], events: vec![], tables: vec![], + labels: vec![] }; assert_eq!(unwrapped_contract.macros, expected_contract.macros); ``` \ No newline at end of file diff --git a/huff_parser/src/lib.rs b/huff_parser/src/lib.rs index df2932cb..379dd737 100644 --- a/huff_parser/src/lib.rs +++ b/huff_parser/src/lib.rs @@ -34,7 +34,7 @@ pub struct Parser { impl Parser { /// Public associated function that instantiates a Parser. pub fn new(tokens: Vec, base: Option) -> Self { - let initial_token = tokens.get(0).unwrap().clone(); + let initial_token = tokens.first().unwrap().clone(); let remapper = files::Remapper::new("./"); Self { tokens, cursor: 0, current_token: initial_token, base, spans: vec![], remapper } } @@ -43,7 +43,7 @@ impl Parser { /// /// PANICS if the tokens vec is empty! pub fn reset(&mut self) { - self.current_token = self.tokens.get(0).unwrap().clone(); + self.current_token = self.tokens.first().unwrap().clone(); self.cursor = 0; } @@ -70,7 +70,7 @@ impl Parser { } // Check for a decorator above a test macro else if self.check(TokenKind::Pound) { - let m = self.parse_macro()?; + let m = self.parse_macro(&mut contract)?; tracing::info!(target: "parser", "SUCCESSFULLY PARSED MACRO {}", m.name); contract.macros.push(m); } @@ -102,7 +102,7 @@ impl Parser { contract.errors.push(e); } TokenKind::Macro | TokenKind::Fn | TokenKind::Test => { - let m = self.parse_macro()?; + let m = self.parse_macro(&mut contract)?; tracing::info!(target: "parser", "SUCCESSFULLY PARSED MACRO {}", m.name); self.check_duplicate_macro(&contract, &m)?; contract.macros.push(m); @@ -186,6 +186,26 @@ impl Parser { std::mem::discriminant(&self.current_token.kind) == std::mem::discriminant(&kind) } + /// Checks whether the input label is unique. + /// If so, it will be added to the contract. Otherwise, an error will be returned. + fn check_duplicate_label( + &self, + contract: &mut Contract, + label: String, + ) -> Result<(), ParserError> { + if contract.labels.binary_search_by(|_label| _label.cmp(&label)).is_ok() { + tracing::error!(target: "parser", "DUPLICATED LABEL NAME: {}", label); + Err(ParserError { + kind: ParserErrorKind::DuplicateLabel(label.clone()), + hint: Some(format!("Duplicated label name: \"{label}\"")), + spans: AstSpan(self.spans.clone()), + }) + } else { + contract.labels.push(label); + Ok(()) + } + } + /// Checks if there is a duplicate macro name pub fn check_duplicate_macro( &self, @@ -506,7 +526,7 @@ impl Parser { /// Parses a macro. /// /// It should parse the following : macro MACRO_NAME(args...) = takes (x) returns (n) {...} - pub fn parse_macro(&mut self) -> Result { + pub fn parse_macro(&mut self, contract: &mut Contract) -> Result { let mut decorator: Option = None; if self.check(TokenKind::Pound) { decorator = Some(self.parse_decorator()?); @@ -538,7 +558,7 @@ impl Parser { let macro_returns = self.match_kind(TokenKind::Returns).map_or(Ok(0), |_| self.parse_single_arg())?; - let macro_statements: Vec = self.parse_body()?; + let macro_statements: Vec = self.parse_body(contract)?; Ok(MacroDefinition::new( macro_name, @@ -556,7 +576,7 @@ impl Parser { /// Parse the body of a macro. /// /// Only HEX, OPCODES, labels, builtins, and MACRO calls should be authorized. - pub fn parse_body(&mut self) -> Result, ParserError> { + pub fn parse_body(&mut self, contract: &mut Contract) -> Result, ParserError> { let mut statements: Vec = Vec::new(); self.match_kind(TokenKind::OpenBrace)?; tracing::info!(target: "parser", "PARSING MACRO BODY"); @@ -653,6 +673,7 @@ impl Parser { TokenKind::Label(l) => { let mut curr_spans = vec![self.current_token.span.clone()]; self.consume(); + self.check_duplicate_label(contract, l.to_string())?; let inner_statements: Vec = self.parse_label()?; inner_statements .iter() diff --git a/huff_parser/tests/labels.rs b/huff_parser/tests/labels.rs index 0ddbe05e..aecfa38c 100644 --- a/huff_parser/tests/labels.rs +++ b/huff_parser/tests/labels.rs @@ -1,6 +1,6 @@ use huff_lexer::*; use huff_parser::*; -use huff_utils::{evm::Opcode, prelude::*}; +use huff_utils::{error::ParserErrorKind, evm::Opcode, prelude::*}; #[test] fn multiline_labels() { @@ -433,3 +433,29 @@ pub fn builtins_under_labels() { assert_eq!(s.span, md_expected.statements[i].span); } } + +#[test] +fn duplicated_labels() { + let source = r#" + #define macro MAIN() = takes(0) returns(0) { + cool_label jump + cool_label jump + + cool_label: 0x00 + dup_label: 0x00 + dup_label: 0x00 + } + "#; + let flattened_source = FullFileSource { source, file: None, spans: vec![] }; + let lexer = Lexer::new(flattened_source.source); + let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::>(); + let mut parser = Parser::new(tokens, None); + + // Grab the first macro + let parse_result = parser.parse(); + assert!(parse_result.is_err()); + assert_eq!( + parse_result.unwrap_err().kind, + ParserErrorKind::DuplicateLabel("dup_label".to_string()) + ); +} diff --git a/huff_utils/src/abi.rs b/huff_utils/src/abi.rs index 2f6cb67f..94c3e070 100644 --- a/huff_utils/src/abi.rs +++ b/huff_utils/src/abi.rs @@ -30,6 +30,7 @@ //! }], //! events: vec![], //! tables: vec![], +//! labels: vec![], //! }; //! //! // Create an ABI using that generate contract @@ -78,7 +79,7 @@ impl From for Abi { .filter(|m| m.name.to_lowercase() == "constructor") .cloned() .collect::>() - .get(0) + .first() .map(|func| Constructor { inputs: func .inputs @@ -97,7 +98,7 @@ impl From for Abi { .filter(|m| m.name == "CONSTRUCTOR") .cloned() .collect::>() - .get(0) + .first() .map(|func| Constructor { inputs: func .parameters diff --git a/huff_utils/src/ast.rs b/huff_utils/src/ast.rs index a2726a3b..447d3160 100644 --- a/huff_utils/src/ast.rs +++ b/huff_utils/src/ast.rs @@ -116,6 +116,8 @@ pub struct Contract { pub events: Vec, /// Tables pub tables: Vec, + /// Labels + pub labels: Vec, } impl Contract { @@ -180,7 +182,7 @@ impl Contract { .iter() .filter(|pointer| pointer.0.eq(&c.name)) .collect::>() - .get(0) + .first() { Some(p) => { *c = ConstantDefinition { @@ -261,7 +263,7 @@ impl Contract { .iter() .filter(|md| md.name.eq(&mi.macro_name)) .collect::>() - .get(0) + .first() { Some(&md) => { if md.name.eq("CONSTRUCTOR") { @@ -291,7 +293,7 @@ impl Contract { .iter() .filter(|md| md.name.eq(name)) .collect::>() - .get(0) + .first() { Some(&md) => { if md.name.eq("CONSTRUCTOR") { @@ -348,7 +350,7 @@ impl Contract { .iter() .filter(|pointer| pointer.0.eq(const_name)) .collect::>() - .get(0) + .first() .is_none() { tracing::debug!(target: "ast", "No storage pointer already set for \"{}\"!", const_name); @@ -360,7 +362,7 @@ impl Contract { .iter() .filter(|c| c.name.eq(const_name)) .collect::>() - .get(0) + .first() { Some(c) => { let new_value = match c.value { diff --git a/huff_utils/src/error.rs b/huff_utils/src/error.rs index d0e90a1b..519bcb81 100644 --- a/huff_utils/src/error.rs +++ b/huff_utils/src/error.rs @@ -63,6 +63,8 @@ pub enum ParserErrorKind { InvalidDecoratorFlag(String), /// Invalid decorator flag argument InvalidDecoratorFlagArg(TokenKind), + /// Duplicate label + DuplicateLabel(String), /// Duplicate MACRO DuplicateMacro(String), } @@ -490,6 +492,14 @@ impl fmt::Display for CompilerError { pe.spans.error(pe.hint.as_ref()) ) } + ParserErrorKind::DuplicateLabel(label) => { + write!( + f, + "\nError: Duplicate label: \"{}\" \n{}\n", + label, + pe.spans.error(pe.hint.as_ref()) + ) + } ParserErrorKind::DuplicateMacro(mn) => { write!( f,