diff --git a/huff_codegen/src/lib.rs b/huff_codegen/src/lib.rs index aca743ab..e281235f 100644 --- a/huff_codegen/src/lib.rs +++ b/huff_codegen/src/lib.rs @@ -475,7 +475,7 @@ impl Codegen { /// On failure, returns a CodegenError. pub fn fill_circular_codesize_invocations( bytes: Vec<(usize, Bytes)>, - circular_codesize_invocations: &mut CircularCodeSizeIndices, + circular_codesize_invocations: &CircularCodeSizeIndices, macro_name: &str, ) -> Result, CodegenError> { // Get the length of the macro diff --git a/huff_lexer/tests/builtins.rs b/huff_lexer/tests/builtins.rs index 64cc6fc7..e8ccb452 100644 --- a/huff_lexer/tests/builtins.rs +++ b/huff_lexer/tests/builtins.rs @@ -24,7 +24,7 @@ fn parses_builtin_function_in_macro_body() { "{", "}", ); let flattened_source = FullFileSource { source, file: None, spans: vec![] }; - let mut lexer = Lexer::new(flattened_source.source.clone()); + let mut lexer = Lexer::new(flattened_source.source); let _ = lexer.next(); // whitespace let _ = lexer.next(); // #define @@ -89,7 +89,7 @@ fn fails_to_parse_builtin_outside_macro_body() { for builtin in builtin_funcs { let source = &format!("{builtin}(MAIN)"); let flattened_source = FullFileSource { source, file: None, spans: vec![] }; - let mut lexer = Lexer::new(flattened_source.source.clone()); + let mut lexer = Lexer::new(flattened_source.source); let tok = lexer.next(); let unwrapped = tok.unwrap().unwrap(); @@ -123,7 +123,7 @@ fn fails_to_parse_invalid_builtin() { "{", "}", ); let flattened_source = FullFileSource { source, file: None, spans: vec![] }; - let mut lexer = Lexer::new(flattened_source.source.clone()); + let mut lexer = Lexer::new(flattened_source.source); let _ = lexer.next(); // whitespace let _ = lexer.next(); // #define diff --git a/huff_lexer/tests/comments.rs b/huff_lexer/tests/comments.rs index 898a6921..962aab03 100644 --- a/huff_lexer/tests/comments.rs +++ b/huff_lexer/tests/comments.rs @@ -14,7 +14,7 @@ use huff_utils::prelude::*; fn instantiates() { let source = "#define macro HELLO_WORLD()"; let flattened_source = FullFileSource { source, file: None, spans: vec![] }; - let lexer = Lexer::new(flattened_source.source.clone()); + let lexer = Lexer::new(flattened_source.source); assert!(!lexer.eof); } @@ -22,7 +22,7 @@ fn instantiates() { fn single_line_comments() { let source = "// comment contents \n#define macro HELLO_WORLD()"; let flattened_source = FullFileSource { source, file: None, spans: vec![] }; - let mut lexer = Lexer::new(flattened_source.source.clone()); + let mut lexer = Lexer::new(flattened_source.source); // The first token should be a single line comment let tok = lexer.next(); @@ -94,7 +94,7 @@ fn single_line_comments() { fn multi_line_comments() { let source = "/* comment contents*/#define macro HELLO_WORLD()"; let flattened_source = FullFileSource { source, file: None, spans: vec![] }; - let mut lexer = Lexer::new(flattened_source.source.clone()); + let mut lexer = Lexer::new(flattened_source.source); // The first token should be a single line comment let tok = lexer.next(); diff --git a/huff_lexer/tests/context.rs b/huff_lexer/tests/context.rs index 28874359..d2136906 100644 --- a/huff_lexer/tests/context.rs +++ b/huff_lexer/tests/context.rs @@ -6,7 +6,7 @@ use huff_utils::prelude::*; fn function_context() { let source = "#define function test(bytes32) {} returns (address)"; let flattened_source = FullFileSource { source, file: None, spans: vec![] }; - let lexer = Lexer::new(flattened_source.source.clone()); + let lexer = Lexer::new(flattened_source.source); let tokens = lexer .into_iter() .map(|x| x.unwrap()) @@ -26,7 +26,7 @@ fn function_context() { fn event_context() { let source = "#define event Transfer(bytes32,address)"; let flattened_source = FullFileSource { source, file: None, spans: vec![] }; - let lexer = Lexer::new(flattened_source.source.clone()); + let lexer = Lexer::new(flattened_source.source); let tokens = lexer .into_iter() .map(|x| x.unwrap()) @@ -44,7 +44,7 @@ fn event_context() { fn macro_context() { let source = "#define macro TEST() = takes (0) returns (0) {byte}"; let flattened_source = FullFileSource { source, file: None, spans: vec![] }; - let lexer = Lexer::new(flattened_source.source.clone()); + let lexer = Lexer::new(flattened_source.source); let tokens = lexer .into_iter() .map(|x| x.unwrap()) diff --git a/huff_lexer/tests/decorators.rs b/huff_lexer/tests/decorators.rs index 4cf3fbf2..2bc6b46a 100644 --- a/huff_lexer/tests/decorators.rs +++ b/huff_lexer/tests/decorators.rs @@ -17,7 +17,7 @@ fn parses_decorator() { ); let flattened_source = FullFileSource { source, file: None, spans: vec![] }; - let mut lexer = Lexer::new(flattened_source.source.clone()); + let mut lexer = Lexer::new(flattened_source.source); let _ = lexer.next(); // whitespace @@ -124,7 +124,7 @@ fn fails_to_parse_decorator_in_body() { ); let flattened_source = FullFileSource { source, file: None, spans: vec![] }; - let mut lexer = Lexer::new(flattened_source.source.clone()); + let mut lexer = Lexer::new(flattened_source.source); for token in lexer.by_ref() { if let Err(e) = token { diff --git a/huff_lexer/tests/eof.rs b/huff_lexer/tests/eof.rs index c363c723..e37bceac 100644 --- a/huff_lexer/tests/eof.rs +++ b/huff_lexer/tests/eof.rs @@ -5,7 +5,7 @@ use huff_utils::prelude::*; fn end_of_file() { let source = " "; let flattened_source = FullFileSource { source, file: None, spans: vec![] }; - let mut lexer = Lexer::new(flattened_source.source.clone()); + let mut lexer = Lexer::new(flattened_source.source); // Eats the whitespace let _ = lexer.next(); diff --git a/huff_lexer/tests/evm_types.rs b/huff_lexer/tests/evm_types.rs index 0e11054a..5c416a3e 100644 --- a/huff_lexer/tests/evm_types.rs +++ b/huff_lexer/tests/evm_types.rs @@ -16,7 +16,7 @@ fn primitive_type_parsing() { for (evm_type, evm_type_enum) in evm_types { let source = &format!("#define function test({evm_type}) view returns (uint256)"); let flattened_source = FullFileSource { source, file: None, spans: vec![] }; - let lexer = Lexer::new(flattened_source.source.clone()); + let lexer = Lexer::new(flattened_source.source); let tokens = lexer .into_iter() .map(|x| x.unwrap()) @@ -42,7 +42,7 @@ fn bounded_array_parsing() { for (evm_type, evm_type_enum) in evm_types { let source = &format!("#define function test({evm_type}) view returns (uint256)"); let flattened_source = FullFileSource { source, file: None, spans: vec![] }; - let lexer = Lexer::new(flattened_source.source.clone()); + let lexer = Lexer::new(flattened_source.source); let tokens = lexer .into_iter() .map(|x| x.unwrap()) @@ -68,7 +68,7 @@ fn unbounded_array_parsing() { for (evm_type, evm_type_enum) in evm_types { let source = &format!("#define function test({evm_type}) view returns (uint256)"); let flattened_source = FullFileSource { source, file: None, spans: vec![] }; - let lexer = Lexer::new(flattened_source.source.clone()); + let lexer = Lexer::new(flattened_source.source); let tokens = lexer .into_iter() .map(|x| x.unwrap()) @@ -93,7 +93,7 @@ fn multidim_array_parsing() { for (evm_type, evm_type_enum) in evm_types { let source = &format!("#define function test({evm_type}) view returns (uint256)"); let flattened_source = FullFileSource { source, file: None, spans: vec![] }; - let lexer = Lexer::new(flattened_source.source.clone()); + let lexer = Lexer::new(flattened_source.source); let tokens = lexer .into_iter() .map(|x| x.unwrap()) diff --git a/huff_lexer/tests/fsp.rs b/huff_lexer/tests/fsp.rs index bc254c64..d143efcb 100644 --- a/huff_lexer/tests/fsp.rs +++ b/huff_lexer/tests/fsp.rs @@ -5,7 +5,7 @@ use huff_utils::prelude::*; fn free_storage_pointer() { let source = "FREE_STORAGE_POINTER() "; let flattened_source = FullFileSource { source, file: None, spans: vec![] }; - let mut lexer = Lexer::new(flattened_source.source.clone()); + let mut lexer = Lexer::new(flattened_source.source); // The first token should be the fsp let tok = lexer.next().unwrap().unwrap(); diff --git a/huff_lexer/tests/function_type.rs b/huff_lexer/tests/function_type.rs index 10729edf..d8cf8fdf 100644 --- a/huff_lexer/tests/function_type.rs +++ b/huff_lexer/tests/function_type.rs @@ -13,7 +13,7 @@ fn parses_function_type() { for (fn_type, fn_type_kind) in fn_types { let source = &format!("#define function test() {fn_type} returns (uint256)"); let flattened_source = FullFileSource { source, file: None, spans: vec![] }; - let mut lexer = Lexer::new(flattened_source.source.clone()); + let mut lexer = Lexer::new(flattened_source.source); let _ = lexer.next(); // #define let _ = lexer.next(); // whitespace diff --git a/huff_lexer/tests/hex.rs b/huff_lexer/tests/hex.rs index 0d5b1ffc..b24ca847 100644 --- a/huff_lexer/tests/hex.rs +++ b/huff_lexer/tests/hex.rs @@ -5,7 +5,7 @@ use huff_utils::prelude::*; fn parses_single_hex() { let source = "0xa57B"; let flattened_source = FullFileSource { source, file: None, spans: vec![] }; - let mut lexer = Lexer::new(flattened_source.source.clone()); + let mut lexer = Lexer::new(flattened_source.source); // The first and only token should be lexed as Literal(0xa57B) let tok = lexer.next().unwrap().unwrap(); @@ -20,7 +20,7 @@ fn parses_single_hex() { fn parses_bool() { let source = "false true"; let flattened_source = FullFileSource { source, file: None, spans: vec![] }; - let mut lexer = Lexer::new(flattened_source.source.clone()); + let mut lexer = Lexer::new(flattened_source.source); // The first token should be lexed as a Literal representing 0x00 let tok = lexer.next().unwrap().unwrap(); @@ -41,7 +41,7 @@ fn parses_bool() { fn parses_odd_len_hex() { let source = "0x1"; let flattened_source = FullFileSource { source, file: None, spans: vec![] }; - let mut lexer = Lexer::new(flattened_source.source.clone()); + let mut lexer = Lexer::new(flattened_source.source); // The first and only token should be lexed as Literal(0x1) let tok = lexer.next().unwrap().unwrap(); diff --git a/huff_parser/tests/abi.rs b/huff_parser/tests/abi.rs index d054ce6e..e9db7fce 100644 --- a/huff_parser/tests/abi.rs +++ b/huff_parser/tests/abi.rs @@ -7,7 +7,7 @@ fn build_abi_from_ast() { let source = "#define function test(uint256[2][],string) view returns(uint256)"; let flattened_source = FullFileSource { source, file: None, spans: vec![] }; - let lexer = Lexer::new(flattened_source.source.clone()); + let lexer = Lexer::new(flattened_source.source); let tokens = lexer .into_iter() .map(|x| x.unwrap()) diff --git a/huff_utils/src/ast.rs b/huff_utils/src/ast.rs index 72cf8fe6..3b0c5255 100644 --- a/huff_utils/src/ast.rs +++ b/huff_utils/src/ast.rs @@ -33,8 +33,7 @@ impl AstSpan { pub fn error(&self, hint: Option<&String>) -> String { let file_to_source_map = self.0.iter().fold(BTreeMap::>::new(), |mut m, s| { - let file_name = - s.file.as_ref().map(|f2| f2.path.clone()).unwrap_or_else(|| "".to_string()); + let file_name = s.file.as_ref().map(|f2| f2.path.clone()).unwrap_or_default(); let mut new_vec: Vec<&Span> = m.get(&file_name).cloned().unwrap_or_default(); new_vec.push(s); m.insert(file_name, new_vec);