Skip to content
This repository has been archived by the owner on Sep 9, 2024. It is now read-only.

Commit

Permalink
chore(clippy): appease (#292)
Browse files Browse the repository at this point in the history
* chore: clippy fix

* fix: clippy manual

* fix: fmt
  • Loading branch information
Maddiaa0 committed Sep 2, 2023
1 parent 2395ef6 commit b43103e
Show file tree
Hide file tree
Showing 12 changed files with 24 additions and 25 deletions.
2 changes: 1 addition & 1 deletion huff_codegen/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -475,7 +475,7 @@ impl Codegen {
/// On failure, returns a CodegenError.
pub fn fill_circular_codesize_invocations(
bytes: Vec<(usize, Bytes)>,
circular_codesize_invocations: &mut CircularCodeSizeIndices,
circular_codesize_invocations: &CircularCodeSizeIndices,
macro_name: &str,
) -> Result<Vec<(usize, Bytes)>, CodegenError> {
// Get the length of the macro
Expand Down
6 changes: 3 additions & 3 deletions huff_lexer/tests/builtins.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ fn parses_builtin_function_in_macro_body() {
"{", "}",
);
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
let mut lexer = Lexer::new(flattened_source.source.clone());
let mut lexer = Lexer::new(flattened_source.source);

let _ = lexer.next(); // whitespace
let _ = lexer.next(); // #define
Expand Down Expand Up @@ -89,7 +89,7 @@ fn fails_to_parse_builtin_outside_macro_body() {
for builtin in builtin_funcs {
let source = &format!("{builtin}(MAIN)");
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
let mut lexer = Lexer::new(flattened_source.source.clone());
let mut lexer = Lexer::new(flattened_source.source);

let tok = lexer.next();
let unwrapped = tok.unwrap().unwrap();
Expand Down Expand Up @@ -123,7 +123,7 @@ fn fails_to_parse_invalid_builtin() {
"{", "}",
);
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
let mut lexer = Lexer::new(flattened_source.source.clone());
let mut lexer = Lexer::new(flattened_source.source);

let _ = lexer.next(); // whitespace
let _ = lexer.next(); // #define
Expand Down
6 changes: 3 additions & 3 deletions huff_lexer/tests/comments.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,15 +14,15 @@ use huff_utils::prelude::*;
fn instantiates() {
let source = "#define macro HELLO_WORLD()";
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
let lexer = Lexer::new(flattened_source.source.clone());
let lexer = Lexer::new(flattened_source.source);
assert!(!lexer.eof);
}

#[test]
fn single_line_comments() {
let source = "// comment contents \n#define macro HELLO_WORLD()";
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
let mut lexer = Lexer::new(flattened_source.source.clone());
let mut lexer = Lexer::new(flattened_source.source);

// The first token should be a single line comment
let tok = lexer.next();
Expand Down Expand Up @@ -94,7 +94,7 @@ fn single_line_comments() {
fn multi_line_comments() {
let source = "/* comment contents*/#define macro HELLO_WORLD()";
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
let mut lexer = Lexer::new(flattened_source.source.clone());
let mut lexer = Lexer::new(flattened_source.source);

// The first token should be a single line comment
let tok = lexer.next();
Expand Down
6 changes: 3 additions & 3 deletions huff_lexer/tests/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use huff_utils::prelude::*;
fn function_context() {
let source = "#define function test(bytes32) {} returns (address)";
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
let lexer = Lexer::new(flattened_source.source.clone());
let lexer = Lexer::new(flattened_source.source);
let tokens = lexer
.into_iter()
.map(|x| x.unwrap())
Expand All @@ -26,7 +26,7 @@ fn function_context() {
fn event_context() {
let source = "#define event Transfer(bytes32,address)";
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
let lexer = Lexer::new(flattened_source.source.clone());
let lexer = Lexer::new(flattened_source.source);
let tokens = lexer
.into_iter()
.map(|x| x.unwrap())
Expand All @@ -44,7 +44,7 @@ fn event_context() {
fn macro_context() {
let source = "#define macro TEST() = takes (0) returns (0) {byte}";
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
let lexer = Lexer::new(flattened_source.source.clone());
let lexer = Lexer::new(flattened_source.source);
let tokens = lexer
.into_iter()
.map(|x| x.unwrap())
Expand Down
4 changes: 2 additions & 2 deletions huff_lexer/tests/decorators.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ fn parses_decorator() {
);

let flattened_source = FullFileSource { source, file: None, spans: vec![] };
let mut lexer = Lexer::new(flattened_source.source.clone());
let mut lexer = Lexer::new(flattened_source.source);

let _ = lexer.next(); // whitespace

Expand Down Expand Up @@ -124,7 +124,7 @@ fn fails_to_parse_decorator_in_body() {
);

let flattened_source = FullFileSource { source, file: None, spans: vec![] };
let mut lexer = Lexer::new(flattened_source.source.clone());
let mut lexer = Lexer::new(flattened_source.source);

for token in lexer.by_ref() {
if let Err(e) = token {
Expand Down
2 changes: 1 addition & 1 deletion huff_lexer/tests/eof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use huff_utils::prelude::*;
fn end_of_file() {
let source = " ";
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
let mut lexer = Lexer::new(flattened_source.source.clone());
let mut lexer = Lexer::new(flattened_source.source);

// Eats the whitespace
let _ = lexer.next();
Expand Down
8 changes: 4 additions & 4 deletions huff_lexer/tests/evm_types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ fn primitive_type_parsing() {
for (evm_type, evm_type_enum) in evm_types {
let source = &format!("#define function test({evm_type}) view returns (uint256)");
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
let lexer = Lexer::new(flattened_source.source.clone());
let lexer = Lexer::new(flattened_source.source);
let tokens = lexer
.into_iter()
.map(|x| x.unwrap())
Expand All @@ -42,7 +42,7 @@ fn bounded_array_parsing() {
for (evm_type, evm_type_enum) in evm_types {
let source = &format!("#define function test({evm_type}) view returns (uint256)");
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
let lexer = Lexer::new(flattened_source.source.clone());
let lexer = Lexer::new(flattened_source.source);
let tokens = lexer
.into_iter()
.map(|x| x.unwrap())
Expand All @@ -68,7 +68,7 @@ fn unbounded_array_parsing() {
for (evm_type, evm_type_enum) in evm_types {
let source = &format!("#define function test({evm_type}) view returns (uint256)");
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
let lexer = Lexer::new(flattened_source.source.clone());
let lexer = Lexer::new(flattened_source.source);
let tokens = lexer
.into_iter()
.map(|x| x.unwrap())
Expand All @@ -93,7 +93,7 @@ fn multidim_array_parsing() {
for (evm_type, evm_type_enum) in evm_types {
let source = &format!("#define function test({evm_type}) view returns (uint256)");
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
let lexer = Lexer::new(flattened_source.source.clone());
let lexer = Lexer::new(flattened_source.source);
let tokens = lexer
.into_iter()
.map(|x| x.unwrap())
Expand Down
2 changes: 1 addition & 1 deletion huff_lexer/tests/fsp.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use huff_utils::prelude::*;
fn free_storage_pointer() {
let source = "FREE_STORAGE_POINTER() ";
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
let mut lexer = Lexer::new(flattened_source.source.clone());
let mut lexer = Lexer::new(flattened_source.source);

// The first token should be the fsp
let tok = lexer.next().unwrap().unwrap();
Expand Down
2 changes: 1 addition & 1 deletion huff_lexer/tests/function_type.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ fn parses_function_type() {
for (fn_type, fn_type_kind) in fn_types {
let source = &format!("#define function test() {fn_type} returns (uint256)");
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
let mut lexer = Lexer::new(flattened_source.source.clone());
let mut lexer = Lexer::new(flattened_source.source);

let _ = lexer.next(); // #define
let _ = lexer.next(); // whitespace
Expand Down
6 changes: 3 additions & 3 deletions huff_lexer/tests/hex.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use huff_utils::prelude::*;
fn parses_single_hex() {
let source = "0xa57B";
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
let mut lexer = Lexer::new(flattened_source.source.clone());
let mut lexer = Lexer::new(flattened_source.source);

// The first and only token should be lexed as Literal(0xa57B)
let tok = lexer.next().unwrap().unwrap();
Expand All @@ -20,7 +20,7 @@ fn parses_single_hex() {
fn parses_bool() {
let source = "false true";
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
let mut lexer = Lexer::new(flattened_source.source.clone());
let mut lexer = Lexer::new(flattened_source.source);

// The first token should be lexed as a Literal representing 0x00
let tok = lexer.next().unwrap().unwrap();
Expand All @@ -41,7 +41,7 @@ fn parses_bool() {
fn parses_odd_len_hex() {
let source = "0x1";
let flattened_source = FullFileSource { source, file: None, spans: vec![] };
let mut lexer = Lexer::new(flattened_source.source.clone());
let mut lexer = Lexer::new(flattened_source.source);

// The first and only token should be lexed as Literal(0x1)
let tok = lexer.next().unwrap().unwrap();
Expand Down
2 changes: 1 addition & 1 deletion huff_parser/tests/abi.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ fn build_abi_from_ast() {
let source = "#define function test(uint256[2][],string) view returns(uint256)";

let flattened_source = FullFileSource { source, file: None, spans: vec![] };
let lexer = Lexer::new(flattened_source.source.clone());
let lexer = Lexer::new(flattened_source.source);
let tokens = lexer
.into_iter()
.map(|x| x.unwrap())
Expand Down
3 changes: 1 addition & 2 deletions huff_utils/src/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,7 @@ impl AstSpan {
pub fn error(&self, hint: Option<&String>) -> String {
let file_to_source_map =
self.0.iter().fold(BTreeMap::<String, Vec<&Span>>::new(), |mut m, s| {
let file_name =
s.file.as_ref().map(|f2| f2.path.clone()).unwrap_or_else(|| "".to_string());
let file_name = s.file.as_ref().map(|f2| f2.path.clone()).unwrap_or_default();
let mut new_vec: Vec<&Span> = m.get(&file_name).cloned().unwrap_or_default();
new_vec.push(s);
m.insert(file_name, new_vec);
Expand Down

0 comments on commit b43103e

Please sign in to comment.