diff --git a/huff_lexer/src/lib.rs b/huff_lexer/src/lib.rs index 7ce3f241..d1944683 100644 --- a/huff_lexer/src/lib.rs +++ b/huff_lexer/src/lib.rs @@ -187,6 +187,7 @@ impl<'a> Lexer<'a> { TokenKind::Indexed, TokenKind::View, TokenKind::Pure, + TokenKind::Padded, // First check for packed jump table TokenKind::JumpTablePacked, // Match with jump table if not @@ -195,6 +196,9 @@ impl<'a> Lexer<'a> { ]; for kind in keys.into_iter() { if self.context == Context::MacroBody { + if word == "padded" { + found_kind = Some(TokenKind::Padded) + } break } let key = kind.to_string(); diff --git a/huff_lexer/tests/padded.rs b/huff_lexer/tests/padded.rs new file mode 100644 index 00000000..1c11b730 --- /dev/null +++ b/huff_lexer/tests/padded.rs @@ -0,0 +1,15 @@ +use huff_lexer::Lexer; +use huff_utils::prelude::*; + +#[cfg(test)] +use std::println as info; + +#[test] +fn padded_with_simple_body() { + let source = + "#define macro HELLO_WORLD() = takes(3) returns(0) {\n #define padded(32) {\n 0x00 mstore\n 0x01 0x02 add \n} 0x69 0x69 return\n}"; + let flattened_source = FullFileSource { source, file: None, spans: vec![] }; + let lexer = Lexer::new(flattened_source.source); + let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::>(); + info!("{:#?}", tokens); +} diff --git a/huff_parser/src/lib.rs b/huff_parser/src/lib.rs index 48e88c0e..8ffe0cbc 100644 --- a/huff_parser/src/lib.rs +++ b/huff_parser/src/lib.rs @@ -8,7 +8,7 @@ use huff_utils::{ ast::*, error::*, files, - prelude::{bytes32_to_string, hash_bytes, str_to_bytes32, Span}, + prelude::{bytes32_to_string, hash_bytes, str_to_bytes32, Opcode, Span}, token::{Token, TokenKind}, types::*, }; @@ -536,7 +536,7 @@ impl Parser { /// Parse the body of a macro. /// - /// Only HEX, OPCODES, labels, builtins, and MACRO calls should be authorized. + /// Only HEX, OPCODES, labels, builtins, MACRO calls and padded blocks should be authorized. pub fn parse_body(&mut self) -> Result, ParserError> { let mut statements: Vec = Vec::new(); self.match_kind(TokenKind::OpenBrace)?; @@ -677,7 +677,20 @@ impl Parser { span: AstSpan(curr_spans), }); } + TokenKind::Padded => { + let padded_statements = self.parse_padded()?; + tracing::info!(target: "parser", "PARSING MACRO BODY : [PADDED CODE BLOCK]"); + statements.extend(padded_statements); + } kind => { + if let TokenKind::Define = kind { + // allow for define within a macro body only if it's followed by "padded" + let expected_padded = self.peek().unwrap(); + if expected_padded.kind == TokenKind::Padded { + self.consume(); + continue + } + } tracing::error!(target: "parser", "TOKEN MISMATCH - MACRO BODY: {}", kind); return Err(ParserError { kind: ParserErrorKind::InvalidTokenInMacroBody(kind), @@ -692,6 +705,38 @@ impl Parser { Ok(statements) } + /// Parse a padded codeblock + pub fn parse_padded(&mut self) -> Result, ParserError> { + // consume the Padded token + self.consume(); + + // parse the padded code block's size + let padded_block_size = self.parse_single_arg()?; + + // parse the padded code block's body + let mut padded_statements = self.parse_body()?; + + if padded_statements.len() > padded_block_size { + return Err(ParserError { + kind: ParserErrorKind::InvalidPaddedSize( + padded_block_size, + padded_statements.len(), + ), + hint: Some("Ensure the padded block's size is >= than its body's size".to_string()), + spans: AstSpan(vec![self.current_token.span.clone()]), + }) + } + + while padded_statements.len() < padded_block_size { + padded_statements.push(Statement { + ty: StatementType::Opcode(Opcode::Stop), + span: AstSpan(vec![]), // TODO what span do I put here? + }); + } + + Ok(padded_statements) + } + /// Parse the body of a label. /// /// ## Examples diff --git a/huff_parser/tests/padded.rs b/huff_parser/tests/padded.rs new file mode 100644 index 00000000..bd4d0d48 --- /dev/null +++ b/huff_parser/tests/padded.rs @@ -0,0 +1,82 @@ +use huff_lexer::Lexer; +use huff_parser::*; +use huff_utils::{evm::Opcode, prelude::*}; + +#[test] +fn macro_with_simple_body() { + let source = + "#define macro HELLO_WORLD() = takes(3) returns(0) {\n #define padded(7) {\n 0x00 mstore\n 0x01 0x02 add \n}\n}"; + let flattened_source = FullFileSource { source, file: None, spans: vec![] }; + let lexer = Lexer::new(flattened_source.source); + let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::>(); + let mut parser = Parser::new(tokens, None); + + // Grab the first macro + let macro_definition = parser.parse().unwrap().macros[0].clone(); + + // TODO fix expected spans + let expected = MacroDefinition { + name: "HELLO_WORLD".to_string(), + decorator: None, + parameters: vec![], + statements: vec![ + Statement { + ty: StatementType::Literal(str_to_bytes32("00")), + span: AstSpan(vec![Span { start: 54, end: 55, file: None }]), + }, + Statement { + ty: StatementType::Opcode(Opcode::Mstore), + span: AstSpan(vec![Span { start: 57, end: 62, file: None }]), + }, + Statement { + ty: StatementType::Literal(str_to_bytes32("01")), + span: AstSpan(vec![Span { start: 67, end: 68, file: None }]), + }, + Statement { + ty: StatementType::Literal(str_to_bytes32("02")), + span: AstSpan(vec![Span { start: 72, end: 73, file: None }]), + }, + Statement { + ty: StatementType::Opcode(Opcode::Add), + span: AstSpan(vec![Span { start: 75, end: 77, file: None }]), + }, + Statement { + ty: StatementType::Opcode(Opcode::Stop), + span: AstSpan(vec![]), // TODO wat do? + }, + Statement { + ty: StatementType::Opcode(Opcode::Stop), + span: AstSpan(vec![]), // TODO wat do? + }, + ], + takes: 3, + returns: 0, + span: AstSpan(vec![ + Span { start: 0, end: 6, file: None }, + Span { start: 8, end: 12, file: None }, + Span { start: 14, end: 24, file: None }, + Span { start: 25, end: 25, file: None }, + Span { start: 26, end: 26, file: None }, + Span { start: 28, end: 28, file: None }, + Span { start: 30, end: 34, file: None }, + Span { start: 35, end: 35, file: None }, + Span { start: 36, end: 36, file: None }, + Span { start: 37, end: 37, file: None }, + Span { start: 39, end: 45, file: None }, + Span { start: 46, end: 46, file: None }, + Span { start: 47, end: 47, file: None }, + Span { start: 48, end: 48, file: None }, + Span { start: 50, end: 50, file: None }, + Span { start: 54, end: 55, file: None }, + Span { start: 57, end: 62, file: None }, + Span { start: 67, end: 68, file: None }, + Span { start: 72, end: 73, file: None }, + Span { start: 75, end: 77, file: None }, + Span { start: 79, end: 79, file: None }, + ]), + outlined: false, + test: false, + }; + assert_eq!(macro_definition, expected); + assert_eq!(parser.current_token.kind, TokenKind::Eof); +} diff --git a/huff_utils/src/error.rs b/huff_utils/src/error.rs index 9ecb2631..b13330f0 100644 --- a/huff_utils/src/error.rs +++ b/huff_utils/src/error.rs @@ -63,6 +63,8 @@ pub enum ParserErrorKind { InvalidDecoratorFlag(String), /// Invalid decorator flag argument InvalidDecoratorFlagArg(TokenKind), + /// Invalid padded code block size + InvalidPaddedSize(usize, usize), } /// A Lexing Error @@ -488,6 +490,9 @@ impl fmt::Display for CompilerError { pe.spans.error(pe.hint.as_ref()) ) } + ParserErrorKind::InvalidPaddedSize(declared_size, actual_size) => { + write!(f, "\nError: Invalid padded code block size: declared size : {} , actual size {}", declared_size, actual_size) + } }, CompilerError::PathBufRead(os_str) => { write!( diff --git a/huff_utils/src/token.rs b/huff_utils/src/token.rs index 547fbe82..d975540c 100644 --- a/huff_utils/src/token.rs +++ b/huff_utils/src/token.rs @@ -63,6 +63,8 @@ pub enum TokenKind { Indexed, /// "FREE_STORAGE_POINTER()" keyword FreeStoragePointer, + /// "padded" keyword + Padded, /// An Identifier Ident(String), /// Equal Sign @@ -166,6 +168,7 @@ impl fmt::Display for TokenKind { TokenKind::Takes => "takes", TokenKind::Returns => "returns", TokenKind::FreeStoragePointer => "FREE_STORAGE_POINTER()", + TokenKind::Padded => "padded", TokenKind::Ident(s) => return write!(f, "{s}"), TokenKind::Assign => "=", TokenKind::OpenParen => "(",