Skip to content
This repository has been archived by the owner on Oct 20, 2024. It is now read-only.

feat(huff_lexer): lexer using the logos crate #118

Draft
wants to merge 17 commits into
base: huff_parser
Choose a base branch
from
48 changes: 30 additions & 18 deletions huff_codegen/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ use std::fs;
///
/// Code Generation Manager responsible for generating the code for the Huff Language.
#[derive(Debug, Default, PartialEq, Eq, Clone)]
pub struct Codegen {
pub struct Codegen<'a> {
/// The Input AST
pub ast: Option<Contract>,
/// A cached codegen output artifact
Expand All @@ -23,20 +23,28 @@ pub struct Codegen {
pub main_bytecode: Option<String>,
/// Intermediate constructor bytecode store
pub constructor_bytecode: Option<String>,

phantom: std::marker::PhantomData<&'a ()>,
}

impl Codegen {
impl<'a> Codegen<'a> {
/// Public associated function to instantiate a new Codegen instance.
pub fn new() -> Self {
Self { ast: None, artifact: None, main_bytecode: None, constructor_bytecode: None }
Self {
ast: None,
artifact: None,
main_bytecode: None,
constructor_bytecode: None,
phantom: std::marker::PhantomData,
}
}

/// Generates main bytecode from a Contract AST
///
/// # Arguments
///
/// * `ast` - Optional Contract Abstract Syntax Tree
pub fn roll(ast: Option<Contract>) -> Result<String, CodegenError> {
pub fn roll(ast: Option<Contract>) -> Result<String, CodegenError<'a>> {
let bytecode: String = String::default();

// Grab the AST
Expand All @@ -50,7 +58,7 @@ impl Codegen {
kind: CodegenErrorKind::MissingAst,
span: None,
token: None,
})
});
}
};

Expand All @@ -64,7 +72,7 @@ impl Codegen {
}

/// Gracefully get the Contract AST
pub fn graceful_ast_grab(&self, ast: Option<Contract>) -> Result<Contract, CodegenError> {
pub fn graceful_ast_grab(&self, ast: Option<Contract>) -> Result<Contract, CodegenError<'a>> {
match ast {
Some(a) => Ok(a),
None => match &self.ast {
Expand All @@ -86,7 +94,7 @@ impl Codegen {
/// # Arguments
///
/// * `ast` - Optional Contract Abstract Syntax Tree
pub fn construct(ast: Option<Contract>) -> Result<String, CodegenError> {
pub fn construct(ast: Option<Contract>) -> Result<String, CodegenError<'a>> {
// Grab the AST
let contract = match &ast {
Some(a) => a,
Expand All @@ -96,7 +104,7 @@ impl Codegen {
kind: CodegenErrorKind::MissingAst,
span: None,
token: None,
})
});
}
};

Expand All @@ -109,7 +117,7 @@ impl Codegen {
kind: CodegenErrorKind::MissingConstructor,
span: None,
token: None,
})
});
};

tracing::info!(target: "codegen", "CONSTRUCTOR MACRO FOUND: {:?}", c_macro);
Expand All @@ -131,7 +139,7 @@ impl Codegen {
macro_def: MacroDefinition,
ast: Option<Contract>,
scope: &mut Vec<MacroDefinition>,
) -> Result<Vec<Byte>, CodegenError> {
) -> Result<Vec<Byte>, CodegenError<'a>> {
let mut final_bytes: Vec<Byte> = vec![];
tracing::info!(target: "codegen", "RECURSING MACRO DEFINITION");

Expand All @@ -144,7 +152,7 @@ impl Codegen {
kind: CodegenErrorKind::MissingAst,
span: None,
token: None,
})
});
}
};

Expand Down Expand Up @@ -174,7 +182,7 @@ impl Codegen {
kind: CodegenErrorKind::MissingConstantDefinition,
span: None,
token: None,
})
});
};

tracing::info!(target: "codegen", "FOUND CONSTANT DEFINITION: {:?}", constant);
Expand Down Expand Up @@ -212,7 +220,7 @@ impl Codegen {
kind: CodegenErrorKind::MissingMacroDefinition,
span: None,
token: None,
})
});
};

tracing::info!(target: "codegen", "FOUND INNER MACRO: {:?}", ir_macro);
Expand All @@ -232,7 +240,7 @@ impl Codegen {
kind: CodegenErrorKind::FailedMacroRecursion,
span: None,
token: None,
})
});
};
final_bytes = final_bytes
.iter()
Expand All @@ -246,7 +254,7 @@ impl Codegen {
kind: CodegenErrorKind::InvalidMacroStatement,
span: None,
token: None,
})
});
}
}
}
Expand All @@ -273,7 +281,7 @@ impl Codegen {
args: Vec<ethers::abi::token::Token>,
main_bytecode: &str,
constructor_bytecode: &str,
) -> Result<Artifact, CodegenError> {
) -> Result<Artifact, CodegenError<'a>> {
let mut artifact: &mut Artifact = if let Some(art) = &mut self.artifact {
art
} else {
Expand Down Expand Up @@ -314,7 +322,7 @@ impl Codegen {
/// # Arguments
///
/// * `out` - Output location to write the serialized json artifact to.
pub fn export(&self, output: String) -> Result<(), CodegenError> {
pub fn export(&self, output: String) -> Result<(), CodegenError<'a>> {
if let Some(art) = &self.artifact {
let serialized_artifact = serde_json::to_string(art).unwrap();
fs::write(output, serialized_artifact).expect("Unable to write file");
Expand All @@ -337,7 +345,11 @@ impl Codegen {
///
/// * `ast` - The Contract Abstract Syntax Tree
/// * `output` - An optional output path
pub fn abigen(&mut self, ast: Contract, output: Option<String>) -> Result<Abi, CodegenError> {
pub fn abigen(
&mut self,
ast: Contract,
output: Option<String>,
) -> Result<Abi, CodegenError<'a>> {
let abi: Abi = ast.into();

// Set the abi on self
Expand Down
6 changes: 3 additions & 3 deletions huff_core/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ impl<'a> Compiler {
Ok(source) => source,
Err(_) => {
tracing::error!(target: "core", "FILE READ FAILED: \"{}\"!", fs.path);
return Err(CompilerError::PathBufRead(fs.path.clone().into()))
return Err(CompilerError::PathBufRead(fs.path.clone().into()));
}
};
new_fs.source = Some(new_source.clone());
Expand Down Expand Up @@ -176,7 +176,7 @@ impl<'a> Compiler {
let lexer: Lexer = Lexer::new(&full_source);

// Grab the tokens from the lexer
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
let tokens = lexer.into_iter().collect::<Vec<Token>>();
tracing::info!(target: "core", "LEXICAL ANALYSIS COMPLETE [{}]", file.path);

// Parser incantation
Expand Down Expand Up @@ -252,7 +252,7 @@ impl<'a> Compiler {
}
Err(e) => {
tracing::error!(target: "core", "ERROR UNPACKING FILE: {:?}", e);
return Err(CompilerError::FileUnpackError(e))
return Err(CompilerError::FileUnpackError(e));
}
}
}
Expand Down
4 changes: 2 additions & 2 deletions huff_core/tests/compiling.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ const SOURCE: &str = r#"
fn compiles_constructor_bytecode() {
// Lex and Parse the source code
let lexer = Lexer::new(SOURCE);
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
let tokens = lexer.into_iter().collect::<Vec<Token>>();
let mut parser = Parser::new(tokens);

// Grab the first macro
Expand All @@ -60,7 +60,7 @@ fn compiles_constructor_bytecode() {
fn compiles_runtime_bytecode() {
// Lex and Parse the source code
let lexer = Lexer::new(SOURCE);
let tokens = lexer.into_iter().map(|x| x.unwrap()).collect::<Vec<Token>>();
let tokens = lexer.into_iter().collect::<Vec<Token>>();
let mut parser = Parser::new(tokens);

// Grab the first macro
Expand Down
2 changes: 1 addition & 1 deletion huff_core/tests/recurse_deps.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use huff_utils::files::FileSource;
#[test]
fn test_recursing_fs_dependencies() {
let file_sources: Vec<FileSource> = Compiler::fetch_sources(&vec![PathBuf::from(
"../huff-examples/erc20/contracts/ERC20.huff".to_string(),
"../huff-examples/erc20/contracts/ERC20.huff",
)]);
assert_eq!(file_sources.len(), 1);
let erc20_file_source = file_sources[0].clone();
Expand Down
3 changes: 2 additions & 1 deletion huff_lexer/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
name = "huff_lexer"
version = "0.3.0"
edition = "2021"
authors = ["Andreas Bigger", "clabby", "exp.table"]
authors = ["Andreas Bigger", "clabby", "exp.table", "Naveen"]
readme = "README.md"
repository = "https://github.com/huff-language/huff-rs/"
license = "MIT OR Apache-2.0"
Expand All @@ -12,6 +12,7 @@ Lexical Analysis Crate for the Huff-Language
keywords = ["huff", "rust", "evm", "bytecode", "compiler"]

[dependencies]
logos = "0.12.0"
proptest = "1.0.0"
huff_utils = { path = "../huff_utils", version = "0.1.0" }
regex = "1"
Expand Down
Loading