Skip to content

Commit

Permalink
cargo fmt
Browse files Browse the repository at this point in the history
  • Loading branch information
vcfxb committed Jul 14, 2024
1 parent c1bcfbd commit 6398af2
Show file tree
Hide file tree
Showing 7 changed files with 68 additions and 43 deletions.
4 changes: 2 additions & 2 deletions wright/src/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,8 @@ impl Lexer {
/// Available in test cases, creates a new [Lexer] over a given static [str]ing.
///
/// The instantiated [Source] in this [Lexer] has its name set to [FileName::None].
///
/// This function is limited to this crate because `#[cfg(test)]` items are not available
///
/// This function is limited to this crate because `#[cfg(test)]` items are not available
/// externally, however it should be relatively easy to reproduce.
///
/// [Source]: crate::source_tracking::source::Source
Expand Down
2 changes: 1 addition & 1 deletion wright/src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
use super::lexer::Lexer;
use error::ParserError;

pub mod error;
mod identifier;
mod path;
pub mod error;

/// Trait implemented by all AST nodes that can be parsed.
pub trait Parse: Sized {
Expand Down
44 changes: 26 additions & 18 deletions wright/src/parser/error.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
//! Representation and implementation relating to errors that may be encountered in parsing.
use crate::{
reporting::{Diagnostic, Highlight},
source_tracking::fragment::Fragment,
};
use std::borrow::Cow;
use crate::{reporting::{Diagnostic, Highlight}, source_tracking::fragment::Fragment};

/// All the different errors that can be produced in the process of parsing.
/// The names of these should be self-describing, but in cases when one of these needs to appear in a diagnostic,
/// The names of these should be self-describing, but in cases when one of these needs to appear in a diagnostic,
/// use [ParserErrorKind::description].
#[allow(missing_docs)]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
Expand All @@ -17,8 +20,14 @@ pub enum ParserErrorKind {

/// Table of all the definition strings for v
pub const ERROR_VARIANT_DESCRIPTION_TABLE: &[(ParserErrorKind, &str)] = &[
(ParserErrorKind::UnternminatedStringLiteralEncountered, "encountered unterminated string literal while parsing"),
(ParserErrorKind::UnterminatedMultilineCommentEncountered, "encountered unterminated multiline comment while parsing"),
(
ParserErrorKind::UnternminatedStringLiteralEncountered,
"encountered unterminated string literal while parsing",
),
(
ParserErrorKind::UnterminatedMultilineCommentEncountered,
"encountered unterminated multiline comment while parsing",
),
(ParserErrorKind::ExpectedIdentifier, "expected identifier"),
(ParserErrorKind::ExpectedPath, "expected path or identifier"),
];
Expand All @@ -29,35 +38,33 @@ impl ParserErrorKind {
let mut i = 0;

while i < ERROR_VARIANT_DESCRIPTION_TABLE.len() {

if ERROR_VARIANT_DESCRIPTION_TABLE[i].0 as u64 == self as u64 {
return true;
}

i += 1;
}

false
}

/// Get the description string of this [ParserErrorKind], if one exists. Calls to this against literals
/// should be zero-cost since all the lookups are done at compile time. You can use a `const { }` block
/// to ensure this.
///
/// Calls against variables might be a bit more expensive, since this does an iterative lookup against the
/// Get the description string of this [ParserErrorKind], if one exists. Calls to this against literals
/// should be zero-cost since all the lookups are done at compile time. You can use a `const { }` block
/// to ensure this.
///
/// Calls against variables might be a bit more expensive, since this does an iterative lookup against the
/// [ERROR_VARIANT_DESCRIPTION_TABLE].
pub const fn find_description(self) -> Option<&'static str> {
let mut i = 0;

while i < ERROR_VARIANT_DESCRIPTION_TABLE.len() {

if ERROR_VARIANT_DESCRIPTION_TABLE[i].0 as u64 == self as u64 {
return Some(ERROR_VARIANT_DESCRIPTION_TABLE[i].1);
}

i += 1;
}

None
}

Expand All @@ -67,7 +74,7 @@ impl ParserErrorKind {
}
}

/// An error that occurred while parsing.
/// An error that occurred while parsing.
/// This error structure is pretty simple compared to what can be represented using a diagnostic. That's fine,
/// since most of the more complex errors arise when typechecking, rather than checking syntax.
#[derive(Debug)]
Expand All @@ -79,21 +86,22 @@ pub struct ParserError {
pub location: Fragment,

/// Optionally, a help string that can be printed with this error.
pub help: Option<Cow<'static, str>>
pub help: Option<Cow<'static, str>>,
}

impl ParserError {
/// Turn this parser error into a full blown compiler error.
/// Turn this parser error into a full blown compiler error.
pub fn as_diagnostic(self) -> Diagnostic {
let description = self.kind
let description = self
.kind
.find_description()
.map(ToOwned::to_owned)
.unwrap_or(format!("parser error ({:?})", self.kind));

let mut diagnostic = Diagnostic::error()
.with_code(self.kind.error_code_string())
.with_message(description)
.with_highlights([Highlight::primary(self.location,"")]);
.with_highlights([Highlight::primary(self.location, "")]);

if let Some(help) = self.help {
diagnostic = diagnostic.with_notes([help]);
Expand Down
21 changes: 16 additions & 5 deletions wright/src/parser/identifier.rs
Original file line number Diff line number Diff line change
@@ -1,18 +1,27 @@
//! [Parse] implementation for [Identifier].
use super::{error::{ParserError, ParserErrorKind}, Parse};
use super::{
error::{ParserError, ParserErrorKind},
Parse,
};
use crate::{
ast::identifier::Identifier,
lexer::{token::{Token, TokenTy}, Lexer},
lexer::{
token::{Token, TokenTy},
Lexer,
},
};

impl Parse for Identifier {
fn parse(lexer: &mut Lexer) -> Result<Self, ParserError> {
let next_token = lexer.next_token();

// Get the fragment from the next token if it's the right type (or produce an error).
// Get the fragment from the next token if it's the right type (or produce an error).
let ident_fragment = match next_token {
Some(Token { variant: TokenTy::Identifier, fragment }) => Ok(fragment),
Some(Token {
variant: TokenTy::Identifier,
fragment,
}) => Ok(fragment),

Some(Token { fragment, .. }) => Err(ParserError {
kind: ParserErrorKind::ExpectedIdentifier,
Expand All @@ -27,7 +36,9 @@ impl Parse for Identifier {
}),
}?;

Ok(Identifier { fragment: ident_fragment })
Ok(Identifier {
fragment: ident_fragment,
})
}
}

Expand Down
8 changes: 4 additions & 4 deletions wright/src/parser/path.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
//! [Parse] implementation for [Path].
use super::{Parse, error::ParserError};
use super::{error::ParserError, Parse};
use crate::ast::identifier::Identifier;
use crate::lexer::Lexer;
use crate::ast::path::Path;
use crate::lexer::Lexer;

impl Parse for Path {
fn parse(_lexer: &mut Lexer) -> Result<Self, ParserError> {
Expand All @@ -12,12 +12,12 @@ impl Parse for Path {
}

/// Parse the first (and possibly only) [Identifier] in the [Path].
///
///
fn _parse_head() -> Result<Identifier, ParserError> {
unimplemented!()
}

// /// Parse a path (`head::tail`) in source code.
// /// Parse a path (`head::tail`) in source code.
// pub fn parse_path<'src>(parser_state: &mut ParserState<'src>) -> NodeParserResult<Path<'src>> {
// // Get the initial index to make metadata at the end.
// let initial_index = parser_state.index();
Expand Down
11 changes: 5 additions & 6 deletions wright/tests/lexer.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@

#![cfg(feature = "lexer")]

use std::sync::Arc;
use wright::{lexer::{
token::TokenTy,
Lexer,
}, source_tracking::{filename::FileName, source::Source}};
use wright::{
lexer::{token::TokenTy, Lexer},
source_tracking::{filename::FileName, source::Source},
};

fn new_test_lexer(s: &'static str) -> Lexer {
Lexer::new(Arc::new(Source::new_from_static_str(FileName::None, s)))
Expand Down Expand Up @@ -35,6 +34,6 @@ fn string_with_escape() {
assert_eq!(token.variant, TokenTy::StringLiteral { terminated: true });
assert_eq!(token.fragment.as_str(), lexer.remaining.source.source().as_str());
assert_eq!(lexer.bytes_remaining(), 0);

assert!(lexer.next_token().is_none());
}
21 changes: 14 additions & 7 deletions wright/tests/parser.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@

#![cfg(all(feature = "reporting", feature = "parsing"))]


use termcolor::Buffer;
use wright::{ast::identifier::Identifier, lexer::Lexer, parser::Parse, source_tracking::{filename::FileName, source::Source, SourceMap, SourceRef}};

use wright::{
ast::identifier::Identifier,
lexer::Lexer,
parser::Parse,
source_tracking::{filename::FileName, source::Source, SourceMap, SourceRef},
};

#[test]
fn test_parse_fail_identifier_to_diagnostic() -> anyhow::Result<()> {
Expand All @@ -14,14 +16,19 @@ fn test_parse_fail_identifier_to_diagnostic() -> anyhow::Result<()> {
let parse_error = Identifier::parse(&mut lexer).unwrap_err();
let mut buffer = Buffer::no_color();

parse_error.as_diagnostic().write(&map, &mut buffer, &Default::default())?;
parse_error
.as_diagnostic()
.write(&map, &mut buffer, &Default::default())?;

assert_eq!(std::str::from_utf8(buffer.as_slice())?, "\
assert_eq!(
std::str::from_utf8(buffer.as_slice())?,
"\
error[WPE2]: expected identifier
┌─ <NO_NAME>:1:1
1 │ 12345
│ ^^^^^\n\n");
│ ^^^^^\n\n"
);

Ok(())
}

0 comments on commit 6398af2

Please sign in to comment.