diff --git a/crates/wit-component/src/printing.rs b/crates/wit-component/src/printing.rs index 769fc04df6..49e4058b09 100644 --- a/crates/wit-component/src/printing.rs +++ b/crates/wit-component/src/printing.rs @@ -5,7 +5,6 @@ use std::mem; use wit_parser::*; // NB: keep in sync with `crates/wit-parser/src/ast/lex.rs` -const PRINT_SEMICOLONS_DEFAULT: bool = true; const PRINT_F32_F64_DEFAULT: bool = false; /// A utility for printing WebAssembly interface definitions to a string. @@ -19,7 +18,6 @@ pub struct WitPrinter { // Whether to print doc comments. emit_docs: bool, - print_semicolons: bool, print_f32_f64: bool, } @@ -29,10 +27,6 @@ impl Default for WitPrinter { output: Default::default(), any_items: false, emit_docs: true, - print_semicolons: match std::env::var("WIT_REQUIRE_SEMICOLONS") { - Ok(s) => s == "1", - Err(_) => PRINT_SEMICOLONS_DEFAULT, - }, print_f32_f64: match std::env::var("WIT_REQUIRE_F32_F64") { Ok(s) => s == "1", Err(_) => PRINT_F32_F64_DEFAULT, @@ -119,9 +113,7 @@ impl WitPrinter { } fn print_semicolon(&mut self) { - if self.print_semicolons { - self.output.push_str(";"); - } + self.output.push_str(";"); } fn new_item(&mut self) { diff --git a/crates/wit-parser/src/ast.rs b/crates/wit-parser/src/ast.rs index 37e25046a2..9d8d7716a1 100644 --- a/crates/wit-parser/src/ast.rs +++ b/crates/wit-parser/src/ast.rs @@ -1624,7 +1624,6 @@ fn eat_id(tokens: &mut Tokenizer<'_>, expected: &str) -> Result { pub struct SourceMap { sources: Vec, offset: u32, - require_semicolons: Option, require_f32_f64: Option, } @@ -1641,11 +1640,6 @@ impl SourceMap { SourceMap::default() } - #[doc(hidden)] // NB: only here for a transitionary period - pub fn set_require_semicolons(&mut self, enable: bool) { - self.require_semicolons = Some(enable); - } - #[doc(hidden)] // NB: only here for a transitionary period pub fn set_require_f32_f64(&mut self, enable: bool) { self.require_f32_f64 = Some(enable); @@ -1702,7 +1696,6 @@ impl SourceMap { // passing through the source to get tokenized. &src.contents[..src.contents.len() - 1], src.offset, - self.require_semicolons, self.require_f32_f64, ) .with_context(|| format!("failed to tokenize path: {}", src.path.display()))?; @@ -1872,7 +1865,7 @@ pub enum ParsedUsePath { } pub fn parse_use_path(s: &str) -> Result { - let mut tokens = Tokenizer::new(s, 0, Some(true), None)?; + let mut tokens = Tokenizer::new(s, 0, None)?; let path = UsePath::parse(&mut tokens)?; if tokens.next()?.is_some() { bail!("trailing tokens in path specifier"); diff --git a/crates/wit-parser/src/ast/lex.rs b/crates/wit-parser/src/ast/lex.rs index 93ad600872..76886d8547 100644 --- a/crates/wit-parser/src/ast/lex.rs +++ b/crates/wit-parser/src/ast/lex.rs @@ -11,7 +11,6 @@ pub struct Tokenizer<'a> { input: &'a str, span_offset: u32, chars: CrlfFold<'a>, - require_semicolons: bool, require_f32_f64: bool, } @@ -117,14 +116,12 @@ pub enum Error { } // NB: keep in sync with `crates/wit-component/src/printing.rs`. -const REQUIRE_SEMICOLONS_BY_DEFAULT: bool = true; const REQUIRE_F32_F64_BY_DEFAULT: bool = false; impl<'a> Tokenizer<'a> { pub fn new( input: &'a str, span_offset: u32, - require_semicolons: Option, require_f32_f64: Option, ) -> Result> { detect_invalid_input(input)?; @@ -135,12 +132,6 @@ impl<'a> Tokenizer<'a> { chars: CrlfFold { chars: input.char_indices(), }, - require_semicolons: require_semicolons.unwrap_or_else(|| { - match std::env::var("WIT_REQUIRE_SEMICOLONS") { - Ok(s) => s == "1", - Err(_) => REQUIRE_SEMICOLONS_BY_DEFAULT, - } - }), require_f32_f64: require_f32_f64.unwrap_or_else(|| { match std::env::var("WIT_REQUIRE_F32_F64") { Ok(s) => s == "1", @@ -154,11 +145,7 @@ impl<'a> Tokenizer<'a> { } pub fn expect_semicolon(&mut self) -> Result<()> { - if self.require_semicolons { - self.expect(Token::Semicolon)?; - } else { - self.eat(Token::Semicolon)?; - } + self.expect(Token::Semicolon)?; Ok(()) } @@ -668,7 +655,7 @@ fn test_validate_id() { #[test] fn test_tokenizer() { fn collect(s: &str) -> Result> { - let mut t = Tokenizer::new(s, 0, Some(true), None)?; + let mut t = Tokenizer::new(s, 0, None)?; let mut tokens = Vec::new(); while let Some(token) = t.next()? { tokens.push(token.1);