Skip to content

Commit

Permalink
Start implementing sql_quote!
Browse files Browse the repository at this point in the history
  • Loading branch information
emk committed Oct 21, 2023
1 parent 91548a4 commit f84ba7e
Show file tree
Hide file tree
Showing 2 changed files with 140 additions and 2 deletions.
93 changes: 91 additions & 2 deletions joinery_macros/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@ use std::borrow::Cow;

use darling::{util::Flag, FromField};
use proc_macro::TokenStream;
use proc_macro2::{Span, TokenStream as TokenStream2};
use quote::quote;
use proc_macro2::{Delimiter, Span, TokenStream as TokenStream2, TokenTree};
use quote::{quote, quote_spanned};
use syn::{spanned::Spanned, Field, Ident};

#[proc_macro_derive(Emit)]
Expand Down Expand Up @@ -221,3 +221,92 @@ struct EmitAttr {
/// Should we omit this field from our output?
skip: Flag,
}

#[proc_macro]
pub fn sql_quote(input: TokenStream) -> TokenStream {
let input = TokenStream2::from(input);

let mut sql_token_exprs = vec![];
for token in input {
emit_sql_token_exprs(&mut sql_token_exprs, token);
}
let output = quote! {
crate::tokenizer::TokenStream::from_tokens(&[#(#sql_token_exprs),*][..])
};
output.into()
}

fn emit_sql_token_exprs(sql_token_exprs: &mut Vec<TokenStream2>, token: TokenTree) {
match token {
TokenTree::Group(group) => {
// We flatten this and use `Punct::new`.
let (open, close) = delimiter_pair(group.delimiter());
if let Some(open) = open {
sql_token_exprs.push(quote! {
crate::tokenizer::Token::Punct(crate::tokenizer::Punct::new(#open))
});
}
for token in group.stream() {
emit_sql_token_exprs(sql_token_exprs, token);
}
if let Some(close) = close {
sql_token_exprs.push(quote! {
crate::tokenizer::Token::Punct(crate::tokenizer::Punct::new(#close))
});
}
}
TokenTree::Ident(ident) => {
let ident_str = ident.to_string();
sql_token_exprs.push(quote! {
crate::tokenizer::Token::Ident(crate::tokenizer::Ident::new(#ident_str))
});
}
TokenTree::Punct(punct) => {
let punct_str = punct.to_string();
sql_token_exprs.push(quote! {
crate::tokenizer::Token::Punct(crate::tokenizer::Punct::new(#punct_str))
});
}
TokenTree::Literal(lit) => {
// There's probably a better way to do this.
let lit: syn::Lit = syn::parse_quote!(#lit);
match lit {
syn::Lit::Int(i) => {
sql_token_exprs.push(quote! {
crate::tokenizer::Token::Literal(crate::tokenizer::Literal::int(#i))
});
}
syn::Lit::Str(s) => {
sql_token_exprs.push(quote! {
crate::tokenizer::Token::Literal(crate::tokenizer::Literal::string(#s))
});
}
syn::Lit::Float(f) => {
sql_token_exprs.push(quote! {
crate::tokenizer::Token::Literal(crate::tokenizer::Literal::float(#f))
});
}
// syn::Lit::ByteStr(_) => todo!(),
// syn::Lit::Byte(_) => todo!(),
// syn::Lit::Char(_) => todo!(),
// syn::Lit::Bool(_) => todo!(),
// syn::Lit::Verbatim(_) => todo!(),
_ => {
sql_token_exprs.push(quote_spanned! {
lit.span() =>
compile_error!("unsupported literal type")
});
}
}
}
}
}

fn delimiter_pair(d: Delimiter) -> (Option<&'static str>, Option<&'static str>) {
match d {
Delimiter::Parenthesis => (Some("("), Some(")")),
Delimiter::Brace => (Some("{"), Some("}")),
Delimiter::Bracket => (Some("["), Some("]")),
Delimiter::None => (None, None),
}
}
49 changes: 49 additions & 0 deletions src/tokenizer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -327,6 +327,32 @@ pub struct Literal {
pub value: LiteralValue,
}

impl Literal {
/// Construct a literal containing an integer.
pub fn int(i: i64) -> Self {
Self {
token: RawToken::new(&i.to_string()),
value: LiteralValue::Int64(i),
}
}

/// Construct a literal containing a floating-point number.
pub fn float(d: f64) -> Self {
Self {
token: RawToken::new(&d.to_string()),
value: LiteralValue::Float64(d),
}
}

/// Construct a literal containing a string.
pub fn string(s: &str) -> Self {
Self {
token: RawToken::new(&BigQueryString(s).to_string()),
value: LiteralValue::String(s.to_owned()),
}
}
}

/// A literal value.
///
/// Does not include literals like `TRUE`, `FALSE` or `NULL`, which are parsed
Expand Down Expand Up @@ -377,6 +403,14 @@ pub struct TokenStream {
}

impl TokenStream {
/// Create from tokens.
#[allow(dead_code)]
pub fn from_tokens<Tokens: Into<Vec<Token>>>(tokens: Tokens) -> Self {
Self {
tokens: tokens.into(),
}
}

/// Parse a literal.
pub fn literal(&self, pos: usize) -> RuleResult<Literal> {
match self.tokens.get(pos) {
Expand Down Expand Up @@ -853,6 +887,8 @@ peg::parser! {

#[cfg(test)]
mod test {
use joinery_macros::sql_quote;

use super::*;

#[test]
Expand Down Expand Up @@ -976,4 +1012,17 @@ mod test {
};
assert_eq!(parsed, expected);
}

#[test]
fn sql_quote_builds_a_token_stream() {
sql_quote! {
SELECT
generate_uuid() AS id,
"hello" AS message,
1 AS n,
1.0 AS x,
true AS t,
false AS f,
};
}
}

0 comments on commit f84ba7e

Please sign in to comment.