@@ -107,15 +107,14 @@ fn whitespace_tokens(input: &str) -> VecDeque<Token> {
107107/// Turn a string of potentially valid sql code into a list of tokens, including their range in the source text.
108108///
109109/// The implementation is primarily using libpg_querys `scan` method, and fills in the gaps with tokens that are not parsed by the library, e.g. whitespace.
110- pub fn lex ( text : & str ) -> Vec < Token > {
110+ pub fn lex ( text : & str ) -> pg_query :: Result < Vec < Token > > {
111111 let mut whitespace_tokens = whitespace_tokens ( text) ;
112112
113113 // tokens from pg_query.rs
114- let mut pglt_query_tokens = match pg_query:: scan ( text) {
115- Ok ( scanned) => VecDeque :: from ( scanned. tokens ) ,
116- // this _should_ never fail
117- _ => panic ! ( "pg_query::scan failed" ) ,
118- } ;
114+ let mut pglt_query_tokens = pg_query:: scan ( text) ?
115+ . tokens
116+ . into_iter ( )
117+ . collect :: < VecDeque < _ > > ( ) ;
119118
120119 // merge the two token lists
121120 let mut tokens: Vec < Token > = Vec :: new ( ) ;
@@ -173,7 +172,7 @@ pub fn lex(text: &str) -> Vec<Token> {
173172 ) ;
174173 }
175174
176- tokens
175+ Ok ( tokens)
177176}
178177
179178#[ cfg( test) ]
@@ -183,36 +182,36 @@ mod tests {
183182 #[ test]
184183 fn test_special_chars ( ) {
185184 let input = "insert into c (name, full_name) values ('Å', 1);" ;
186- let tokens = lex ( input) ;
185+ let tokens = lex ( input) . unwrap ( ) ;
187186 assert ! ( !tokens. is_empty( ) ) ;
188187 }
189188
190189 #[ test]
191190 fn test_tab_tokens ( ) {
192191 let input = "select\t 1" ;
193- let tokens = lex ( input) ;
192+ let tokens = lex ( input) . unwrap ( ) ;
194193 assert_eq ! ( tokens[ 1 ] . kind, SyntaxKind :: Tab ) ;
195194 }
196195
197196 #[ test]
198197 fn test_newline_tokens ( ) {
199198 let input = "select\n 1" ;
200- let tokens = lex ( input) ;
199+ let tokens = lex ( input) . unwrap ( ) ;
201200 assert_eq ! ( tokens[ 1 ] . kind, SyntaxKind :: Newline ) ;
202201 }
203202
204203 #[ test]
205204 fn test_whitespace_tokens ( ) {
206205 let input = "select 1" ;
207- let tokens = lex ( input) ;
206+ let tokens = lex ( input) . unwrap ( ) ;
208207 assert_eq ! ( tokens[ 1 ] . kind, SyntaxKind :: Whitespace ) ;
209208 }
210209
211210 #[ test]
212211 fn test_lexer ( ) {
213212 let input = "select 1; \n -- some comment \n select 2\t " ;
214213
215- let tokens = lex ( input) ;
214+ let tokens = lex ( input) . unwrap ( ) ;
216215 let mut tokens_iter = tokens. iter ( ) ;
217216
218217 let token = tokens_iter. next ( ) . unwrap ( ) ;
0 commit comments