@@ -95,15 +95,7 @@ impl Parser {
95
95
"INSERT" => Ok ( self . parse_insert ( ) ?) ,
96
96
"ALTER" => Ok ( self . parse_alter ( ) ?) ,
97
97
"COPY" => Ok ( self . parse_copy ( ) ?) ,
98
- "TRUE" => {
99
- self . prev_token ( ) ;
100
- self . parse_sql_value ( )
101
- }
102
- "FALSE" => {
103
- self . prev_token ( ) ;
104
- self . parse_sql_value ( )
105
- }
106
- "NULL" => {
98
+ "TRUE" | "FALSE" | "NULL" => {
107
99
self . prev_token ( ) ;
108
100
self . parse_sql_value ( )
109
101
}
@@ -116,7 +108,7 @@ impl Parser {
116
108
self . parse_cast_expression ( )
117
109
} else {
118
110
match self . peek_token ( ) {
119
- Some ( Token :: LParen ) => self . parse_function_or_pg_cast ( & id) ,
111
+ Some ( Token :: LParen ) => self . parse_function ( & id) ,
120
112
Some ( Token :: Period ) => {
121
113
let mut id_parts: Vec < String > = vec ! [ id] ;
122
114
while self . peek_token ( ) == Some ( Token :: Period ) {
@@ -136,19 +128,10 @@ impl Parser {
136
128
}
137
129
}
138
130
}
139
- Token :: Number ( _) => {
140
- self . prev_token ( ) ;
141
- self . parse_sql_value ( )
142
- }
143
- Token :: String ( _) => {
144
- self . prev_token ( ) ;
145
- self . parse_sql_value ( )
146
- }
147
- Token :: SingleQuotedString ( _) => {
148
- self . prev_token ( ) ;
149
- self . parse_sql_value ( )
150
- }
151
- Token :: DoubleQuotedString ( _) => {
131
+ Token :: Number ( _)
132
+ | Token :: String ( _)
133
+ | Token :: SingleQuotedString ( _)
134
+ | Token :: DoubleQuotedString ( _) => {
152
135
self . prev_token ( ) ;
153
136
self . parse_sql_value ( )
154
137
}
@@ -168,15 +151,6 @@ impl Parser {
168
151
}
169
152
}
170
153
171
- pub fn parse_function_or_pg_cast ( & mut self , id : & str ) -> Result < ASTNode , ParserError > {
172
- let func = self . parse_function ( & id) ?;
173
- if let Some ( Token :: DoubleColon ) = self . peek_token ( ) {
174
- self . parse_pg_cast ( func)
175
- } else {
176
- Ok ( func)
177
- }
178
- }
179
-
180
154
pub fn parse_function ( & mut self , id : & str ) -> Result < ASTNode , ParserError > {
181
155
self . consume_token ( & Token :: LParen ) ?;
182
156
if let Ok ( true ) = self . consume_token ( & Token :: RParen ) {
@@ -241,25 +215,13 @@ impl Parser {
241
215
} )
242
216
}
243
217
244
- /// Parse a postgresql casting style which is in the form or expr::datatype
218
+ /// Parse a postgresql casting style which is in the form of ` expr::datatype`
245
219
pub fn parse_pg_cast ( & mut self , expr : ASTNode ) -> Result < ASTNode , ParserError > {
246
220
let _ = self . consume_token ( & Token :: DoubleColon ) ?;
247
- let datatype = if let Ok ( data_type) = self . parse_data_type ( ) {
248
- Ok ( data_type)
249
- } else if let Ok ( table_name) = self . parse_tablename ( ) {
250
- Ok ( SQLType :: Custom ( table_name) )
251
- } else {
252
- parser_err ! ( "Expecting datatype or identifier" )
253
- } ;
254
- let pg_cast = ASTNode :: SQLCast {
221
+ Ok ( ASTNode :: SQLCast {
255
222
expr : Box :: new ( expr) ,
256
- data_type : datatype?,
257
- } ;
258
- if let Some ( Token :: DoubleColon ) = self . peek_token ( ) {
259
- self . parse_pg_cast ( pg_cast)
260
- } else {
261
- Ok ( pg_cast)
262
- }
223
+ data_type : self . parse_data_type ( ) ?,
224
+ } )
263
225
}
264
226
265
227
/// Parse an expression infix (typically an operator)
@@ -362,11 +324,17 @@ impl Parser {
362
324
}
363
325
}
364
326
327
+ /// Return first non-whitespace token that has not yet been processed
365
328
pub fn peek_token ( & self ) -> Option < Token > {
366
- self . peek_token_skip_whitespace ( )
329
+ if let Some ( n) = self . til_non_whitespace ( ) {
330
+ self . token_at ( n)
331
+ } else {
332
+ None
333
+ }
367
334
}
368
335
369
- pub fn skip_whitespace ( & mut self ) -> Option < Token > {
336
+ /// Get the next token skipping whitespace and increment the token index
337
+ pub fn next_token ( & mut self ) -> Option < Token > {
370
338
loop {
371
339
match self . next_token_no_skip ( ) {
372
340
Some ( Token :: Whitespace ( _) ) => {
@@ -406,19 +374,6 @@ impl Parser {
406
374
}
407
375
}
408
376
409
- pub fn peek_token_skip_whitespace ( & self ) -> Option < Token > {
410
- if let Some ( n) = self . til_non_whitespace ( ) {
411
- self . token_at ( n)
412
- } else {
413
- None
414
- }
415
- }
416
-
417
- /// Get the next token skipping whitespace and increment the token index
418
- pub fn next_token ( & mut self ) -> Option < Token > {
419
- self . skip_whitespace ( )
420
- }
421
-
422
377
pub fn next_token_no_skip ( & mut self ) -> Option < Token > {
423
378
if self . index < self . tokens . len ( ) {
424
379
self . index = self . index + 1 ;
@@ -428,9 +383,9 @@ impl Parser {
428
383
}
429
384
}
430
385
431
- /// if prev token is whitespace skip it
432
- /// if prev token is not whitespace skipt it as well
433
- pub fn prev_token_skip_whitespace ( & mut self ) -> Option < Token > {
386
+ /// Push back the last one non- whitespace token
387
+ pub fn prev_token ( & mut self ) -> Option < Token > {
388
+ // TODO: returned value is unused (available via peek_token)
434
389
loop {
435
390
match self . prev_token_no_skip ( ) {
436
391
Some ( Token :: Whitespace ( _) ) => {
@@ -443,12 +398,8 @@ impl Parser {
443
398
}
444
399
}
445
400
446
- pub fn prev_token ( & mut self ) -> Option < Token > {
447
- self . prev_token_skip_whitespace ( )
448
- }
449
-
450
401
/// Get the previous token and decrement the token index
451
- pub fn prev_token_no_skip ( & mut self ) -> Option < Token > {
402
+ fn prev_token_no_skip ( & mut self ) -> Option < Token > {
452
403
if self . index > 0 {
453
404
self . index = self . index - 1 ;
454
405
Some ( self . tokens [ self . index ] . clone ( ) )
@@ -731,30 +682,13 @@ impl Parser {
731
682
"NULL" => Ok ( Value :: Null ) ,
732
683
_ => return parser_err ! ( format!( "No value parser for keyword {}" , k) ) ,
733
684
} ,
734
- //TODO: parse the timestamp here
685
+ //TODO: parse the timestamp here (see parse_timestamp_value())
735
686
Token :: Number ( ref n) if n. contains ( "." ) => match n. parse :: < f64 > ( ) {
736
687
Ok ( n) => Ok ( Value :: Double ( n) ) ,
737
- Err ( e) => {
738
- let index = self . index ;
739
- self . prev_token ( ) ;
740
- if let Ok ( timestamp) = self . parse_timestamp_value ( ) {
741
- println ! ( "timstamp: {:?}" , timestamp) ;
742
- Ok ( timestamp)
743
- } else {
744
- self . index = index;
745
- parser_err ! ( format!( "Could not parse '{}' as i64: {}" , n, e) )
746
- }
747
- }
688
+ Err ( e) => parser_err ! ( format!( "Could not parse '{}' as i64: {}" , n, e) ) ,
748
689
} ,
749
690
Token :: Number ( ref n) => match n. parse :: < i64 > ( ) {
750
- Ok ( n) => {
751
- // if let Some(Token::Minus) = self.peek_token() {
752
- // self.prev_token();
753
- // self.parse_timestamp_value()
754
- // } else {
755
- Ok ( Value :: Long ( n) )
756
- // }
757
- }
691
+ Ok ( n) => Ok ( Value :: Long ( n) ) ,
758
692
Err ( e) => parser_err ! ( format!( "Could not parse '{}' as i64: {}" , n, e) ) ,
759
693
} ,
760
694
Token :: Identifier ( id) => Ok ( Value :: String ( id. to_string ( ) ) ) ,
@@ -782,13 +716,13 @@ impl Parser {
782
716
}
783
717
}
784
718
785
- /// Parse a literal integer/long
719
+ /// Parse a literal double
786
720
pub fn parse_literal_double ( & mut self ) -> Result < f64 , ParserError > {
787
721
match self . next_token ( ) {
788
722
Some ( Token :: Number ( s) ) => s. parse :: < f64 > ( ) . map_err ( |e| {
789
- ParserError :: ParserError ( format ! ( "Could not parse '{}' as i64 : {}" , s, e) )
723
+ ParserError :: ParserError ( format ! ( "Could not parse '{}' as f64 : {}" , s, e) )
790
724
} ) ,
791
- other => parser_err ! ( format!( "Expected literal int , found {:?}" , other) ) ,
725
+ other => parser_err ! ( format!( "Expected literal number , found {:?}" , other) ) ,
792
726
}
793
727
}
794
728
@@ -869,19 +803,17 @@ impl Parser {
869
803
self . consume_token ( & Token :: Colon ) ?;
870
804
let min = self . parse_literal_int ( ) ?;
871
805
self . consume_token ( & Token :: Colon ) ?;
806
+ // On one hand, the SQL specs defines <seconds fraction> ::= <unsigned integer>,
807
+ // so it would be more correct to parse it as such
872
808
let sec = self . parse_literal_double ( ) ?;
873
- let _ = ( sec. fract ( ) * 1000.0 ) . round ( ) ;
874
- if let Ok ( true ) = self . consume_token ( & Token :: Period ) {
875
- let ms = self . parse_literal_int ( ) ?;
876
- Ok ( NaiveTime :: from_hms_milli (
877
- hour as u32 ,
878
- min as u32 ,
879
- sec as u32 ,
880
- ms as u32 ,
881
- ) )
882
- } else {
883
- Ok ( NaiveTime :: from_hms ( hour as u32 , min as u32 , sec as u32 ) )
884
- }
809
+ // On the other, chrono only supports nanoseconds, which should(?) fit in seconds-as-f64...
810
+ let nanos = ( sec. fract ( ) * 1_000_000_000.0 ) . round ( ) ;
811
+ Ok ( NaiveTime :: from_hms_nano (
812
+ hour as u32 ,
813
+ min as u32 ,
814
+ sec as u32 ,
815
+ nanos as u32 ,
816
+ ) )
885
817
}
886
818
887
819
/// Parse a SQL datatype (in the context of a CREATE TABLE statement for example)
@@ -973,13 +905,10 @@ impl Parser {
973
905
}
974
906
_ => parser_err ! ( format!( "Invalid data type '{:?}'" , k) ) ,
975
907
} ,
976
- Some ( Token :: Identifier ( id) ) => {
977
- if let Ok ( true ) = self . consume_token ( & Token :: Period ) {
978
- let ids = self . parse_tablename ( ) ?;
979
- Ok ( SQLType :: Custom ( format ! ( "{}.{}" , id, ids) ) )
980
- } else {
981
- Ok ( SQLType :: Custom ( id) )
982
- }
908
+ Some ( Token :: Identifier ( _) ) => {
909
+ self . prev_token ( ) ;
910
+ let type_name = self . parse_tablename ( ) ?; // TODO: this actually reads a possibly schema-qualified name of a (custom) type
911
+ Ok ( SQLType :: Custom ( type_name) )
983
912
}
984
913
other => parser_err ! ( format!( "Invalid data type: '{:?}'" , other) ) ,
985
914
}
0 commit comments