@@ -149,6 +149,14 @@ pub enum Token {
149
149
ExclamationMarkTilde ,
150
150
/// `!~*` , a case insensitive not match regular expression operator in PostgreSQL
151
151
ExclamationMarkTildeAsterisk ,
152
+ /// `~~`, a case sensitive match pattern operator in PostgreSQL
153
+ DoubleTilde ,
154
+ /// `~~*`, a case insensitive match pattern operator in PostgreSQL
155
+ DoubleTildeAsterisk ,
156
+ /// `!~~`, a case sensitive not match pattern operator in PostgreSQL
157
+ ExclamationMarkDoubleTilde ,
158
+ /// `!~~*`, a case insensitive not match pattern operator in PostgreSQL
159
+ ExclamationMarkDoubleTildeAsterisk ,
152
160
/// `<<`, a bitwise shift left operator in PostgreSQL
153
161
ShiftLeft ,
154
162
/// `>>`, a bitwise shift right operator in PostgreSQL
@@ -249,6 +257,10 @@ impl fmt::Display for Token {
249
257
Token :: TildeAsterisk => f. write_str ( "~*" ) ,
250
258
Token :: ExclamationMarkTilde => f. write_str ( "!~" ) ,
251
259
Token :: ExclamationMarkTildeAsterisk => f. write_str ( "!~*" ) ,
260
+ Token :: DoubleTilde => f. write_str ( "~~" ) ,
261
+ Token :: DoubleTildeAsterisk => f. write_str ( "~~*" ) ,
262
+ Token :: ExclamationMarkDoubleTilde => f. write_str ( "!~~" ) ,
263
+ Token :: ExclamationMarkDoubleTildeAsterisk => f. write_str ( "!~~*" ) ,
252
264
Token :: AtSign => f. write_str ( "@" ) ,
253
265
Token :: CaretAt => f. write_str ( "^@" ) ,
254
266
Token :: ShiftLeft => f. write_str ( "<<" ) ,
@@ -903,6 +915,16 @@ impl<'a> Tokenizer<'a> {
903
915
match chars. peek ( ) {
904
916
Some ( '*' ) => self
905
917
. consume_and_return ( chars, Token :: ExclamationMarkTildeAsterisk ) ,
918
+ Some ( '~' ) => {
919
+ chars. next ( ) ;
920
+ match chars. peek ( ) {
921
+ Some ( '*' ) => self . consume_and_return (
922
+ chars,
923
+ Token :: ExclamationMarkDoubleTildeAsterisk ,
924
+ ) ,
925
+ _ => Ok ( Some ( Token :: ExclamationMarkDoubleTilde ) ) ,
926
+ }
927
+ }
906
928
_ => Ok ( Some ( Token :: ExclamationMarkTilde ) ) ,
907
929
}
908
930
}
@@ -974,6 +996,15 @@ impl<'a> Tokenizer<'a> {
974
996
chars. next ( ) ; // consume
975
997
match chars. peek ( ) {
976
998
Some ( '*' ) => self . consume_and_return ( chars, Token :: TildeAsterisk ) ,
999
+ Some ( '~' ) => {
1000
+ chars. next ( ) ;
1001
+ match chars. peek ( ) {
1002
+ Some ( '*' ) => {
1003
+ self . consume_and_return ( chars, Token :: DoubleTildeAsterisk )
1004
+ }
1005
+ _ => Ok ( Some ( Token :: DoubleTilde ) ) ,
1006
+ }
1007
+ }
977
1008
_ => Ok ( Some ( Token :: Tilde ) ) ,
978
1009
}
979
1010
}
@@ -1994,6 +2025,44 @@ mod tests {
1994
2025
compare ( expected, tokens) ;
1995
2026
}
1996
2027
2028
+ #[ test]
2029
+ fn tokenize_pg_like_match ( ) {
2030
+ let sql = "SELECT col ~~ '_a%', col ~~* '_a%', col !~~ '_a%', col !~~* '_a%'" ;
2031
+ let dialect = GenericDialect { } ;
2032
+ let tokens = Tokenizer :: new ( & dialect, sql) . tokenize ( ) . unwrap ( ) ;
2033
+ let expected = vec ! [
2034
+ Token :: make_keyword( "SELECT" ) ,
2035
+ Token :: Whitespace ( Whitespace :: Space ) ,
2036
+ Token :: make_word( "col" , None ) ,
2037
+ Token :: Whitespace ( Whitespace :: Space ) ,
2038
+ Token :: DoubleTilde ,
2039
+ Token :: Whitespace ( Whitespace :: Space ) ,
2040
+ Token :: SingleQuotedString ( "_a%" . into( ) ) ,
2041
+ Token :: Comma ,
2042
+ Token :: Whitespace ( Whitespace :: Space ) ,
2043
+ Token :: make_word( "col" , None ) ,
2044
+ Token :: Whitespace ( Whitespace :: Space ) ,
2045
+ Token :: DoubleTildeAsterisk ,
2046
+ Token :: Whitespace ( Whitespace :: Space ) ,
2047
+ Token :: SingleQuotedString ( "_a%" . into( ) ) ,
2048
+ Token :: Comma ,
2049
+ Token :: Whitespace ( Whitespace :: Space ) ,
2050
+ Token :: make_word( "col" , None ) ,
2051
+ Token :: Whitespace ( Whitespace :: Space ) ,
2052
+ Token :: ExclamationMarkDoubleTilde ,
2053
+ Token :: Whitespace ( Whitespace :: Space ) ,
2054
+ Token :: SingleQuotedString ( "_a%" . into( ) ) ,
2055
+ Token :: Comma ,
2056
+ Token :: Whitespace ( Whitespace :: Space ) ,
2057
+ Token :: make_word( "col" , None ) ,
2058
+ Token :: Whitespace ( Whitespace :: Space ) ,
2059
+ Token :: ExclamationMarkDoubleTildeAsterisk ,
2060
+ Token :: Whitespace ( Whitespace :: Space ) ,
2061
+ Token :: SingleQuotedString ( "_a%" . into( ) ) ,
2062
+ ] ;
2063
+ compare ( expected, tokens) ;
2064
+ }
2065
+
1997
2066
#[ test]
1998
2067
fn tokenize_quoted_identifier ( ) {
1999
2068
let sql = r#" "a "" b" "a """ "c """"" "# ;
0 commit comments