@@ -851,7 +851,7 @@ impl<'a> Tokenizer<'a> {
851851 /// Create a new SQL tokenizer for the specified SQL statement
852852 ///
853853 /// ```
854- /// # use sqlparser::tokenizer::{Token, Whitespace, Tokenizer};
854+ /// # use sqlparser::tokenizer::{Token, Tokenizer};
855855 /// # use sqlparser::dialect::GenericDialect;
856856 /// # let dialect = GenericDialect{};
857857 /// let query = r#"SELECT 'foo'"#;
@@ -861,7 +861,6 @@ impl<'a> Tokenizer<'a> {
861861 ///
862862 /// assert_eq!(tokens, vec![
863863 /// Token::make_word("SELECT", None),
864- /// Token::Whitespace(Whitespace::Space),
865864 /// Token::SingleQuotedString("foo".to_string()),
866865 /// ]);
867866 pub fn new ( dialect : & ' a dyn Dialect , query : & ' a str ) -> Self {
@@ -1673,7 +1672,7 @@ impl<'a> Tokenizer<'a> {
16731672 // regular identifier starting with an "E" or "e"
16741673 let s = self . tokenize_word ( "~" , chars, prev_keyword) ?;
16751674 Ok ( Some ( Token :: make_word ( s, None ) ) )
1676- }
1675+ }
16771676 _ => self . start_binop ( chars, "~" , Token :: Tilde ) ,
16781677 }
16791678 }
0 commit comments