Skip to content

Commit c86960a

Browse files
Alexander Beediealexander-beedie
authored andcommitted
Add a next_token_ref to help avoid unnecessary copies
1 parent 0b589b2 commit c86960a

2 files changed

Lines changed: 88 additions & 80 deletions

File tree

src/dialect/snowflake.rs

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -750,7 +750,7 @@ fn parse_alter_external_table(parser: &mut Parser) -> Result<Statement, ParserEr
750750
// Optional subpath for refreshing specific partitions
751751
let subpath = match parser.peek_token().token {
752752
Token::SingleQuotedString(s) => {
753-
parser.next_token();
753+
parser.advance_token();
754754
Some(s)
755755
}
756756
_ => None,
@@ -1151,14 +1151,14 @@ pub fn parse_create_database(
11511151
pub fn parse_storage_serialization_policy(
11521152
parser: &mut Parser,
11531153
) -> Result<StorageSerializationPolicy, ParserError> {
1154-
let next_token = parser.next_token();
1155-
match &next_token.token {
1156-
Token::Word(w) => match w.keyword {
1157-
Keyword::COMPATIBLE => Ok(StorageSerializationPolicy::Compatible),
1158-
Keyword::OPTIMIZED => Ok(StorageSerializationPolicy::Optimized),
1159-
_ => parser.expected("storage_serialization_policy", next_token),
1160-
},
1161-
_ => parser.expected("storage_serialization_policy", next_token),
1154+
let keyword = match &parser.next_token_ref().token {
1155+
Token::Word(w) => w.keyword,
1156+
_ => Keyword::NoKeyword,
1157+
};
1158+
match keyword {
1159+
Keyword::COMPATIBLE => Ok(StorageSerializationPolicy::Compatible),
1160+
Keyword::OPTIMIZED => Ok(StorageSerializationPolicy::Optimized),
1161+
_ => parser.expected_at("storage_serialization_policy", parser.get_current_index()),
11621162
}
11631163
}
11641164

@@ -1395,7 +1395,7 @@ pub fn parse_copy_into(parser: &mut Parser) -> Result<Statement, ParserError> {
13951395
// VALIDATION MODE
13961396
} else if parser.parse_keyword(Keyword::VALIDATION_MODE) {
13971397
parser.expect_token(&Token::Eq)?;
1398-
validation_mode = Some(parser.next_token().token.to_string());
1398+
validation_mode = Some(parser.next_token_ref().to_string());
13991399
// COPY OPTIONS
14001400
} else if parser.parse_keyword(Keyword::COPY_OPTIONS) {
14011401
parser.expect_token(&Token::Eq)?;
@@ -1553,7 +1553,7 @@ fn parse_stage_params(parser: &mut Parser) -> Result<StageParamsObject, ParserEr
15531553
// STORAGE INTEGRATION
15541554
if parser.parse_keyword(Keyword::STORAGE_INTEGRATION) {
15551555
parser.expect_token(&Token::Eq)?;
1556-
storage_integration = Some(parser.next_token().token.to_string());
1556+
storage_integration = Some(parser.next_token_ref().to_string());
15571557
}
15581558

15591559
// ENDPOINT

src/parser/mod.rs

Lines changed: 77 additions & 69 deletions
Original file line numberDiff line numberDiff line change
@@ -2529,15 +2529,15 @@ impl<'a> Parser<'a> {
25292529

25302530
/// Parse window frame `UNITS` clause: `ROWS`, `RANGE`, or `GROUPS`.
25312531
pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
2532-
let next_token = self.next_token();
2533-
match &next_token.token {
2534-
Token::Word(w) => match w.keyword {
2535-
Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2536-
Keyword::RANGE => Ok(WindowFrameUnits::Range),
2537-
Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2538-
_ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
2539-
},
2540-
_ => self.expected("ROWS, RANGE, GROUPS", next_token),
2532+
let keyword = match &self.next_token_ref().token {
2533+
Token::Word(w) => w.keyword,
2534+
_ => Keyword::NoKeyword,
2535+
};
2536+
match keyword {
2537+
Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2538+
Keyword::RANGE => Ok(WindowFrameUnits::Range),
2539+
Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2540+
_ => self.expected_at("ROWS, RANGE, GROUPS", self.get_current_index()),
25412541
}
25422542
}
25432543

@@ -2987,15 +2987,15 @@ impl<'a> Parser<'a> {
29872987
///
29882988
/// See [TrimWhereField]
29892989
pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
2990-
let next_token = self.next_token();
2991-
match &next_token.token {
2992-
Token::Word(w) => match w.keyword {
2993-
Keyword::BOTH => Ok(TrimWhereField::Both),
2994-
Keyword::LEADING => Ok(TrimWhereField::Leading),
2995-
Keyword::TRAILING => Ok(TrimWhereField::Trailing),
2996-
_ => self.expected("trim_where field", next_token)?,
2997-
},
2998-
_ => self.expected("trim_where field", next_token),
2990+
let keyword = match &self.next_token_ref().token {
2991+
Token::Word(w) => w.keyword,
2992+
_ => Keyword::NoKeyword,
2993+
};
2994+
match keyword {
2995+
Keyword::BOTH => Ok(TrimWhereField::Both),
2996+
Keyword::LEADING => Ok(TrimWhereField::Leading),
2997+
Keyword::TRAILING => Ok(TrimWhereField::Trailing),
2998+
_ => self.expected_at("trim_where field", self.get_current_index()),
29992999
}
30003000
}
30013001

@@ -3438,7 +3438,7 @@ impl<'a> Parser<'a> {
34383438
if self.peek_token_ref().token != Token::Lt {
34393439
return Ok((Default::default(), false.into()));
34403440
}
3441-
self.next_token();
3441+
self.advance_token();
34423442

34433443
let mut field_defs = vec![];
34443444
let trailing_bracket = loop {
@@ -3680,11 +3680,11 @@ impl<'a> Parser<'a> {
36803680
let trailing_bracket = if !trailing_bracket.0 {
36813681
match &self.peek_token_ref().token {
36823682
Token::Gt => {
3683-
self.next_token();
3683+
self.advance_token();
36843684
false.into()
36853685
}
36863686
Token::ShiftRight => {
3687-
self.next_token();
3687+
self.advance_token();
36883688
true.into()
36893689
}
36903690
_ => return self.expected_ref(">", self.peek_token_ref()),
@@ -4468,6 +4468,14 @@ impl<'a> Parser<'a> {
44684468
self.get_current_token().clone()
44694469
}
44704470

4471+
/// Advances to the next non-whitespace token and returns a reference to it.
4472+
///
4473+
/// See [`Self::next_token`] for a version that returns a cloned token.
4474+
pub fn next_token_ref(&mut self) -> &TokenWithSpan {
4475+
self.advance_token();
4476+
self.get_current_token()
4477+
}
4478+
44714479
/// Returns the index of the current token
44724480
///
44734481
/// This can be used with APIs that expect an index, such as
@@ -5341,7 +5349,7 @@ impl<'a> Parser<'a> {
53415349
match &self.peek_token_ref().token {
53425350
Token::Word(word) => match word.keyword {
53435351
Keyword::AS => {
5344-
self.next_token();
5352+
self.advance_token();
53455353
Ok((true, self.parse_query()?))
53465354
}
53475355
_ => Ok((false, self.parse_query()?)),
@@ -6317,19 +6325,19 @@ impl<'a> Parser<'a> {
63176325

63186326
/// Parse a file format for external tables.
63196327
pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
6320-
let next_token = self.next_token();
6321-
match &next_token.token {
6322-
Token::Word(w) => match w.keyword {
6323-
Keyword::AVRO => Ok(FileFormat::AVRO),
6324-
Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
6325-
Keyword::ORC => Ok(FileFormat::ORC),
6326-
Keyword::PARQUET => Ok(FileFormat::PARQUET),
6327-
Keyword::RCFILE => Ok(FileFormat::RCFILE),
6328-
Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
6329-
Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
6330-
_ => self.expected("fileformat", next_token),
6331-
},
6332-
_ => self.expected("fileformat", next_token),
6328+
let keyword = match &self.next_token_ref().token {
6329+
Token::Word(w) => w.keyword,
6330+
_ => Keyword::NoKeyword,
6331+
};
6332+
match keyword {
6333+
Keyword::AVRO => Ok(FileFormat::AVRO),
6334+
Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
6335+
Keyword::ORC => Ok(FileFormat::ORC),
6336+
Keyword::PARQUET => Ok(FileFormat::PARQUET),
6337+
Keyword::RCFILE => Ok(FileFormat::RCFILE),
6338+
Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
6339+
Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
6340+
_ => self.expected_at("fileformat", self.get_current_index()),
63336341
}
63346342
}
63356343

@@ -6343,16 +6351,16 @@ impl<'a> Parser<'a> {
63436351

63446352
/// Parse an `ANALYZE FORMAT`.
63456353
pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
6346-
let next_token = self.next_token();
6347-
match &next_token.token {
6348-
Token::Word(w) => match w.keyword {
6349-
Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
6350-
Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
6351-
Keyword::JSON => Ok(AnalyzeFormat::JSON),
6352-
Keyword::TREE => Ok(AnalyzeFormat::TREE),
6353-
_ => self.expected("fileformat", next_token),
6354-
},
6355-
_ => self.expected("fileformat", next_token),
6354+
let keyword = match &self.next_token_ref().token {
6355+
Token::Word(w) => w.keyword,
6356+
_ => Keyword::NoKeyword,
6357+
};
6358+
match keyword {
6359+
Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
6360+
Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
6361+
Keyword::JSON => Ok(AnalyzeFormat::JSON),
6362+
Keyword::TREE => Ok(AnalyzeFormat::TREE),
6363+
_ => self.expected_at("fileformat", self.get_current_index()),
63566364
}
63576365
}
63586366

@@ -6466,9 +6474,9 @@ impl<'a> Parser<'a> {
64666474
Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable,
64676475
_ => {
64686476
self.prev_token();
6469-
let found = self.next_token();
6477+
self.next_token_ref();
64706478
return self
6471-
.expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found);
6479+
.expected_at("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", self.get_current_index());
64726480
}
64736481
},
64746482
)
@@ -6488,8 +6496,8 @@ impl<'a> Parser<'a> {
64886496
Keyword::INVOKER => CreateViewSecurity::Invoker,
64896497
_ => {
64906498
self.prev_token();
6491-
let found = self.next_token();
6492-
return self.expected("DEFINER or INVOKER after SQL SECURITY", found);
6499+
self.next_token_ref();
6500+
return self.expected_at("DEFINER or INVOKER after SQL SECURITY", self.get_current_index());
64936501
}
64946502
},
64956503
)
@@ -7696,11 +7704,11 @@ impl<'a> Parser<'a> {
76967704
let (declare_type, data_type) = match &self.peek_token_ref().token {
76977705
Token::Word(w) => match w.keyword {
76987706
Keyword::CURSOR => {
7699-
self.next_token();
7707+
self.advance_token();
77007708
(Some(DeclareType::Cursor), None)
77017709
}
77027710
Keyword::AS => {
7703-
self.next_token();
7711+
self.advance_token();
77047712
(None, Some(self.parse_data_type()?))
77057713
}
77067714
_ => (None, Some(self.parse_data_type()?)),
@@ -7709,7 +7717,7 @@ impl<'a> Parser<'a> {
77097717
};
77107718

77117719
let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) {
7712-
self.next_token();
7720+
self.advance_token();
77137721
let query = Some(self.parse_query()?);
77147722
(query, None)
77157723
} else {
@@ -7742,11 +7750,11 @@ impl<'a> Parser<'a> {
77427750
) -> Result<Option<DeclareAssignment>, ParserError> {
77437751
Ok(match &self.peek_token_ref().token {
77447752
Token::Word(w) if w.keyword == Keyword::DEFAULT => {
7745-
self.next_token(); // Skip `DEFAULT`
7753+
self.advance_token(); // Skip `DEFAULT`
77467754
Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
77477755
}
77487756
Token::Assignment => {
7749-
self.next_token(); // Skip `:=`
7757+
self.advance_token(); // Skip `:=`
77507758
Some(DeclareAssignment::DuckAssignment(Box::new(
77517759
self.parse_expr()?,
77527760
)))
@@ -7766,7 +7774,7 @@ impl<'a> Parser<'a> {
77667774
) -> Result<Option<DeclareAssignment>, ParserError> {
77677775
Ok(match &self.peek_token_ref().token {
77687776
Token::Eq => {
7769-
self.next_token(); // Skip `=`
7777+
self.advance_token(); // Skip `=`
77707778
Some(DeclareAssignment::MsSqlAssignment(Box::new(
77717779
self.parse_expr()?,
77727780
)))
@@ -12585,7 +12593,7 @@ impl<'a> Parser<'a> {
1258512593
} else {
1258612594
loop {
1258712595
if allow_wildcards && self.peek_token_ref().token == Token::Mul {
12588-
let span = self.next_token().span;
12596+
let span = self.next_token_ref().span;
1258912597
parts.push(ObjectNamePart::Identifier(Ident {
1259012598
value: Token::Mul.to_string(),
1259112599
quote_style: None,
@@ -13842,7 +13850,7 @@ impl<'a> Parser<'a> {
1384213850
let mut root = None;
1384313851
let mut r#type = false;
1384413852
while self.peek_token_ref().token == Token::Comma {
13845-
self.next_token();
13853+
self.advance_token();
1384613854
if self.parse_keyword(Keyword::ELEMENTS) {
1384713855
elements = true;
1384813856
} else if self.parse_keyword(Keyword::BINARY) {
@@ -13880,7 +13888,7 @@ impl<'a> Parser<'a> {
1388013888
let mut include_null_values = false;
1388113889
let mut without_array_wrapper = false;
1388213890
while self.peek_token_ref().token == Token::Comma {
13883-
self.next_token();
13891+
self.advance_token();
1388413892
if self.parse_keyword(Keyword::ROOT) {
1388513893
self.expect_token(&Token::LParen)?;
1388613894
root = Some(self.parse_literal_string()?);
@@ -14026,7 +14034,7 @@ impl<'a> Parser<'a> {
1402614034
if precedence >= next_precedence {
1402714035
break;
1402814036
}
14029-
self.next_token(); // skip past the set operator
14037+
self.advance_token(); // skip past the set operator
1403014038
let set_quantifier = self.parse_set_quantifier(&op);
1403114039
expr = SetExpr::SetOperation {
1403214040
left: Box::new(expr),
@@ -15190,7 +15198,7 @@ impl<'a> Parser<'a> {
1519015198
}
1519115199
}
1519215200
kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
15193-
let _ = self.next_token(); // consume LEFT/RIGHT
15201+
self.advance_token(); // consume LEFT/RIGHT
1519415202
let is_left = kw == Keyword::LEFT;
1519515203
let join_type = self.parse_one_of_keywords(&[
1519615204
Keyword::OUTER,
@@ -15238,17 +15246,17 @@ impl<'a> Parser<'a> {
1523815246
}
1523915247
}
1524015248
Keyword::ANTI => {
15241-
let _ = self.next_token(); // consume ANTI
15249+
self.advance_token(); // consume ANTI
1524215250
self.expect_keyword_is(Keyword::JOIN)?;
1524315251
JoinOperator::Anti
1524415252
}
1524515253
Keyword::SEMI => {
15246-
let _ = self.next_token(); // consume SEMI
15254+
self.advance_token(); // consume SEMI
1524715255
self.expect_keyword_is(Keyword::JOIN)?;
1524815256
JoinOperator::Semi
1524915257
}
1525015258
Keyword::FULL => {
15251-
let _ = self.next_token(); // consume FULL
15259+
self.advance_token(); // consume FULL
1525215260
let _ = self.parse_keyword(Keyword::OUTER); // [ OUTER ]
1525315261
self.expect_keyword_is(Keyword::JOIN)?;
1525415262
JoinOperator::FullOuter
@@ -15257,7 +15265,7 @@ impl<'a> Parser<'a> {
1525715265
return self.expected_ref("LEFT, RIGHT, or FULL", self.peek_token_ref());
1525815266
}
1525915267
Keyword::STRAIGHT_JOIN => {
15260-
let _ = self.next_token(); // consume STRAIGHT_JOIN
15268+
self.advance_token(); // consume STRAIGHT_JOIN
1526115269
JoinOperator::StraightJoin
1526215270
}
1526315271
_ if natural => {
@@ -16048,8 +16056,8 @@ impl<'a> Parser<'a> {
1604816056
} else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
1604916057
Some(AfterMatchSkip::ToLast(self.parse_identifier()?))
1605016058
} else {
16051-
let found = self.next_token();
16052-
return self.expected("after match skip option", found);
16059+
self.next_token_ref();
16060+
return self.expected_at("after match skip option", self.get_current_index());
1605316061
}
1605416062
} else {
1605516063
None
@@ -16282,7 +16290,7 @@ impl<'a> Parser<'a> {
1628216290
let name = self.parse_identifier()?;
1628316291
let r#type = self.parse_data_type()?;
1628416292
let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
16285-
self.next_token();
16293+
self.advance_token();
1628616294
Some(path)
1628716295
} else {
1628816296
None
@@ -17984,8 +17992,8 @@ impl<'a> Parser<'a> {
1798417992
self.expect_token(&Token::RParen)?;
1798517993
Some(ReplaceSelectItem { items })
1798617994
} else {
17987-
let tok = self.next_token();
17988-
return self.expected("( after REPLACE but", tok);
17995+
self.next_token_ref();
17996+
return self.expected_at("( after REPLACE but", self.get_current_index());
1798917997
}
1799017998
} else {
1799117999
None

0 commit comments

Comments
 (0)