Skip to content

Commit 7f838a4

Browse files
author
Alexander Beedie
committed
Add a next_token_ref to help avoid unnecessary copies
1 parent 31e1942 commit 7f838a4

2 files changed

Lines changed: 88 additions & 80 deletions

File tree

src/dialect/snowflake.rs

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -754,7 +754,7 @@ fn parse_alter_external_table(parser: &mut Parser) -> Result<Statement, ParserEr
754754
// Optional subpath for refreshing specific partitions
755755
let subpath = match parser.peek_token().token {
756756
Token::SingleQuotedString(s) => {
757-
parser.next_token();
757+
parser.advance_token();
758758
Some(s)
759759
}
760760
_ => None,
@@ -1155,14 +1155,14 @@ pub fn parse_create_database(
11551155
pub fn parse_storage_serialization_policy(
11561156
parser: &mut Parser,
11571157
) -> Result<StorageSerializationPolicy, ParserError> {
1158-
let next_token = parser.next_token();
1159-
match &next_token.token {
1160-
Token::Word(w) => match w.keyword {
1161-
Keyword::COMPATIBLE => Ok(StorageSerializationPolicy::Compatible),
1162-
Keyword::OPTIMIZED => Ok(StorageSerializationPolicy::Optimized),
1163-
_ => parser.expected("storage_serialization_policy", next_token),
1164-
},
1165-
_ => parser.expected("storage_serialization_policy", next_token),
1158+
let keyword = match &parser.next_token_ref().token {
1159+
Token::Word(w) => w.keyword,
1160+
_ => Keyword::NoKeyword,
1161+
};
1162+
match keyword {
1163+
Keyword::COMPATIBLE => Ok(StorageSerializationPolicy::Compatible),
1164+
Keyword::OPTIMIZED => Ok(StorageSerializationPolicy::Optimized),
1165+
_ => parser.expected_at("storage_serialization_policy", parser.get_current_index()),
11661166
}
11671167
}
11681168

@@ -1399,7 +1399,7 @@ pub fn parse_copy_into(parser: &mut Parser) -> Result<Statement, ParserError> {
13991399
// VALIDATION MODE
14001400
} else if parser.parse_keyword(Keyword::VALIDATION_MODE) {
14011401
parser.expect_token(&Token::Eq)?;
1402-
validation_mode = Some(parser.next_token().token.to_string());
1402+
validation_mode = Some(parser.next_token_ref().to_string());
14031403
// COPY OPTIONS
14041404
} else if parser.parse_keyword(Keyword::COPY_OPTIONS) {
14051405
parser.expect_token(&Token::Eq)?;
@@ -1557,7 +1557,7 @@ fn parse_stage_params(parser: &mut Parser) -> Result<StageParamsObject, ParserEr
15571557
// STORAGE INTEGRATION
15581558
if parser.parse_keyword(Keyword::STORAGE_INTEGRATION) {
15591559
parser.expect_token(&Token::Eq)?;
1560-
storage_integration = Some(parser.next_token().token.to_string());
1560+
storage_integration = Some(parser.next_token_ref().to_string());
15611561
}
15621562

15631563
// ENDPOINT

src/parser/mod.rs

Lines changed: 77 additions & 69 deletions
Original file line numberDiff line numberDiff line change
@@ -2529,15 +2529,15 @@ impl<'a> Parser<'a> {
25292529

25302530
/// Parse window frame `UNITS` clause: `ROWS`, `RANGE`, or `GROUPS`.
25312531
pub fn parse_window_frame_units(&mut self) -> Result<WindowFrameUnits, ParserError> {
2532-
let next_token = self.next_token();
2533-
match &next_token.token {
2534-
Token::Word(w) => match w.keyword {
2535-
Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2536-
Keyword::RANGE => Ok(WindowFrameUnits::Range),
2537-
Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2538-
_ => self.expected("ROWS, RANGE, GROUPS", next_token)?,
2539-
},
2540-
_ => self.expected("ROWS, RANGE, GROUPS", next_token),
2532+
let keyword = match &self.next_token_ref().token {
2533+
Token::Word(w) => w.keyword,
2534+
_ => Keyword::NoKeyword,
2535+
};
2536+
match keyword {
2537+
Keyword::ROWS => Ok(WindowFrameUnits::Rows),
2538+
Keyword::RANGE => Ok(WindowFrameUnits::Range),
2539+
Keyword::GROUPS => Ok(WindowFrameUnits::Groups),
2540+
_ => self.expected_at("ROWS, RANGE, GROUPS", self.get_current_index()),
25412541
}
25422542
}
25432543

@@ -2986,15 +2986,15 @@ impl<'a> Parser<'a> {
29862986
///
29872987
/// See [TrimWhereField]
29882988
pub fn parse_trim_where(&mut self) -> Result<TrimWhereField, ParserError> {
2989-
let next_token = self.next_token();
2990-
match &next_token.token {
2991-
Token::Word(w) => match w.keyword {
2992-
Keyword::BOTH => Ok(TrimWhereField::Both),
2993-
Keyword::LEADING => Ok(TrimWhereField::Leading),
2994-
Keyword::TRAILING => Ok(TrimWhereField::Trailing),
2995-
_ => self.expected("trim_where field", next_token)?,
2996-
},
2997-
_ => self.expected("trim_where field", next_token),
2989+
let keyword = match &self.next_token_ref().token {
2990+
Token::Word(w) => w.keyword,
2991+
_ => Keyword::NoKeyword,
2992+
};
2993+
match keyword {
2994+
Keyword::BOTH => Ok(TrimWhereField::Both),
2995+
Keyword::LEADING => Ok(TrimWhereField::Leading),
2996+
Keyword::TRAILING => Ok(TrimWhereField::Trailing),
2997+
_ => self.expected_at("trim_where field", self.get_current_index()),
29982998
}
29992999
}
30003000

@@ -3437,7 +3437,7 @@ impl<'a> Parser<'a> {
34373437
if self.peek_token_ref().token != Token::Lt {
34383438
return Ok((Default::default(), false.into()));
34393439
}
3440-
self.next_token();
3440+
self.advance_token();
34413441

34423442
let mut field_defs = vec![];
34433443
let trailing_bracket = loop {
@@ -3679,11 +3679,11 @@ impl<'a> Parser<'a> {
36793679
let trailing_bracket = if !trailing_bracket.0 {
36803680
match &self.peek_token_ref().token {
36813681
Token::Gt => {
3682-
self.next_token();
3682+
self.advance_token();
36833683
false.into()
36843684
}
36853685
Token::ShiftRight => {
3686-
self.next_token();
3686+
self.advance_token();
36873687
true.into()
36883688
}
36893689
_ => return self.expected_ref(">", self.peek_token_ref()),
@@ -4467,6 +4467,14 @@ impl<'a> Parser<'a> {
44674467
self.get_current_token().clone()
44684468
}
44694469

4470+
/// Advances to the next non-whitespace token and returns a reference to it.
4471+
///
4472+
/// See [`Self::next_token`] for a version that returns a cloned token.
4473+
pub fn next_token_ref(&mut self) -> &TokenWithSpan {
4474+
self.advance_token();
4475+
self.get_current_token()
4476+
}
4477+
44704478
/// Returns the index of the current token
44714479
///
44724480
/// This can be used with APIs that expect an index, such as
@@ -5340,7 +5348,7 @@ impl<'a> Parser<'a> {
53405348
match &self.peek_token_ref().token {
53415349
Token::Word(word) => match word.keyword {
53425350
Keyword::AS => {
5343-
self.next_token();
5351+
self.advance_token();
53445352
Ok((true, self.parse_query()?))
53455353
}
53465354
_ => Ok((false, self.parse_query()?)),
@@ -6316,19 +6324,19 @@ impl<'a> Parser<'a> {
63166324

63176325
/// Parse a file format for external tables.
63186326
pub fn parse_file_format(&mut self) -> Result<FileFormat, ParserError> {
6319-
let next_token = self.next_token();
6320-
match &next_token.token {
6321-
Token::Word(w) => match w.keyword {
6322-
Keyword::AVRO => Ok(FileFormat::AVRO),
6323-
Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
6324-
Keyword::ORC => Ok(FileFormat::ORC),
6325-
Keyword::PARQUET => Ok(FileFormat::PARQUET),
6326-
Keyword::RCFILE => Ok(FileFormat::RCFILE),
6327-
Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
6328-
Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
6329-
_ => self.expected("fileformat", next_token),
6330-
},
6331-
_ => self.expected("fileformat", next_token),
6327+
let keyword = match &self.next_token_ref().token {
6328+
Token::Word(w) => w.keyword,
6329+
_ => Keyword::NoKeyword,
6330+
};
6331+
match keyword {
6332+
Keyword::AVRO => Ok(FileFormat::AVRO),
6333+
Keyword::JSONFILE => Ok(FileFormat::JSONFILE),
6334+
Keyword::ORC => Ok(FileFormat::ORC),
6335+
Keyword::PARQUET => Ok(FileFormat::PARQUET),
6336+
Keyword::RCFILE => Ok(FileFormat::RCFILE),
6337+
Keyword::SEQUENCEFILE => Ok(FileFormat::SEQUENCEFILE),
6338+
Keyword::TEXTFILE => Ok(FileFormat::TEXTFILE),
6339+
_ => self.expected_at("fileformat", self.get_current_index()),
63326340
}
63336341
}
63346342

@@ -6342,16 +6350,16 @@ impl<'a> Parser<'a> {
63426350

63436351
/// Parse an `ANALYZE FORMAT`.
63446352
pub fn parse_analyze_format(&mut self) -> Result<AnalyzeFormat, ParserError> {
6345-
let next_token = self.next_token();
6346-
match &next_token.token {
6347-
Token::Word(w) => match w.keyword {
6348-
Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
6349-
Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
6350-
Keyword::JSON => Ok(AnalyzeFormat::JSON),
6351-
Keyword::TREE => Ok(AnalyzeFormat::TREE),
6352-
_ => self.expected("fileformat", next_token),
6353-
},
6354-
_ => self.expected("fileformat", next_token),
6353+
let keyword = match &self.next_token_ref().token {
6354+
Token::Word(w) => w.keyword,
6355+
_ => Keyword::NoKeyword,
6356+
};
6357+
match keyword {
6358+
Keyword::TEXT => Ok(AnalyzeFormat::TEXT),
6359+
Keyword::GRAPHVIZ => Ok(AnalyzeFormat::GRAPHVIZ),
6360+
Keyword::JSON => Ok(AnalyzeFormat::JSON),
6361+
Keyword::TREE => Ok(AnalyzeFormat::TREE),
6362+
_ => self.expected_at("fileformat", self.get_current_index()),
63556363
}
63566364
}
63576365

@@ -6465,9 +6473,9 @@ impl<'a> Parser<'a> {
64656473
Keyword::TEMPTABLE => CreateViewAlgorithm::TempTable,
64666474
_ => {
64676475
self.prev_token();
6468-
let found = self.next_token();
6476+
self.next_token_ref();
64696477
return self
6470-
.expected("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", found);
6478+
.expected_at("UNDEFINED or MERGE or TEMPTABLE after ALGORITHM =", self.get_current_index());
64716479
}
64726480
},
64736481
)
@@ -6487,8 +6495,8 @@ impl<'a> Parser<'a> {
64876495
Keyword::INVOKER => CreateViewSecurity::Invoker,
64886496
_ => {
64896497
self.prev_token();
6490-
let found = self.next_token();
6491-
return self.expected("DEFINER or INVOKER after SQL SECURITY", found);
6498+
self.next_token_ref();
6499+
return self.expected_at("DEFINER or INVOKER after SQL SECURITY", self.get_current_index());
64926500
}
64936501
},
64946502
)
@@ -7695,11 +7703,11 @@ impl<'a> Parser<'a> {
76957703
let (declare_type, data_type) = match &self.peek_token_ref().token {
76967704
Token::Word(w) => match w.keyword {
76977705
Keyword::CURSOR => {
7698-
self.next_token();
7706+
self.advance_token();
76997707
(Some(DeclareType::Cursor), None)
77007708
}
77017709
Keyword::AS => {
7702-
self.next_token();
7710+
self.advance_token();
77037711
(None, Some(self.parse_data_type()?))
77047712
}
77057713
_ => (None, Some(self.parse_data_type()?)),
@@ -7708,7 +7716,7 @@ impl<'a> Parser<'a> {
77087716
};
77097717

77107718
let (for_query, assignment) = if self.peek_keyword(Keyword::FOR) {
7711-
self.next_token();
7719+
self.advance_token();
77127720
let query = Some(self.parse_query()?);
77137721
(query, None)
77147722
} else {
@@ -7741,11 +7749,11 @@ impl<'a> Parser<'a> {
77417749
) -> Result<Option<DeclareAssignment>, ParserError> {
77427750
Ok(match &self.peek_token_ref().token {
77437751
Token::Word(w) if w.keyword == Keyword::DEFAULT => {
7744-
self.next_token(); // Skip `DEFAULT`
7752+
self.advance_token(); // Skip `DEFAULT`
77457753
Some(DeclareAssignment::Default(Box::new(self.parse_expr()?)))
77467754
}
77477755
Token::Assignment => {
7748-
self.next_token(); // Skip `:=`
7756+
self.advance_token(); // Skip `:=`
77497757
Some(DeclareAssignment::DuckAssignment(Box::new(
77507758
self.parse_expr()?,
77517759
)))
@@ -7765,7 +7773,7 @@ impl<'a> Parser<'a> {
77657773
) -> Result<Option<DeclareAssignment>, ParserError> {
77667774
Ok(match &self.peek_token_ref().token {
77677775
Token::Eq => {
7768-
self.next_token(); // Skip `=`
7776+
self.advance_token(); // Skip `=`
77697777
Some(DeclareAssignment::MsSqlAssignment(Box::new(
77707778
self.parse_expr()?,
77717779
)))
@@ -12605,7 +12613,7 @@ impl<'a> Parser<'a> {
1260512613
} else {
1260612614
loop {
1260712615
if allow_wildcards && self.peek_token_ref().token == Token::Mul {
12608-
let span = self.next_token().span;
12616+
let span = self.next_token_ref().span;
1260912617
parts.push(ObjectNamePart::Identifier(Ident {
1261012618
value: Token::Mul.to_string(),
1261112619
quote_style: None,
@@ -13866,7 +13874,7 @@ impl<'a> Parser<'a> {
1386613874
let mut root = None;
1386713875
let mut r#type = false;
1386813876
while self.peek_token_ref().token == Token::Comma {
13869-
self.next_token();
13877+
self.advance_token();
1387013878
if self.parse_keyword(Keyword::ELEMENTS) {
1387113879
elements = true;
1387213880
} else if self.parse_keyword(Keyword::BINARY) {
@@ -13904,7 +13912,7 @@ impl<'a> Parser<'a> {
1390413912
let mut include_null_values = false;
1390513913
let mut without_array_wrapper = false;
1390613914
while self.peek_token_ref().token == Token::Comma {
13907-
self.next_token();
13915+
self.advance_token();
1390813916
if self.parse_keyword(Keyword::ROOT) {
1390913917
self.expect_token(&Token::LParen)?;
1391013918
root = Some(self.parse_literal_string()?);
@@ -14050,7 +14058,7 @@ impl<'a> Parser<'a> {
1405014058
if precedence >= next_precedence {
1405114059
break;
1405214060
}
14053-
self.next_token(); // skip past the set operator
14061+
self.advance_token(); // skip past the set operator
1405414062
let set_quantifier = self.parse_set_quantifier(&op);
1405514063
expr = SetExpr::SetOperation {
1405614064
left: Box::new(expr),
@@ -15214,7 +15222,7 @@ impl<'a> Parser<'a> {
1521415222
}
1521515223
}
1521615224
kw @ Keyword::LEFT | kw @ Keyword::RIGHT => {
15217-
let _ = self.next_token(); // consume LEFT/RIGHT
15225+
self.advance_token(); // consume LEFT/RIGHT
1521815226
let is_left = kw == Keyword::LEFT;
1521915227
let join_type = self.parse_one_of_keywords(&[
1522015228
Keyword::OUTER,
@@ -15262,17 +15270,17 @@ impl<'a> Parser<'a> {
1526215270
}
1526315271
}
1526415272
Keyword::ANTI => {
15265-
let _ = self.next_token(); // consume ANTI
15273+
self.advance_token(); // consume ANTI
1526615274
self.expect_keyword_is(Keyword::JOIN)?;
1526715275
JoinOperator::Anti
1526815276
}
1526915277
Keyword::SEMI => {
15270-
let _ = self.next_token(); // consume SEMI
15278+
self.advance_token(); // consume SEMI
1527115279
self.expect_keyword_is(Keyword::JOIN)?;
1527215280
JoinOperator::Semi
1527315281
}
1527415282
Keyword::FULL => {
15275-
let _ = self.next_token(); // consume FULL
15283+
self.advance_token(); // consume FULL
1527615284
let _ = self.parse_keyword(Keyword::OUTER); // [ OUTER ]
1527715285
self.expect_keyword_is(Keyword::JOIN)?;
1527815286
JoinOperator::FullOuter
@@ -15281,7 +15289,7 @@ impl<'a> Parser<'a> {
1528115289
return self.expected_ref("LEFT, RIGHT, or FULL", self.peek_token_ref());
1528215290
}
1528315291
Keyword::STRAIGHT_JOIN => {
15284-
let _ = self.next_token(); // consume STRAIGHT_JOIN
15292+
self.advance_token(); // consume STRAIGHT_JOIN
1528515293
JoinOperator::StraightJoin
1528615294
}
1528715295
_ if natural => {
@@ -16072,8 +16080,8 @@ impl<'a> Parser<'a> {
1607216080
} else if self.parse_keywords(&[Keyword::TO, Keyword::LAST]) {
1607316081
Some(AfterMatchSkip::ToLast(self.parse_identifier()?))
1607416082
} else {
16075-
let found = self.next_token();
16076-
return self.expected("after match skip option", found);
16083+
self.next_token_ref();
16084+
return self.expected_at("after match skip option", self.get_current_index());
1607716085
}
1607816086
} else {
1607916087
None
@@ -16306,7 +16314,7 @@ impl<'a> Parser<'a> {
1630616314
let name = self.parse_identifier()?;
1630716315
let r#type = self.parse_data_type()?;
1630816316
let path = if let Token::SingleQuotedString(path) = self.peek_token().token {
16309-
self.next_token();
16317+
self.advance_token();
1631016318
Some(path)
1631116319
} else {
1631216320
None
@@ -18053,8 +18061,8 @@ impl<'a> Parser<'a> {
1805318061
self.expect_token(&Token::RParen)?;
1805418062
Some(ReplaceSelectItem { items })
1805518063
} else {
18056-
let tok = self.next_token();
18057-
return self.expected("( after REPLACE but", tok);
18064+
self.next_token_ref();
18065+
return self.expected_at("( after REPLACE but", self.get_current_index());
1805818066
}
1805918067
} else {
1806018068
None

0 commit comments

Comments
 (0)