Skip to content

Commit 1e7e4c7

Browse files
parser: collapse clippy-collapsible match branches
Refactor parser match arms to use guards instead of nested if blocks where Clippy flagged collapsible_match. This keeps behavior unchanged while making control flow clearer in statement parsing, wildcard qualification handling, terminal keyword detection, and Hive row format delimiter parsing.
1 parent 1233fc1 commit 1e7e4c7

File tree

1 file changed

+79
-90
lines changed

1 file changed

+79
-90
lines changed

src/parser/mod.rs

Lines changed: 79 additions & 90 deletions
Original file line numberDiff line numberDiff line change
@@ -508,10 +508,10 @@ impl<'a> Parser<'a> {
508508
Token::EOF => break,
509509

510510
// end of statement
511-
Token::Word(word) => {
512-
if expecting_statement_delimiter && word.keyword == Keyword::END {
513-
break;
514-
}
511+
Token::Word(word)
512+
if expecting_statement_delimiter && word.keyword == Keyword::END =>
513+
{
514+
break;
515515
}
516516
_ => {}
517517
}
@@ -1298,41 +1298,40 @@ impl<'a> Parser<'a> {
12981298

12991299
let next_token = self.next_token();
13001300
match next_token.token {
1301-
t @ (Token::Word(_) | Token::SingleQuotedString(_)) => {
1302-
if self.peek_token_ref().token == Token::Period {
1303-
let mut id_parts: Vec<Ident> = vec![match t {
1304-
Token::Word(w) => w.into_ident(next_token.span),
1305-
Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1306-
_ => {
1307-
return Err(ParserError::ParserError(
1308-
"Internal parser error: unexpected token type".to_string(),
1309-
))
1301+
t @ (Token::Word(_) | Token::SingleQuotedString(_))
1302+
if self.peek_token_ref().token == Token::Period =>
1303+
{
1304+
let mut id_parts: Vec<Ident> = vec![match t {
1305+
Token::Word(w) => w.into_ident(next_token.span),
1306+
Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1307+
_ => {
1308+
return Err(ParserError::ParserError(
1309+
"Internal parser error: unexpected token type".to_string(),
1310+
))
1311+
}
1312+
}];
1313+
1314+
while self.consume_token(&Token::Period) {
1315+
let next_token = self.next_token();
1316+
match next_token.token {
1317+
Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1318+
Token::SingleQuotedString(s) => {
1319+
// SQLite has single-quoted identifiers
1320+
id_parts.push(Ident::with_quote('\'', s))
13101321
}
1311-
}];
1312-
1313-
while self.consume_token(&Token::Period) {
1314-
let next_token = self.next_token();
1315-
match next_token.token {
1316-
Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1317-
Token::SingleQuotedString(s) => {
1318-
// SQLite has single-quoted identifiers
1319-
id_parts.push(Ident::with_quote('\'', s))
1320-
}
1321-
Token::Placeholder(s) => {
1322-
// Snowflake uses $1, $2, etc. for positional column references
1323-
// in staged data queries like: SELECT t.$1 FROM @stage t
1324-
id_parts.push(Ident::new(s))
1325-
}
1326-
Token::Mul => {
1327-
return Ok(Expr::QualifiedWildcard(
1328-
ObjectName::from(id_parts),
1329-
AttachedToken(next_token),
1330-
));
1331-
}
1332-
_ => {
1333-
return self
1334-
.expected("an identifier or a '*' after '.'", next_token);
1335-
}
1322+
Token::Placeholder(s) => {
1323+
// Snowflake uses $1, $2, etc. for positional column references
1324+
// in staged data queries like: SELECT t.$1 FROM @stage t
1325+
id_parts.push(Ident::new(s))
1326+
}
1327+
Token::Mul => {
1328+
return Ok(Expr::QualifiedWildcard(
1329+
ObjectName::from(id_parts),
1330+
AttachedToken(next_token),
1331+
));
1332+
}
1333+
_ => {
1334+
return self.expected("an identifier or a '*' after '.'", next_token);
13361335
}
13371336
}
13381337
}
@@ -4990,10 +4989,10 @@ impl<'a> Parser<'a> {
49904989
loop {
49914990
match &self.peek_nth_token_ref(0).token {
49924991
Token::EOF => break,
4993-
Token::Word(w) => {
4994-
if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) {
4995-
break;
4996-
}
4992+
Token::Word(w)
4993+
if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) =>
4994+
{
4995+
break;
49974996
}
49984997
_ => {}
49994998
}
@@ -8177,70 +8176,60 @@ impl<'a> Parser<'a> {
81778176
Keyword::LINES,
81788177
Keyword::NULL,
81798178
]) {
8180-
Some(Keyword::FIELDS) => {
8181-
if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
8179+
Some(Keyword::FIELDS)
8180+
if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) =>
8181+
{
8182+
row_delimiters.push(HiveRowDelimiter {
8183+
delimiter: HiveDelimiter::FieldsTerminatedBy,
8184+
char: self.parse_identifier()?,
8185+
});
8186+
8187+
if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
81828188
row_delimiters.push(HiveRowDelimiter {
8183-
delimiter: HiveDelimiter::FieldsTerminatedBy,
8189+
delimiter: HiveDelimiter::FieldsEscapedBy,
81848190
char: self.parse_identifier()?,
81858191
});
8186-
8187-
if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
8188-
row_delimiters.push(HiveRowDelimiter {
8189-
delimiter: HiveDelimiter::FieldsEscapedBy,
8190-
char: self.parse_identifier()?,
8191-
});
8192-
}
8193-
} else {
8194-
break;
81958192
}
81968193
}
8197-
Some(Keyword::COLLECTION) => {
8194+
Some(Keyword::COLLECTION)
81988195
if self.parse_keywords(&[
81998196
Keyword::ITEMS,
82008197
Keyword::TERMINATED,
82018198
Keyword::BY,
8202-
]) {
8203-
row_delimiters.push(HiveRowDelimiter {
8204-
delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
8205-
char: self.parse_identifier()?,
8206-
});
8207-
} else {
8208-
break;
8209-
}
8199+
]) =>
8200+
{
8201+
row_delimiters.push(HiveRowDelimiter {
8202+
delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
8203+
char: self.parse_identifier()?,
8204+
});
82108205
}
8211-
Some(Keyword::MAP) => {
8206+
Some(Keyword::MAP)
82128207
if self.parse_keywords(&[
82138208
Keyword::KEYS,
82148209
Keyword::TERMINATED,
82158210
Keyword::BY,
8216-
]) {
8217-
row_delimiters.push(HiveRowDelimiter {
8218-
delimiter: HiveDelimiter::MapKeysTerminatedBy,
8219-
char: self.parse_identifier()?,
8220-
});
8221-
} else {
8222-
break;
8223-
}
8211+
]) =>
8212+
{
8213+
row_delimiters.push(HiveRowDelimiter {
8214+
delimiter: HiveDelimiter::MapKeysTerminatedBy,
8215+
char: self.parse_identifier()?,
8216+
});
82248217
}
8225-
Some(Keyword::LINES) => {
8226-
if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) {
8227-
row_delimiters.push(HiveRowDelimiter {
8228-
delimiter: HiveDelimiter::LinesTerminatedBy,
8229-
char: self.parse_identifier()?,
8230-
});
8231-
} else {
8232-
break;
8233-
}
8218+
Some(Keyword::LINES)
8219+
if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) =>
8220+
{
8221+
row_delimiters.push(HiveRowDelimiter {
8222+
delimiter: HiveDelimiter::LinesTerminatedBy,
8223+
char: self.parse_identifier()?,
8224+
});
82348225
}
8235-
Some(Keyword::NULL) => {
8236-
if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) {
8237-
row_delimiters.push(HiveRowDelimiter {
8238-
delimiter: HiveDelimiter::NullDefinedAs,
8239-
char: self.parse_identifier()?,
8240-
});
8241-
} else {
8242-
break;
8243-
}
8226+
Some(Keyword::NULL)
8227+
if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) =>
8228+
{
8229+
row_delimiters.push(HiveRowDelimiter {
8230+
delimiter: HiveDelimiter::NullDefinedAs,
8231+
char: self.parse_identifier()?,
8232+
});
82448233
}
82458234
_ => {
82468235
break;

0 commit comments

Comments
 (0)