Skip to content

Commit d09697c

Browse files
committed
style: apply cargo fmt to parser/mod.rs
1 parent 8324a58 commit d09697c

1 file changed

Lines changed: 79 additions & 72 deletions

File tree

src/parser/mod.rs

Lines changed: 79 additions & 72 deletions
Original file line numberDiff line numberDiff line change
@@ -509,9 +509,10 @@ impl<'a> Parser<'a> {
509509

510510
// end of statement
511511
Token::Word(word)
512-
if expecting_statement_delimiter && word.keyword == Keyword::END => {
513-
break;
514-
}
512+
if expecting_statement_delimiter && word.keyword == Keyword::END =>
513+
{
514+
break;
515+
}
515516
_ => {}
516517
}
517518

@@ -1305,43 +1306,43 @@ impl<'a> Parser<'a> {
13051306
let next_token = self.next_token();
13061307
match next_token.token {
13071308
t @ (Token::Word(_) | Token::SingleQuotedString(_))
1308-
if self.peek_token_ref().token == Token::Period => {
1309-
let mut id_parts: Vec<Ident> = vec![match t {
1310-
Token::Word(w) => w.into_ident(next_token.span),
1311-
Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1312-
_ => {
1313-
return Err(ParserError::ParserError(
1314-
"Internal parser error: unexpected token type".to_string(),
1315-
))
1309+
if self.peek_token_ref().token == Token::Period =>
1310+
{
1311+
let mut id_parts: Vec<Ident> = vec![match t {
1312+
Token::Word(w) => w.into_ident(next_token.span),
1313+
Token::SingleQuotedString(s) => Ident::with_quote('\'', s),
1314+
_ => {
1315+
return Err(ParserError::ParserError(
1316+
"Internal parser error: unexpected token type".to_string(),
1317+
))
1318+
}
1319+
}];
1320+
1321+
while self.consume_token(&Token::Period) {
1322+
let next_token = self.next_token();
1323+
match next_token.token {
1324+
Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1325+
Token::SingleQuotedString(s) => {
1326+
// SQLite has single-quoted identifiers
1327+
id_parts.push(Ident::with_quote('\'', s))
13161328
}
1317-
}];
1318-
1319-
while self.consume_token(&Token::Period) {
1320-
let next_token = self.next_token();
1321-
match next_token.token {
1322-
Token::Word(w) => id_parts.push(w.into_ident(next_token.span)),
1323-
Token::SingleQuotedString(s) => {
1324-
// SQLite has single-quoted identifiers
1325-
id_parts.push(Ident::with_quote('\'', s))
1326-
}
1327-
Token::Placeholder(s) => {
1328-
// Snowflake uses $1, $2, etc. for positional column references
1329-
// in staged data queries like: SELECT t.$1 FROM @stage t
1330-
id_parts.push(Ident::new(s))
1331-
}
1332-
Token::Mul => {
1333-
return Ok(Expr::QualifiedWildcard(
1334-
ObjectName::from(id_parts),
1335-
AttachedToken(next_token),
1336-
));
1337-
}
1338-
_ => {
1339-
return self
1340-
.expected("an identifier or a '*' after '.'", next_token);
1341-
}
1329+
Token::Placeholder(s) => {
1330+
// Snowflake uses $1, $2, etc. for positional column references
1331+
// in staged data queries like: SELECT t.$1 FROM @stage t
1332+
id_parts.push(Ident::new(s))
1333+
}
1334+
Token::Mul => {
1335+
return Ok(Expr::QualifiedWildcard(
1336+
ObjectName::from(id_parts),
1337+
AttachedToken(next_token),
1338+
));
1339+
}
1340+
_ => {
1341+
return self.expected("an identifier or a '*' after '.'", next_token);
13421342
}
13431343
}
13441344
}
1345+
}
13451346
Token::Mul => {
13461347
return Ok(Expr::Wildcard(AttachedToken(next_token)));
13471348
}
@@ -5030,9 +5031,10 @@ impl<'a> Parser<'a> {
50305031
match &self.peek_nth_token_ref(0).token {
50315032
Token::EOF => break,
50325033
Token::Word(w)
5033-
if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) => {
5034-
break;
5035-
}
5034+
if w.quote_style.is_none() && terminal_keywords.contains(&w.keyword) =>
5035+
{
5036+
break;
5037+
}
50365038
_ => {}
50375039
}
50385040

@@ -8386,55 +8388,60 @@ impl<'a> Parser<'a> {
83868388
Keyword::NULL,
83878389
]) {
83888390
Some(Keyword::FIELDS)
8389-
if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) => {
8391+
if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) =>
8392+
{
8393+
row_delimiters.push(HiveRowDelimiter {
8394+
delimiter: HiveDelimiter::FieldsTerminatedBy,
8395+
char: self.parse_identifier()?,
8396+
});
8397+
8398+
if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
83908399
row_delimiters.push(HiveRowDelimiter {
8391-
delimiter: HiveDelimiter::FieldsTerminatedBy,
8400+
delimiter: HiveDelimiter::FieldsEscapedBy,
83928401
char: self.parse_identifier()?,
83938402
});
8394-
8395-
if self.parse_keywords(&[Keyword::ESCAPED, Keyword::BY]) {
8396-
row_delimiters.push(HiveRowDelimiter {
8397-
delimiter: HiveDelimiter::FieldsEscapedBy,
8398-
char: self.parse_identifier()?,
8399-
});
8400-
}
84018403
}
8404+
}
84028405
Some(Keyword::COLLECTION)
84038406
if self.parse_keywords(&[
84048407
Keyword::ITEMS,
84058408
Keyword::TERMINATED,
84068409
Keyword::BY,
8407-
]) => {
8408-
row_delimiters.push(HiveRowDelimiter {
8409-
delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
8410-
char: self.parse_identifier()?,
8411-
});
8412-
}
8410+
]) =>
8411+
{
8412+
row_delimiters.push(HiveRowDelimiter {
8413+
delimiter: HiveDelimiter::CollectionItemsTerminatedBy,
8414+
char: self.parse_identifier()?,
8415+
});
8416+
}
84138417
Some(Keyword::MAP)
84148418
if self.parse_keywords(&[
84158419
Keyword::KEYS,
84168420
Keyword::TERMINATED,
84178421
Keyword::BY,
8418-
]) => {
8419-
row_delimiters.push(HiveRowDelimiter {
8420-
delimiter: HiveDelimiter::MapKeysTerminatedBy,
8421-
char: self.parse_identifier()?,
8422-
});
8423-
}
8422+
]) =>
8423+
{
8424+
row_delimiters.push(HiveRowDelimiter {
8425+
delimiter: HiveDelimiter::MapKeysTerminatedBy,
8426+
char: self.parse_identifier()?,
8427+
});
8428+
}
84248429
Some(Keyword::LINES)
8425-
if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) => {
8426-
row_delimiters.push(HiveRowDelimiter {
8427-
delimiter: HiveDelimiter::LinesTerminatedBy,
8428-
char: self.parse_identifier()?,
8429-
});
8430-
}
8430+
if self.parse_keywords(&[Keyword::TERMINATED, Keyword::BY]) =>
8431+
{
8432+
row_delimiters.push(HiveRowDelimiter {
8433+
delimiter: HiveDelimiter::LinesTerminatedBy,
8434+
char: self.parse_identifier()?,
8435+
});
8436+
}
84318437
Some(Keyword::NULL)
8432-
if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) => {
8433-
row_delimiters.push(HiveRowDelimiter {
8434-
delimiter: HiveDelimiter::NullDefinedAs,
8435-
char: self.parse_identifier()?,
8436-
});
8437-
}
8438+
if self.parse_keywords(&[Keyword::DEFINED, Keyword::AS]) =>
8439+
{
8440+
row_delimiters.push(HiveRowDelimiter {
8441+
delimiter: HiveDelimiter::NullDefinedAs,
8442+
char: self.parse_identifier()?,
8443+
});
8444+
}
84388445
_ => {
84398446
break;
84408447
}

0 commit comments

Comments
 (0)