@@ -45,7 +45,7 @@ public class Parser : IDisposable { // TODO: remove IDisposable
4545 private SourceUnit _sourceUnit ;
4646
4747 /// <summary>
48- /// Language features initialized on parser construction and possibly updated during parsing.
48+ /// Language features initialized on parser construction and possibly updated during parsing.
4949 /// The code can set the language features (e.g. "from __future__ import division").
5050 /// </summary>
5151 private ModuleOptions _languageFeatures ;
@@ -376,7 +376,7 @@ internal void ReportSyntaxError(int start, int end, string message, int errorCod
376376 Severity . FatalError ) ;
377377 }
378378
379- #endregion
379+ #endregion
380380
381381 #region LL(1) Parsing
382382
@@ -403,7 +403,7 @@ private string ReadName() {
403403 }
404404
405405 //stmt: simple_stmt | compound_stmt
406- //compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt
406+ //compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt
407407 private Statement ParseStmt ( ) {
408408 switch ( PeekToken ( ) . Kind ) {
409409 case TokenKind . KeywordIf :
@@ -465,7 +465,7 @@ private Statement ParseSimpleStmt() {
465465
466466 /*
467467 small_stmt: expr_stmt | del_stmt | pass_stmt | flow_stmt | import_stmt | global_stmt | nonlocal_stmt | assert_stmt
468-
468+
469469 del_stmt: 'del' exprlist
470470 pass_stmt: 'pass'
471471 flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt
@@ -557,7 +557,7 @@ private Statement FinishSmallStmt(Statement stmt) {
557557
558558 // yield_stmt: yield_expr
559559 private Statement ParseYieldStmt ( ) {
560- // For yield statements, continue to enforce that it's currently in a function.
560+ // For yield statements, continue to enforce that it's currently in a function.
561561 // This gives us better syntax error reporting for yield-statements than for yield-expressions.
562562 FunctionDefinition current = CurrentFunction ;
563563 if ( current == null ) {
@@ -577,7 +577,7 @@ private Statement ParseYieldStmt() {
577577
578578 /// <summary>
579579 /// Peek if the next token is a 'yield' and parse a yield expression. Else return null.
580- ///
580+ ///
581581 /// Called w/ yield already eaten.
582582 /// </summary>
583583 /// <returns>A yield expression if present, else null. </returns>
@@ -589,7 +589,7 @@ private Expression ParseYieldExpression() {
589589 // Mark that this function is actually a generator.
590590 // If we're in a generator expression, then we don't have a function yet.
591591 // g=((yield i) for i in range(5))
592- // In that acse, the genexp will mark IsGenerator.
592+ // In that acse, the genexp will mark IsGenerator.
593593 FunctionDefinition current = CurrentFunction ;
594594 if ( current != null ) {
595595 current . IsGenerator = true ;
@@ -761,7 +761,7 @@ private PythonOperator GetBinaryOperator(OperatorToken token) {
761761 }
762762 }
763763
764- // import_stmt: 'import' module ['as' name"] (',' module ['as' name])*
764+ // import_stmt: 'import' module ['as' name"] (',' module ['as' name])*
765765 // name: identifier
766766 private ImportStatement ParseImportStmt ( ) {
767767 Eat ( TokenKind . KeywordImport ) ;
@@ -834,8 +834,8 @@ private string[] ReadNames() {
834834
835835
836836 // 'from' relative_module 'import' identifier ['as' name] (',' identifier ['as' name]) *
837- // 'from' relative_module 'import' '(' identifier ['as' name] (',' identifier ['as' name])* [','] ')'
838- // 'from' module 'import' "*"
837+ // 'from' relative_module 'import' '(' identifier ['as' name] (',' identifier ['as' name])* [','] ')'
838+ // 'from' module 'import' "*"
839839 private FromImportStatement ParseFromImportStmt ( ) {
840840 Eat ( TokenKind . KeywordFrom ) ;
841841 var start = GetStart ( ) ;
@@ -1336,7 +1336,7 @@ private Expression FinishLambdef() {
13361336 return ParseLambdaHelperEnd ( func , expr ) ;
13371337 }
13381338
1339- // Helpers for parsing lambda expressions.
1339+ // Helpers for parsing lambda expressions.
13401340 // Usage
13411341 // FunctionDefinition f = ParseLambdaHelperStart(string);
13421342 // Expression expr = ParseXYZ();
@@ -1357,7 +1357,7 @@ private FunctionDefinition ParseLambdaHelperStart(string name) {
13571357 }
13581358
13591359 private Expression ParseLambdaHelperEnd ( FunctionDefinition func , Expression expr ) {
1360- // Pep 342 in Python 2.5 allows Yield Expressions, which can occur inside a Lambda body.
1360+ // Pep 342 in Python 2.5 allows Yield Expressions, which can occur inside a Lambda body.
13611361 // In this case, the lambda is a generator and will yield it's final result instead of just return it.
13621362 Statement body ;
13631363 if ( func . IsGenerator ) {
@@ -1424,7 +1424,6 @@ private WithStatement ParseWithStmt() {
14241424 items . Add ( ParseWithItem ( ) ) ;
14251425 }
14261426
1427-
14281427 var header = GetEnd ( ) ;
14291428 Statement body = ParseSuite ( ) ;
14301429 if ( items is not null ) {
@@ -1456,16 +1455,18 @@ private WithItem ParseWithItem() {
14561455 }
14571456
14581457 // async_stmt: 'async' (funcdef | with_stmt | for_stmt)
1459- private Statement ParseAsyncStmt ( ) {
1458+ private Statement ParseAsyncStmt ( bool onlyAllowDef = false ) {
14601459 Eat ( TokenKind . KeywordAsync ) ;
14611460 var start = GetStart ( ) ;
14621461
14631462 switch ( PeekToken ( ) . Kind ) {
14641463 case TokenKind . KeywordDef :
14651464 return ParseFuncDef ( true ) ;
14661465 case TokenKind . KeywordWith :
1466+ if ( onlyAllowDef ) goto default ;
14671467 return ParseAsyncWithStmt ( start ) ;
14681468 case TokenKind . KeywordFor :
1469+ if ( onlyAllowDef ) goto default ;
14691470 return ParseAsyncForStmt ( start ) ;
14701471 default :
14711472 ReportSyntaxError ( "invalid syntax" ) ;
@@ -1491,7 +1492,6 @@ private AsyncWithStatement ParseAsyncWithStmt(int asyncStart) {
14911492 items . Add ( ParseWithItem ( ) ) ;
14921493 }
14931494
1494-
14951495 var header = GetEnd ( ) ;
14961496 Statement body = ParseSuite ( ) ;
14971497 if ( items is not null ) {
@@ -2642,7 +2642,7 @@ private Expression ParseGeneratorExpression(Expression expr) {
26422642 // Generator Expressions have an implicit function definition and yield around their expression.
26432643 // (x for i in R)
26442644 // becomes:
2645- // def f():
2645+ // def f():
26462646 // for i in R: yield (x)
26472647 ExpressionStatement ys = new ExpressionStatement ( new YieldExpression ( expr ) ) ;
26482648 ys . Expression . SetLoc ( _globalParent , expr . IndexSpan ) ;
@@ -3151,13 +3151,13 @@ private PythonAst ParseFileWorker(bool makeModule, bool returnValue) {
31513151 List < Statement > l = new List < Statement > ( ) ;
31523152
31533153 //
3154- // A future statement must appear near the top of the module.
3155- // The only lines that can appear before a future statement are:
3156- // - the module docstring (if any),
3157- // - comments,
3158- // - blank lines, and
3159- // - other future statements.
3160- //
3154+ // A future statement must appear near the top of the module.
3155+ // The only lines that can appear before a future statement are:
3156+ // - the module docstring (if any),
3157+ // - comments,
3158+ // - blank lines, and
3159+ // - other future statements.
3160+ //
31613161
31623162 MaybeEatNewLine ( ) ;
31633163
@@ -3231,6 +3231,12 @@ private Statement InternalParseInteractiveInput(out bool parsingMultiLineCmpdStm
32313231 }
32323232 return null ;
32333233
3234+ case TokenKind . KeywordAsync :
3235+ parsingMultiLineCmpdStmt = true ;
3236+ s = ParseAsyncStmt ( onlyAllowDef : true ) ;
3237+ EatEndOfInput ( ) ;
3238+ break ;
3239+
32343240 case TokenKind . KeywordIf :
32353241 case TokenKind . KeywordWhile :
32363242 case TokenKind . KeywordFor :
@@ -3268,11 +3274,11 @@ private Expression ParseTestListAsExpression() {
32683274 /// <summary>
32693275 /// Maybe eats a new line token returning true if the token was
32703276 /// eaten.
3271- ///
3272- /// Python always tokenizes to have only 1 new line character in a
3273- /// row. But we also craete NLToken's and ignore them except for
3274- /// error reporting purposes. This gives us the same errors as
3275- /// CPython and also matches the behavior of the standard library
3277+ ///
3278+ /// Python always tokenizes to have only 1 new line character in a
3279+ /// row. But we also craete NLToken's and ignore them except for
3280+ /// error reporting purposes. This gives us the same errors as
3281+ /// CPython and also matches the behavior of the standard library
32763282 /// tokenize module. This function eats any present NL tokens and throws
32773283 /// them away.
32783284 /// </summary>
@@ -3285,12 +3291,12 @@ private bool MaybeEatNewLine() {
32853291 }
32863292
32873293 /// <summary>
3288- /// Eats a new line token throwing if the next token isn't a new line.
3289- ///
3290- /// Python always tokenizes to have only 1 new line character in a
3291- /// row. But we also craete NLToken's and ignore them except for
3292- /// error reporting purposes. This gives us the same errors as
3293- /// CPython and also matches the behavior of the standard library
3294+ /// Eats a new line token throwing if the next token isn't a new line.
3295+ ///
3296+ /// Python always tokenizes to have only 1 new line character in a
3297+ /// row. But we also craete NLToken's and ignore them except for
3298+ /// error reporting purposes. This gives us the same errors as
3299+ /// CPython and also matches the behavior of the standard library
32943300 /// tokenize module. This function eats any present NL tokens and throws
32953301 /// them away.
32963302 /// </summary>
@@ -3316,7 +3322,7 @@ private Token EatEndOfInput() {
33163322 if ( _sourceReader . BaseReader is StreamReader sr && sr . BaseStream . CanSeek ) {
33173323 // TODO: Convert exception index to proper SourceLocation
33183324 }
3319- // BUG: We have some weird stream and we can't accurately track the
3325+ // BUG: We have some weird stream and we can't accurately track the
33203326 // position where the exception came from. There are too many levels
33213327 // of buffering below us to re-wind and calculate the actual line number, so
33223328 // we'll give the last line number the tokenizer was at.
0 commit comments