Issue #24619: New approach for tokenizing async/await.

This commit fixes how one-line async-defs and defs are tracked
by tokenizer.  It allows to correctly parse invalid code such
as:

>>> async def f():
...     def g(): pass
...     async = 10

and valid code such as:

>>> async def f():
...     async def g(): pass
...     await z

As a consequence, is is now possible to have one-line
'async def foo(): await ..' functions:

>>> async def foo(): return await bar()
diff --git a/Parser/tokenizer.h b/Parser/tokenizer.h
index 3bcdad6..e198a0b 100644
--- a/Parser/tokenizer.h
+++ b/Parser/tokenizer.h
@@ -66,12 +66,21 @@
     const char* str;
     const char* input; /* Tokenizer's newline translated copy of the string. */
 
-    int defstack[MAXINDENT];     /* stack if funcs & indents where they
-                                    were defined */
-    int deftypestack[MAXINDENT]; /* stack of func types
-                                    (0 not func; 1: "def name";
-                                     2: "async def name") */
-    int def;                     /* Length of stack of func types */
+    /* `def*` fields are for parsing async/await in a backwards compatible
+       way.  They should be removed in 3.7, when they will become
+       regular constants.  See PEP 492 for more details. */
+    int defstack[MAXINDENT];     /* Stack of funcs & indents where they
+                                    were defined. */
+    int deftypestack[MAXINDENT]; /* Stack of func flags, see DEFTYPE_*
+                                    constants. */
+    int def;                     /* Length of stack of func types/flags. */
+    int def_async_behind;        /* 1 if there was an 'async' token before
+                                    a 'def' token. */
+    int def_in_async;            /* Counter of how deep 'async def's
+                                    are nested.  If greater than 0,
+                                    we are somewhere in an 'async def'
+                                    body, so 'async' and 'await' should
+                                    be parsed as keywords.*/
 };
 
 extern struct tok_state *PyTokenizer_FromString(const char *, int);