Preliminary support for "from __future__ import generators" to enable
the yield statement.  I figure we have to have this in before I can
release 2.2a1 on Wednesday.

Note: test_generators is currently broken, I'm counting on Tim to fix
this.
diff --git a/Include/compile.h b/Include/compile.h
index b9a735a..5e6e572 100644
--- a/Include/compile.h
+++ b/Include/compile.h
@@ -65,6 +65,9 @@
 #define NESTED_SCOPES_DEFAULT 1
 #define FUTURE_NESTED_SCOPES "nested_scopes"
 
+#define GENERATORS_DEFAULT 0
+#define FUTURE_GENERATORS "generators"
+
 /* for internal use only */
 #define _PyCode_GETCODEPTR(co, pp) \
 	((*(co)->co_code->ob_type->tp_as_buffer->bf_getreadbuffer) \
diff --git a/Lib/__future__.py b/Lib/__future__.py
index 8510ceb..65416ae 100644
--- a/Lib/__future__.py
+++ b/Lib/__future__.py
@@ -67,3 +67,4 @@
                             `self.getMandatoryRelease()` + ")"
 
 nested_scopes = _Feature((2, 1, 0, "beta", 1), (2, 2, 0, "alpha", 0))
+generators = _Feature((2, 2, 0, "alpha", 1), (2, 3, 0, "final", 0))
diff --git a/Lib/inspect.py b/Lib/inspect.py
index 2d88bc1..6183b0e 100644
--- a/Lib/inspect.py
+++ b/Lib/inspect.py
@@ -24,6 +24,8 @@
 
 # This module is in the public domain.  No warranties.
 
+from __future__ import generators
+
 __author__ = 'Ka-Ping Yee <ping@lfw.org>'
 __date__ = '1 Jan 2001'
 
diff --git a/Lib/tokenize.py b/Lib/tokenize.py
index b79cdc0..cbe4552 100644
--- a/Lib/tokenize.py
+++ b/Lib/tokenize.py
@@ -22,6 +22,8 @@
 function to which the 5 fields described above are passed as 5 arguments,
 each time a new token is found."""
 
+from __future__ import generators
+
 __author__ = 'Ka-Ping Yee <ping@lfw.org>'
 __credits__ = \
     'GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro'
diff --git a/Lib/types.py b/Lib/types.py
index 85962ba..95600a3 100644
--- a/Lib/types.py
+++ b/Lib/types.py
@@ -2,6 +2,7 @@
 
 Types that are part of optional modules (e.g. array) are not listed.
 """
+from __future__ import generators
 
 import sys
 
diff --git a/Parser/parser.c b/Parser/parser.c
index 6eaa925..753c43a 100644
--- a/Parser/parser.c
+++ b/Parser/parser.c
@@ -79,6 +79,7 @@
 	if (ps == NULL)
 		return NULL;
 	ps->p_grammar = g;
+	ps->p_generators = 0;
 	ps->p_tree = PyNode_New(start);
 	if (ps->p_tree == NULL) {
 		PyMem_DEL(ps);
@@ -131,8 +132,9 @@
 /* PARSER PROPER */
 
 static int
-classify(grammar *g, int type, char *str)
+classify(parser_state *ps, int type, char *str)
 {
+	grammar *g = ps->p_grammar;
 	register int n = g->g_ll.ll_nlabels;
 	
 	if (type == NAME) {
@@ -143,6 +145,10 @@
 			if (l->lb_type == NAME && l->lb_str != NULL &&
 					l->lb_str[0] == s[0] &&
 					strcmp(l->lb_str, s) == 0) {
+				if (!ps->p_generators &&
+				    s[0] == 'y' &&
+				    strcmp(s, "yield") == 0)
+					break; /* not a keyword */
 				D(printf("It's a keyword\n"));
 				return n - i;
 			}
@@ -164,6 +170,22 @@
 	return -1;
 }
 
+static void
+future_hack(parser_state *ps)
+{
+	node *n = ps->p_stack.s_top->s_parent;
+	node *ch;
+
+	if (strcmp(STR(CHILD(n, 0)), "from") != 0)
+		return;
+	ch = CHILD(n, 1);
+	if (strcmp(STR(CHILD(ch, 0)), "__future__") != 0)
+		return;
+	ch = CHILD(n, 3);
+	if (NCH(ch) == 1 && strcmp(STR(CHILD(ch, 0)), "generators") == 0)
+		ps->p_generators = 1;
+}
+
 int
 PyParser_AddToken(register parser_state *ps, register int type, char *str,
 	          int lineno, int *expected_ret)
@@ -174,7 +196,7 @@
 	D(printf("Token %s/'%s' ... ", _PyParser_TokenNames[type], str));
 	
 	/* Find out which label this token is */
-	ilabel = classify(ps->p_grammar, type, str);
+	ilabel = classify(ps, type, str);
 	if (ilabel < 0)
 		return E_SYNTAX;
 	
@@ -217,7 +239,14 @@
 				while (s = &d->d_state
 						[ps->p_stack.s_top->s_state],
 					s->s_accept && s->s_narcs == 1) {
-					D(printf("  Direct pop.\n"));
+					D(printf("  DFA '%s', state %d: "
+						 "Direct pop.\n",
+						 d->d_name,
+						 ps->p_stack.s_top->s_state));
+					if (d->d_name[0] == 'i' &&
+					    strcmp(d->d_name,
+						   "import_stmt") == 0)
+						future_hack(ps);
 					s_pop(&ps->p_stack);
 					if (s_empty(&ps->p_stack)) {
 						D(printf("  ACCEPT.\n"));
@@ -230,6 +259,9 @@
 		}
 		
 		if (s->s_accept) {
+			if (d->d_name[0] == 'i' &&
+			    strcmp(d->d_name, "import_stmt") == 0)
+				future_hack(ps);
 			/* Pop this dfa and try again */
 			s_pop(&ps->p_stack);
 			D(printf(" Pop ...\n"));
diff --git a/Parser/parser.h b/Parser/parser.h
index cf8d318..b0c9a1e 100644
--- a/Parser/parser.h
+++ b/Parser/parser.h
@@ -25,6 +25,7 @@
 	stack	 	p_stack;	/* Stack of parser states */
 	grammar		*p_grammar;	/* Grammar to use */
 	node		*p_tree;	/* Top of parse tree */
+	int		p_generators;	/* 1 if yield is a keyword */
 } parser_state;
 
 parser_state *PyParser_New(grammar *g, int start);
diff --git a/Python/future.c b/Python/future.c
index cf2dca5..70be26b 100644
--- a/Python/future.c
+++ b/Python/future.c
@@ -31,6 +31,8 @@
 		feature = STR(CHILD(ch, 0));
 		if (strcmp(feature, FUTURE_NESTED_SCOPES) == 0) {
 			ff->ff_nested_scopes = 1;
+		} else if (strcmp(feature, FUTURE_GENERATORS) == 0) {
+			/* OK; this is processed by the parser */
 		} else if (strcmp(feature, "braces") == 0) {
 			PyErr_SetString(PyExc_SyntaxError,
 					"not a chance");