Update to reflect new tokenize_test.py
diff --git a/Lib/test/output/test_tokenize b/Lib/test/output/test_tokenize
index ba7ea6e..ea55181 100644
--- a/Lib/test/output/test_tokenize
+++ b/Lib/test/output/test_tokenize
@@ -470,14 +470,14 @@
 147,11-147,12:	NUMBER	'1'
 147,12-147,13:	OP	','
 147,14-147,15:	OP	'*'
-147,15-147,19:	NAME	'rest'
-147,19-147,20:	OP	','
-147,21-147,23:	OP	'**'
-147,23-147,27:	NAME	'rest'
-147,27-147,28:	OP	')'
-147,28-147,29:	OP	':'
-147,30-147,34:	NAME	'pass'
-147,34-147,35:	NEWLINE	'\n'
+147,15-147,20:	NAME	'restt'
+147,20-147,21:	OP	','
+147,22-147,24:	OP	'**'
+147,24-147,29:	NAME	'restd'
+147,29-147,30:	OP	')'
+147,30-147,31:	OP	':'
+147,32-147,36:	NAME	'pass'
+147,36-147,37:	NEWLINE	'\n'
 148,0-148,1:	NL	'\n'
 149,0-149,1:	OP	'('
 149,1-149,2:	NAME	'x'