commit | 1b933ed50aef2c31e003a001cef8647244ebfa0e | [log] [tgz] |
---|---|---|
author | Amaury Forgeot d'Arc <amauryfa@gmail.com> | Thu Sep 04 22:34:09 2008 +0000 |
committer | Amaury Forgeot d'Arc <amauryfa@gmail.com> | Thu Sep 04 22:34:09 2008 +0000 |
tree | 1d434753b2e2fd9e1454f4bd614c73a90bab903f | |
parent | 1d6a16bf3838bfb89efdd5e338b247324d962010 [diff] [blame] |
#3773: Check for errors around the use of PyTokenizer_FindEncoding(). reviewed by Brett Cannon.
diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c index e4cf8e4..a040696 100644 --- a/Parser/tokenizer.c +++ b/Parser/tokenizer.c
@@ -1610,7 +1610,10 @@ fclose(fp); if (tok->encoding) { encoding = (char *)PyMem_MALLOC(strlen(tok->encoding) + 1); - strcpy(encoding, tok->encoding); + if (encoding) + strcpy(encoding, tok->encoding); + else + PyErr_NoMemory(); } PyTokenizer_Free(tok); return encoding;