commit | f5adf1eb72c755c3f6183199656f18b12a1cb952 | [log] [tgz] |
---|---|---|
author | Martin v. Löwis <martin@v.loewis.de> | Thu Feb 16 14:35:38 2006 +0000 |
committer | Martin v. Löwis <martin@v.loewis.de> | Thu Feb 16 14:35:38 2006 +0000 |
tree | d0ecbd123efacdc92192d86e33fa44ccba72e026 | |
parent | eb079f1c2533bcefcab3ef4c07b800e5bd37239f [diff] |
Use Py_ssize_t to count the length.
diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c index 646a7c1..dee8e84 100644 --- a/Parser/tokenizer.c +++ b/Parser/tokenizer.c
@@ -370,7 +370,7 @@ PyObject* utf8 = NULL; PyObject* buf = tok->decoding_buffer; char *str; - int utf8len; + Py_ssize_t utf8len; /* Ask for one less byte so we can terminate it */ assert(size > 0);