Fix breaking the build with missing GetGlobalMaxTokenSize.

Because the preprocessor is used independently from the compiler,
we need a way to track max token size when we don't have access
to the parse context with the current spec.

BUG=angle:550

Change-Id: Idf5035ec2c001ee75f264151ab3c4e92f3cd44d7
Reviewed-on: https://chromium-review.googlesource.com/187140
Reviewed-by: Geoff Lang <geofflang@chromium.org>
Reviewed-by: Shannon Woods <shannonwoods@chromium.org>
Reviewed-by: Nicolas Capens <nicolascapens@chromium.org>
Tested-by: Jamie Madill <jmadill@chromium.org>
diff --git a/src/compiler/preprocessor/Tokenizer.l b/src/compiler/preprocessor/Tokenizer.l
index 95120e7..ab9d99a 100644
--- a/src/compiler/preprocessor/Tokenizer.l
+++ b/src/compiler/preprocessor/Tokenizer.l
@@ -24,7 +24,6 @@
 
 %{
 #include "Tokenizer.h"
-#include "length_limits.h"
 
 #include "DiagnosticsBase.h"
 #include "Token.h"
@@ -298,14 +297,19 @@
     yyset_lineno(line, mHandle);
 }
 
+void Tokenizer::setMaxTokenSize(size_t maxTokenSize)
+{
+    mMaxTokenSize = maxTokenSize;
+}
+
 void Tokenizer::lex(Token* token)
 {
     token->type = yylex(&token->text, &token->location, mHandle);
-    if (token->text.size() > GetGlobalMaxTokenSize())
+    if (token->text.size() > mMaxTokenSize)
     {
         mContext.diagnostics->report(Diagnostics::PP_TOKEN_TOO_LONG,
                                      token->location, token->text);
-        token->text.erase(GetGlobalMaxTokenSize());
+        token->text.erase(mMaxTokenSize);
     }
 
     token->flags = 0;