Increase the maximum token length in the preprocessor to be 1024, to comply with the GLSL ES 3 specification.
TRAC #23077
Signed-off-by: Shannon Woods
Signed-off-by: Geoff Lang
Author: Jamie Madill
git-svn-id: https://angleproject.googlecode.com/svn/branches/es3proto@2349 736b8ea6-26fd-11df-bfd4-992fa37f6226
diff --git a/src/compiler/preprocessor/Tokenizer.cpp b/src/compiler/preprocessor/Tokenizer.cpp
index a48d241..feea74e 100644
--- a/src/compiler/preprocessor/Tokenizer.cpp
+++ b/src/compiler/preprocessor/Tokenizer.cpp
@@ -1,4 +1,4 @@
-#line 16 "./Tokenizer.l"
+#line 16 "preprocessor/Tokenizer.l"
//
// Copyright (c) 2011-2013 The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
@@ -9,7 +9,7 @@
-#line 13 "./Tokenizer.cpp"
+#line 13 "preprocessor/Tokenizer.cpp"
#define YY_INT_ALIGNED short int
@@ -519,6 +519,7 @@
*/
#include "Tokenizer.h"
+#include "length_limits.h"
#include "DiagnosticsBase.h"
#include "Token.h"
@@ -2292,7 +2293,7 @@
// TODO(alokp): Maximum token length should ideally be specified by
// the preprocessor client, i.e., the compiler.
-const size_t Tokenizer::kMaxTokenLength = 256;
+const size_t Tokenizer::kMaxTokenLength = MAX_SYMBOL_NAME_LEN;
Tokenizer::Tokenizer(Diagnostics* diagnostics) : mHandle(0)
{