Proper support for token max size in WebGL+ES3.
WebGL specifies a maximum token size of 256 characters, while
ES3 specifies 1024 characters. We can determine the proper max
size to support from the spec.
BUG=angle:550
Change-Id: I6aeabe8af3b6184a27b456248ce2f84f361b76e4
Reviewed-on: https://chromium-review.googlesource.com/186973
Reviewed-by: Shannon Woods <shannonwoods@chromium.org>
Tested-by: Jamie Madill <jmadill@chromium.org>
diff --git a/src/compiler/preprocessor/Tokenizer.l b/src/compiler/preprocessor/Tokenizer.l
index 6c53f51..95120e7 100644
--- a/src/compiler/preprocessor/Tokenizer.l
+++ b/src/compiler/preprocessor/Tokenizer.l
@@ -1,6 +1,6 @@
/*
//
-// Copyright (c) 2002-2013 The ANGLE Project Authors. All rights reserved.
+// Copyright (c) 2002-2014 The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
@@ -14,7 +14,7 @@
%top{
//
-// Copyright (c) 2011-2013 The ANGLE Project Authors. All rights reserved.
+// Copyright (c) 2011-2014 The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
@@ -268,10 +268,6 @@
namespace pp {
-// TODO(alokp): Maximum token length should ideally be specified by
-// the preprocessor client, i.e., the compiler.
-const size_t Tokenizer::kMaxTokenLength = MAX_SYMBOL_NAME_LEN;
-
Tokenizer::Tokenizer(Diagnostics* diagnostics) : mHandle(0)
{
mContext.diagnostics = diagnostics;
@@ -305,11 +301,11 @@
void Tokenizer::lex(Token* token)
{
token->type = yylex(&token->text, &token->location, mHandle);
- if (token->text.size() > kMaxTokenLength)
+ if (token->text.size() > GetGlobalMaxTokenSize())
{
mContext.diagnostics->report(Diagnostics::PP_TOKEN_TOO_LONG,
token->location, token->text);
- token->text.erase(kMaxTokenLength);
+ token->text.erase(GetGlobalMaxTokenSize());
}
token->flags = 0;