blob: 16b80187428bdf76f6dd156857900040bd3e984b [file] [log] [blame]
// Copyright 2008, Google Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "base/string_tokenizer.h"
#include "testing/gtest/include/gtest/gtest.h"
using std::string;
namespace {
class StringTokenizerTest : public testing::Test {};
}
TEST(StringTokenizerTest, Simple) {
string input = "this is a test";
StringTokenizer t(input, " ");
EXPECT_TRUE(t.GetNext());
EXPECT_EQ(string("this"), t.token());
EXPECT_TRUE(t.GetNext());
EXPECT_EQ(string("is"), t.token());
EXPECT_TRUE(t.GetNext());
EXPECT_EQ(string("a"), t.token());
EXPECT_TRUE(t.GetNext());
EXPECT_EQ(string("test"), t.token());
EXPECT_FALSE(t.GetNext());
}
TEST(StringTokenizerTest, RetDelims) {
string input = "this is a test";
StringTokenizer t(input, " ");
t.set_options(StringTokenizer::RETURN_DELIMS);
EXPECT_TRUE(t.GetNext());
EXPECT_EQ(string("this"), t.token());
EXPECT_TRUE(t.GetNext());
EXPECT_EQ(string(" "), t.token());
EXPECT_TRUE(t.GetNext());
EXPECT_EQ(string("is"), t.token());
EXPECT_TRUE(t.GetNext());
EXPECT_EQ(string(" "), t.token());
EXPECT_TRUE(t.GetNext());
EXPECT_EQ(string("a"), t.token());
EXPECT_TRUE(t.GetNext());
EXPECT_EQ(string(" "), t.token());
EXPECT_TRUE(t.GetNext());
EXPECT_EQ(string("test"), t.token());
EXPECT_FALSE(t.GetNext());
}
TEST(StringTokenizerTest, ManyDelims) {
string input = "this: is, a-test";
StringTokenizer t(input, ": ,-");
EXPECT_TRUE(t.GetNext());
EXPECT_EQ(string("this"), t.token());
EXPECT_TRUE(t.GetNext());
EXPECT_EQ(string("is"), t.token());
EXPECT_TRUE(t.GetNext());
EXPECT_EQ(string("a"), t.token());
EXPECT_TRUE(t.GetNext());
EXPECT_EQ(string("test"), t.token());
EXPECT_FALSE(t.GetNext());
}
TEST(StringTokenizerTest, ParseHeader) {
string input = "Content-Type: text/html ; charset=UTF-8";
StringTokenizer t(input, ": ;=");
t.set_options(StringTokenizer::RETURN_DELIMS);
EXPECT_TRUE(t.GetNext());
EXPECT_FALSE(t.token_is_delim());
EXPECT_EQ(string("Content-Type"), t.token());
EXPECT_TRUE(t.GetNext());
EXPECT_TRUE(t.token_is_delim());
EXPECT_EQ(string(":"), t.token());
EXPECT_TRUE(t.GetNext());
EXPECT_TRUE(t.token_is_delim());
EXPECT_EQ(string(" "), t.token());
EXPECT_TRUE(t.GetNext());
EXPECT_FALSE(t.token_is_delim());
EXPECT_EQ(string("text/html"), t.token());
EXPECT_TRUE(t.GetNext());
EXPECT_TRUE(t.token_is_delim());
EXPECT_EQ(string(" "), t.token());
EXPECT_TRUE(t.GetNext());
EXPECT_TRUE(t.token_is_delim());
EXPECT_EQ(string(";"), t.token());
EXPECT_TRUE(t.GetNext());
EXPECT_TRUE(t.token_is_delim());
EXPECT_EQ(string(" "), t.token());
EXPECT_TRUE(t.GetNext());
EXPECT_FALSE(t.token_is_delim());
EXPECT_EQ(string("charset"), t.token());
EXPECT_TRUE(t.GetNext());
EXPECT_TRUE(t.token_is_delim());
EXPECT_EQ(string("="), t.token());
EXPECT_TRUE(t.GetNext());
EXPECT_FALSE(t.token_is_delim());
EXPECT_EQ(string("UTF-8"), t.token());
EXPECT_FALSE(t.GetNext());
EXPECT_FALSE(t.token_is_delim());
}
TEST(StringTokenizerTest, ParseQuotedString) {
string input = "foo bar 'hello world' baz";
StringTokenizer t(input, " ");
t.set_quote_chars("'");
EXPECT_TRUE(t.GetNext());
EXPECT_EQ(string("foo"), t.token());
EXPECT_TRUE(t.GetNext());
EXPECT_EQ(string("bar"), t.token());
EXPECT_TRUE(t.GetNext());
EXPECT_EQ(string("'hello world'"), t.token());
EXPECT_TRUE(t.GetNext());
EXPECT_EQ(string("baz"), t.token());
EXPECT_FALSE(t.GetNext());
}
TEST(StringTokenizerTest, ParseQuotedString_Malformed) {
string input = "bar 'hello wo";
StringTokenizer t(input, " ");
t.set_quote_chars("'");
EXPECT_TRUE(t.GetNext());
EXPECT_EQ(string("bar"), t.token());
EXPECT_TRUE(t.GetNext());
EXPECT_EQ(string("'hello wo"), t.token());
EXPECT_FALSE(t.GetNext());
}
TEST(StringTokenizerTest, ParseQuotedString_Multiple) {
string input = "bar 'hel\"lo\" wo' baz\"";
StringTokenizer t(input, " ");
t.set_quote_chars("'\"");
EXPECT_TRUE(t.GetNext());
EXPECT_EQ(string("bar"), t.token());
EXPECT_TRUE(t.GetNext());
EXPECT_EQ(string("'hel\"lo\" wo'"), t.token());
EXPECT_TRUE(t.GetNext());
EXPECT_EQ(string("baz\""), t.token());
EXPECT_FALSE(t.GetNext());
}
TEST(StringTokenizerTest, ParseQuotedString_EscapedQuotes) {
string input = "foo 'don\\'t do that'";
StringTokenizer t(input, " ");
t.set_quote_chars("'");
EXPECT_TRUE(t.GetNext());
EXPECT_EQ(string("foo"), t.token());
EXPECT_TRUE(t.GetNext());
EXPECT_EQ(string("'don\\'t do that'"), t.token());
EXPECT_FALSE(t.GetNext());
}
TEST(StringTokenizerTest, ParseQuotedString_EscapedQuotes2) {
string input = "foo='a, b', bar";
StringTokenizer t(input, ", ");
t.set_quote_chars("'");
EXPECT_TRUE(t.GetNext());
EXPECT_EQ(string("foo='a, b'"), t.token());
EXPECT_TRUE(t.GetNext());
EXPECT_EQ(string("bar"), t.token());
EXPECT_FALSE(t.GetNext());
}