change our tokenize to yield empty tokens

Our tokenize function currently skips empty tokens.  This means we
incorrectly accept invalid syntax with our seccomp filter like:
	close: arg0 == 1 |||||| arg0 == 2

Change the tokenizer helper to yield an empty string in this case so
we can correctly detect & reject these.  We don't have any scenarios
where we actually want to allow empty strings currently either (and
if we did, the callers could check themselves).

Bug: None
Test: unittests pass
Change-Id: I282e4e4544a24c0e5a7036b693429bdd209339cf
diff --git a/syscall_filter_unittest.cc b/syscall_filter_unittest.cc
index 6bc044d..db01fbb 100644
--- a/syscall_filter_unittest.cc
+++ b/syscall_filter_unittest.cc
@@ -1261,6 +1261,19 @@
   ASSERT_NE(res, 0);
 }
 
+TEST(FilterTest, invalid_tokens) {
+  struct sock_fprog actual;
+  const char *policy = "read: arg0 == 1 |||| arg0 == 2\n";
+
+  FILE *policy_file = write_policy_to_pipe(policy, strlen(policy));
+  ASSERT_NE(policy_file, nullptr);
+
+  int res =
+      compile_filter("policy", policy_file, &actual, USE_RET_KILL, NO_LOGGING);
+  fclose(policy_file);
+  ASSERT_NE(res, 0);
+}
+
 TEST(FilterTest, nonexistent) {
   struct sock_fprog actual;
   int res = compile_filter("policy", NULL, &actual, USE_RET_KILL, NO_LOGGING);