change our tokenize to yield empty tokens
Our tokenize function currently skips empty tokens. This means we
incorrectly accept invalid syntax with our seccomp filter like:
close: arg0 == 1 |||||| arg0 == 2
Change the tokenizer helper to yield an empty string in this case so
we can correctly detect & reject these. We don't have any scenarios
where we actually want to allow empty strings currently either (and
if we did, the callers could check themselves).
Bug: None
Test: unittests pass
Change-Id: I282e4e4544a24c0e5a7036b693429bdd209339cf
diff --git a/util.c b/util.c
index 14c028a..9bb37ca 100644
--- a/util.c
+++ b/util.c
@@ -257,8 +257,8 @@
{
char *ret = NULL;
- /* If the string is NULL or empty, there are no tokens to be found. */
- if (stringp == NULL || *stringp == NULL || **stringp == '\0')
+ /* If the string is NULL, there are no tokens to be found. */
+ if (stringp == NULL || *stringp == NULL)
return NULL;
/*
@@ -271,33 +271,19 @@
return ret;
}
- char *found;
- while (**stringp != '\0') {
- found = strstr(*stringp, delim);
-
- if (!found) {
- /*
- * The delimiter was not found, so the full string
- * makes up the only token, and we're done.
- */
- ret = *stringp;
- *stringp = NULL;
- break;
- }
-
- if (found != *stringp) {
- /* There's a non-empty token before the delimiter. */
- *found = '\0';
- ret = *stringp;
- *stringp = found + strlen(delim);
- break;
- }
-
+ char *found = strstr(*stringp, delim);
+ if (!found) {
/*
- * The delimiter was found at the start of the string,
- * skip it and keep looking for a non-empty token.
+ * The delimiter was not found, so the full string
+ * makes up the only token, and we're done.
*/
- *stringp += strlen(delim);
+ ret = *stringp;
+ *stringp = NULL;
+ } else {
+ /* There's a token here, possibly empty. That's OK. */
+ *found = '\0';
+ ret = *stringp;
+ *stringp = found + strlen(delim);
}
return ret;