From dfc34563b5d1e59e2e400ca3488ac02cbef68bad Mon Sep 17 00:00:00 2001 From: Jon Siwek Date: Thu, 16 Jul 2020 11:51:40 -0700 Subject: [PATCH] Fix tokenize_string() to work with delimiters of length > 1 --- src/util.cc | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/util.cc b/src/util.cc index b54f9d5ade..9742a62e46 100644 --- a/src/util.cc +++ b/src/util.cc @@ -1530,6 +1530,10 @@ TEST_CASE("util tokenize_string") auto svs = tokenize_string("one,two,three,four,", ','); std::vector expect{"one", "two", "three", "four", ""}; CHECK(svs == expect); + + auto letters = tokenize_string("a--b--c--d", "--"); + CHECK(*letters == vector({ "a", "b", "c", "d" })); + delete letters; } vector* tokenize_string(std::string_view input, std::string_view delim, @@ -1546,7 +1550,7 @@ vector* tokenize_string(std::string_view input, std::string_view delim, { ++found; rval->emplace_back(input.substr(pos, n - pos)); - pos = n + 1; + pos = n + delim.size(); if ( limit && found == limit ) break;