Fix tokenize_string() to work with delimiters of length > 1

This commit is contained in:
Jon Siwek 2020-07-16 11:51:40 -07:00
parent fd8db24daf
commit dfc34563b5

View file

@ -1530,6 +1530,10 @@ TEST_CASE("util tokenize_string")
auto svs = tokenize_string("one,two,three,four,", ',');
std::vector<std::string_view> expect{"one", "two", "three", "four", ""};
CHECK(svs == expect);
auto letters = tokenize_string("a--b--c--d", "--");
CHECK(*letters == vector<string>({ "a", "b", "c", "d" }));
delete letters;
}
vector<string>* tokenize_string(std::string_view input, std::string_view delim,
@ -1546,7 +1550,7 @@ vector<string>* tokenize_string(std::string_view input, std::string_view delim,
{
++found;
rval->emplace_back(input.substr(pos, n - pos));
pos = n + 1;
pos = n + delim.size();
if ( limit && found == limit )
break;