Trim tokenized sub-strings.

This commit is contained in:
asuessenbach 2020-05-04 17:19:44 +02:00
parent 9db492100b
commit bc6eba91f6

View File

@ -726,7 +726,7 @@ std::vector<std::string> tokenize( std::string const & tokenString, std::string
end = tokenString.find( separator, start ); end = tokenString.find( separator, start );
if ( start != end ) if ( start != end )
{ {
tokens.push_back( tokenString.substr( start, end - start ) ); tokens.push_back( trim( tokenString.substr( start, end - start ) ) );
} }
start = end + separator.length(); start = end + separator.length();
} while ( end != std::string::npos ); } while ( end != std::string::npos );