diff --git a/src/normalizer.cpp b/src/normalizer.cpp index 04af6c5..985a96c 100644 --- a/src/normalizer.cpp +++ b/src/normalizer.cpp @@ -102,8 +102,9 @@ std::unique_ptr ReplaceNormalizer::create_regex_( } std::string ReplaceNormalizer::normalize(const std::string& input) const { - if (!regex_) + if (!regex_) { return input; + } std::string result = input; auto matches = regex_->find_all(result); diff --git a/src/pre_tokenizer.cpp b/src/pre_tokenizer.cpp index 279fc39..7383d80 100644 --- a/src/pre_tokenizer.cpp +++ b/src/pre_tokenizer.cpp @@ -151,8 +151,9 @@ std::unique_ptr RegexPreTokenizer::create_regex_( std::vector RegexPreTokenizer::pre_tokenize( const std::string& input) const { - if (!regex_) + if (!regex_) { return {}; + } std::vector results; auto matches = regex_->find_all(input);