Skip to content

Commit 05711be

Browse files
authored
Revert "Add tekken to the tokenize tool (#119)" (#123)
This reverts commit 1c2481d.
1 parent 43d0317 commit 05711be

File tree

1 file changed

+0
-3
lines changed

1 file changed

+0
-3
lines changed

examples/tokenize_tool/main.cpp

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@
2222
#include "hf_tokenizer.h"
2323
#include "sentencepiece.h"
2424
#include "tiktoken.h"
25-
#include "tekken.h"
2625

2726
using namespace tokenizers;
2827

@@ -65,8 +64,6 @@ int main(int argc, char* argv[]) {
6564
tok_ptr.reset(new Tiktoken());
6665
} else if (tokenizer_type == "hf_tokenizer") {
6766
tok_ptr.reset(new HFTokenizer());
68-
} else if (tokenizer_type == "tekken") {
69-
tok_ptr.reset(new Tekken());
7067
} else {
7168
std::stringstream ss;
7269
ss << "ERROR: Invalid tokenizer type: " << tokenizer_type << std::endl

0 commit comments

Comments
 (0)