File tree Expand file tree Collapse file tree 3 files changed +27
-16
lines changed
include/pytorch/tokenizers Expand file tree Collapse file tree 3 files changed +27
-16
lines changed Original file line number Diff line number Diff line change 25
25
#include < pytorch/tokenizers/string_integer_map.h>
26
26
#include < pytorch/tokenizers/tokenizer.h>
27
27
28
+ #include " re2/re2.h"
29
+
28
30
namespace tokenizers {
29
31
namespace detail {
30
32
@@ -104,6 +106,25 @@ static Result<TokenMap> buildTokenMap(
104
106
return buildTokenMap (std::move (pairs));
105
107
}
106
108
109
+ static Result<std::unique_ptr<IRegex>> build_special_token_regex (
110
+ const TokenMap& special_token_map) {
111
+ std::string special_pattern;
112
+ const std::size_t count = special_token_map.size ();
113
+
114
+ for (std::size_t i = 0 ; i < count; ++i) {
115
+ const auto & [token, _] = special_token_map.getElement (i);
116
+ if (!special_pattern.empty ()) {
117
+ special_pattern += " |" ;
118
+ }
119
+ special_pattern += re2::RE2::QuoteMeta (std::string (token));
120
+ }
121
+
122
+ if (special_pattern.empty ()) {
123
+ return static_cast <std::unique_ptr<IRegex>>(nullptr );
124
+ }
125
+ return create_regex (special_pattern);
126
+ }
127
+
107
128
class BPETokenizerBase : public Tokenizer {
108
129
public:
109
130
Result<std::vector<uint64_t >>
Original file line number Diff line number Diff line change @@ -69,6 +69,11 @@ Error HFTokenizer::load(const std::string& path) {
69
69
special_tokens,
70
70
[](const auto & it) -> std::string { return it.at (" content" ); },
71
71
[](const auto & it) -> std::uint64_t { return it.at (" id" ); }));
72
+
73
+ // Create special token regex to help later with encoding.
74
+ special_token_regex_ = TK_UNWRAP (detail::build_special_token_regex (special_token_map));
75
+
76
+ // Store for future use.
72
77
special_token_map_.emplace (std::move (special_token_map));
73
78
} catch (const json::out_of_range& e) {
74
79
fprintf (stderr, " Could not parse special tokens: %s\n " , e.what ());
Original file line number Diff line number Diff line change 32
32
#include < fstream>
33
33
#include < limits>
34
34
#include < unordered_set>
35
- #include " re2/re2.h"
36
35
37
36
namespace tokenizers {
38
37
@@ -47,20 +46,6 @@ static Result<std::unique_ptr<IRegex>> _create_regex(
47
46
return create_regex (pattern);
48
47
}
49
48
50
- static Result<std::unique_ptr<IRegex>> _build_special_token_regex (
51
- const std::vector<std::pair<std::string, std::uint64_t >>& special_encoder) {
52
- std::string special_pattern;
53
- for (const auto & ele : special_encoder) {
54
- if (!special_pattern.empty ()) {
55
- special_pattern += " |" ;
56
- }
57
- special_pattern += re2::RE2::QuoteMeta (ele.first );
58
- }
59
- if (special_pattern.empty ()) {
60
- return static_cast <std::unique_ptr<IRegex>>(nullptr );
61
- }
62
- return _create_regex (special_pattern);
63
- }
64
49
65
50
static Result<std::pair<std::string, uint64_t >> _parse (
66
51
const std::string& line) {
@@ -153,7 +138,7 @@ Error Tiktoken::load(const std::string& path) {
153
138
154
139
_regex = TK_UNWRAP (_create_regex (_pattern));
155
140
special_token_regex_ =
156
- TK_UNWRAP (_build_special_token_regex ( special_token_map));
141
+ TK_UNWRAP (detail::build_special_token_regex ( TokenMap ( special_token_map) ));
157
142
158
143
// initialize vocab_size, bos_tok, eos_tok
159
144
vocab_size_ = token_map_->size () + special_token_map_->size ();
You can’t perform that action at this time.
0 commit comments