@@ -43,7 +43,7 @@ def convert_token_to_string(self, token):
43
43
regex_str = "[1-9]"
44
44
45
45
with pytest .raises (ValueError , match = "The vocabulary" ):
46
- RegexGuide (regex_str , MockTokenizer ())
46
+ RegexGuide . from_regex (regex_str , MockTokenizer ())
47
47
48
48
49
49
def test_regex ():
@@ -57,7 +57,7 @@ def convert_token_to_string(self, token):
57
57
58
58
regex_str = "[1-9]"
59
59
tokenizer = MockTokenizer ()
60
- fsm = RegexGuide (regex_str , tokenizer )
60
+ fsm = RegexGuide . from_regex (regex_str , tokenizer )
61
61
62
62
assert fsm .states_to_token_maps == {0 : {1 : 1 }}
63
63
@@ -98,7 +98,7 @@ def convert_token_to_string(self, token):
98
98
99
99
regex_str = "[😁-😎]"
100
100
tokenizer = MockTokenizer ()
101
- fsm = RegexGuide (regex_str , tokenizer )
101
+ fsm = RegexGuide . from_regex (regex_str , tokenizer )
102
102
103
103
assert fsm .states_to_token_maps == {
104
104
0 : {5 : 1 , 4 : 2 },
@@ -145,7 +145,7 @@ def convert_token_to_string(self, token):
145
145
146
146
regex_str = " [😁-😎]"
147
147
tokenizer = MockTokenizer ()
148
- fsm = RegexGuide (regex_str , tokenizer )
148
+ fsm = RegexGuide . from_regex (regex_str , tokenizer )
149
149
150
150
assert fsm .states_to_token_maps == {
151
151
0 : {5 : 1 , 10 : 2 },
@@ -180,7 +180,7 @@ def convert_token_to_string(self, token):
180
180
181
181
regex_str = r"`\n(\.\n)?`\n"
182
182
tokenizer = MockTokenizer ()
183
- fsm = RegexGuide (regex_str , tokenizer )
183
+ fsm = RegexGuide . from_regex (regex_str , tokenizer )
184
184
185
185
state = fsm .get_next_state (state = 4 , token_id = 103 )
186
186
assert state == 5
0 commit comments