We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 9992c50 commit 11d9562Copy full SHA for 11d9562
llama_cpp/llama.py
@@ -1519,15 +1519,15 @@ def logit_bias_processor(
1519
1520
if stream:
1521
remaining_tokens = completion_tokens[returned_tokens:]
1522
- all_text = self.detokenize(
+ remaining_text = self.detokenize(
1523
remaining_tokens,
1524
prev_tokens=prompt_tokens + completion_tokens[:returned_tokens],
1525
)
1526
- any_stop = [s for s in stop_sequences if s in all_text]
+ any_stop = [s for s in stop_sequences if s in remaining_text]
1527
if len(any_stop) > 0:
1528
- end = min(all_text.index(stop) for stop in any_stop)
+ end = min(remaining_text.index(stop) for stop in any_stop)
1529
else:
1530
- end = len(all_text)
+ end = len(remaining_text)
1531
1532
token_end_position = 0
1533
for token in remaining_tokens:
0 commit comments