Skip to content

Commit c75f724

Browse files
committed
fix doc strings
1 parent 760c2e9 commit c75f724

File tree

2 files changed

+10
-5
lines changed

2 files changed

+10
-5
lines changed

llama_cpp/llama.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -570,6 +570,8 @@ def tokenize(
570570
571571
Args:
572572
text: The utf-8 encoded string to tokenize.
573+
add_bos: Whether to add a beginning of sequence token.
574+
special: Whether to tokenize special tokens.
573575
574576
Raises:
575577
RuntimeError: If the tokenization failed.
@@ -586,7 +588,8 @@ def detokenize(
586588
587589
Args:
588590
tokens: The list of tokens to detokenize.
589-
prev_tokens: The list of previous tokens. Offset mapping will be performed if provided
591+
prev_tokens: The list of previous tokens. Offset mapping will be performed if provided.
592+
special: Whether to detokenize special tokens.
590593
591594
Returns:
592595
The detokenized string.

llama_cpp/llama_tokenizer.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -19,9 +19,10 @@ def tokenize(
1919
"""Tokenize the text into tokens.
2020
2121
Args:
22-
text: The text to tokenize.
22+
text: The utf-8 encoded string to tokenize.
2323
add_bos: Whether to add a beginning of sequence token.
24-
special: Whether to tokenize text literally or as special tokens."""
24+
special: Whether to tokenize special tokens.
25+
"""
2526
raise NotImplementedError
2627

2728
@abc.abstractmethod
@@ -31,8 +32,9 @@ def detokenize(
3132
"""Detokenize the tokens into text.
3233
3334
Args:
34-
tokens: The tokens to detokenize.
35-
prev_tokens: If tokens is a continuation of a previous sequence, the previous tokens.
35+
tokens: The list of tokens to detokenize.
36+
prev_tokens: The list of previous tokens. Offset mapping will be performed if provided.
37+
special: Whether to detokenize special tokens.
3638
"""
3739
raise NotImplementedError
3840

0 commit comments

Comments
 (0)