Skip to content

Commit e1cc07d

Browse files
committed
fix doc strings
1 parent 212f3f7 commit e1cc07d

File tree

2 files changed

+10
-5
lines changed

2 files changed

+10
-5
lines changed

llama_cpp/llama.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -578,6 +578,8 @@ def tokenize(
578578
579579
Args:
580580
text: The utf-8 encoded string to tokenize.
581+
add_bos: Whether to add a beginning of sequence token.
582+
special: Whether to tokenize special tokens.
581583
582584
Raises:
583585
RuntimeError: If the tokenization failed.
@@ -594,7 +596,8 @@ def detokenize(
594596
595597
Args:
596598
tokens: The list of tokens to detokenize.
597-
prev_tokens: The list of previous tokens. Offset mapping will be performed if provided
599+
prev_tokens: The list of previous tokens. Offset mapping will be performed if provided.
600+
special: Whether to detokenize special tokens.
598601
599602
Returns:
600603
The detokenized string.

llama_cpp/llama_tokenizer.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -19,9 +19,10 @@ def tokenize(
1919
"""Tokenize the text into tokens.
2020
2121
Args:
22-
text: The text to tokenize.
22+
text: The utf-8 encoded string to tokenize.
2323
add_bos: Whether to add a beginning of sequence token.
24-
special: Whether to tokenize text literally or as special tokens."""
24+
special: Whether to tokenize special tokens.
25+
"""
2526
raise NotImplementedError
2627

2728
@abc.abstractmethod
@@ -31,8 +32,9 @@ def detokenize(
3132
"""Detokenize the tokens into text.
3233
3334
Args:
34-
tokens: The tokens to detokenize.
35-
prev_tokens: If tokens is a continuation of a previous sequence, the previous tokens.
35+
tokens: The list of tokens to detokenize.
36+
prev_tokens: The list of previous tokens. Offset mapping will be performed if provided.
37+
special: Whether to detokenize special tokens.
3638
"""
3739
raise NotImplementedError
3840

0 commit comments

Comments
 (0)