Skip to content

Commit 1a40080

Browse files
authored
Lint
1 parent ff353bc commit 1a40080

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

llama_cpp/llama_cache.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ def __contains__(self, key: Sequence[int]) -> bool:
4141

4242
@abstractmethod
4343
def __setitem__(
44-
self, key: Sequence[int], value: "llama_cpp.llama.LlamaState"
44+
self, key: Sequence[int], value: "llama_cpp.llama.LlamaState",
4545
) -> None:
4646
raise NotImplementedError
4747

@@ -105,7 +105,7 @@ class LlamaDiskCache(BaseLlamaCache):
105105
"""Cache for a llama.cpp model using disk."""
106106

107107
def __init__(
108-
self, cache_dir: str = ".cache/llama_cache", capacity_bytes: int = (2 << 30)
108+
self, cache_dir: str = ".cache/llama_cache", capacity_bytes: int = (2 << 30),
109109
):
110110
super().__init__(capacity_bytes)
111111
self.cache = diskcache.Cache(cache_dir)

0 commit comments

Comments
 (0)