Skip to content

Commit 0b89fe4

Browse files
committed
feat: Update llama.cpp
1 parent 80be68a commit 0b89fe4

File tree

2 files changed

+7
-5
lines changed

2 files changed

+7
-5
lines changed

llama_cpp/llama_cpp.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1483,10 +1483,12 @@ def llama_model_size(model: llama_model_p, /) -> int:
14831483

14841484

14851485
# // Get the default chat template. Returns nullptr if not available
1486-
# LLAMA_API const char * llama_model_chat_template(const struct llama_model * model);
1487-
@ctypes_function("llama_model_chat_template", [llama_model_p_ctypes], ctypes.c_char_p)
1488-
def llama_model_chat_template(model: llama_model_p, /) -> Optional[bytes]:
1489-
"""Get the default chat template. Returns None if not available"""
1486+
# // If name is NULL, returns the default chat template
1487+
# LLAMA_API const char * llama_model_chat_template(const struct llama_model * model, const char * name);
1488+
@ctypes_function("llama_model_chat_template", [llama_model_p_ctypes, ctypes.c_char_p], ctypes.c_char_p)
1489+
def llama_model_chat_template(model: llama_model_p, name: Optional[bytes], /) -> Optional[bytes]:
1490+
"""Get the default chat template. Returns None if not available
1491+
If name is None, returns the default chat template"""
14901492
...
14911493

14921494

vendor/llama.cpp

0 commit comments

Comments
 (0)