File tree 2 files changed +3
-3
lines changed 2 files changed +3
-3
lines changed Original file line number Diff line number Diff line change @@ -3352,7 +3352,7 @@ class MiniCPMv26ChatHandler(Llava15ChatHandler):
3352
3352
)
3353
3353
3354
3354
3355
- class PaligemmaChatHandler (Llava15ChatHandler ):
3355
+ class PaliGemmaChatHandler (Llava15ChatHandler ):
3356
3356
def __call__ (
3357
3357
self ,
3358
3358
* ,
Original file line number Diff line number Diff line change @@ -175,14 +175,14 @@ def load_llama_from_model_settings(settings: ModelSettings) -> llama_cpp.Llama:
175
175
assert settings .clip_model_path is not None , "clip model not found"
176
176
if settings .hf_model_repo_id is not None :
177
177
chat_handler = (
178
- llama_cpp .llama_chat_format .PaligemmaChatHandler .from_pretrained (
178
+ llama_cpp .llama_chat_format .PaliGemmaChatHandler .from_pretrained (
179
179
repo_id = settings .hf_model_repo_id ,
180
180
filename = settings .clip_model_path ,
181
181
verbose = settings .verbose ,
182
182
)
183
183
)
184
184
else :
185
- chat_handler = llama_cpp .llama_chat_format .PaligemmaChatHandler (
185
+ chat_handler = llama_cpp .llama_chat_format .PaliGemmaChatHandler (
186
186
clip_model_path = settings .clip_model_path , verbose = settings .verbose
187
187
)
188
188
elif settings .chat_format == "hf-autotokenizer" :
You can’t perform that action at this time.
0 commit comments