Skip to content

Commit 4039999

Browse files
committed
Autodetect llamacpp_HF loader when tokenizer exists
1 parent 76d28ea commit 4039999

File tree

2 files changed

+4
-2
lines changed

2 files changed

+4
-2
lines changed

modules/models.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -257,7 +257,7 @@ def llamacpp_HF_loader(model_name):
257257
path = Path(f'{shared.args.model_dir}/{model_name}')
258258

259259
# Check if a HF tokenizer is available for the model
260-
if all((path / file).exists() for file in ['tokenizer.json', 'tokenizer_config.json']):
260+
if all((path / file).exists() for file in ['tokenizer_config.json']):
261261
logger.info(f'Using tokenizer from: \"{path}\"')
262262
else:
263263
logger.error("Could not load the model because a tokenizer in Transformers format was not found.")

modules/models_settings.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -153,6 +153,8 @@ def infer_loader(model_name, model_settings):
153153
loader = 'ExLlamav2_HF'
154154
elif (path_to_model / 'quant_config.json').exists() or re.match(r'.*-awq', model_name.lower()):
155155
loader = 'AutoAWQ'
156+
elif len(list(path_to_model.glob('*.gguf'))) > 0 and path_to_model.is_dir() and (path_to_model / 'tokenizer_config.json').exists():
157+
loader = 'llamacpp_HF'
156158
elif len(list(path_to_model.glob('*.gguf'))) > 0:
157159
loader = 'llama.cpp'
158160
elif re.match(r'.*\.gguf', model_name.lower()):
@@ -225,7 +227,7 @@ def apply_model_settings_to_state(model, state):
225227
loader = model_settings.pop('loader')
226228

227229
# If the user is using an alternative loader for the same model type, let them keep using it
228-
if not (loader == 'ExLlamav2_HF' and state['loader'] in ['GPTQ-for-LLaMa', 'ExLlamav2', 'AutoGPTQ']) and not (loader == 'llama.cpp' and state['loader'] in ['llamacpp_HF', 'ctransformers']):
230+
if not (loader == 'ExLlamav2_HF' and state['loader'] in ['GPTQ-for-LLaMa', 'ExLlamav2', 'AutoGPTQ']) and not (loader == 'llama.cpp' and state['loader'] in ['ctransformers']):
229231
state['loader'] = loader
230232

231233
for k in model_settings:

0 commit comments

Comments
 (0)