Skip to content

Commit f863d5d

Browse files
authored
Merge pull request #373 from ebolam/Model_Plugins
Making model backends respond to a specific type in the aiserver menu for now
2 parents 22b2a3f + abe07a2 commit f863d5d

File tree

9 files changed

+21
-5
lines changed

9 files changed

+21
-5
lines changed

aiserver.py

+13-5
Original file line numberDiff line numberDiff line change
@@ -626,14 +626,20 @@ def UI_2_log_history(message):
626626
import importlib
627627
model_backend_code = {}
628628
model_backends = {}
629+
model_backend_type_crosswalk = {}
629630
for module in os.listdir("./modeling/inference_models"):
630631
if not os.path.isfile(os.path.join("./modeling/inference_models",module)) and module != '__pycache__':
631632
try:
632633
model_backend_code[module] = importlib.import_module('modeling.inference_models.{}.class'.format(module))
633634
model_backends[model_backend_code[module].model_backend_name] = model_backend_code[module].model_backend()
634-
if 'disable' in vars(model_backends[model_backend_code[module].model_backend_name]):
635-
if model_backends[model_backend_code[module].model_backend_name].disable:
636-
del model_backends[model_backend_code[module].model_backend_name]
635+
if 'disable' in vars(model_backends[model_backend_code[module].model_backend_name]) and model_backends[model_backend_code[module].model_backend_name].disable:
636+
del model_backends[model_backend_code[module].model_backend_name]
637+
else:
638+
if model_backend_code[module].model_backend_type in model_backend_type_crosswalk:
639+
model_backend_type_crosswalk[model_backend_code[module].model_backend_type].append(model_backend_code[module].model_backend_name)
640+
else:
641+
model_backend_type_crosswalk[model_backend_code[module].model_backend_type] = [model_backend_code[module].model_backend_name]
642+
637643
except Exception:
638644
logger.error("Model Backend {} failed to load".format(module))
639645
logger.error(traceback.format_exc())
@@ -6211,6 +6217,7 @@ def UI_2_load_model_button(data):
62116217
@socketio.on('select_model')
62126218
@logger.catch
62136219
def UI_2_select_model(data):
6220+
global model_backend_type_crosswalk #No idea why I have to make this a global where I don't for model_backends...
62146221
logger.debug("Clicked on model entry: {}".format(data))
62156222
if data["name"] in model_menu and data['ismenu'] == "true":
62166223
emit("open_model_load_menu", {"items": [{**item.to_json(), **{"menu":data["name"]}} for item in model_menu[data["name"]] if item.should_show()]})
@@ -6220,8 +6227,9 @@ def UI_2_select_model(data):
62206227
valid_loaders = {}
62216228
if data['id'] in [item.name for sublist in model_menu for item in model_menu[sublist]]:
62226229
#Here if we have a model id that's in our menu, we explicitly use that backend
6223-
for model_backend in set([item.model_backend for sublist in model_menu for item in model_menu[sublist] if item.name == data['id']]):
6224-
valid_loaders[model_backend] = model_backends[model_backend].get_requested_parameters(data["name"], data["path"] if 'path' in data else None, data["menu"])
6230+
for model_backend_type in set([item.model_backend for sublist in model_menu for item in model_menu[sublist] if item.name == data['id']]):
6231+
for model_backend in model_backend_type_crosswalk[model_backend_type]:
6232+
valid_loaders[model_backend] = model_backends[model_backend].get_requested_parameters(data["name"], data["path"] if 'path' in data else None, data["menu"])
62256233
emit("selected_model_info", {"model_backends": valid_loaders})
62266234
else:
62276235
#Here we have a model that's not in our menu structure (either a custom model or a custom path

modeling/inference_models/api/class.py

+1
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
)
2020

2121
model_backend_name = "KoboldAI API"
22+
model_backend_type = "KoboldAI API" #This should be a generic name in case multiple model backends are compatible (think Hugging Face Custom and Basic Hugging Face)
2223

2324
class APIException(Exception):
2425
"""To be used for errors when using the Kobold API as an interface."""

modeling/inference_models/basic_api/class.py

+1
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717

1818

1919
model_backend_name = "KoboldAI Old Colab Method"
20+
model_backend_type = "KoboldAI Old Colab Method" #This should be a generic name in case multiple model backends are compatible (think Hugging Face Custom and Basic Hugging Face)
2021

2122
class BasicAPIException(Exception):
2223
"""To be used for errors when using the Basic API as an interface."""

modeling/inference_models/generic_hf_torch/class.py

+1
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
from modeling.inference_models.hf_torch import HFTorchInferenceModel
2424

2525
model_backend_name = "Huggingface"
26+
model_backend_type = "Huggingface" #This should be a generic name in case multiple model backends are compatible (think Hugging Face Custom and Basic Hugging Face)
2627

2728
class model_backend(HFTorchInferenceModel):
2829

modeling/inference_models/gooseai/class.py

+1
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
from modeling.inference_models.openai_gooseai import model_backend as openai_gooseai_model_backend
1616

1717
model_backend_name = "GooseAI"
18+
model_backend_type = "GooseAI" #This should be a generic name in case multiple model backends are compatible (think Hugging Face Custom and Basic Hugging Face)
1819

1920
class OpenAIAPIError(Exception):
2021
def __init__(self, error_type: str, error_message) -> None:

modeling/inference_models/hf_mtj/class.py

+1
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
from modeling.tokenizer import GenericTokenizer
2121

2222
model_backend_name = "Huggingface MTJ"
23+
model_backend_type = "Huggingface" #This should be a generic name in case multiple model backends are compatible (think Hugging Face Custom and Basic Hugging Face)
2324

2425

2526
class model_backend(HFInferenceModel):

modeling/inference_models/horde/class.py

+1
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
)
1919

2020
model_backend_name = "Horde"
21+
model_backend_type = "Horde" #This should be a generic name in case multiple model backends are compatible (think Hugging Face Custom and Basic Hugging Face)
2122

2223
class HordeException(Exception):
2324
"""To be used for errors on server side of the Horde."""

modeling/inference_models/openai/class.py

+1
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
from modeling.inference_models.openai_gooseai import model_backend as openai_gooseai_model_backend
1616

1717
model_backend_name = "OpenAI"
18+
model_backend_type = "OpenAI" #This should be a generic name in case multiple model backends are compatible (think Hugging Face Custom and Basic Hugging Face)
1819

1920
class OpenAIAPIError(Exception):
2021
def __init__(self, error_type: str, error_message) -> None:

modeling/inference_models/readonly/class.py

+1
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
)
1616

1717
model_backend_name = "Read Only"
18+
model_backend_type = "Read Only" #This should be a generic name in case multiple model backends are compatible (think Hugging Face Custom and Basic Hugging Face)
1819

1920
class BasicAPIException(Exception):
2021
"""To be used for errors when using the Basic API as an interface."""

0 commit comments

Comments
 (0)