diff --git a/example-apps/chatbot-rag-app/README.md b/example-apps/chatbot-rag-app/README.md index c15b8867..f266bcf2 100644 --- a/example-apps/chatbot-rag-app/README.md +++ b/example-apps/chatbot-rag-app/README.md @@ -119,12 +119,12 @@ export GOOGLE_APPLICATION_CREDENTIALS= ### Mistral AI -To use Mistral AI you need to set the following environment variables: +To use Mistral AI you need to set the following environment variables. The app has been tested with Mistral Large Model deployed through Microsoft Azure. More information [here](https://learn.microsoft.com/en-us/azure/ai-studio/how-to/deploy-models-mistral). ``` export LLM_TYPE=mistral export MISTRAL_API_KEY=... -export MISTRAL_API_ENDPOINT=... # optional +export MISTRAL_API_ENDPOINT=... # should be of the form https://..inference.ai.azure.com export MISTRAL_MODEL=... # optional ``` diff --git a/example-apps/chatbot-rag-app/api/llm_integrations.py b/example-apps/chatbot-rag-app/api/llm_integrations.py index b73e2cad..d297bc03 100644 --- a/example-apps/chatbot-rag-app/api/llm_integrations.py +++ b/example-apps/chatbot-rag-app/api/llm_integrations.py @@ -64,7 +64,7 @@ def init_bedrock(temperature): def init_mistral_chat(temperature): MISTRAL_API_ENDPOINT = os.getenv("MISTRAL_API_ENDPOINT") MISTRAL_API_KEY = os.getenv("MISTRAL_API_KEY") - MISTRAL_MODEL = os.getenv("MISTRAL_MODEL") + MISTRAL_MODEL = os.getenv("MISTRAL_MODEL", "Mistral-large") kwargs = { "mistral_api_key": MISTRAL_API_KEY, "temperature": temperature,