-
Notifications
You must be signed in to change notification settings - Fork 154
/
Copy pathllms.py
40 lines (35 loc) · 1.24 KB
/
llms.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
import bittensor as bt
from openai import AsyncOpenAI
from webgenie.constants import (
LLM_MODEL_ID,
LLM_API_KEY,
LLM_MODEL_URL
)
if not LLM_API_KEY or not LLM_MODEL_URL or not LLM_MODEL_ID:
raise Exception("LLM_API_KEY, LLM_MODEL_URL, and LLM_MODEL_ID must be set")
client = AsyncOpenAI(
api_key=LLM_API_KEY,
base_url=LLM_MODEL_URL,
)
async def openai_call(messages, response_format, deterministic=True, retries=3):
for _ in range(retries):
try:
if deterministic:
completion = await client.beta.chat.completions.parse(
model=LLM_MODEL_ID,
messages= messages,
response_format=response_format,
temperature=0,
)
else:
completion = await client.beta.chat.completions.parse(
model=LLM_MODEL_ID,
messages= messages,
response_format=response_format,
temperature=0.7,
)
return completion.choices[0].message.parsed
except Exception as e:
bt.logging.warning(f"Error calling OpenAI: {e}")
continue
raise Exception("Failed to call OpenAI")