Skip to content

Commit c100aad

Browse files
committed
bugfix if chunk.choices is empty
1 parent f44a451 commit c100aad

File tree

1 file changed

+50
-1
lines changed

1 file changed

+50
-1
lines changed

llm_azure.py

+50-1
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,8 @@
33
import llm
44
import yaml
55
from llm import EmbeddingModel, hookimpl
6-
from llm.default_plugins.openai_models import Chat
6+
from llm.default_plugins.openai_models import Chat, combine_chunks
7+
from llm.utils import remove_dict_none_values
78
from openai import AzureOpenAI
89

910

@@ -74,6 +75,54 @@ def get_client(self):
7475
def __str__(self):
7576
return "AzureOpenAI Completion: {}".format(self.model_id)
7677

78+
def execute(self, prompt, stream, response, conversation=None):
79+
messages = []
80+
current_system = None
81+
if conversation is not None:
82+
for prev_response in conversation.responses:
83+
if (
84+
prev_response.prompt.system
85+
and prev_response.prompt.system != current_system
86+
):
87+
messages.append(
88+
{"role": "system", "content": prev_response.prompt.system},
89+
)
90+
current_system = prev_response.prompt.system
91+
messages.append(
92+
{"role": "user", "content": prev_response.prompt.prompt},
93+
)
94+
messages.append({"role": "assistant", "content": prev_response.text()})
95+
if prompt.system and prompt.system != current_system:
96+
messages.append({"role": "system", "content": prompt.system})
97+
messages.append({"role": "user", "content": prompt.prompt})
98+
response._prompt_json = {"messages": messages}
99+
kwargs = self.build_kwargs(prompt)
100+
client = self.get_client()
101+
if stream:
102+
completion = client.chat.completions.create(
103+
model=self.model_name or self.model_id,
104+
messages=messages,
105+
stream=True,
106+
**kwargs,
107+
)
108+
chunks = []
109+
for chunk in completion:
110+
chunks.append(chunk)
111+
if chunk.choices:
112+
content = chunk.choices[0].delta.content
113+
if content is not None:
114+
yield content
115+
response.response_json = remove_dict_none_values(combine_chunks(chunks))
116+
else:
117+
completion = client.chat.completions.create(
118+
model=self.model_name or self.model_id,
119+
messages=messages,
120+
stream=False,
121+
**kwargs,
122+
)
123+
response.response_json = remove_dict_none_values(completion.dict())
124+
yield completion.choices[0].message.content
125+
77126

78127
def config_dir():
79128
dir_path = llm.user_dir() / "azure"

0 commit comments

Comments
 (0)