Skip to content

Commit cbe9604

Browse files
committed
fix for openai server compatibility
1 parent 7f6a41a commit cbe9604

File tree

1 file changed

+9
-8
lines changed

1 file changed

+9
-8
lines changed

llama_cpp/llama_chat_format.py

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2131,13 +2131,15 @@ def create_completion(stop):
21312131
)
21322132

21332133
# TODO: support stream mode
2134-
function_call_dict: Union[Dict[str, str], Dict[Literal["function_call"], llama_types.ChatCompletionRequestAssistantMessageFunctionCall]] = {
2135-
"function_call": {
2136-
"name": tool_calls[0]["function"]["name"],
2137-
"arguments": tool_calls[0]["function"]["arguments"],
2138-
} if len(tool_calls) > 0 and tools is None else None,
2139-
"tool_calls": tool_calls if len(tool_calls) > 0 and tools is not None else None,
2140-
}
2134+
function_call_dict: Union[Dict[str, str], Dict[Literal["function_call"], llama_types.ChatCompletionRequestAssistantMessageFunctionCall]] = {}
2135+
if len(tool_calls) > 0:
2136+
if tools is not None:
2137+
function_call_dict["tool_calls"] = tool_calls
2138+
else:
2139+
function_call_dict["function_call"] = {
2140+
"name": tool_calls[0]["function"]["name"],
2141+
"arguments": tool_calls[0]["function"]["arguments"],
2142+
}
21412143
completion["usage"]["completion_tokens"] = completion_tokens
21422144
return llama_types.CreateChatCompletionResponse(
21432145
id="chat" + completion["id"],
@@ -2151,7 +2153,6 @@ def create_completion(stop):
21512153
"message": {
21522154
"role": "assistant",
21532155
"content": None if content == "" else content,
2154-
"tool_calls": tool_calls if tools is not None else None,
21552156
**function_call_dict,
21562157
},
21572158
"finish_reason": "tool_calls" if len(tool_calls) > 0 else "stop",

0 commit comments

Comments
 (0)