Skip to content

Commit c4b1ccc

Browse files
committed
fix 'function_call' and 'tool_calls' depending on 'functions' and 'tools', incompatibility with python 3.8
1 parent e265448 commit c4b1ccc

File tree

1 file changed

+16
-5
lines changed

1 file changed

+16
-5
lines changed

llama_cpp/llama_chat_format.py

Lines changed: 16 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2078,11 +2078,21 @@ def create_completion(stop):
20782078
completion_text = completion["choices"][0]["text"]
20792079
completion_tokens += completion["usage"]["completion_tokens"]
20802080
if function_name == "all":
2081-
content += completion_text.removesuffix("\n<|from|>assistant\n").removesuffix("\n<|from|> assistant\n")
2081+
if completion_text.endswith("\n<|from|>assistant\n"):
2082+
content += completion_text[:-len("\n<|from|>assistant\n")]
2083+
if completion_text.endswith("\n<|from|> assistant\n"):
2084+
content += completion_text[-len("\n<|from|> assistant\n")]
2085+
else:
2086+
content += completion_text
20822087
content = content.lstrip()
20832088
# Check whether the model wants to generate another turn
20842089
if "<|from|> assistant" in completion_text or "<|from|>assistant" in completion_text:
2085-
cleaned_completion_text = completion_text.removesuffix("\n<|from|>assistant\n").removesuffix("\n<|from|> assistant\n").strip()
2090+
if completion_text.endswith("\n<|from|>assistant\n"):
2091+
cleaned_completion_text = completion_text[:-len("\n<|from|>assistant\n")].strip()
2092+
elif completion_text.endswith("\n<|from|> assistant\n"):
2093+
cleaned_completion_text = completion_text[-len("\n<|from|> assistant\n")].strip()
2094+
else:
2095+
cleaned_completion_text = completion_text.strip()
20862096
prompt += f"{cleaned_completion_text}\n<|from|>assistant\n<|recipient|>"
20872097
else:
20882098
break
@@ -2125,8 +2135,9 @@ def create_completion(stop):
21252135
"function_call": {
21262136
"name": tool_calls[0]["function"]["name"],
21272137
"arguments": tool_calls[0]["function"]["arguments"],
2128-
}
2129-
} if len(tool_calls) == 1 else {}
2138+
} if len(tool_calls) > 0 and tools is None else None,
2139+
"tool_calls": tool_calls if len(tool_calls) > 0 and tools is not None else None,
2140+
}
21302141
completion["usage"]["completion_tokens"] = completion_tokens
21312142
return llama_types.CreateChatCompletionResponse(
21322143
id="chat" + completion["id"],
@@ -2140,7 +2151,7 @@ def create_completion(stop):
21402151
"message": {
21412152
"role": "assistant",
21422153
"content": None if content == "" else content,
2143-
"tool_calls": tool_calls,
2154+
"tool_calls": tool_calls if tools is not None else None,
21442155
**function_call_dict,
21452156
},
21462157
"finish_reason": "tool_calls" if len(tool_calls) > 0 else "stop",

0 commit comments

Comments
 (0)