Skip to content

Commit 30e88c1

Browse files
authored
Merge pull request #412 from LlmKira/dev
[BUG] Solved the problem of global credentials overwriting personal credentials
2 parents a1865b4 + 546ad8b commit 30e88c1

File tree

5 files changed

+8
-6
lines changed

5 files changed

+8
-6
lines changed

app/middleware/llm_task.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -226,7 +226,10 @@ async def request_openai(
226226
# TODO:实现消息时序切片
227227
# 日志
228228
logger.info(
229-
f"[x] Openai request" f"\n--message {len(messages)} " f"\n--tools {tools}"
229+
f"Request Details:"
230+
f"\n--message {len(messages)} "
231+
f"\n--tools {tools} "
232+
f"\n--model {credential.api_model}"
230233
)
231234
for msg in messages:
232235
if isinstance(msg, UserMessage):

app/receiver/function.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -260,7 +260,7 @@ async def run_pending_task(task: TaskHeader, pending_task: ToolCall):
260260
if len(task.task_sign.tool_calls_pending) == 0:
261261
if not has_been_called_recently(userid=task.receiver.uid, n_seconds=3):
262262
credentials = await read_user_credential(user_id=task.receiver.uid)
263-
if global_credential:
263+
if global_credential and not credentials:
264264
credentials = global_credential
265265
logic = LLMLogic(
266266
api_key=credentials.api_key,

app/receiver/receiver_client.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -246,7 +246,7 @@ async def _flash(
246246
try:
247247
try:
248248
credentials = await read_user_credential(user_id=task.receiver.uid)
249-
if global_credential:
249+
if global_credential and not credentials:
250250
credentials = global_credential
251251
assert credentials, "You need to /login first"
252252
llm_result = await llm.request_openai(

llmkira/kv_manager/instruction.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,9 @@
44
from llmkira.kv_manager._base import KvManager
55

66
DEFAULT_INSTRUCTION = (
7-
"[ASSISTANT RULE]"
7+
"instruction: "
88
"SPEAK IN MORE CUTE STYLE, No duplication answer, CALL USER MASTER, REPLY IN USER "
99
"LANGUAGE, ACT STEP BY STEP"
10-
"[RULE END]"
1110
)
1211

1312

llmkira/openai/request.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -168,7 +168,7 @@ def make_url(base_url: str):
168168
def check_vision(self):
169169
if not self.model.startswith(VISION):
170170
logger.info(
171-
"Try to remove the image content part from the messages, because the model is not supported."
171+
f"Try to remove the image content part from the messages, because the model is not supported {self.model}"
172172
)
173173
for message in self.messages:
174174
if isinstance(message, UserMessage) and isinstance(

0 commit comments

Comments
 (0)