1
+
2
+
3
+ from typing import Any , List , Optional
4
+ from langchain_openai import ChatOpenAI
5
+ from langchain_core .utils .function_calling import convert_to_openai_tool
6
+
7
+ from agent .llm import register_llm_client
8
+ from agent .llm .base import BaseLLMClient
9
+
10
+ from petercat_utils .data_class import MessageContent
11
+ from petercat_utils import get_env_variable
12
+
13
+ DASHSCOPE_API_KEY = get_env_variable ("DASHSCOPE_API_KEY" )
14
+
15
+
16
+ @register_llm_client ("dashscope" )
17
+ class DashScopeClient (BaseLLMClient ):
18
+ _client : ChatOpenAI
19
+
20
+ def __init__ (
21
+ self ,
22
+ temperature : Optional [float ] = 0.2 ,
23
+ n : Optional [int ] = 1 ,
24
+ top_p : Optional [float ] = None ,
25
+ max_tokens : Optional [int ] = 1500 ,
26
+ streaming : Optional [bool ] = False ,
27
+ api_key : Optional [str ] = DASHSCOPE_API_KEY ,
28
+ ):
29
+ self ._client = ChatOpenAI (
30
+ model_name = "deepseek-v3" ,
31
+ temperature = temperature ,
32
+ n = n ,
33
+ top_p = top_p ,
34
+ streaming = streaming ,
35
+ max_tokens = max_tokens ,
36
+ openai_api_key = api_key ,
37
+ stream_usage = True ,
38
+ openai_api_base = "https://dashscope.aliyuncs.com/compatible-mode/v1/chat/completions"
39
+ )
40
+
41
+ def get_client (self ):
42
+ return self ._client
43
+
44
+ def get_tools (self , tools : List [Any ]):
45
+ return [convert_to_openai_tool (tool ) for tool in tools ]
46
+
47
+ def parse_content (self , content : List [MessageContent ]):
48
+ return [c .model_dump () for c in content ]
0 commit comments