We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 0d3fda2 commit 6b6dbffCopy full SHA for 6b6dbff
pyproject.toml
@@ -12,15 +12,15 @@ dependencies = [
12
"ffmpy",
13
"fschat==0.2.36",
14
"infinity-emb[all]==0.0.73",
15
- "lmdeploy==0.7.0.post3",
+ "lmdeploy==0.7.1",
16
"loguru>=0.7.2",
17
"openai==1.55.3",
18
"setuptools==75.2.0",
19
"streamlit==1.39.0",
20
"torch==2.5.1",
21
"torchvision==0.20.1",
22
"transformers==4.48.2",
23
- "vllm==0.7.2",
+ "vllm==0.7.3",
24
"qwen_vl_utils",
25
"evalscope[perf]==0.10.1",
26
"modelscope==1.20.1",
@@ -34,8 +34,6 @@ override-dependencies = [
34
35
"triton",
36
"outlines==0.1.11",
37
- "pynvml==12.0.0" # 解决vllm==0.7.2的bug https://github.com/vllm-project/vllm/issues/12847,后面可去掉
38
-
39
]
40
41
[[tool.uv.index]]
0 commit comments