Skip to content

Commit 52bd79a

Browse files
committed
fix: have proxy in a dedicated package (avoid dependency on server deps)
1 parent 6bc8292 commit 52bd79a

File tree

5 files changed

+7
-5
lines changed

5 files changed

+7
-5
lines changed
+1-2
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
11
from .llama_chat_model import LlamaChatModel
2-
from .llama_proxy_chat_model import LlamaProxyChatModel
32

4-
__all__ = ["LlamaChatModel", "LlamaProxyChatModel"]
3+
__all__ = ["LlamaChatModel"]
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
from .llama_proxy_chat_model import LlamaProxyChatModel
2+
3+
__all__ = ["LlamaProxyChatModel"]

tests/test_functional/test_llama_proxy.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import pytest
22
from langchain_core.messages import AIMessage, HumanMessage
33

4-
from langchain_llamacpp_chat_model import LlamaProxyChatModel
4+
from langchain_llamacpp_chat_model.proxy import LlamaProxyChatModel
55

66
from tests.test_functional.models_configuration import (
77
create_llama_proxy,

tests/test_functional/test_stream.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ def llama(self, request) -> Llama:
1616

1717
@pytest.fixture
1818
def instance(self, llama):
19-
return LlamaChatModel(llama=llama)
19+
return LlamaChatModel(llama=llama, temperature=0)
2020

2121
def test_stream(self, instance: LlamaChatModel):
2222
stream = instance.stream("Say Hi!")
@@ -50,4 +50,4 @@ def test_conversation_memory(self, instance: LlamaChatModel):
5050
final_content += token.content
5151

5252
assert len(final_content) > 0
53-
assert "banana" in final_content
53+
assert "banana" in final_content.lower()

0 commit comments

Comments
 (0)