Skip to content

Commit 65fc95f

Browse files
committed
chore: have tests more robusts
1 parent b2d5112 commit 65fc95f

File tree

4 files changed

+10
-8
lines changed

4 files changed

+10
-8
lines changed

Diff for: tests/test_functional/test_ainvoke.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ def llama(self, request) -> Llama:
2323

2424
@pytest.fixture
2525
def instance(self, llama):
26-
return LlamaChatModel(llama=llama)
26+
return LlamaChatModel(llama=llama, temperature=0)
2727

2828
@pytest.mark.asyncio
2929
async def test_ainvoke(self, instance: LlamaChatModel):
@@ -41,4 +41,4 @@ async def test_conversation_memory(self, instance: LlamaChatModel):
4141
]
4242
)
4343

44-
assert "banana" in result.content
44+
assert "banana" in result.content.lower()

Diff for: tests/test_functional/test_astream.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ def llama(self, request) -> Llama:
1616

1717
@pytest.fixture
1818
def instance(self, llama):
19-
return LlamaChatModel(llama=llama)
19+
return LlamaChatModel(llama=llama, temperature=0)
2020

2121
@pytest.mark.asyncio
2222
async def test_astream(self, instance: LlamaChatModel):
@@ -44,4 +44,4 @@ async def test_conversation_memory(self, instance: LlamaChatModel):
4444
final_content += token.content
4545

4646
assert len(final_content) > 0
47-
assert "banana" in final_content
47+
assert "banana" in final_content.lower()

Diff for: tests/test_functional/test_invoke.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ def llama(self, request) -> Llama:
2424

2525
@pytest.fixture
2626
def instance(self, llama):
27-
return LlamaChatModel(llama=llama)
27+
return LlamaChatModel(llama=llama, temperature=0)
2828

2929
def test_invoke(self, instance: LlamaChatModel):
3030
result = instance.invoke("Say Hi!")
@@ -45,7 +45,7 @@ def test_conversation_memory(self, instance: LlamaChatModel):
4545
]
4646
)
4747

48-
assert "banana" in result.content
48+
assert "banana" in result.content.lower()
4949

5050
def test_json_mode(self, instance: LlamaChatModel):
5151
structured_llm = instance.with_structured_output(Joke)

Diff for: tests/test_functional/test_llama_proxy.py

+4-2
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,9 @@ class TestLlamaProxyChat:
2222
)
2323
def instance(self, llama_proxy: LlamaProxy, request):
2424
return LlamaProxyChatModel(
25-
llama_proxy=llama_proxy, model_name=request.param["alias"]
25+
llama_proxy=llama_proxy,
26+
model_name=request.param["alias"],
27+
temperature=0,
2628
)
2729

2830
def test_conversation_memory(self, instance: LlamaProxyChatModel):
@@ -34,4 +36,4 @@ def test_conversation_memory(self, instance: LlamaProxyChatModel):
3436
]
3537
)
3638

37-
assert "banana" in result.content
39+
assert "banana" in result.content.lower()

0 commit comments

Comments
 (0)