Skip to content

Commit 568a749

Browse files
srini047mpangrazzi
andauthored
feat: add run_async support for OllamaTextEmbedder (#1877)
* feat: add run_async support for OllamaTextEmbedder * fix: address review comments (use one async client per embedder obj) --------- Co-authored-by: Michele Pangrazzi <xmikex83@gmail.com>
1 parent 6253bd6 commit 568a749

File tree

3 files changed

+41
-2
lines changed

3 files changed

+41
-2
lines changed

integrations/ollama/pyproject.toml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,7 @@ installer = "uv"
5050
dependencies = [
5151
"coverage[toml]>=6.5",
5252
"pytest",
53+
"pytest-asyncio",
5354
"pytest-rerunfailures",
5455
"haystack-pydoc-tools",
5556
]
@@ -158,7 +159,10 @@ exclude_lines = ["no cov", "if __name__ == .__main__.:", "if TYPE_CHECKING:"]
158159
markers = [
159160
"integration: marks tests as slow (deselect with '-m \"not integration\"')",
160161
]
162+
log_cli = true
161163
addopts = ["--import-mode=importlib"]
164+
asyncio_mode = "auto"
165+
asyncio_default_fixture_loop_scope = "class"
162166

163167
[[tool.mypy.overrides]]
164168
module = ["haystack.*", "haystack_integrations.*", "pytest.*", "ollama.*", "pydantic.*"]

integrations/ollama/src/haystack_integrations/components/embedders/ollama/text_embedder.py

Lines changed: 23 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from haystack import component
44

5-
from ollama import Client
5+
from ollama import AsyncClient, Client
66

77

88
@component
@@ -46,6 +46,7 @@ def __init__(
4646
self.model = model
4747

4848
self._client = Client(host=self.url, timeout=self.timeout)
49+
self._async_client = AsyncClient(host=self.url, timeout=self.timeout)
4950

5051
@component.output_types(embedding=List[float], meta=Dict[str, Any])
5152
def run(self, text: str, generation_kwargs: Optional[Dict[str, Any]] = None):
@@ -66,3 +67,24 @@ def run(self, text: str, generation_kwargs: Optional[Dict[str, Any]] = None):
6667
result["meta"] = {"model": self.model}
6768

6869
return result
70+
71+
@component.output_types(embedding=List[float], meta=Dict[str, Any])
72+
async def run_async(self, text: str, generation_kwargs: Optional[Dict[str, Any]] = None):
73+
"""
74+
Asynchronously run an Ollama Model to compute embeddings of the provided text.
75+
76+
:param text:
77+
Text to be converted to an embedding.
78+
:param generation_kwargs:
79+
Optional arguments to pass to the Ollama generation endpoint, such as temperature,
80+
top_p, etc. See the
81+
[Ollama docs](https://github.com/jmorganca/ollama/blob/main/docs/modelfile.md#valid-parameters-and-values).
82+
:returns: A dictionary with the following keys:
83+
- `embedding`: The computed embeddings
84+
- `meta`: The metadata collected during the embedding process
85+
"""
86+
response = await self._async_client.embeddings(model=self.model, prompt=text, options=generation_kwargs)
87+
result = response.model_dump()
88+
result["meta"] = {"model": self.model}
89+
90+
return result

integrations/ollama/tests/test_text_embedder.py

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,20 @@ def test_model_not_found(self):
3737
def test_run(self):
3838
embedder = OllamaTextEmbedder(model="nomic-embed-text")
3939

40-
reply = embedder.run("hello")
40+
text = "hello"
41+
reply = embedder.run(text=text)
42+
43+
assert isinstance(reply, dict)
44+
assert all(isinstance(element, float) for element in reply["embedding"])
45+
assert reply["meta"]["model"] == "nomic-embed-text"
46+
47+
@pytest.mark.asyncio
48+
@pytest.mark.integration
49+
async def test_run_async(self):
50+
embedder = OllamaTextEmbedder(model="nomic-embed-text")
51+
52+
text = "hello"
53+
reply = await embedder.run_async(text=text)
4154

4255
assert isinstance(reply, dict)
4356
assert all(isinstance(element, float) for element in reply["embedding"])

0 commit comments

Comments
 (0)