You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
agent_ollama.py code:
import datetime
from zoneinfo import ZoneInfo
from google.adk.agents import Agent
from google.adk.agents import LlmAgent
from google.adk.models.lite_llm import LiteLlm
def get_weather(city: str) -> dict:
"""Retrieves the current weather report for a specified city.
Args:
city (str): The name of the city for which to retrieve the weather report.
Returns:
dict: status and result or error msg.
"""
if city.lower() == "new york":
return {
"status": "success",
"report": (
"The weather in New York is sunny with a temperature of 25 degrees"
" Celsius (77 degrees Fahrenheit)."
),
}
else:
return {
"status": "error",
"error_message": f"Weather information for '{city}' is not available.",
}
def get_current_time(city: str) -> dict:
"""Returns the current time in a specified city.
Args:
city (str): The name of the city for which to retrieve the current time.
Returns:
dict: status and result or error msg.
"""
if city.lower() == "new york":
tz_identifier = "America/New_York"
else:
return {
"status": "error",
"error_message": (
f"Sorry, I don't have timezone information for {city}."
),
}
tz = ZoneInfo(tz_identifier)
now = datetime.datetime.now(tz)
report = (
f'The current time in {city} is {now.strftime("%Y-%m-%d %H:%M:%S %Z%z")}'
)
return {"status": "success", "report": report}
root_agent = Agent(
name="weather_time_agent",
model=LiteLlm(model="openai/qwen3"),
description=(
"Agent to answer questions about the time and weather in a city."
),
instruction=(
"You are a helpful agent who can answer user questions about the time and weather in a city."
),
tools=[get_weather, get_current_time],
)
init.py code:
from . import agent_ollama
The error which I am getting is of module import error. as follows:
Traceback (most recent call last):
File "E:\Python\LLM\google_adk.venv\Lib\site-packages\google\adk\cli\fast_api.py", line 696, in event_generator
runner = await _get_runner_async(req.app_name)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\Python\LLM\google_adk.venv\Lib\site-packages\google\adk\cli\fast_api.py", line 845, in _get_runner_async
root_agent = agent_loader.load_agent(app_name)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\Python\LLM\google_adk.venv\Lib\site-packages\google\adk\cli\utils\agent_loader.py", line 135, in load_agent
agent = self._perform_load(agent_name)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\Python\LLM\google_adk.venv\Lib\site-packages\google\adk\cli\utils\agent_loader.py", line 109, in _perform_load
root_agent = self._load_from_module_or_package(agent_name)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\Python\LLM\google_adk.venv\Lib\site-packages\google\adk\cli\utils\agent_loader.py", line 67, in _load_from_module_or_package
raise ValueError(
ValueError: Module ollama_agent not found during import attempts.
When I am using Google Gemini models for the same code.. I am not getting this error
reacted with thumbs up emoji reacted with thumbs down emoji reacted with laugh emoji reacted with hooray emoji reacted with confused emoji reacted with heart emoji reacted with rocket emoji reacted with eyes emoji
Uh oh!
There was an error while loading. Please reload this page.
-
agent_ollama.py code:
import datetime
from zoneinfo import ZoneInfo
from google.adk.agents import Agent
from google.adk.agents import LlmAgent
from google.adk.models.lite_llm import LiteLlm
def get_weather(city: str) -> dict:
"""Retrieves the current weather report for a specified city.
def get_current_time(city: str) -> dict:
"""Returns the current time in a specified city.
root_agent = Agent(
name="weather_time_agent",
model=LiteLlm(model="openai/qwen3"),
description=(
"Agent to answer questions about the time and weather in a city."
),
instruction=(
"You are a helpful agent who can answer user questions about the time and weather in a city."
),
tools=[get_weather, get_current_time],
)
init.py code:
from . import agent_ollama
The error which I am getting is of module import error. as follows:
Traceback (most recent call last):
File "E:\Python\LLM\google_adk.venv\Lib\site-packages\google\adk\cli\fast_api.py", line 696, in event_generator
runner = await _get_runner_async(req.app_name)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\Python\LLM\google_adk.venv\Lib\site-packages\google\adk\cli\fast_api.py", line 845, in _get_runner_async
root_agent = agent_loader.load_agent(app_name)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\Python\LLM\google_adk.venv\Lib\site-packages\google\adk\cli\utils\agent_loader.py", line 135, in load_agent
agent = self._perform_load(agent_name)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\Python\LLM\google_adk.venv\Lib\site-packages\google\adk\cli\utils\agent_loader.py", line 109, in _perform_load
root_agent = self._load_from_module_or_package(agent_name)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "E:\Python\LLM\google_adk.venv\Lib\site-packages\google\adk\cli\utils\agent_loader.py", line 67, in _load_from_module_or_package
raise ValueError(
ValueError: Module ollama_agent not found during import attempts.
When I am using Google Gemini models for the same code.. I am not getting this error
Beta Was this translation helpful? Give feedback.
All reactions