We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
After running docker-compose up --build I get the following error.
I can access the client in the browser but the agent does not respond.
Attaching to xrx-client, xrx-orchestrator, xrx-reasoning, xrx-redis, xrx-stt, xrx-tts xrx-reasoning | 2025-01-04 00:26:17,759 INFO:Initializing LLM client. xrx-reasoning | 2025-01-04 00:26:17,759 INFO:LLM API KEY : *************** xrx-reasoning | 2025-01-04 00:26:17,759 INFO:LLM Base URL: https://api.groq.com/openai/v1 xrx-reasoning | 2025-01-04 00:26:17,759 INFO:LLM Model ID: llama3-70b-8192 xrx-reasoning | 2025-01-04 00:26:17,759 INFO:LLM Observability Library: none xrx-reasoning | Traceback (most recent call last): xrx-reasoning | File "/usr/local/bin/uvicorn", line 8, in xrx-reasoning | sys.exit(main()) xrx-reasoning | File "/usr/local/lib/python3.10/site-packages/click/core.py", line 1161, in call xrx-reasoning | return self.main(*args, **kwargs) xrx-reasoning | File "/usr/local/lib/python3.10/site-packages/click/core.py", line 1082, in main xrx-reasoning | rv = self.invoke(ctx) xrx-reasoning | File "/usr/local/lib/python3.10/site-packages/click/core.py", line 1443, in invoke xrx-reasoning | return ctx.invoke(self.callback, **ctx.params) xrx-reasoning | File "/usr/local/lib/python3.10/site-packages/click/core.py", line 788, in invoke xrx-reasoning | return __callback(*args, **kwargs) xrx-reasoning | File "/usr/local/lib/python3.10/site-packages/uvicorn/main.py", line 410, in main xrx-reasoning | run( xrx-reasoning | File "/usr/local/lib/python3.10/site-packages/uvicorn/main.py", line 577, in run xrx-reasoning | server.run() xrx-reasoning | File "/usr/local/lib/python3.10/site-packages/uvicorn/server.py", line 65, in run xrx-reasoning | return asyncio.run(self.serve(sockets=sockets)) xrx-reasoning | File "/usr/local/lib/python3.10/asyncio/runners.py", line 44, in run xrx-reasoning | return loop.run_until_complete(main) xrx-reasoning | File "uvloop/loop.pyx", line 1518, in uvloop.loop.Loop.run_until_complete xrx-reasoning | File "/usr/local/lib/python3.10/site-packages/uvicorn/server.py", line 69, in serve xrx-reasoning | await self._serve(sockets) xrx-reasoning | File "/usr/local/lib/python3.10/site-packages/uvicorn/server.py", line 76, in _serve xrx-reasoning | config.load() xrx-reasoning | File "/usr/local/lib/python3.10/site-packages/uvicorn/config.py", line 434, in load xrx-reasoning | self.loaded_app = import_from_string(self.app) xrx-reasoning | File "/usr/local/lib/python3.10/site-packages/uvicorn/importer.py", line 19, in import_from_string xrx-reasoning | module = importlib.import_module(module_str) xrx-reasoning | File "/usr/local/lib/python3.10/importlib/init.py", line 126, in import_module xrx-reasoning | return _bootstrap._gcd_import(name[level:], package, level) xrx-reasoning | File "", line 1050, in _gcd_import xrx-reasoning | File "", line 1027, in _find_and_load xrx-reasoning | File "", line 1006, in _find_and_load_unlocked xrx-reasoning | File "", line 688, in _load_unlocked xrx-reasoning | File "", line 883, in exec_module xrx-reasoning | File "", line 241, in _call_with_frames_removed xrx-reasoning | File "/app/main.py", line 2, in xrx-reasoning | from agent.executor import run_agent xrx-reasoning | File "/app/agent/executor.py", line 11, in xrx-reasoning | client = initialize_llm_client() xrx-reasoning | File "/app/agent_framework/utils/llm.py", line 43, in initialize_llm_client xrx-reasoning | llm_client = OpenAI(api_key=LLM_API_KEY, base_url=LLM_BASE_URL) xrx-reasoning | File "/usr/local/lib/python3.10/site-packages/openai/_client.py", line 122, in init xrx-reasoning | super().init( xrx-reasoning | File "/usr/local/lib/python3.10/site-packages/openai/_base_client.py", line 846, in init xrx-reasoning | self._client = http_client or SyncHttpxClientWrapper( xrx-reasoning | File "/usr/local/lib/python3.10/site-packages/openai/_base_client.py", line 744, in init xrx-reasoning | super().init(**kwargs) xrx-reasoning | TypeError: Client.init() got an unexpected keyword argument 'proxies' xrx-reasoning exited with code 1
The text was updated successfully, but these errors were encountered:
this is being caused by an old version of the openai client, similar to this issue. I'll upgrade the client
Sorry, something went wrong.
Any updates on this? What's going on with XRX. It seems like the momentum slowed the last few months.
apologies @claytonwinterbotham , think I forgot to push. just put up #33
No branches or pull requests
After running docker-compose up --build I get the following error.
I can access the client in the browser but the agent does not respond.
Attaching to xrx-client, xrx-orchestrator, xrx-reasoning, xrx-redis, xrx-stt, xrx-tts
xrx-reasoning | 2025-01-04 00:26:17,759 INFO:Initializing LLM client.
xrx-reasoning | 2025-01-04 00:26:17,759 INFO:LLM API KEY : ***************
xrx-reasoning | 2025-01-04 00:26:17,759 INFO:LLM Base URL: https://api.groq.com/openai/v1
xrx-reasoning | 2025-01-04 00:26:17,759 INFO:LLM Model ID: llama3-70b-8192
xrx-reasoning | 2025-01-04 00:26:17,759 INFO:LLM Observability Library: none
xrx-reasoning | Traceback (most recent call last):
xrx-reasoning | File "/usr/local/bin/uvicorn", line 8, in
xrx-reasoning | sys.exit(main())
xrx-reasoning | File "/usr/local/lib/python3.10/site-packages/click/core.py", line 1161, in call
xrx-reasoning | return self.main(*args, **kwargs)
xrx-reasoning | File "/usr/local/lib/python3.10/site-packages/click/core.py", line 1082, in main
xrx-reasoning | rv = self.invoke(ctx)
xrx-reasoning | File "/usr/local/lib/python3.10/site-packages/click/core.py", line 1443, in invoke
xrx-reasoning | return ctx.invoke(self.callback, **ctx.params)
xrx-reasoning | File "/usr/local/lib/python3.10/site-packages/click/core.py", line 788, in invoke
xrx-reasoning | return __callback(*args, **kwargs)
xrx-reasoning | File "/usr/local/lib/python3.10/site-packages/uvicorn/main.py", line 410, in main
xrx-reasoning | run(
xrx-reasoning | File "/usr/local/lib/python3.10/site-packages/uvicorn/main.py", line 577, in run
xrx-reasoning | server.run()
xrx-reasoning | File "/usr/local/lib/python3.10/site-packages/uvicorn/server.py", line 65, in run
xrx-reasoning | return asyncio.run(self.serve(sockets=sockets))
xrx-reasoning | File "/usr/local/lib/python3.10/asyncio/runners.py", line 44, in run
xrx-reasoning | return loop.run_until_complete(main)
xrx-reasoning | File "uvloop/loop.pyx", line 1518, in uvloop.loop.Loop.run_until_complete
xrx-reasoning | File "/usr/local/lib/python3.10/site-packages/uvicorn/server.py", line 69, in serve
xrx-reasoning | await self._serve(sockets)
xrx-reasoning | File "/usr/local/lib/python3.10/site-packages/uvicorn/server.py", line 76, in _serve
xrx-reasoning | config.load()
xrx-reasoning | File "/usr/local/lib/python3.10/site-packages/uvicorn/config.py", line 434, in load
xrx-reasoning | self.loaded_app = import_from_string(self.app)
xrx-reasoning | File "/usr/local/lib/python3.10/site-packages/uvicorn/importer.py", line 19, in import_from_string
xrx-reasoning | module = importlib.import_module(module_str)
xrx-reasoning | File "/usr/local/lib/python3.10/importlib/init.py", line 126, in import_module
xrx-reasoning | return _bootstrap._gcd_import(name[level:], package, level)
xrx-reasoning | File "", line 1050, in _gcd_import
xrx-reasoning | File "", line 1027, in _find_and_load
xrx-reasoning | File "", line 1006, in _find_and_load_unlocked
xrx-reasoning | File "", line 688, in _load_unlocked
xrx-reasoning | File "", line 883, in exec_module
xrx-reasoning | File "", line 241, in _call_with_frames_removed
xrx-reasoning | File "/app/main.py", line 2, in
xrx-reasoning | from agent.executor import run_agent
xrx-reasoning | File "/app/agent/executor.py", line 11, in
xrx-reasoning | client = initialize_llm_client()
xrx-reasoning | File "/app/agent_framework/utils/llm.py", line 43, in initialize_llm_client
xrx-reasoning | llm_client = OpenAI(api_key=LLM_API_KEY, base_url=LLM_BASE_URL)
xrx-reasoning | File "/usr/local/lib/python3.10/site-packages/openai/_client.py", line 122, in init
xrx-reasoning | super().init(
xrx-reasoning | File "/usr/local/lib/python3.10/site-packages/openai/_base_client.py", line 846, in init
xrx-reasoning | self._client = http_client or SyncHttpxClientWrapper(
xrx-reasoning | File "/usr/local/lib/python3.10/site-packages/openai/_base_client.py", line 744, in init
xrx-reasoning | super().init(**kwargs)
xrx-reasoning | TypeError: Client.init() got an unexpected keyword argument 'proxies'
xrx-reasoning exited with code 1
The text was updated successfully, but these errors were encountered: