-
-
Notifications
You must be signed in to change notification settings - Fork 670
Open
Description
Hi all!
I am trying to run the example code of "https://cua.ai/docs/agent-sdk/agent-loops" but in this case using an ollama model.
from agent import ComputerAgent
import asyncio
from computer import Computer
async def take_screenshot():
async with Computer(
os_type="linux",
provider_type="cloud",
name="your-sandbox-name",
api_key="your-api-key"
) as computer:
agent = ComputerAgent(
model="anthropic/claude-3-5-sonnet-20241022",
tools=[computer],
max_trajectory_budget=5.0
)
messages = [{"role": "user", "content": "Take a screenshot and tell me what you see"}]
async for result in agent.run(messages):
for item in result["output"]:
if item["type"] == "message":
print(item["content"][0]["text"])
if __name__ == "__main__":
asyncio.run(take_screenshot())
I use UV as a virtual environment with the following dependencies:
"cua-agent[all]>=0.4.47",
"cua-computer>=0.4.12",
"cua-computer-server>=0.1.30",
"cua-som>=0.1.3",
"pywinsandbox>=1.4.0",
I run with windows locally.
When I run the code I get the following error (I have tested with multiple ollama models and nothing changes). What do you thing is going on here?
The error:
Traceback (most recent call last):
File "C:\Users\marc\\.venv\Lib\site-packages\litellm\llms\custom_httpx\llm_http_handler.py", line 148, in _make_common_async_call
response = await async_httpx_client.post(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\marc\\.venv\Lib\site-packages\litellm\litellm_core_utils\logging_utils.py", line 190, in async_wrapper
result = await func(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\marc\\.venv\Lib\site-packages\litellm\llms\custom_httpx\http_handler.py", line 403, in post
raise e
File "C:\Users\marc\\.venv\Lib\site-packages\litellm\llms\custom_httpx\http_handler.py", line 359, in post
response.raise_for_status()
File "C:\Users\marc\\.venv\Lib\site-packages\httpx\_models.py", line 829, in raise_for_status
raise HTTPStatusError(message, request=request, response=self)
httpx.HTTPStatusError: Client error '400 Bad Request' for url 'http://localhost:11434/api/chat'
For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/400
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Users\marc\\.venv\Lib\site-packages\litellm\main.py", line 599, in acompletion
response = await init_response
^^^^^^^^^^^^^^^^^^^
File "C:\Users\marc\\.venv\Lib\site-packages\litellm\llms\custom_httpx\llm_http_handler.py", line 280, in async_completion
response = await self._make_common_async_call(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\marc\\.venv\Lib\site-packages\litellm\llms\custom_httpx\llm_http_handler.py", line 173, in _make_common_async_call
raise self._handle_error(e=e, provider_config=provider_config)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\marc\\.venv\Lib\site-packages\litellm\llms\custom_httpx\llm_http_handler.py", line 3582, in _handle_error
raise provider_config.get_error_class(
litellm.llms.ollama.common_utils.OllamaError: {"error":"illegal base64 data at input byte 4"}
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Users\marc\\src\tf\agents\tf_use\agent3.py", line 26, in <module>
asyncio.run(take_screenshot())
File "C:\Users\mdomenei\AppData\Roaming\uv\python\cpython-3.12.0-windows-x86_64-none\Lib\asyncio\runners.py", line 194, in run
return runner.run(main)
^^^^^^^^^^^^^^^^
File "C:\Users\mdomenei\AppData\Roaming\uv\python\cpython-3.12.0-windows-x86_64-none\Lib\asyncio\runners.py", line 118, in run
return self._loop.run_until_complete(task)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\mdomenei\AppData\Roaming\uv\python\cpython-3.12.0-windows-x86_64-none\Lib\asyncio\base_events.py", line 664, in run_until_complete
return future.result()
^^^^^^^^^^^^^^^
File "C:\Users\marc\\src\tf\agents\tf_use\agent3.py", line 19, in take_screenshot
async for result in agent.run(messages):
File "C:\Users\marc\\.venv\Lib\site-packages\agent\agent.py", line 677, in run
result = await self.agent_loop.predict_step(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\marc\\.venv\Lib\site-packages\agent\loops\qwen.py", line 362, in predict_step
response = await litellm.acompletion(**api_kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\marc\\.venv\Lib\site-packages\litellm\utils.py", line 1642, in wrapper_async
raise e
File "C:\Users\marc\\.venv\Lib\site-packages\litellm\utils.py", line 1488, in wrapper_async
result = await original_function(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\marc\\.venv\Lib\site-packages\litellm\main.py", line 618, in acompletion
raise exception_type(
^^^^^^^^^^^^^^^
File "C:\Users\marc\\.venv\Lib\site-packages\litellm\litellm_core_utils\exception_mapping_utils.py", line 2328, in exception_type
raise e
File "C:\Users\marc\\.venv\Lib\site-packages\litellm\litellm_core_utils\exception_mapping_utils.py", line 2297, in exception_type
raise APIConnectionError(
litellm.exceptions.APIConnectionError: litellm.APIConnectionError: Ollama_chatException - {"error":"illegal base64 data at input byte 4"}
Metadata
Metadata
Assignees
Labels
No labels