Description
Error
Text | Error code: 401 - {'error': {'message': 'Incorrect API key provided: None. You can find your API key at https://platform.openai.com/account/api-keys.', 'type': 'invalid_request_error', 'param': None, 'code': 'invalid_api_key'}} -- | --Traceback (most recent call last):
Traceback (most recent call last):
File "/a0/agent.py", line 290, in monologue
prompt = await self.prepare_prompt(loop_data=self.loop_data)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/a0/agent.py", line 374, in prepare_prompt
await self.call_extensions("message_loop_prompts_after", loop_data=loop_data)
File "/a0/agent.py", line 725, in call_extensions
await cls(agent=self).execute(**kwargs)
File "/a0/python/extensions/message_loop_prompts_after/_91_recall_wait.py", line 13, in execute
await task
File "/usr/lib/python3.11/asyncio/futures.py", line 287, in __await__
yield self # This tells Task to wait for completion.
^^^^^^^^^^
File "/usr/lib/python3.11/asyncio/tasks.py", line 339, in __wakeup
future.result()
File "/usr/lib/python3.11/asyncio/futures.py", line 203, in result
raise self._exception.with_traceback(self._exception_tb)
File "/usr/lib/python3.11/asyncio/tasks.py", line 267, in __step
result = coro.send(None)
^^^^^^^^^^^^^^^
File "/a0/python/extensions/message_loop_prompts_after/_50_recall_memories.py", line 60, in search_memories
query = await self.agent.call_utility_model(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/a0/agent.py", line 579, in call_utility_model
async for chunk in (prompt | model).astream({}):
File "/opt/venv/lib/python3.11/site-packages/langchain_core/runnables/base.py", line 3465, in astream
async for chunk in self.atransform(input_aiter(), config, **kwargs):
File "/opt/venv/lib/python3.11/site-packages/langchain_core/runnables/base.py", line 3447, in atransform
async for chunk in self._atransform_stream_with_config(
File "/opt/venv/lib/python3.11/site-packages/langchain_core/runnables/base.py", line 2322, in _atransform_stream_with_config
chunk = await coro_with_context(py_anext(iterator), context)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/lib/python3.11/asyncio/futures.py", line 287, in __await__
yield self # This tells Task to wait for completion.
^^^^^^^^^^
File "/usr/lib/python3.11/asyncio/tasks.py", line 339, in __wakeup
future.result()
>>> 15 stack lines skipped <<<
File "/opt/venv/lib/python3.11/site-packages/langchain_core/language_models/chat_models.py", line 591, in astream
async for chunk in self._astream(
File "/opt/venv/lib/python3.11/site-packages/langchain_openai/chat_models/base.py", line 2025, in _astream
async for chunk in super()._astream(*args, **kwargs):
File "/opt/venv/lib/python3.11/site-packages/langchain_openai/chat_models/base.py", line 890, in _astream
response = await self.async_client.create(**payload)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/venv/lib/python3.11/site-packages/openai/resources/chat/completions/completions.py", line 2028, in create
return await self._post(
^^^^^^^^^^^^^^^^^
File "/opt/venv/lib/python3.11/site-packages/openai/_base_client.py", line 1742, in post
return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/venv/lib/python3.11/site-packages/openai/_base_client.py", line 1549, in request
raise self._make_status_error_from_response(err.response) from None
openai.AuthenticationError: Error code: 401 - {'error': {'message': 'Incorrect API key provided: None. You can find your API key at https://platform.openai.com/account/api-keys.', 'type': 'invalid_request_error', 'param': None, 'code': 'invalid_api_key'}}
Error
Text
Error code: 401 - {'error': {'message': 'Incorrect API key provided: None. You can find your API key at https://platform.openai.com/account/api-keys.', 'type': 'invalid_request_error', 'param': None, 'code': 'invalid_api_key'}}
Copy
Traceback (most recent call last):
Traceback (most recent call last):
File "/[a0](http://localhost:32771/#)/[agent.py](http://localhost:32771/#)", line 290, in monologue
prompt = await self.prepare_prompt(loop_data=self.loop_data)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/[a0](http://localhost:32771/#)/[agent.py](http://localhost:32771/#)", line 374, in prepare_prompt
await self.call_extensions("message_loop_prompts_after", loop_data=loop_data)
File "/[a0](http://localhost:32771/#)/[agent.py](http://localhost:32771/#)", line 725, in call_extensions
await cls(agent=self).execute(**kwargs)
File "/[a0](http://localhost:32771/#)/[python](http://localhost:32771/#)/[extensions](http://localhost:32771/#)/[message_loop_prompts_after](http://localhost:32771/#)/[_91_recall_wait.py](http://localhost:32771/#)", line 13, in execute
await task
File "/[usr](http://localhost:32771/#)/[lib](http://localhost:32771/#)/[python3.11](http://localhost:32771/#)/[asyncio](http://localhost:32771/#)/[futures.py](http://localhost:32771/#)", line 287, in __await__
yield self # This tells Task to wait for completion.
^^^^^^^^^^
File "/[usr](http://localhost:32771/#)/[lib](http://localhost:32771/#)/[python3.11](http://localhost:32771/#)/[asyncio](http://localhost:32771/#)/[tasks.py](http://localhost:32771/#)", line 339, in __wakeup
future.result()
File "/[usr](http://localhost:32771/#)/[lib](http://localhost:32771/#)/[python3.11](http://localhost:32771/#)/[asyncio](http://localhost:32771/#)/[futures.py](http://localhost:32771/#)", line 203, in result
raise self._exception.with_traceback(self._exception_tb)
File "/[usr](http://localhost:32771/#)/[lib](http://localhost:32771/#)/[python3.11](http://localhost:32771/#)/[asyncio](http://localhost:32771/#)/[tasks.py](http://localhost:32771/#)", line 267, in __step
result = coro.send(None)
^^^^^^^^^^^^^^^
File "/[a0](http://localhost:32771/#)/[python](http://localhost:32771/#)/[extensions](http://localhost:32771/#)/[message_loop_prompts_after](http://localhost:32771/#)/[_50_recall_memories.py](http://localhost:32771/#)", line 60, in search_memories
query = await self.agent.call_utility_model(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/[a0](http://localhost:32771/#)/[agent.py](http://localhost:32771/#)", line 579, in call_utility_model
async for chunk in (prompt | model).astream({}):
File "/[opt](http://localhost:32771/#)/[venv](http://localhost:32771/#)/[lib](http://localhost:32771/#)/[python3.11](http://localhost:32771/#)/[site-packages](http://localhost:32771/#)/[langchain_core](http://localhost:32771/#)/[runnables](http://localhost:32771/#)/[base.py](http://localhost:32771/#)", line 3465, in astream
async for chunk in self.atransform(input_aiter(), config, **kwargs):
File "/[opt](http://localhost:32771/#)/[venv](http://localhost:32771/#)/[lib](http://localhost:32771/#)/[python3.11](http://localhost:32771/#)/[site-packages](http://localhost:32771/#)/[langchain_core](http://localhost:32771/#)/[runnables](http://localhost:32771/#)/[base.py](http://localhost:32771/#)", line 3447, in atransform
async for chunk in self._atransform_stream_with_config(
File "/[opt](http://localhost:32771/#)/[venv](http://localhost:32771/#)/[lib](http://localhost:32771/#)/[python3.11](http://localhost:32771/#)/[site-packages](http://localhost:32771/#)/[langchain_core](http://localhost:32771/#)/[runnables](http://localhost:32771/#)/[base.py](http://localhost:32771/#)", line 2322, in _atransform_stream_with_config
chunk = await coro_with_context(py_anext(iterator), context)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/[usr](http://localhost:32771/#)/[lib](http://localhost:32771/#)/[python3.11](http://localhost:32771/#)/[asyncio](http://localhost:32771/#)/[futures.py](http://localhost:32771/#)", line 287, in __await__
yield self # This tells Task to wait for completion.
^^^^^^^^^^
File "/[usr](http://localhost:32771/#)/[lib](http://localhost:32771/#)/[python3.11](http://localhost:32771/#)/[asyncio](http://localhost:32771/#)/[tasks.py](http://localhost:32771/#)", line 339, in __wakeup
future.result()
15 stack lines skipped <<<
File "/opt/venv/lib/python3.11/site-packages/langchain_core/language_models/chat_models.py", line 591, in astream
async for chunk in self._astream(
File "/opt/venv/lib/python3.11/site-packages/langchain_openai/chat_models/base.py", line 2025, in _astream
async for chunk in super()._astream(*args, **kwargs):
File "/opt/venv/lib/python3.11/site-packages/langchain_openai/chat_models/base.py", line 890, in _astream
response = await self.async_client.create(**payload)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/venv/lib/python3.11/site-packages/openai/resources/chat/completions/completions.py", line 2028, in create
return await self._post(
^^^^^^^^^^^^^^^^^
File "/opt/venv/lib/python3.11/site-packages/openai/_base_client.py", line 1742, in post
return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/venv/lib/python3.11/site-packages/openai/_base_client.py", line 1549, in request
raise self._make_status_error_from_response(err.response) from None
openai.AuthenticationError: Error code: 401 - {'error': {'message': 'Incorrect API key provided: None. You can find your API key at https://platform.openai.com/account/api-keys.', 'type': 'invalid_request_error', 'param': None, 'code': 'invalid_api_key'}}