Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Bug]: Can not execute async function when use nested_chats pattern #3083

Open
lfygh opened this issue Jul 5, 2024 · 0 comments
Open

[Bug]: Can not execute async function when use nested_chats pattern #3083

lfygh opened this issue Jul 5, 2024 · 0 comments
Labels
bug Something isn't working

Comments

@lfygh
Copy link

lfygh commented Jul 5, 2024

Describe the bug

import asyncio
import os
from typing import Annotated

import dotenv

import autogen
from autogen import ConversableAgent

dotenv.load_dotenv()

adder_agent = ConversableAgent(
    name="Adder_Agent",
    system_message="You add 1 to each number I give you and return me the new numbers, one number each line.",
    llm_config={
        "config_list": [{"model": "gpt-3.5-turbo", "api_key": os.environ["OPENAI_API_KEY"]}]},
    human_input_mode="NEVER",
)


async def multiplier(number: Annotated[int, "number"]):
    print("multiplier===============")
    return number * 2

multiplier_agent = ConversableAgent(
    name="Multiplier_Agent",
    system_message="use function to calculate",
    llm_config={"config_list": [{"model": "gpt-4o", "api_key": os.environ["OPENAI_API_KEY"]}]},
    human_input_mode="NEVER",
)

user_proxy = ConversableAgent(
    name="User_Proxy",
    system_message="You are a helpful assistant",
    llm_config={"config_list": [{"model": "gpt-4o", "api_key": os.environ["OPENAI_API_KEY"]}]},
    human_input_mode="NEVER",
)

assistant_agent = ConversableAgent(
    name="Assistant_Agent",
    system_message="You are a helpful assistant",
    llm_config={
        "config_list": [{"model": "gpt-3.5-turbo", "api_key": os.environ["OPENAI_API_KEY"]}]},
    human_input_mode="NEVER",
)
assistant_agent.register_nested_chats(
    chat_queue=[
        {
            "recipient": multiplier_agent
        }],
    trigger=lambda sender: sender in [user_proxy]
)

autogen.register_function(
    multiplier,
    caller=multiplier_agent,
    executor=assistant_agent,
    name="multiplier",
    description="multiply number by 2",
)


async def main():
    r = await user_proxy.a_initiate_chat(
        recipient=assistant_agent,
        message="my number is 5"
    )
    print(r)


if __name__ == '__main__':
    asyncio.run(main())





### Screenshots and logs

File "/Users/lfyg/.pyenv/versions/3.10.13/lib/python3.10/asyncio/runners.py", line 44, in run
    return loop.run_until_complete(main)
  File "/Users/lfyg/.pyenv/versions/3.10.13/lib/python3.10/asyncio/base_events.py", line 649, in run_until_complete
    return future.result()
  File "/Users/lfyg/IdeaProjects/autogen-main/samples/netest_error.py", line 70, in main
    r = await user_proxy.a_initiate_chat(
  File "/Users/lfyg/IdeaProjects/autogen-main/autogen/agentchat/conversable_agent.py", line 1085, in a_initiate_chat
    await self.a_send(msg2send, recipient, silent=silent)
  File "/Users/lfyg/IdeaProjects/autogen-main/autogen/agentchat/conversable_agent.py", line 706, in a_send
    await recipient.a_receive(message, self, request_reply, silent)
  File "/Users/lfyg/IdeaProjects/autogen-main/autogen/agentchat/conversable_agent.py", line 856, in a_receive
    reply = await self.a_generate_reply(sender=sender)
  File "/Users/lfyg/IdeaProjects/autogen-main/autogen/agentchat/conversable_agent.py", line 2047, in a_generate_reply
    final, reply = reply_func(self, messages=messages, sender=sender, config=reply_func_tuple["config"])
  File "/Users/lfyg/IdeaProjects/autogen-main/autogen/agentchat/conversable_agent.py", line 439, in wrapped_reply_func
    return reply_func_from_nested_chats(chat_queue, recipient, messages, sender, config)
  File "/Users/lfyg/IdeaProjects/autogen-main/autogen/agentchat/conversable_agent.py", line 403, in _summary_from_nested_chats
    res = initiate_chats(chat_to_run)
  File "/Users/lfyg/IdeaProjects/autogen-main/autogen/agentchat/chat.py", line 202, in initiate_chats
    chat_res = sender.initiate_chat(**chat_info)
  File "/Users/lfyg/IdeaProjects/autogen-main/autogen/agentchat/conversable_agent.py", line 1019, in initiate_chat
    self.send(msg2send, recipient, silent=silent)
  File "/Users/lfyg/IdeaProjects/autogen-main/autogen/agentchat/conversable_agent.py", line 656, in send
    recipient.receive(message, self, request_reply, silent)
  File "/Users/lfyg/IdeaProjects/autogen-main/autogen/agentchat/conversable_agent.py", line 821, in receive
    self.send(reply, sender, silent=silent)
  File "/Users/lfyg/IdeaProjects/autogen-main/autogen/agentchat/conversable_agent.py", line 656, in send
    recipient.receive(message, self, request_reply, silent)
  File "/Users/lfyg/IdeaProjects/autogen-main/autogen/agentchat/conversable_agent.py", line 819, in receive
    reply = self.generate_reply(messages=self.chat_messages[sender], sender=sender)
  File "/Users/lfyg/IdeaProjects/autogen-main/autogen/agentchat/conversable_agent.py", line 1973, in generate_reply
    final, reply = reply_func(self, messages=messages, sender=sender, config=reply_func_tuple["config"])
  File "/Users/lfyg/IdeaProjects/autogen-main/autogen/agentchat/conversable_agent.py", line 1625, in generate_tool_calls_reply
    _, func_return = loop.run_until_complete(self.a_execute_function(function_call))
  File "/Users/lfyg/.pyenv/versions/3.10.13/lib/python3.10/asyncio/base_events.py", line 625, in run_until_complete
    self._check_running()
  File "/Users/lfyg/.pyenv/versions/3.10.13/lib/python3.10/asyncio/base_events.py", line 584, in _check_running
    raise RuntimeError('This event loop is already running')
RuntimeError: This event loop is already running
sys:1: RuntimeWarning: coroutine 'ConversableAgent.a_execute_function' was never awaited

Additional Information

No response

@lfygh lfygh added the bug Something isn't working label Jul 5, 2024
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
bug Something isn't working
1 participant