diff --git a/python/packages/autogen-agentchat/src/autogen_agentchat/agents/_assistant_agent.py b/python/packages/autogen-agentchat/src/autogen_agentchat/agents/_assistant_agent.py index 0fad2d670fb..9be3adcdc99 100644 --- a/python/packages/autogen-agentchat/src/autogen_agentchat/agents/_assistant_agent.py +++ b/python/packages/autogen-agentchat/src/autogen_agentchat/agents/_assistant_agent.py @@ -72,6 +72,14 @@ class AssistantAgent(BaseChatAgent): the inner messages as they are created, and the :class:`~autogen_agentchat.base.Response` object as the last item before closing the generator. + + .. note:: + + The caller must only pass the new messages to the agent on each call + to the :meth:`on_messages` or :meth:`on_messages_stream` method. + The agent maintains its state between calls to these methods. + Do not pass the entire conversation history to the agent on each call. + Tool call behavior: * If the model returns no tool call, then the response is immediately returned as a :class:`~autogen_agentchat.messages.TextMessage` in :attr:`~autogen_agentchat.base.Response.chat_message`. diff --git a/python/packages/autogen-agentchat/src/autogen_agentchat/agents/_base_chat_agent.py b/python/packages/autogen-agentchat/src/autogen_agentchat/agents/_base_chat_agent.py index 5b41d7bea33..c78c9adb90e 100644 --- a/python/packages/autogen-agentchat/src/autogen_agentchat/agents/_base_chat_agent.py +++ b/python/packages/autogen-agentchat/src/autogen_agentchat/agents/_base_chat_agent.py @@ -14,7 +14,26 @@ class BaseChatAgent(ChatAgent, ABC): - """Base class for a chat agent.""" + """Base class for a chat agent. + + This abstract class provides a base implementation for a :class:`ChatAgent`. + To create a new chat agent, subclass this class and implement the + :meth:`on_messages`, :meth:`on_reset`, and :attr:`produced_message_types`. + If streaming is required, also implement the :meth:`on_messages_stream` method. + + An agent is considered stateful and maintains its state between calls to + the :meth:`on_messages` or :meth:`on_messages_stream` methods. + The agent should store its state in the + agent instance. The agent should also implement the :meth:`on_reset` method + to reset the agent to its initialization state. + + .. note:: + + The caller should only pass the new messages to the agent on each call + to the :meth:`on_messages` or :meth:`on_messages_stream` method. + Do not pass the entire conversation history to the agent on each call. + This design principle must be followed when creating a new agent. + """ def __init__(self, name: str, description: str) -> None: self._name = name diff --git a/python/packages/autogen-core/docs/src/user-guide/agentchat-user-guide/tutorial/agents.ipynb b/python/packages/autogen-core/docs/src/user-guide/agentchat-user-guide/tutorial/agents.ipynb index 7f5edd8e4d0..70811b062bf 100644 --- a/python/packages/autogen-core/docs/src/user-guide/agentchat-user-guide/tutorial/agents.ipynb +++ b/python/packages/autogen-core/docs/src/user-guide/agentchat-user-guide/tutorial/agents.ipynb @@ -11,7 +11,7 @@ "\n", "- {py:attr}`~autogen_agentchat.agents.BaseChatAgent.name`: The unique name of the agent.\n", "- {py:attr}`~autogen_agentchat.agents.BaseChatAgent.description`: The description of the agent in text.\n", - "- {py:meth}`~autogen_agentchat.agents.BaseChatAgent.on_messages`: Send the agent a sequence of {py:class}`~autogen_agentchat.messages.ChatMessage` get a {py:class}`~autogen_agentchat.base.Response`.\n", + "- {py:meth}`~autogen_agentchat.agents.BaseChatAgent.on_messages`: Send the agent a sequence of {py:class}`~autogen_agentchat.messages.ChatMessage` get a {py:class}`~autogen_agentchat.base.Response`. **It is important to note that agents are expected to be stateful and this method is expected to be called with new messages, not the complete history**.\n", "- {py:meth}`~autogen_agentchat.agents.BaseChatAgent.on_messages_stream`: Same as {py:meth}`~autogen_agentchat.agents.BaseChatAgent.on_messages` but returns an iterator of {py:class}`~autogen_agentchat.messages.AgentEvent` or {py:class}`~autogen_agentchat.messages.ChatMessage` followed by a {py:class}`~autogen_agentchat.base.Response` as the last item.\n", "- {py:meth}`~autogen_agentchat.agents.BaseChatAgent.on_reset`: Reset the agent to its initial state.\n", "\n", @@ -106,8 +106,13 @@ "```{note}\n", "It is important to note that {py:meth}`~autogen_agentchat.agents.AssistantAgent.on_messages`\n", "will update the internal state of the agent -- it will add the messages to the agent's\n", - "history. So you should not repeatedly call this method with the same messages if you want to\n", - "carry on a conversation with the agent.\n", + "history. So you should call this method with new messages.\n", + "**You should not repeatedly call this method with the same messages or the complete history.**\n", + "```\n", + "\n", + "```{note}\n", + "Unlike in v0.2 AgentChat, the tools are executed by the same agent directly within\n", + "the same call to {py:meth}`~autogen_agentchat.agents.AssistantAgent.on_messages`.\n", "```\n", "\n", "## User Proxy Agent\n", @@ -141,7 +146,8 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "The User Proxy agent is ideally used for on-demand human-in-the-loop interactions for scenarios such as Just In Time approvals, human feedback, alerts, etc. For slower user interactions, consider terminating the session using a termination condition and start another one from run or run_stream with another message.\n", + "The User Proxy agent is ideally used for on-demand human-in-the-loop interactions for scenarios such as Just In Time approvals, human feedback, alerts, etc. For slower user interactions, consider terminating a team using a termination condition and start another one from\n", + "{py:meth}`~autogen_agentchat.base.TaskRunner.run` or {py:meth}`~autogen_agentchat.base.TaskRunner.run_stream` with another message.\n", "\n", "## Streaming Messages\n", "\n", diff --git a/python/packages/autogen-ext/test.py b/python/packages/autogen-ext/test.py deleted file mode 100644 index 4bd65eca2c5..00000000000 --- a/python/packages/autogen-ext/test.py +++ /dev/null @@ -1,21 +0,0 @@ -from autogen_core.components.models import LLMMessage -from autogen_core.models import ChatCompletionClient -from autogen_core.models import UserMessage - -client = ChatCompletionClient.load_component( - { - "provider": "openai_chat_completion_client", - "config": { - "model": "gpt-4o" - } - } -) - -# async def main(): -# print(await client.create([UserMessage(source="user", content="Hello")])) - -# import asyncio - -# asyncio.run(main()) - -print(client.dump_component().model_dump_json())