Use GPTAssistantAgent in group chat.
GPTAssistantAgent
converse through group chat.
Python>=3.9
. To run this notebook example, please
install:
autogen
:pip install autogen
LLMConfig.from_json
method loads a list of configurations from an environment variable or a
json file.
import autogen
from autogen.agentchat import AssistantAgent
from autogen.agentchat.contrib.gpt_assistant_agent import GPTAssistantAgent
llm_config = autogen.LLMConfig.from_json(path="OAI_CONFIG_LIST", cache_seed=45).where(
model=["gpt-4", "gpt-4-1106-preview", "gpt-4-32k"]
)
# Define user proxy agent
user_proxy = autogen.UserProxyAgent(
name="User_proxy",
system_message="A human admin.",
code_execution_config={
"last_n_messages": 2,
"work_dir": "groupchat",
"use_docker": False,
}, # Please set use_docker=True if docker is available to run the generated code. Using docker is safer than running the generated code directly.
human_input_mode="TERMINATE",
)
# define two GPTAssistants
coder = GPTAssistantAgent(
name="Coder",
llm_config=llm_config,
instructions=AssistantAgent.DEFAULT_SYSTEM_MESSAGE,
)
analyst = GPTAssistantAgent(
name="Data_analyst",
instructions="You are a data analyst that offers insight into data.",
llm_config=llm_config,
)
# define group chat
groupchat = autogen.GroupChat(agents=[user_proxy, coder, analyst], messages=[], max_round=10)
manager = autogen.GroupChatManager(groupchat=groupchat, llm_config=llm_config)
user_proxy.initiate_chat(
manager,
message="Get the number of issues and pull requests for the repository 'ag2ai/ag2' over the past three weeks and offer analysis to the data. You should print the data in csv format grouped by weeks.",
)
# type exit to terminate the chat