178 lines
7.5 KiB
Python
Executable File
178 lines
7.5 KiB
Python
Executable File
from autogen.code_utils import create_virtual_env
|
|
from autogen.coding import LocalCommandLineCodeExecutor
|
|
from autogen.agentchat.contrib.capabilities.teachability import Teachability
|
|
from autogen.agentchat.contrib.capabilities.vision_capability import VisionCapability
|
|
from autogen.agentchat.contrib.multimodal_conversable_agent import MultimodalConversableAgent
|
|
from pathlib import Path
|
|
import autogen
|
|
from .utils import load_agent_configs
|
|
import os
|
|
from .retrieval_group import init_retrieval_group
|
|
from .constant import config_list, STREAM, SILENT, WORK_DIR, CACHE
|
|
|
|
|
|
|
|
agent_configs = load_agent_configs(os.path.join(os.path.dirname(os.path.abspath(__file__)), "config/generate_group.yaml"))
|
|
venv_context = create_virtual_env(WORK_DIR)
|
|
llm_config = {"config_list": config_list, "stream": STREAM, "cache_seed": CACHE}
|
|
|
|
|
|
def init_generate_group(outer_retrieval_agent, inner_retrieval_admin):
|
|
|
|
outer_generate_agent = autogen.ConversableAgent(
|
|
name="Outer_Generate_Admin",
|
|
human_input_mode="NEVER",
|
|
# human_input_mode="TERMINATE",
|
|
code_execution_config={
|
|
"work_dir": WORK_DIR,
|
|
"use_docker": False,
|
|
},
|
|
is_termination_msg=lambda x: x.get("content", "").find("TERMINATE") >= 0,
|
|
description="Outer_Generate_Admin",
|
|
default_auto_reply="continue",
|
|
max_consecutive_auto_reply=1
|
|
)
|
|
|
|
|
|
structure_scientist_name = "structure_scientist"
|
|
structure_scientist = MultimodalConversableAgent(
|
|
name=structure_scientist_name,
|
|
system_message=agent_configs[structure_scientist_name]['system_message'],
|
|
llm_config=llm_config,
|
|
is_termination_msg=lambda x: x.get("content", "")[-1].get("text", "").find("TERMINATE") >= 0,
|
|
human_input_mode="TERMINATE",
|
|
description="structure_scientist",
|
|
max_consecutive_auto_reply=1
|
|
)
|
|
|
|
|
|
property_scientist_name = "property_scientist"
|
|
property_scientist = MultimodalConversableAgent(
|
|
name=property_scientist_name,
|
|
system_message=agent_configs[property_scientist_name]['system_message'],
|
|
llm_config=llm_config,
|
|
is_termination_msg=lambda x: x.get("content", "")[-1].get("text", "").find("TERMINATE") >= 0,
|
|
human_input_mode="TERMINATE",
|
|
description="property_scientist",
|
|
max_consecutive_auto_reply=1
|
|
)
|
|
|
|
|
|
application_scientist_name = "application_scientist"
|
|
application_scientist = MultimodalConversableAgent(
|
|
name=application_scientist_name,
|
|
system_message=agent_configs[application_scientist_name]['system_message'],
|
|
llm_config=llm_config,
|
|
is_termination_msg=lambda x: x.get("content", "")[-1].get("text", "").find("TERMINATE") >= 0,
|
|
human_input_mode="TERMINATE",
|
|
description="application_scientist",
|
|
max_consecutive_auto_reply=1
|
|
)
|
|
|
|
|
|
synthesis_scientist_name = "synthesis_scientist"
|
|
synthesis_scientist = autogen.AssistantAgent(
|
|
name=synthesis_scientist_name,
|
|
system_message=agent_configs[synthesis_scientist_name]['system_message'],
|
|
llm_config=llm_config,
|
|
is_termination_msg=lambda x: x.get("content", "").find("TERMINATE") >= 0,
|
|
human_input_mode="TERMINATE",
|
|
description="synthesis_scientist",
|
|
max_consecutive_auto_reply=3
|
|
)
|
|
|
|
|
|
|
|
scheme_critic_name = "scheme_critic"
|
|
scheme_critic = autogen.AssistantAgent(
|
|
name=scheme_critic_name,
|
|
system_message=agent_configs[scheme_critic_name]['system_message'],
|
|
llm_config=llm_config,
|
|
# is_termination_msg=lambda x: x.get("content", "").find("TERMINATE") >= 0,
|
|
human_input_mode="TERMINATE",
|
|
description="scheme_critic",
|
|
max_consecutive_auto_reply=3
|
|
)
|
|
|
|
def state_transition(last_speaker, groupchat):
|
|
messages = groupchat.messages
|
|
|
|
if last_speaker is outer_generate_agent:
|
|
# user -> retrieve
|
|
if len(messages) <= 1:
|
|
return outer_retrieval_agent
|
|
else:
|
|
return "auto"
|
|
elif last_speaker is outer_retrieval_agent:
|
|
# retrieve -> auto select scientist
|
|
return "auto"
|
|
|
|
# 方案问题
|
|
elif last_speaker is synthesis_scientist:
|
|
return scheme_critic
|
|
elif last_speaker is scheme_critic:
|
|
groupchat.messages[-1]["content"] += "\n\nSYNTHESIS"
|
|
if "OPTIMIZE" in messages[-1]["content"]:
|
|
return synthesis_scientist
|
|
elif ("TERMINATE" not in messages[-1]["content"] and "OPTIMIZE" not in messages[-1]["content"]):
|
|
groupchat.messages[-1]["content"] += "\n\nTERMINATE"
|
|
return outer_generate_agent
|
|
else:
|
|
return outer_generate_agent
|
|
|
|
else:
|
|
if ("TERMINATE" not in messages[-1]["content"] and "OPTIMIZE" not in messages[-1]["content"]):
|
|
messages[-1]["content"] += "\n\nTERMINATE"
|
|
return outer_generate_agent
|
|
|
|
|
|
generate_group = autogen.GroupChat(
|
|
agents=[outer_generate_agent, outer_retrieval_agent, structure_scientist, property_scientist, application_scientist, synthesis_scientist, scheme_critic],
|
|
messages=[],
|
|
speaker_selection_method=state_transition, # custom speaker selection method
|
|
max_round=10,
|
|
)
|
|
|
|
|
|
inner_generate_admin = autogen.GroupChatManager(
|
|
name="Generate_Group_Admin",
|
|
description="Generate_Group_Admin",
|
|
groupchat=generate_group,
|
|
# is_termination_msg=lambda x: x.get("content", "").find("TERMINATE") >= 0,
|
|
llm_config=llm_config,
|
|
system_message=agent_configs["admin"]['system_message']
|
|
)
|
|
|
|
|
|
outer_retrieval_agent.register_nested_chats(
|
|
[
|
|
{"recipient": inner_retrieval_admin, "max_turn": 1, "summary_method": "last_msg", "silent": SILENT},
|
|
],
|
|
trigger=inner_generate_admin,
|
|
)
|
|
|
|
return inner_generate_admin, outer_generate_agent
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
outer_retrieval_agent, inner_retrieval_admin = init_retrieval_group(WORK_DIR, venv_context)
|
|
inner_generate_admin, outer_generate_agent = init_generate_group(outer_retrieval_agent, inner_retrieval_admin)
|
|
|
|
outer_generate_agent.initiate_chat(
|
|
inner_generate_admin,
|
|
# message="如何在常温条件下制备CsPbBr3纳米立方体",
|
|
# message="how to synthesis of CsPbBr3 Perovskite NCs at room temperature?"
|
|
# message="how to synthesis CsPbBr3 nanocubes at room temperature?"
|
|
# message="什么是钙钛矿?"
|
|
message="钙钛矿有什么作用?"
|
|
# message="Please prepare few layers graphene from graphite powder.",
|
|
# message="Can you please prepare black phosphorusene with improved stability from black phosphorus crystals powder?",
|
|
# message="Can you synthesize gold nanorods by seed-mediated method with absorption peaks at 820 nm?",
|
|
# message="Please synthesize CsPbBr3 nanocubes with a fluorescence emission wavelength of 520 nm at room temperature?",
|
|
# message="Please design a new hybridized halide perovskite composite material that is biocompatible and water-stable",
|
|
# message="please use phospholipid membrane as shell to encapsulate hybrid perovskite"
|
|
# Now I want a novel bright perovskite composite based CH3NH3PbBr3 and phospholipid membrane(PM) to improve the stability and biocompatibility, please synthesis this materials under room temperature
|
|
) |