Skip to content

T2AA4P agent

This is the main agent file for the AIAgents4Pharma.

get_app(uniq_id, llm_model)

This function returns the langraph app.

Source code in aiagents4pharma/talk2aiagents4pharma/agents/main_agent.py
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
def get_app(uniq_id, llm_model: BaseChatModel):
    """
    This function returns the langraph app.
    """
    if hasattr(llm_model, "model_name"):
        if llm_model.model_name == "gpt-4o-mini":
            llm_model = ChatOpenAI(
                model="gpt-4o-mini",
                temperature=0,
                model_kwargs={"parallel_tool_calls": False},
            )
    # Load hydra configuration
    logger.log(logging.INFO, "Launching AIAgents4Pharma_Agent with thread_id %s", uniq_id)
    with hydra.initialize(version_base=None, config_path="../configs"):
        cfg = hydra.compose(config_name="config", overrides=["agents/main_agent=default"])
        cfg = cfg.agents.main_agent
    logger.log(logging.INFO, "System_prompt of T2AA4P: %s", cfg.system_prompt)
    with hydra.initialize(version_base=None, config_path="../../talk2biomodels/configs"):
        cfg_t2b = hydra.compose(config_name="config", overrides=["agents/t2b_agent=default"])
        cfg_t2b = cfg_t2b.agents.t2b_agent
    with hydra.initialize(version_base=None, config_path="../../talk2knowledgegraphs/configs"):
        cfg_t2kg = hydra.compose(config_name="config", overrides=["agents/t2kg_agent=default"])
        cfg_t2kg = cfg_t2kg.agents.t2kg_agent
    system_prompt = cfg.system_prompt
    system_prompt += "\n\nHere is the system prompt of T2B agent\n"
    system_prompt += cfg_t2b.state_modifier
    system_prompt += "\n\nHere is the system prompt of T2KG agent\n"
    system_prompt += cfg_t2kg.state_modifier
    # Create supervisor workflow
    workflow = create_supervisor(
        [
            get_app_t2b(uniq_id, llm_model),  # Talk2BioModels
            get_app_t2kg(uniq_id, llm_model),  # Talk2KnowledgeGraphs
        ],
        model=llm_model,
        state_schema=Talk2AIAgents4Pharma,
        # Full history is needed to extract
        # the tool artifacts
        output_mode="full_history",
        add_handoff_back_messages=True,
        prompt=system_prompt,
    )

    # Compile and run
    app = workflow.compile(checkpointer=MemorySaver(), name="AIAgents4Pharma_Agent")

    return app