2024-09-13 16:28:17 -07:00
|
|
|
"""Define a custom Reasoning and Action agent.
|
|
|
|
|
|
|
|
|
|
Works with a chat model with tool calling support.
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
from datetime import datetime, timezone
|
2024-09-13 16:57:34 -07:00
|
|
|
from typing import Any, Dict, List
|
2024-09-13 16:28:17 -07:00
|
|
|
|
|
|
|
|
from langchain_core.runnables import RunnableConfig
|
|
|
|
|
from langgraph.graph import StateGraph
|
|
|
|
|
|
2024-09-13 17:06:33 -07:00
|
|
|
from agent.configuration import Configuration
|
|
|
|
|
from agent.state import State
|
|
|
|
|
from agent.utils import load_chat_model
|
|
|
|
|
|
2024-09-13 16:28:17 -07:00
|
|
|
# Define the function that calls the model
|
|
|
|
|
|
|
|
|
|
|
2024-09-13 17:06:33 -07:00
|
|
|
async def call_model(state: State, config: RunnableConfig) -> Dict[str, List[Any]]:
|
2024-09-13 16:28:17 -07:00
|
|
|
"""Call the LLM powering our "agent".
|
|
|
|
|
|
|
|
|
|
This function prepares the prompt, initializes the model, and processes the response.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
state (State): The current state of the conversation.
|
|
|
|
|
config (RunnableConfig): Configuration for the model run.
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
dict: A dictionary containing the model's response message.
|
|
|
|
|
"""
|
|
|
|
|
configuration = Configuration.from_runnable_config(config)
|
2024-09-13 16:57:34 -07:00
|
|
|
system_prompt = configuration.system_prompt.format(
|
|
|
|
|
system_time=datetime.now(tz=timezone.utc).isoformat()
|
2024-09-13 16:28:17 -07:00
|
|
|
)
|
2024-09-13 17:06:33 -07:00
|
|
|
model = load_chat_model(configuration.model_name)
|
|
|
|
|
res = await model.ainvoke([("system", system_prompt), *state.messages])
|
2024-09-13 16:28:17 -07:00
|
|
|
|
|
|
|
|
# Return the model's response as a list to be added to existing messages
|
2024-09-13 17:06:33 -07:00
|
|
|
return {"messages": [res]}
|
2024-09-13 16:28:17 -07:00
|
|
|
|
|
|
|
|
|
|
|
|
|
# Define a new graph
|
|
|
|
|
|
2024-09-13 16:57:34 -07:00
|
|
|
workflow = StateGraph(State, config_schema=Configuration)
|
2024-09-13 16:28:17 -07:00
|
|
|
|
|
|
|
|
# Define the two nodes we will cycle between
|
|
|
|
|
workflow.add_node(call_model)
|
|
|
|
|
|
|
|
|
|
# Set the entrypoint as `call_model`
|
|
|
|
|
# This means that this node is the first one called
|
|
|
|
|
workflow.add_edge("__start__", "call_model")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Compile the workflow into an executable graph
|
|
|
|
|
# You can customize this by adding interrupt points for state updates
|
|
|
|
|
graph = workflow.compile(
|
|
|
|
|
interrupt_before=[], # Add node names here to update state before they're called
|
|
|
|
|
interrupt_after=[], # Add node names here to update state after they're called
|
|
|
|
|
)
|
2024-09-13 16:57:34 -07:00
|
|
|
graph.name = "My New Graph" # This defines the custom name in LangSmith
|