Files
graphtest/src/agent/graph.py

39 lines
1.1 KiB
Python
Raw Normal View History

"""Define a simple chatbot agent.
2024-09-13 16:28:17 -07:00
This agent returns a predefined response without using an actual LLM.
2024-09-13 16:28:17 -07:00
"""
from typing import Any, Dict
2024-09-13 16:28:17 -07:00
from langchain_core.runnables import RunnableConfig
from langgraph.graph import StateGraph
2024-09-13 17:06:33 -07:00
from agent.configuration import Configuration
from agent.state import State
2024-09-13 16:28:17 -07:00
async def my_node(state: State, config: RunnableConfig) -> Dict[str, Any]:
"""Each node does work."""
2024-09-13 16:28:17 -07:00
configuration = Configuration.from_runnable_config(config)
# configuration = Configuration.from_runnable_config(config)
# You can use runtime configuration to alter the behavior of your
# graph.
return {
"changeme": "output from my_node. "
f"Configured with {configuration.my_configurable_param}"
}
2024-09-13 16:28:17 -07:00
# Define a new graph
2024-09-13 16:57:34 -07:00
workflow = StateGraph(State, config_schema=Configuration)
2024-09-13 16:28:17 -07:00
# Add the node to the graph
workflow.add_node("my_node", my_node)
2024-09-13 16:28:17 -07:00
# Set the entrypoint as `call_model`
workflow.add_edge("__start__", "my_node")
2024-09-13 16:28:17 -07:00
# Compile the workflow into an executable graph
graph = workflow.compile()
graph.name = "New Graph" # This defines the custom name in LangSmith