From 5a795ff5f226e6703fc7868709d7ca68a7866cd7 Mon Sep 17 00:00:00 2001 From: Sydney Runkle Date: Sun, 10 Aug 2025 16:44:10 -0400 Subject: [PATCH 1/3] new api --- src/agent/graph.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/src/agent/graph.py b/src/agent/graph.py index 2cecc0b..a89e7bf 100644 --- a/src/agent/graph.py +++ b/src/agent/graph.py @@ -8,12 +8,12 @@ from __future__ import annotations from dataclasses import dataclass from typing import Any, Dict, TypedDict -from langchain_core.runnables import RunnableConfig from langgraph.graph import StateGraph +from langgraph.runtime import Runtime -class Configuration(TypedDict): - """Configurable parameters for the agent. +class Context(TypedDict): + """Context parameters for the agent. Set these when creating assistants OR when invoking the graph. See: https://langchain-ai.github.io/langgraph/cloud/how-tos/configuration_cloud/ @@ -33,21 +33,20 @@ class State: changeme: str = "example" -async def call_model(state: State, config: RunnableConfig) -> Dict[str, Any]: +async def call_model(state: State, runtime: Runtime[Context]) -> Dict[str, Any]: """Process input and returns output. - Can use runtime configuration to alter behavior. + Can use runtime context to alter behavior. """ - configuration = config["configurable"] return { "changeme": "output from call_model. " - f'Configured with {configuration.get("my_configurable_param")}' + f"Configured with {runtime.context.get('my_configurable_param')}" } # Define the graph graph = ( - StateGraph(State, config_schema=Configuration) + StateGraph(State, context_schema=Context) .add_node(call_model) .add_edge("__start__", "call_model") .compile(name="New Graph") From 14b6eefcf16a973ff585dc95815aef1b563c7717 Mon Sep 17 00:00:00 2001 From: Sydney Runkle Date: Sun, 10 Aug 2025 16:45:37 -0400 Subject: [PATCH 2/3] update readme --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 18ebe0a..2e6b3bb 100644 --- a/README.md +++ b/README.md @@ -53,7 +53,7 @@ For more information on getting started with LangGraph Server, [see here](https: ## How to customize -1. **Define configurable parameters**: Modify the `Configuration` class in the `graph.py` file to expose the arguments you want to configure. For example, in a chatbot application you may want to define a dynamic system prompt or LLM to use. For more information on configurations in LangGraph, [see here](https://langchain-ai.github.io/langgraph/concepts/low_level/?h=configuration#configuration). +1. **Define runtime context parameters**: Modify the `Context` class in the `graph.py` file to expose the arguments you want to configure. For example, in a chatbot application you may want to define a dynamic system prompt or LLM to use. For more information on runtime context in LangGraph, [see here](https://langchain-ai.github.io/langgraph/agents/context/?h=context#static-runtime-context). 2. **Extend the graph**: The core logic of the application is defined in [graph.py](./src/agent/graph.py). You can modify this file to add new nodes, edges, or change the flow of information. From 73d250349a1ffe0cfda2e2397667fb4fad64b447 Mon Sep 17 00:00:00 2001 From: Sydney Runkle Date: Sun, 10 Aug 2025 16:46:05 -0400 Subject: [PATCH 3/3] update readme --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 2e6b3bb..33fb9b5 100644 --- a/README.md +++ b/README.md @@ -53,7 +53,7 @@ For more information on getting started with LangGraph Server, [see here](https: ## How to customize -1. **Define runtime context parameters**: Modify the `Context` class in the `graph.py` file to expose the arguments you want to configure. For example, in a chatbot application you may want to define a dynamic system prompt or LLM to use. For more information on runtime context in LangGraph, [see here](https://langchain-ai.github.io/langgraph/agents/context/?h=context#static-runtime-context). +1. **Define runtime context**: Modify the `Context` class in the `graph.py` file to expose the arguments you want to configure per assistant. For example, in a chatbot application you may want to define a dynamic system prompt or LLM to use. For more information on runtime context in LangGraph, [see here](https://langchain-ai.github.io/langgraph/agents/context/?h=context#static-runtime-context). 2. **Extend the graph**: The core logic of the application is defined in [graph.py](./src/agent/graph.py). You can modify this file to add new nodes, edges, or change the flow of information.