Merge pull request #8 from langchain-ai/sr/context-schema

chore: replace `config_schema` with `context_schema`
This commit is contained in:
Sydney Runkle
2025-08-12 08:35:50 -04:00
committed by GitHub
2 changed files with 8 additions and 9 deletions

View File

@@ -53,7 +53,7 @@ For more information on getting started with LangGraph Server, [see here](https:
## How to customize ## How to customize
1. **Define configurable parameters**: Modify the `Configuration` class in the `graph.py` file to expose the arguments you want to configure. For example, in a chatbot application you may want to define a dynamic system prompt or LLM to use. For more information on configurations in LangGraph, [see here](https://langchain-ai.github.io/langgraph/concepts/low_level/?h=configuration#configuration). 1. **Define runtime context**: Modify the `Context` class in the `graph.py` file to expose the arguments you want to configure per assistant. For example, in a chatbot application you may want to define a dynamic system prompt or LLM to use. For more information on runtime context in LangGraph, [see here](https://langchain-ai.github.io/langgraph/agents/context/?h=context#static-runtime-context).
2. **Extend the graph**: The core logic of the application is defined in [graph.py](./src/agent/graph.py). You can modify this file to add new nodes, edges, or change the flow of information. 2. **Extend the graph**: The core logic of the application is defined in [graph.py](./src/agent/graph.py). You can modify this file to add new nodes, edges, or change the flow of information.

View File

@@ -8,12 +8,12 @@ from __future__ import annotations
from dataclasses import dataclass from dataclasses import dataclass
from typing import Any, Dict, TypedDict from typing import Any, Dict, TypedDict
from langchain_core.runnables import RunnableConfig
from langgraph.graph import StateGraph from langgraph.graph import StateGraph
from langgraph.runtime import Runtime
class Configuration(TypedDict): class Context(TypedDict):
"""Configurable parameters for the agent. """Context parameters for the agent.
Set these when creating assistants OR when invoking the graph. Set these when creating assistants OR when invoking the graph.
See: https://langchain-ai.github.io/langgraph/cloud/how-tos/configuration_cloud/ See: https://langchain-ai.github.io/langgraph/cloud/how-tos/configuration_cloud/
@@ -33,21 +33,20 @@ class State:
changeme: str = "example" changeme: str = "example"
async def call_model(state: State, config: RunnableConfig) -> Dict[str, Any]: async def call_model(state: State, runtime: Runtime[Context]) -> Dict[str, Any]:
"""Process input and returns output. """Process input and returns output.
Can use runtime configuration to alter behavior. Can use runtime context to alter behavior.
""" """
configuration = config["configurable"]
return { return {
"changeme": "output from call_model. " "changeme": "output from call_model. "
f'Configured with {configuration.get("my_configurable_param")}' f"Configured with {runtime.context.get('my_configurable_param')}"
} }
# Define the graph # Define the graph
graph = ( graph = (
StateGraph(State, config_schema=Configuration) StateGraph(State, context_schema=Context)
.add_node(call_model) .add_node(call_model)
.add_edge("__start__", "call_model") .add_edge("__start__", "call_model")
.compile(name="New Graph") .compile(name="New Graph")