Compare commits
10 Commits
5a795ff5f2
...
6c7574cd88
| Author | SHA1 | Date | |
|---|---|---|---|
| 6c7574cd88 | |||
| 303548d94e | |||
| cc67975d27 | |||
| 0c8bfcf67b | |||
|
|
f1b721887d | ||
|
|
a10d7eb1a9 | ||
|
|
64df02d6c2 | ||
|
|
f1696eb614 | ||
|
|
73d250349a | ||
|
|
14b6eefcf1 |
9
.env
Normal file
9
.env
Normal file
@@ -0,0 +1,9 @@
|
||||
# (可选但推荐)设置您想要发送 traces 的项目名称,例如 'my-awesome-project'
|
||||
# To separate your traces from other application
|
||||
LANGSMITH_PROJECT=new-agent
|
||||
|
||||
# Add API keys for connecting to LLM providers, data sources, and other integrations here
|
||||
LANGSMITH_API_KEY=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
|
||||
|
||||
# 通常还需要设置 LangChain 的跟踪端点,默认就是以下地址,通常无需修改
|
||||
LANGCHAIN_TRACING_V2=true
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -162,3 +162,5 @@ cython_debug/
|
||||
#.idea/
|
||||
uv.lock
|
||||
.langgraph_api/
|
||||
|
||||
.idea
|
||||
21
README.md
21
README.md
@@ -15,14 +15,6 @@ You can extend this graph to orchestrate more complex agentic workflows that can
|
||||
|
||||
## Getting Started
|
||||
|
||||
<!--
|
||||
Setup instruction auto-generated by `langgraph template lock`. DO NOT EDIT MANUALLY.
|
||||
-->
|
||||
|
||||
<!--
|
||||
End setup instructions
|
||||
-->
|
||||
|
||||
1. Install dependencies, along with the [LangGraph CLI](https://langchain-ai.github.io/langgraph/concepts/langgraph_cli/), which will be used to run the server.
|
||||
|
||||
```bash
|
||||
@@ -53,7 +45,7 @@ For more information on getting started with LangGraph Server, [see here](https:
|
||||
|
||||
## How to customize
|
||||
|
||||
1. **Define configurable parameters**: Modify the `Configuration` class in the `graph.py` file to expose the arguments you want to configure. For example, in a chatbot application you may want to define a dynamic system prompt or LLM to use. For more information on configurations in LangGraph, [see here](https://langchain-ai.github.io/langgraph/concepts/low_level/?h=configuration#configuration).
|
||||
1. **Define runtime context**: Modify the `Context` class in the `graph.py` file to expose the arguments you want to configure per assistant. For example, in a chatbot application you may want to define a dynamic system prompt or LLM to use. For more information on runtime context in LangGraph, [see here](https://langchain-ai.github.io/langgraph/agents/context/?h=context#static-runtime-context).
|
||||
|
||||
2. **Extend the graph**: The core logic of the application is defined in [graph.py](./src/agent/graph.py). You can modify this file to add new nodes, edges, or change the flow of information.
|
||||
|
||||
@@ -67,14 +59,3 @@ For more advanced features and examples, refer to the [LangGraph documentation](
|
||||
|
||||
LangGraph Studio also integrates with [LangSmith](https://smith.langchain.com/) for more in-depth tracing and collaboration with teammates, allowing you to analyze and optimize your chatbot's performance.
|
||||
|
||||
<!--
|
||||
Configuration auto-generated by `langgraph template lock`. DO NOT EDIT MANUALLY.
|
||||
{
|
||||
"config_schemas": {
|
||||
"agent": {
|
||||
"type": "object",
|
||||
"properties": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
-->
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
{
|
||||
"dependencies": ["."],
|
||||
"graphs": {
|
||||
"agent": "./src/agent/graph.py:graph"
|
||||
"agent": "./src/agent/graph.py:graph",
|
||||
"my_agent": "./src/agent/my_graph.py:make_graph"
|
||||
},
|
||||
"env": ".env",
|
||||
"image_distro": "wolfi"
|
||||
|
||||
1
requirements.txt
Normal file
1
requirements.txt
Normal file
@@ -0,0 +1 @@
|
||||
langchain
|
||||
67
src/agent/my_graph.py
Normal file
67
src/agent/my_graph.py
Normal file
@@ -0,0 +1,67 @@
|
||||
from typing import Annotated, Any
|
||||
|
||||
from langchain.chat_models import init_chat_model
|
||||
from langchain_core.tools import tool
|
||||
from langchain_mcp_adapters.client import MultiServerMCPClient
|
||||
from langchain_tavily import TavilySearch
|
||||
from langchain_core.messages import BaseMessage
|
||||
from langgraph.graph.state import CompiledStateGraph
|
||||
from typing_extensions import TypedDict
|
||||
|
||||
from langgraph.checkpoint.memory import InMemorySaver
|
||||
from langgraph.graph import StateGraph
|
||||
from langgraph.graph.message import add_messages
|
||||
from langgraph.prebuilt import ToolNode, tools_condition
|
||||
from langchain_community.chat_models.tongyi import ChatTongyi
|
||||
|
||||
from src.mcp.mcp_tools import get_client
|
||||
|
||||
llm = ChatTongyi(
|
||||
model="qwen-max", # 此处以qwen-max为例,您可按需更换模型名称。模型列表:https://help.aliyun.com/zh/model-studio/getting-started/models
|
||||
streaming=True,
|
||||
# other params...
|
||||
)
|
||||
|
||||
|
||||
@tool
|
||||
def get_wheather(location: str) -> str:
|
||||
"""输入城市查询天气"""
|
||||
return f"The weather in {location} is 20 degrees Celsius."
|
||||
|
||||
class State(TypedDict):
|
||||
messages: Annotated[list, add_messages]
|
||||
|
||||
|
||||
|
||||
async def make_graph() -> CompiledStateGraph[Any, Any, Any, Any]:
|
||||
graph_builder = StateGraph(State)
|
||||
|
||||
# tool = TavilySearch(max_results=2)
|
||||
|
||||
client = get_client()
|
||||
|
||||
mcp_tools = await client.get_tools()
|
||||
|
||||
tools = [get_wheather]
|
||||
tools.extend(mcp_tools)
|
||||
llm_with_tools = llm.bind_tools(tools)
|
||||
|
||||
def chatbot(state: State):
|
||||
return {"messages": [llm_with_tools.invoke(state["messages"])]}
|
||||
|
||||
graph_builder.add_node("chatbot", chatbot)
|
||||
|
||||
tool_node = ToolNode(tools=[tool])
|
||||
graph_builder.add_node("tools", tool_node)
|
||||
|
||||
graph_builder.add_conditional_edges(
|
||||
"chatbot",
|
||||
tools_condition,
|
||||
)
|
||||
graph_builder.add_edge("tools", "chatbot")
|
||||
graph_builder.set_entry_point("chatbot")
|
||||
# memory = InMemorySaver()
|
||||
# graph = graph_builder.compile(checkpointer=memory)
|
||||
graph = graph_builder.compile()
|
||||
|
||||
return graph
|
||||
0
src/agt20250902/__init__.py
Normal file
0
src/agt20250902/__init__.py
Normal file
0
src/agt20250902/graph.py
Normal file
0
src/agt20250902/graph.py
Normal file
0
src/mcp/__init__.py
Normal file
0
src/mcp/__init__.py
Normal file
9
src/mcp/mcp_servers.json
Normal file
9
src/mcp/mcp_servers.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"math": {
|
||||
"command": "python",
|
||||
"args": [
|
||||
"/home/kexsh/src/knightutils/test/mcp/math_server.py"
|
||||
],
|
||||
"transport": "stdio"
|
||||
}
|
||||
}
|
||||
9
src/mcp/mcp_tools.py
Normal file
9
src/mcp/mcp_tools.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from langchain_mcp_adapters.client import MultiServerMCPClient
|
||||
import json
|
||||
|
||||
def get_client() -> MultiServerMCPClient:
|
||||
mcp_tools = json.load(open("mcp_tools.json"))
|
||||
|
||||
client = MultiServerMCPClient(mcp_tools)
|
||||
|
||||
return client
|
||||
Reference in New Issue
Block a user