PrebuildAgent
准备项目环境
# 使用 uv 创建项目目录
uv init langgraph-dev
cd langgraph-dev
uv add langgraph langchain-openai
准备两个公共模块
config.py
,读取config.json
配置文件
import json
class Config:
def __init__(self):
self.data = self.read_json("config.json")
def read_json(self, filepath: str) -> dict:
with open(filepath, "r") as f:
return json.load(f)
@property
def llm_model(self) -> str:
return self.data["llm"]["model"]
@property
def llm_api_key(self):
return self.data["llm"]["api_key"]
@property
def llm_base_url(self) -> str:
return self.data["llm"]["base_url"]
cfg = Config()
config.json
文件示例
{
"llm": {
"model": "qwen-plus",
"base_url": "https://dashscope.aliyuncs.com/compatible-mode/v1",
"api_key": "sk-yourtoken"
}
}
log.py
,一个简单的日志模块
import logging
import sys
def set_formatter():
"""设置formatter"""
fmt = "%(asctime)s | %(name)s | %(levelname)s | %(filename)s:%(lineno)d | %(funcName)s | %(message)s"
datefmt = "%Y-%m-%d %H:%M:%S"
return logging.Formatter(fmt, datefmt=datefmt)
def set_stream_handler():
return logging.StreamHandler(sys.stdout)
def get_logger(name: str = "mylogger", level=logging.INFO):
logger = logging.getLogger(name)
formatter = set_formatter()
handler = set_stream_handler()
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(level)
return logger
logger = get_logger()
创建一个agent
from langgraph.prebuilt import create_react_agent
from langchain_openai import ChatOpenAI
from config import cfg
from langchain_core.messages import HumanMessage
llm = ChatOpenAI(
model=cfg.llm_model,
base_url=cfg.llm_base_url,
api_key=cfg.llm_api_key,
)
def get_weather(city: str) -> str:
"""Get weather for a given city."""
return f"It's always sunny in {city}!"
agent = create_react_agent(
model=llm,
tools=[get_weather],
prompt="You are a helpful assistant"
)
# 方式1: 使用字典格式(原始方式)
# print(agent.invoke({"messages": [{"role": "user", "content": "what is the weather in sf"}]}))
# 方式2: 使用 HumanMessage
print(agent.invoke({"messages": [HumanMessage(content="what is the weather in Beijing")]})["messages"][-1].content)
执行输出
$ python demo1.py
The weather in Beijing is always sunny!
添加一个自定义的prompt
Prompts 指示 LLM 如何行动。
- 静态提示:将字符串解释为system message
agent = create_react_agent(
model=llm,
tools=[get_weather],
prompt="Never answer questions about the weather."
)
运行输出
$ python demo1.py
I cannot provide weather information. Please use a weather service or app to check the current weather in Beijing.
- 动态提示: 运行时根据输入或配置生成的信息列表。
from langgraph.prebuilt import create_react_agent
from langchain_openai import ChatOpenAI
from config import cfg
from langchain_core.messages import HumanMessage, AnyMessage, SystemMessage
from langgraph.prebuilt.chat_agent_executor import AgentState
from langchain_core.runnables import RunnableConfig
llm = ChatOpenAI(
model=cfg.llm_model,
base_url=cfg.llm_base_url,
api_key=cfg.llm_api_key,
)
def prompt(state: AgentState, config: RunnableConfig) -> list[AnyMessage]:
user_name = config["configurable"].get("user_name")
system_msg = f"You are a helpful assistant. Address the user as {user_name}."
return [SystemMessage(content=system_msg)] + state["messages"]
# 或者也可以使用:
# return [SystemMessage(content=system_msg), *state["messages"]]
# 定义一个tool
def get_weather(city: str) -> str:
"""Get weather for a given city."""
return f"It's always sunny in {city}!"
agent = create_react_agent(
model=llm,
tools=[get_weather],
prompt=prompt,
)
# 使用 HumanMessage 并传递配置参数
resp = agent.invoke(
{"messages": [HumanMessage(content="what is the weather in sf")]},
config={"configurable": {"user_name": "John Smith"}}
)
print(resp)
print("\n助手回答:", resp["messages"][-1].content)
执行输出
$ python demo1.py
{'messages': [HumanMessage(content='what is the weather in sf', additional_kwargs={}, response_metadata={}, id='bd4218d2-e87e-48fd-b249-90710c665f77'), AIMessage(content='', additional_kwargs={'tool_calls': [{'id': 'call_c3513e3ac0804814851797', 'function': {'arguments': '{"city": "sf"}', 'name': 'get_weather'}, 'type': 'function', 'index': 0}], 'refusal': None}, response_metadata={'token_usage': {'completion_tokens': 19, 'prompt_tokens': 169, 'total_tokens': 188, 'completion_tokens_details': None, 'prompt_tokens_details': {'audio_tokens': None, 'cached_tokens': 64}}, 'model_name': 'qwen-plus', 'system_fingerprint': None, 'id': 'chatcmpl-4aa5bc40-79f9-9191-9209-dda0700a3c9d', 'service_tier': None, 'finish_reason': 'tool_calls', 'logprobs': None}, id='run--8aeb1798-daad-431f-a247-21db0e15a1bb-0', tool_calls=[{'name': 'get_weather', 'args': {'city': 'sf'}, 'id': 'call_c3513e3ac0804814851797', 'type': 'tool_call'}], usage_metadata={'input_tokens': 169, 'output_tokens': 19, 'total_tokens': 188, 'input_token_details': {'cache_read': 64}, 'output_token_details': {}}), ToolMessage(content="It's always sunny in sf!", name='get_weather', id='47e4dd37-e91c-4800-954c-b511612ae4b5', tool_call_id='call_c3513e3ac0804814851797'), AIMessage(content='The weather in SF is always sunny! ☀️', additional_kwargs={'refusal': None}, response_metadata={'token_usage': {'completion_tokens': 11, 'prompt_tokens': 208, 'total_tokens': 219, 'completion_tokens_details': None, 'prompt_tokens_details': {'audio_tokens': None, 'cached_tokens': 0}}, 'model_name': 'qwen-plus', 'system_fingerprint': None, 'id': 'chatcmpl-b4d25dcb-c5be-97db-b4b5-0e53a0a5f0a2', 'service_tier': None, 'finish_reason': 'stop', 'logprobs': None}, id='run--a985ea17-fc90-467f-b1d2-0f6c67fd846e-0', usage_metadata={'input_tokens': 208, 'output_tokens': 11, 'total_tokens': 219, 'input_token_details': {'cache_read': 0}, 'output_token_details': {}})]}
助手回答: The weather in SF is always sunny! ☀️
(langgraph-dev)