基于 MCP 协议的工具体系
MCP 协议 (Model Context Protocol)
MCP 是一个开放协议,它规范了应用程序向 LLM 提供上下文的方式。MCP 就像 AI 应用程序的 USB-C 端口一样。正如 USB-C 提供了一种标准化的方式将您的设备连接到各种外围设备和配件一样,MCP 也提供了一种标准化的方式将 AI 模型连接到不同的数据源和工具。

MCP 架构


MCP Python SDK
安装
# 安装
pip install "mcp[cli]"
# 编写service
# 启动
mcp dev server.py
# 直接启动
python server.py
# server.py
from mcp.server.fastmcp import FastMCP
# Create an MCP server
mcp = FastMCP("Demo")
# Add an addition tool
@mcp.tool()
def add(a: int, b: int) -> int:
"""Add two numbers"""
return a + b
# Add a dynamic greeting resource
@mcp.resource("greeting://{name}")
def get_greeting(name: str) -> str:
"""Get a personalized greeting"""
return f"Hello, {name}!"
mcp 分析工具 mcp inspector

mcp Server
# server.py
from mcp.server.fastmcp import FastMCP
# Create an MCP server
mcp = FastMCP("Demo")
# Add an addition tool
@mcp.tool()
def add(a: int, b: int) -> int:
"""Add two numbers"""
return a + b
工具 Tools
import httpx
from mcp.server.fastmcp import FastMCP
mcp = FastMCP("My App")
@mcp.tool()
def calculate_bmi(weight_kg: float, height_m: float) -> float:
"""Calculate BMI given weight in kg and height in meters"""
return weight_kg / (height_m**2)
@mcp.tool()
async def fetch_weather(city: str) -> str:
"""Fetch current weather for a city"""
async with httpx.AsyncClient() as client:
response = await client.get(f"https://api.weather.com/{city}")
return response.text
提示词 Prompts
from mcp.server.fastmcp import FastMCP
from mcp.server.fastmcp.prompts import base
mcp = FastMCP("My App")
@mcp.prompt()
def review_code(code: str) -> str:
return f"Please review this code:\n\n{code}"
@mcp.prompt()
def debug_error(error: str) -> list[base.Message]:
return [
base.UserMessage("I'm seeing this error:"),
base.UserMessage(error),
base.AssistantMessage("I'll help debug that. What have you tried so far?"),
]
资源 Resource
from mcp.server.fastmcp import FastMCP
mcp = FastMCP("My App")
@mcp.resource("config://app")
def get_config() -> str:
"""Static configuration data"""
return "App configuration here"
@mcp.resource("users://{user_id}/profile")
def get_user_profile(user_id: str) -> str:
"""Dynamic user data"""
return f"Profile data for user {user_id}"
图片 Images
from mcp.server.fastmcp import FastMCP, Image
from PIL import Image as PILImage
mcp = FastMCP("My App")
@mcp.tool()
def create_thumbnail(image_path: str) -> Image:
"""Create a thumbnail from an image"""
img = PILImage.open(image_path)
img.thumbnail((100, 100))
return Image(data=img.tobytes(), format="png")
上下文 Context
from mcp.server.fastmcp import FastMCP, Context
mcp = FastMCP("My App")
@mcp.tool()
async def long_task(files: list[str], ctx: Context) -> str:
"""Process multiple files with progress tracking"""
for i, file in enumerate(files):
ctx.info(f"Processing {file}")
await ctx.report_progress(i, len(files))
data, mime_type = await ctx.read_resource(f"file://{file}")
return "Processing complete"
认证 Auth OAuth 2.0
mcp = FastMCP("My App",
auth_provider=MyOAuthServerProvider(),
auth=AuthSettings(
issuer_url="https://myapp.com",
revocation_options=RevocationOptions(
enabled=True,
),
client_registration_options=ClientRegistrationOptions(
enabled=True,
valid_scopes=["myscope", "myotherscope"],
default_scopes=["myscope"],
),
required_scopes=["myscope"],
),
)
服务管理
开发模式
mcp dev server.py
# Add dependencies
mcp dev server.py --with pandas --with numpy
# Mount local code
mcp dev server.py --with-editable .
服务参数
if __name__ == "__main__":
# Initialize and run the server
mcp.run(transport='sse')
# 流式传输的 HTTP 传输正在取代用于生产部署的 SSE 传输。
\
python server.py
# or
mcp run server.py
def run(
self,
transport: Literal["stdio", "sse", "streamable-http"] = "stdio",
mount_path: str | None = None,
) -> None:
"""Run the FastMCP server. Note this is a synchronous function.
Args:
transport: Transport protocol to use ("stdio", "sse", or "streamable-http")
mount_path: Optional mount path for SSE transport
"""
与 FastApi 集成
# echo.py
from mcp.server.fastmcp import FastMCP
mcp = FastMCP(name="EchoServer", stateless_http=True)
@mcp.tool(description="A simple echo tool")
def echo(message: str) -> str:
return f"Echo: {message}"
\
# echo.py
from mcp.server.fastmcp import FastMCP
mcp = FastMCP(name="EchoServer", stateless_http=True)
@mcp.tool(description="A simple echo tool")
def echo(message: str) -> str:
return f"Echo: {message}"
# main.py
from fastapi import FastAPI
from mcp.echo import echo
from mcp.math import math
app = FastAPI()
# Use the session manager's lifespan
app = FastAPI(lifespan=lambda app: echo.mcp.session_manager.run())
app.mount("/echo", echo.mcp.streamable_http_app())
app.mount("/math", math.mcp.streamable_http_app())
Client 管理 stdio sse streamable http
stdio
sse/streamable http
sse client
import asyncio
from mcp import ClientSession
from mcp.client.sse import sse_client
async def run():
async with sse_client('http://127.0.0.1:8000/sse') as (read, write):
async with ClientSession(
read, write,
) as session:
# Initialize the connection
await session.initialize()
# List available prompts
prompts = await session.list_prompts()
print(prompts)
# List available resources
resources = await session.list_resources()
print(resources)
# List available tools
tools = await session.list_tools()
print(tools)
# Read a resource
content = await session.read_resource("greeting://some")
print(content)
# Call a tool
result = await session.call_tool("add", arguments={'a': 1, 'b': 2})
print(result)
if __name__ == "__main__":
asyncio.run(run())
streamable http client
import asyncio
from mcp import ClientSession
from mcp.client.streamable_http import streamablehttp_client
async def run():
async with streamablehttp_client('http://127.0.0.1:8000/mcp') as (read, write, _):
async with ClientSession(
read, write,
) as session:
# Initialize the connection
await session.initialize()
# List available prompts
prompts = await session.list_prompts()
print(prompts)
# List available resources
resources = await session.list_resources()
print(resources)
# List available tools
tools = await session.list_tools()
print(tools)
# Read a resource
content = await session.read_resource("greeting://some")
print(content)
# Call a tool
result = await session.call_tool("add", arguments={'a': 1, 'b': 2})
print(result)
if __name__ == "__main__":
asyncio.run(run())
FastMCP 2.0
FastMCP 1.0 的核心服务器概念奠定了基础,并被贡献给了官方 MCP SDK,而 FastMCP 2.0(本项目)是其积极开发的后续版本。它添加了重要的增强功能和全新功能,例如强大的客户端库、服务器代理、组合模式等等
pip install fastmcp
运行
from fastmcp import FastMCP, Client
mcp = FastMCP("My MCP Server")
@mcp.tool()
def greet(name: str) -> str:
return f"Hello, {name}!"
if __name__ == "__main__":
mcp.run()
fastmcp dev server.py
fastmcp run my_server.py:mcp
fastmcp run server.py --transport sse --port 9000
FastMCP client
import asyncio
from fastmcp import Client, FastMCP
# Example transports (more details in Transports page)
server_instance = FastMCP(name="TestServer") # In-memory server
sse_url = "http://localhost:8000/sse" # SSE server URL
ws_url = "ws://localhost:9000" # WebSocket server URL
server_script = "my_mcp_server.py" # Path to a Python server file
# Client automatically infers the transport type
client_in_memory = Client(server_instance)
client_sse = Client(sse_url)
client_ws = Client(ws_url)
client_stdio = Client(server_script)
print(client_in_memory.transport)
print(client_sse.transport)
print(client_ws.transport)
print(client_stdio.transport)
# Expected Output (types may vary slightly based on environment):
# <FastMCP(server='TestServer')>
# <SSE(url='http://localhost:8000/sse')>
# <WebSocket(url='ws://localhost:9000')>
# <PythonStdioTransport(command='python', args=['/path/to/your/my_mcp_server.py'])>
client.ping()
tools = await client.list_tools()
result = await client.call_tool("add", {"a": 5, "b": 3})
resources = await client.list_resources()
LangChain LangGraph 结合 MCP

langchain mcp client
import asyncio
from langchain_mcp_adapters.tools import load_mcp_tools
from langchain_ollama import ChatOllama
from langgraph.prebuilt import create_react_agent
from mcp import ClientSession
from mcp.client.streamable_http import streamablehttp_client
model = ChatOllama(model="qwen2.5")
async def run():
async with streamablehttp_client('http://127.0.0.1:8000/mcp') as (read, write, _):
async with ClientSession(read, write) as session:
# Initialize the connection
await session.initialize()
# Get tools
tools = await load_mcp_tools(session)
# Create and run the agent
agent = create_react_agent(model, tools)
agent_response = await agent.ainvoke({"messages": "北京天气如何"})
print(agent_response)
def test_run():
asyncio.run(run())
LangGraph 集成 MCP
from langchain_mcp_adapters.client import MultiServerMCPClient
from langgraph.prebuilt import create_react_agent
async with MultiServerMCPClient(
{
"math": {
"command": "python",
# Replace with absolute path to your math_server.py file
"args": ["/path/to/math_server.py"],
"transport": "stdio",
},
"weather": {
# Ensure your start your weather server on port 8000
"url": "http://localhost:8000/sse",
"transport": "sse",
}
}
) as client:
agent = create_react_agent(
"ollama:qwen3",
client.get_tools()
)
math_response = await agent.ainvoke(
{"messages": [{"role": "user", "content": "what's (3 + 5) x 12?"}]}
)
weather_response = await agent.ainvoke(
{"messages": [{"role": "user", "content": "what is the weather in nyc?"}]}
)