摘要
MCP(Model Context Protocol)是一种新兴的标准化协议,旨在为大语言模型应用提供统一的上下文管理和工具调用接口。LangBot作为先进的聊天机器人框架,率先支持MCP协议,使得开发者能够通过标准化的方式扩展模型能力。本文将深入探讨LangBot中MCP协议的实现机制,包括协议集成、工具映射、上下文管理等方面,并通过实际示例展示如何在LangBot中使用MCP协议扩展聊天机器人的功能。
正文
1. MCP协议概述
MCP(Model Context Protocol)是由ModelContextProtocol.io组织制定的开放协议,旨在解决大语言模型应用中的上下文管理和工具调用标准化问题。MCP协议的主要特点包括:
- 标准化接口:提供统一的API接口,屏蔽不同工具和服务的差异
- 上下文管理:支持结构化的上下文数据管理
- 工具调用:定义标准的工具调用机制
- 资源访问:提供标准化的资源访问接口
- 可扩展性:支持自定义扩展和插件机制
2. 系统架构
LangBot中MCP协议支持的架构如下图所示:
3. 核心组件
3.1 MCP服务管理器
MCP服务管理器是LangBot中MCP协议支持的核心组件,负责管理MCP服务器连接和协议交互:
class MCPManager:
"""MCP服务管理器"""
def __init__(self, ap: app.Application):
self.ap = ap
self.servers: dict[str, MCPServer] = {}
self.tools: dict[str, MCPTool] = {}
self.resources: dict[str, MCPResource] = {}
async def initialize(self):
"""初始化MCP管理器"""
# 加载配置的MCP服务器
await self.load_mcp_servers()
# 初始化已连接的服务器
for server in self.servers.values():
await server.initialize()
async def load_mcp_servers(self):
"""加载MCP服务器配置"""
# 从配置文件或数据库加载MCP服务器信息
mcp_config = self.ap.instance_config.data.get("mcp", {})
servers = mcp_config.get("servers", [])
for server_config in servers:
server = MCPServer(
uuid=server_config["uuid"],
name=server_config["name"],
endpoint=server_config["endpoint"],
auth=server_config.get("auth", {})
)
self.servers[server.uuid] = server
async def register_mcp_server(self, server_config: dict) -> str:
"""
注册MCP服务器
Args:
server_config: 服务器配置
Returns:
服务器UUID
"""
server = MCPServer(
uuid=server_config.get("uuid") or str(uuid.uuid4()),
name=server_config["name"],
endpoint=server_config["endpoint"],
auth=server_config.get("auth", {})
)
self.servers[server.uuid] = server
await server.initialize()
return server.uuid
async def get_tools(self, server_uuid: str = None) -> list[MCPTool]:
"""
获取MCP工具列表
Args:
server_uuid: 服务器UUID(可选)
Returns:
工具列表
"""
if server_uuid:
server = self.servers.get(server_uuid)
if server:
return await server.list_tools()
return []
else:
# 获取所有服务器的工具
all_tools = []
for server in self.servers.values():
tools = await server.list_tools()
all_tools.extend(tools)
return all_tools
3.2 MCP服务器实现
class MCPServer:
"""MCP服务器"""
def __init__(self, uuid: str, name: str, endpoint: str, auth: dict):
self.uuid = uuid
self.name = name
self.endpoint = endpoint
self.auth = auth
self.client: MCPClient = None
self.connected = False
async def initialize(self):
"""初始化服务器连接"""
try:
# 创建MCP客户端
self.client = MCPClient(
endpoint=self.endpoint,
auth=self.auth
)
# 建立连接
await self.client.connect()
self.connected = True
self.ap.logger.info(f"MCP服务器 {self.name} 连接成功")
except Exception as e:
self.ap.logger.error(f"MCP服务器 {self.name} 连接失败: {e}")
async def list_tools(self) -> list[MCPTool]:
"""
列出服务器提供的工具
Returns:
工具列表
"""
if not self.connected or not self.client:
return []
try:
tools_data = await self.client.list_tools()
tools = [MCPTool.model_validate(tool_data) for tool_data in tools_data]
return tools
except Exception as e:
self.ap.logger.error(f"获取MCP服务器 {self.name} 工具列表失败: {e}")
return []
async def call_tool(self, tool_name: str, arguments: dict) -> dict:
"""
调用工具
Args:
tool_name: 工具名称
arguments: 工具参数
Returns:
调用结果
"""
if not self.connected or not self.client:
raise RuntimeError("MCP服务器未连接")
try:
result = await self.client.call_tool(tool_name, arguments)
return result
except Exception as e:
self.ap.logger.error(f"调用MCP工具 {tool_name} 失败: {e}")
raise
4. MCP协议适配
LangBot通过协议适配器实现与MCP协议的对接:
4.1 MCP客户端实现
class MCPClient:
"""MCP客户端"""
def __init__(self, endpoint: str, auth: dict):
self.endpoint = endpoint
self.auth = auth
self.session: aiohttp.ClientSession = None
self.request_id = 0
async def connect(self):
"""建立连接"""
self.session = aiohttp.ClientSession()
# 如果需要认证,执行认证流程
if self.auth:
await self._authenticate()
async def _authenticate(self):
"""执行认证"""
auth_type = self.auth.get("type", "bearer")
if auth_type == "bearer":
# Bearer token认证
token = self.auth.get("token")
if token:
self.session.headers["Authorization"] = f"Bearer {token}"
elif auth_type == "basic":
# Basic认证
username = self.auth.get("username")
password = self.auth.get("password")
if username and password:
import base64
credentials = base64.b64encode(f"{username}:{password}".encode()).decode()
self.session.headers["Authorization"] = f"Basic {credentials}"
async def list_tools(self) -> list[dict]:
"""
列出工具
Returns:
工具列表
"""
self.request_id += 1
request = {
"jsonrpc": "2.0",
"id": self.request_id,
"method": "tools/list",
"params": {}
}
async with self.session.post(self.endpoint, json=request) as response:
if response.status == 200:
result = await response.json()
return result.get("result", [])
else:
raise RuntimeError(f"HTTP {response.status}: {await response.text()}")
async def call_tool(self, tool_name: str, arguments: dict) -> dict:
"""
调用工具
Args:
tool_name: 工具名称
arguments: 工具参数
Returns:
调用结果
"""
self.request_id += 1
request = {
"jsonrpc": "2.0",
"id": self.request_id,
"method": "tools/call",
"params": {
"name": tool_name,
"arguments": arguments
}
}
async with self.session.post(self.endpoint, json=request) as response:
if response.status == 200:
result = await response.json()
return result.get("result", {})
else:
raise RuntimeError(f"HTTP {response.status}: {await response.text()}")
4.2 MCP工具映射
class MCPToolMapper:
"""MCP工具映射器"""
def __init__(self, mcp_mgr: MCPManager):
self.mcp_mgr = mcp_mgr
def map_mcp_tool_to_llm_tool(self, mcp_tool: MCPTool) -> LLMTool:
"""
将MCP工具映射为LLM工具
Args:
mcp_tool: MCP工具
Returns:
LLM工具
"""
return LLMTool(
name=f"mcp_{mcp_tool.name}",
description=mcp_tool.description,
parameters=mcp_tool.inputSchema,
metadata={
"source": "mcp",
"mcp_server": mcp_tool.server_uuid,
"mcp_tool_name": mcp_tool.name
}
)
async def execute_mcp_tool(self, tool_name: str, parameters: dict) -> dict:
"""
执行MCP工具
Args:
tool_name: 工具名称(映射后的名称)
parameters: 参数
Returns:
执行结果
"""
# 从工具名称中提取MCP服务器UUID和原始工具名称
if not tool_name.startswith("mcp_"):
raise ValueError("不是有效的MCP工具名称")
# 查找对应的MCP工具
for server in self.mcp_mgr.servers.values():
tools = await server.list_tools()
for tool in tools:
if f"mcp_{tool.name}" == tool_name:
# 调用MCP工具
return await server.call_tool(tool.name, parameters)
raise ValueError(f"MCP工具 {tool_name} 未找到")
5. 在LangBot中使用MCP
5.1 配置MCP服务器
# config.yaml
mcp:
servers:
- uuid: "dify-server-1"
name: "Dify MCP Server"
endpoint: "http://localhost:8000/mcp"
auth:
type: "bearer"
token: "your-mcp-token"
- uuid: "custom-server-1"
name: "Custom MCP Server"
endpoint: "http://custom-server:5000/mcp"
auth:
type: "basic"
username: "mcp_user"
password: "mcp_password"
5.2 MCP工具调用阶段
@stage.stage_class("mcp-tool-call")
class MCPToolCallStage(stage.PipelineStage):
"""MCP工具调用阶段"""
async def initialize(self, pipeline_config: dict):
"""初始化阶段"""
self.mcp_mapper = MCPToolMapper(self.ap.mcp_mgr)
async def process(
self,
query: pipeline_query.Query,
stage_inst_name: str,
) -> entities.StageProcessResult:
"""处理消息"""
# 获取绑定到此流水线的MCP服务器
bound_mcp_servers = query.variables.get('_pipeline_bound_mcp_servers', [])
# 获取MCP工具
mcp_tools = []
for server_uuid in bound_mcp_servers:
server_tools = await self.ap.mcp_mgr.get_tools(server_uuid)
mcp_tools.extend(server_tools)
if not mcp_tools:
return entities.StageProcessResult(
result_type=entities.ResultType.CONTINUE,
new_query=query
)
# 将MCP工具映射为LLM工具
llm_tools = [self.mcp_mapper.map_mcp_tool_to_llm_tool(tool) for tool in mcp_tools]
# 构造消息历史
messages = [
{"role": "user", "content": query.message_chain.get_text()}
]
# 调用支持工具的模型
default_model = await self.ap.model_mgr.get_default_model()
response = await default_model.requester.invoke_llm(
query=query,
model=default_model,
messages=messages,
funcs=llm_tools
)
# 检查是否有工具调用
if hasattr(response, 'tool_calls') and response.tool_calls:
# 处理工具调用
tool_results = await self._handle_mcp_tool_calls(response.tool_calls)
# 将工具调用结果添加到消息历史
messages.append({
"role": "assistant",
"tool_calls": response.tool_calls
})
for result in tool_results:
messages.append({
"role": "tool",
"tool_call_id": result["tool_call_id"],
"name": result["name"],
"content": result.get("content", result.get("error", ""))
})
# 基于工具调用结果生成最终回复
final_response = await default_model.requester.invoke_llm(
query=query,
model=default_model,
messages=messages
)
# 构造回复
reply = platform_message.MessageChain([
platform_message.Plain(text=final_response.content)
])
return entities.StageProcessResult(
result_type=entities.ResultType.CONTINUE,
new_query=query,
user_notice=reply,
console_notice=f"执行了{len(tool_results)}个MCP工具调用"
)
return entities.StageProcessResult(
result_type=entities.ResultType.CONTINUE,
new_query=query,
user_notice=platform_message.MessageChain([
platform_message.Plain(text=response.content)
])
)
async def _handle_mcp_tool_calls(self, tool_calls: list) -> list[dict]:
"""
处理MCP工具调用
Args:
tool_calls: 工具调用列表
Returns:
工具调用结果列表
"""
results = []
for tool_call in tool_calls:
try:
# 解析参数
arguments = json.loads(tool_call.function.arguments)
# 执行MCP工具
result = await self.mcp_mapper.execute_mcp_tool(
tool_call.function.name,
arguments
)
results.append({
"tool_call_id": tool_call.id,
"name": tool_call.function.name,
"content": json.dumps(result, ensure_ascii=False)
})
except Exception as e:
results.append({
"tool_call_id": tool_call.id,
"name": tool_call.function.name,
"error": f"工具执行失败: {str(e)}"
})
return results
6. MCP资源管理
MCP协议还支持资源管理,LangBot也提供了相应的支持:
class MCPResourceManager:
"""MCP资源管理器"""
def __init__(self, mcp_mgr: MCPManager):
self.mcp_mgr = mcp_mgr
async def list_resources(self, server_uuid: str) -> list[dict]:
"""
列出资源
Args:
server_uuid: 服务器UUID
Returns:
资源列表
"""
server = self.mcp_mgr.servers.get(server_uuid)
if not server:
raise ValueError(f"MCP服务器 {server_uuid} 未找到")
try:
resources = await server.client.list_resources()
return resources
except Exception as e:
self.ap.logger.error(f"获取MCP服务器 {server_uuid} 资源列表失败: {e}")
return []
async def read_resource(self, server_uuid: str, uri: str) -> dict:
"""
读取资源
Args:
server_uuid: 服务器UUID
uri: 资源URI
Returns:
资源内容
"""
server = self.mcp_mgr.servers.get(server_uuid)
if not server:
raise ValueError(f"MCP服务器 {server_uuid} 未找到")
try:
content = await server.client.read_resource(uri)
return content
except Exception as e:
self.ap.logger.error(f"读取MCP资源 {uri} 失败: {e}")
raise
7. MCP协议扩展
LangBot支持通过插件扩展MCP协议功能:
class MCPExtension:
"""MCP扩展"""
def __init__(self, ap: app.Application):
self.ap = ap
async def register_custom_methods(self, server: MCPServer):
"""
注册自定义方法
Args:
server: MCP服务器
"""
# 注册自定义方法处理器
server.client.register_method_handler(
"langbot/get_session_info",
self._handle_get_session_info
)
server.client.register_method_handler(
"langbot/send_message",
self._handle_send_message
)
async def _handle_get_session_info(self, params: dict) -> dict:
"""
处理获取会话信息请求
Args:
params: 参数
Returns:
会话信息
"""
session_id = params.get("session_id")
if not session_id:
raise ValueError("缺少session_id参数")
# 获取会话信息
# 注意:这里需要实现具体的会话信息获取逻辑
session_info = {
"session_id": session_id,
"created_at": datetime.now().isoformat(),
"message_count": 10 # 示例数据
}
return session_info
async def _handle_send_message(self, params: dict) -> dict:
"""
处理发送消息请求
Args:
params: 参数
Returns:
发送结果
"""
target_type = params.get("target_type")
target_id = params.get("target_id")
message = params.get("message")
if not all([target_type, target_id, message]):
raise ValueError("缺少必要参数")
# 发送消息
# 注意:这里需要实现具体的消息发送逻辑
try:
# await self.ap.platform_mgr.send_message(target_type, target_id, message)
return {"success": True, "message_id": str(uuid.uuid4())}
except Exception as e:
return {"success": False, "error": str(e)}
8. 性能优化和错误处理
8.1 连接池管理
class MCPConnectionPool:
"""MCP连接池"""
def __init__(self, max_connections: int = 10):
self.max_connections = max_connections
self.connections: dict[str, asyncio.Queue] = {}
self.connection_counts: dict[str, int] = {}
async def get_connection(self, server_uuid: str, endpoint: str, auth: dict) -> MCPClient:
"""
获取MCP连接
Args:
server_uuid: 服务器UUID
endpoint: 端点URL
auth: 认证信息
Returns:
MCP客户端
"""
if server_uuid not in self.connections:
self.connections[server_uuid] = asyncio.Queue(maxsize=self.max_connections)
self.connection_counts[server_uuid] = 0
try:
# 尝试从连接池获取连接
client = self.connections[server_uuid].get_nowait()
return client
except asyncio.QueueEmpty:
# 连接池为空,创建新连接
if self.connection_counts[server_uuid] < self.max_connections:
client = MCPClient(endpoint, auth)
await client.connect()
self.connection_counts[server_uuid] += 1
return client
else:
# 等待可用连接
client = await self.connections[server_uuid].get()
return client
async def release_connection(self, server_uuid: str, client: MCPClient):
"""
释放MCP连接
Args:
server_uuid: 服务器UUID
client: MCP客户端
"""
if server_uuid in self.connections:
try:
self.connections[server_uuid].put_nowait(client)
except asyncio.QueueFull:
# 连接池已满,关闭连接
await client.close()
self.connection_counts[server_uuid] -= 1
8.2 错误处理和重试机制
class MCPErrorHandler:
"""MCP错误处理器"""
def __init__(self, max_retries: int = 3):
self.max_retries = max_retries
async def execute_with_retry(self, func: Callable, *args, **kwargs):
"""
带重试机制的执行
Args:
func: 要执行的函数
*args: 位置参数
**kwargs: 关键字参数
Returns:
执行结果
"""
for attempt in range(self.max_retries + 1):
try:
return await func(*args, **kwargs)
except (aiohttp.ClientError, asyncio.TimeoutError) as e:
if attempt == self.max_retries:
raise e
# 指数退避
await asyncio.sleep(2 ** attempt)
except Exception as e:
# 其他错误不重试
raise e
总结
LangBot对MCP协议的支持为聊天机器人应用提供了强大的扩展能力。通过标准化的协议接口,开发者可以轻松集成各种外部工具和服务,极大地丰富了机器人的功能。
关键要点包括:
- 协议兼容:完全兼容MCP协议标准,支持工具调用、资源访问等功能
- 灵活集成:支持多个MCP服务器同时连接和管理
- 工具映射:将MCP工具无缝映射为LangBot内部工具
- 性能优化:通过连接池、重试机制等技术优化性能
- 扩展支持:支持自定义方法扩展和插件集成
在实际应用中,建议遵循以下最佳实践:
- 合理配置:根据实际需求配置MCP服务器连接参数
- 错误处理:完善错误处理机制,确保系统稳定性
- 性能优化:使用连接池等技术优化连接性能
- 安全认证:正确配置认证信息,确保通信安全
- 监控日志:记录详细的日志信息,便于问题排查
通过合理使用LangBot的MCP协议支持,开发者可以构建出功能丰富、扩展性强的智能聊天机器人应用,充分利用外部工具和服务的能力。
1594

被折叠的 条评论
为什么被折叠?



