perf(web_search): 缓存网络工具调用以加速响应

为 `WebSurfingTool` 和 `URLParserTool` 集成 `tool_cache`,避免对相同参数的重复请求。

此更改通过在执行网络搜索或URL解析前检查缓存来优化性能。如果找到先前成功的结果,则立即返回缓存数据,从而显著减少延迟和外部API的使用。仅当缓存未命中时,工具才会继续执行其原始逻辑,并将成功的结果存入缓存以备将来使用。
This commit is contained in:
minecraft1024a
2025-08-17 21:26:35 +08:00
parent 74ae472005
commit 1752831024

View File

@@ -20,6 +20,7 @@ from src.plugin_system import (
PythonDependency
)
from src.plugin_system.apis import config_api # 添加config_api导入
from src.common.cache_manager import tool_cache
import httpx
from bs4 import BeautifulSoup
@@ -86,6 +87,12 @@ class WebSurfingTool(BaseTool):
if not query:
return {"error": "搜索查询不能为空。"}
# 检查缓存
cached_result = tool_cache.get(self.name, function_args)
if cached_result:
logger.info(f"缓存命中: {self.name} -> {function_args}")
return cached_result
# 读取搜索配置
enabled_engines = config_api.get_global_config("web_search.enabled_engines", ["ddg"])
search_strategy = config_api.get_global_config("web_search.search_strategy", "single")
@@ -94,11 +101,17 @@ class WebSurfingTool(BaseTool):
# 根据策略执行搜索
if search_strategy == "parallel":
return await self._execute_parallel_search(function_args, enabled_engines)
result = await self._execute_parallel_search(function_args, enabled_engines)
elif search_strategy == "fallback":
return await self._execute_fallback_search(function_args, enabled_engines)
result = await self._execute_fallback_search(function_args, enabled_engines)
else: # single
return await self._execute_single_search(function_args, enabled_engines)
result = await self._execute_single_search(function_args, enabled_engines)
# 保存到缓存
if "error" not in result:
tool_cache.set(self.name, function_args, result)
return result
async def _execute_parallel_search(self, function_args: Dict[str, Any], enabled_engines: List[str]) -> Dict[str, Any]:
"""并行搜索策略:同时使用所有启用的搜索引擎"""
@@ -449,6 +462,12 @@ class URLParserTool(BaseTool):
"""
执行URL内容提取和总结。优先使用Exa失败后尝试本地解析。
"""
# 检查缓存
cached_result = tool_cache.get(self.name, function_args)
if cached_result:
logger.info(f"缓存命中: {self.name} -> {function_args}")
return cached_result
urls_input = function_args.get("urls")
if not urls_input:
return {"error": "URL列表不能为空。"}
@@ -555,6 +574,10 @@ class URLParserTool(BaseTool):
"content": formatted_content,
"errors": error_messages
}
# 保存到缓存
if "error" not in result:
tool_cache.set(self.name, function_args, result)
return result