From 1752831024b6ddc56ee9b1d3004209c761219c31 Mon Sep 17 00:00:00 2001 From: minecraft1024a Date: Sun, 17 Aug 2025 21:26:35 +0800 Subject: [PATCH] =?UTF-8?q?perf(web=5Fsearch):=20=E7=BC=93=E5=AD=98?= =?UTF-8?q?=E7=BD=91=E7=BB=9C=E5=B7=A5=E5=85=B7=E8=B0=83=E7=94=A8=E4=BB=A5?= =?UTF-8?q?=E5=8A=A0=E9=80=9F=E5=93=8D=E5=BA=94?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 为 `WebSurfingTool` 和 `URLParserTool` 集成 `tool_cache`,避免对相同参数的重复请求。 此更改通过在执行网络搜索或URL解析前检查缓存来优化性能。如果找到先前成功的结果,则立即返回缓存数据,从而显著减少延迟和外部API的使用。仅当缓存未命中时,工具才会继续执行其原始逻辑,并将成功的结果存入缓存以备将来使用。 --- .../built_in/web_search_tool/plugin.py | 29 +++++++++++++++++-- 1 file changed, 26 insertions(+), 3 deletions(-) diff --git a/src/plugins/built_in/web_search_tool/plugin.py b/src/plugins/built_in/web_search_tool/plugin.py index 05f318cd6..b2212f836 100644 --- a/src/plugins/built_in/web_search_tool/plugin.py +++ b/src/plugins/built_in/web_search_tool/plugin.py @@ -20,6 +20,7 @@ from src.plugin_system import ( PythonDependency ) from src.plugin_system.apis import config_api # 添加config_api导入 +from src.common.cache_manager import tool_cache import httpx from bs4 import BeautifulSoup @@ -86,6 +87,12 @@ class WebSurfingTool(BaseTool): if not query: return {"error": "搜索查询不能为空。"} + # 检查缓存 + cached_result = tool_cache.get(self.name, function_args) + if cached_result: + logger.info(f"缓存命中: {self.name} -> {function_args}") + return cached_result + # 读取搜索配置 enabled_engines = config_api.get_global_config("web_search.enabled_engines", ["ddg"]) search_strategy = config_api.get_global_config("web_search.search_strategy", "single") @@ -94,11 +101,17 @@ class WebSurfingTool(BaseTool): # 根据策略执行搜索 if search_strategy == "parallel": - return await self._execute_parallel_search(function_args, enabled_engines) + result = await self._execute_parallel_search(function_args, enabled_engines) elif search_strategy == "fallback": - return await self._execute_fallback_search(function_args, enabled_engines) + result = await self._execute_fallback_search(function_args, enabled_engines) else: # single - return await self._execute_single_search(function_args, enabled_engines) + result = await self._execute_single_search(function_args, enabled_engines) + + # 保存到缓存 + if "error" not in result: + tool_cache.set(self.name, function_args, result) + + return result async def _execute_parallel_search(self, function_args: Dict[str, Any], enabled_engines: List[str]) -> Dict[str, Any]: """并行搜索策略:同时使用所有启用的搜索引擎""" @@ -449,6 +462,12 @@ class URLParserTool(BaseTool): """ 执行URL内容提取和总结。优先使用Exa,失败后尝试本地解析。 """ + # 检查缓存 + cached_result = tool_cache.get(self.name, function_args) + if cached_result: + logger.info(f"缓存命中: {self.name} -> {function_args}") + return cached_result + urls_input = function_args.get("urls") if not urls_input: return {"error": "URL列表不能为空。"} @@ -555,6 +574,10 @@ class URLParserTool(BaseTool): "content": formatted_content, "errors": error_messages } + + # 保存到缓存 + if "error" not in result: + tool_cache.set(self.name, function_args, result) return result