feat(tool_system): implement declarative caching for tools

This commit refactors the tool caching system to be more robust, configurable, and easier to use. The caching logic is centralized within the `wrap_tool_executor`, removing the need for boilerplate code within individual tool implementations.

Key changes:
- Adds `enable_cache`, `cache_ttl`, and `semantic_cache_query_key` attributes to `BaseTool` for declarative cache configuration.
- Moves caching logic from a simple history-based lookup and individual tools into a unified handling process in `wrap_tool_executor`.
- The new system leverages the central `tool_cache` manager for both exact and semantic caching based on tool configuration.
- Refactors `WebSurfingTool` and `URLParserTool` to utilize the new declarative caching mechanism, simplifying their code.
This commit is contained in:
minecraft1024a
2025-08-27 18:45:59 +08:00
committed by Windpicker-owo
parent 12bcde800e
commit 6b53560a7e
5 changed files with 208 additions and 70 deletions

View File

@@ -30,6 +30,12 @@ class URLParserTool(BaseTool):
parameters = [
("urls", ToolParamType.STRING, "要理解的网站", True, None),
]
# --- 新的缓存配置 ---
enable_cache: bool = True
cache_ttl: int = 86400 # 缓存24小时
semantic_cache_query_key: str = "urls"
# --------------------
def __init__(self, plugin_config=None):
super().__init__(plugin_config)
@@ -42,10 +48,11 @@ class URLParserTool(BaseTool):
if exa_api_keys is None:
# 从插件配置文件读取
exa_api_keys = self.get_config("exa.api_keys", [])
# 创建API密钥管理器
from typing import cast, List
self.api_manager = create_api_key_manager_from_config(
exa_api_keys,
cast(List[str], exa_api_keys),
lambda key: Exa(api_key=key),
"Exa URL Parser"
)
@@ -135,16 +142,6 @@ class URLParserTool(BaseTool):
"""
执行URL内容提取和总结。优先使用Exa失败后尝试本地解析。
"""
# 获取当前文件路径用于缓存键
import os
current_file_path = os.path.abspath(__file__)
# 检查缓存
cached_result = await tool_cache.get(self.name, function_args, current_file_path)
if cached_result:
logger.info(f"缓存命中: {self.name} -> {function_args}")
return cached_result
urls_input = function_args.get("urls")
if not urls_input:
return {"error": "URL列表不能为空。"}
@@ -235,8 +232,4 @@ class URLParserTool(BaseTool):
"errors": error_messages
}
# 保存到缓存
if "error" not in result:
await tool_cache.set(self.name, function_args, current_file_path, result)
return result

View File

@@ -31,6 +31,12 @@ class WebSurfingTool(BaseTool):
("time_range", ToolParamType.STRING, "指定搜索的时间范围,可以是 'any', 'week', 'month'。默认为 'any'", False, ["any", "week", "month"])
] # type: ignore
# --- 新的缓存配置 ---
enable_cache: bool = True
cache_ttl: int = 7200 # 缓存2小时
semantic_cache_query_key: str = "query"
# --------------------
def __init__(self, plugin_config=None):
super().__init__(plugin_config)
# 初始化搜索引擎
@@ -46,16 +52,6 @@ class WebSurfingTool(BaseTool):
if not query:
return {"error": "搜索查询不能为空。"}
# 获取当前文件路径用于缓存键
import os
current_file_path = os.path.abspath(__file__)
# 检查缓存
cached_result = await tool_cache.get(self.name, function_args, current_file_path, semantic_query=query)
if cached_result:
logger.info(f"缓存命中: {self.name} -> {function_args}")
return cached_result
# 读取搜索配置
enabled_engines = config_api.get_global_config("web_search.enabled_engines", ["ddg"])
search_strategy = config_api.get_global_config("web_search.search_strategy", "single")
@@ -69,10 +65,6 @@ class WebSurfingTool(BaseTool):
result = await self._execute_fallback_search(function_args, enabled_engines)
else: # single
result = await self._execute_single_search(function_args, enabled_engines)
# 保存到缓存
if "error" not in result:
await tool_cache.set(self.name, function_args, current_file_path, result, semantic_query=query)
return result