This commit is contained in:
雅诺狐
2025-08-18 17:29:32 +08:00
18 changed files with 1128 additions and 566 deletions

View File

@@ -9,8 +9,6 @@ from src.common.logger import get_logger
from src.plugin_system import (
BasePlugin,
ComponentInfo,
BaseAction,
BaseCommand,
register_plugin
)
from src.plugin_system.base.config_types import ConfigField
@@ -68,6 +66,8 @@ class MaiZoneRefactoredPlugin(BasePlugin):
},
"schedule": {
"enable_schedule": ConfigField(type=bool, default=False, description="是否启用定时发送"),
"random_interval_min_minutes": ConfigField(type=int, default=5, description="随机间隔分钟数下限"),
"random_interval_max_minutes": ConfigField(type=int, default=15, description="随机间隔分钟数上限"),
},
"cookie": {
"http_fallback_host": ConfigField(type=str, default="127.0.0.1", description="备用Cookie获取服务的主机地址"),

View File

@@ -5,7 +5,6 @@ QQ空间服务模块
"""
import asyncio
import base64
import json
import os
import random
@@ -15,7 +14,6 @@ from typing import Callable, Optional, Dict, Any, List, Tuple
import aiohttp
import bs4
import json5
from src.chat.utils.utils_image import get_image_manager
from src.common.logger import get_logger
from src.plugin_system.apis import config_api, person_api

View File

@@ -5,6 +5,7 @@
"""
import asyncio
import datetime
import random
import traceback
from typing import Callable
@@ -91,8 +92,12 @@ class SchedulerService:
result.get("message", "")
)
# 6. 等待5分钟后进行下一次检查
await asyncio.sleep(300)
# 6. 计算并等待一个随机的时间间隔
min_minutes = self.get_config("schedule.random_interval_min_minutes", 5)
max_minutes = self.get_config("schedule.random_interval_max_minutes", 15)
wait_seconds = random.randint(min_minutes * 60, max_minutes * 60)
logger.info(f"下一次检查将在 {wait_seconds / 60:.2f} 分钟后进行。")
await asyncio.sleep(wait_seconds)
except asyncio.CancelledError:
logger.info("定时任务循环被取消。")
@@ -113,7 +118,7 @@ class SchedulerService:
with get_db_session() as session:
record = session.query(MaiZoneScheduleStatus).filter(
MaiZoneScheduleStatus.datetime_hour == hour_str,
MaiZoneScheduleStatus.is_processed == True
MaiZoneScheduleStatus.is_processed == True # noqa: E712
).first()
return record is not None
except Exception as e:
@@ -138,10 +143,10 @@ class SchedulerService:
if record:
# 如果存在,则更新状态
record.is_processed = True
record.processed_at = datetime.datetime.now()
record.send_success = success
record.story_content = content
record.is_processed = True # type: ignore
record.processed_at = datetime.datetime.now()# type: ignore
record.send_success = success# type: ignore
record.story_content = content# type: ignore
else:
# 如果不存在,则创建新记录
new_record = MaiZoneScheduleStatus(

View File

@@ -20,6 +20,7 @@ from src.plugin_system import (
PythonDependency
)
from src.plugin_system.apis import config_api # 添加config_api导入
from src.common.cache_manager import tool_cache
import httpx
from bs4 import BeautifulSoup
@@ -86,6 +87,13 @@ class WebSurfingTool(BaseTool):
if not query:
return {"error": "搜索查询不能为空。"}
# 检查缓存
query = function_args.get("query")
cached_result = await tool_cache.get(self.name, function_args, tool_class=self.__class__, semantic_query=query)
if cached_result:
logger.info(f"缓存命中: {self.name} -> {function_args}")
return cached_result
# 读取搜索配置
enabled_engines = config_api.get_global_config("web_search.enabled_engines", ["ddg"])
search_strategy = config_api.get_global_config("web_search.search_strategy", "single")
@@ -94,11 +102,18 @@ class WebSurfingTool(BaseTool):
# 根据策略执行搜索
if search_strategy == "parallel":
return await self._execute_parallel_search(function_args, enabled_engines)
result = await self._execute_parallel_search(function_args, enabled_engines)
elif search_strategy == "fallback":
return await self._execute_fallback_search(function_args, enabled_engines)
result = await self._execute_fallback_search(function_args, enabled_engines)
else: # single
return await self._execute_single_search(function_args, enabled_engines)
result = await self._execute_single_search(function_args, enabled_engines)
# 保存到缓存
if "error" not in result:
query = function_args.get("query")
await tool_cache.set(self.name, function_args, self.__class__, result, semantic_query=query)
return result
async def _execute_parallel_search(self, function_args: Dict[str, Any], enabled_engines: List[str]) -> Dict[str, Any]:
"""并行搜索策略:同时使用所有启用的搜索引擎"""
@@ -449,6 +464,12 @@ class URLParserTool(BaseTool):
"""
执行URL内容提取和总结。优先使用Exa失败后尝试本地解析。
"""
# 检查缓存
cached_result = await tool_cache.get(self.name, function_args, tool_class=self.__class__)
if cached_result:
logger.info(f"缓存命中: {self.name} -> {function_args}")
return cached_result
urls_input = function_args.get("urls")
if not urls_input:
return {"error": "URL列表不能为空。"}
@@ -555,6 +576,10 @@ class URLParserTool(BaseTool):
"content": formatted_content,
"errors": error_messages
}
# 保存到缓存
if "error" not in result:
await tool_cache.set(self.name, function_args, self.__class__, result)
return result