This commit is contained in:
Windpicker-owo
2025-10-05 01:29:48 +08:00
41 changed files with 740 additions and 1182 deletions

View File

@@ -402,6 +402,12 @@ class EmojiManager:
logger.info("启动表情包管理器")
def shutdown(self) -> None:
"""关闭EmojiManager取消正在运行的任务"""
if self._scan_task and not self._scan_task.done():
self._scan_task.cancel()
logger.info("表情包扫描任务已取消")
def initialize(self) -> None:
"""初始化数据库连接和表情目录"""

View File

@@ -32,7 +32,10 @@ import time
from dataclasses import dataclass
from datetime import datetime
from enum import Enum
from typing import Any
from typing import Any, Type, TypeVar
E = TypeVar("E", bound=Enum)
import orjson
@@ -49,6 +52,21 @@ from src.llm_models.utils_model import LLMRequest
logger = get_logger(__name__)
CHINESE_TO_MEMORY_TYPE: dict[str, MemoryType] = {
"个人事实": MemoryType.PERSONAL_FACT,
"事件": MemoryType.EVENT,
"偏好": MemoryType.PREFERENCE,
"观点": MemoryType.OPINION,
"关系": MemoryType.RELATIONSHIP,
"情感": MemoryType.EMOTION,
"知识": MemoryType.KNOWLEDGE,
"技能": MemoryType.SKILL,
"目标": MemoryType.GOAL,
"经验": MemoryType.EXPERIENCE,
"上下文": MemoryType.CONTEXTUAL,
}
class ExtractionStrategy(Enum):
"""提取策略"""
@@ -428,7 +446,7 @@ class MemoryBuilder:
subject=normalized_subject,
predicate=predicate_value,
obj=object_value,
memory_type=MemoryType(mem_data.get("type", "contextual")),
memory_type=self._resolve_memory_type(mem_data.get("type")),
chat_id=context.get("chat_id"),
source_context=mem_data.get("reasoning", ""),
importance=importance_level,
@@ -459,7 +477,33 @@ class MemoryBuilder:
return memories
def _parse_enum_value(self, enum_cls: type[Enum], raw_value: Any, default: Enum, field_name: str) -> Enum:
def _resolve_memory_type(self, type_str: Any) -> MemoryType:
"""健壮地解析记忆类型,兼容中文和英文"""
if not isinstance(type_str, str) or not type_str.strip():
return MemoryType.CONTEXTUAL
cleaned_type = type_str.strip()
# 尝试中文映射
if cleaned_type in CHINESE_TO_MEMORY_TYPE:
return CHINESE_TO_MEMORY_TYPE[cleaned_type]
# 尝试直接作为枚举值解析
try:
return MemoryType(cleaned_type.lower().replace(" ", "_"))
except ValueError:
pass
# 尝试作为枚举名解析
try:
return MemoryType[cleaned_type.upper()]
except KeyError:
pass
logger.warning(f"无法解析未知的记忆类型 '{type_str}',回退到上下文类型")
return MemoryType.CONTEXTUAL
def _parse_enum_value(self, enum_cls: Type[E], raw_value: Any, default: E, field_name: str) -> E:
"""解析枚举值,兼容数字/字符串表示"""
if isinstance(raw_value, enum_cls):
return raw_value

View File

@@ -162,7 +162,6 @@ class MemorySystem:
async def initialize(self):
"""异步初始化记忆系统"""
try:
logger.info("正在初始化记忆系统...")
# 初始化LLM模型
fallback_task = getattr(self.llm_model, "model_for_task", None) if self.llm_model else None
@@ -268,11 +267,8 @@ class MemorySystem:
logger.warning(f"海马体采样器初始化失败: {e}")
self.hippocampus_sampler = None
# 统一存储已经自动加载数据,无需额外加载
logger.info("✅ 简化版记忆系统初始化完成")
self.status = MemorySystemStatus.READY
logger.info("✅ 记忆系统初始化完成")
except Exception as e:
self.status = MemorySystemStatus.ERROR
@@ -1425,16 +1421,6 @@ class MemorySystem:
def _fingerprint_key(user_id: str, fingerprint: str) -> str:
return f"{user_id!s}:{fingerprint}"
def get_system_stats(self) -> dict[str, Any]:
"""获取系统统计信息"""
return {
"status": self.status.value,
"total_memories": self.total_memories,
"last_build_time": self.last_build_time,
"last_retrieval_time": self.last_retrieval_time,
"config": asdict(self.config),
}
def _compute_memory_score(self, query_text: str, memory: MemoryChunk, context: dict[str, Any]) -> float:
"""根据查询和上下文为记忆计算匹配分数"""
tokens_query = self._tokenize_text(query_text)
@@ -1542,7 +1528,7 @@ class MemorySystem:
# 保存统一存储数据
if self.unified_storage:
await self.unified_storage.cleanup()
self.unified_storage.cleanup()
logger.info("✅ 简化记忆系统已关闭")

View File

@@ -964,6 +964,11 @@ class VectorMemoryStorage:
logger.info("Vector记忆存储系统已停止")
def cleanup(self):
"""清理资源,兼容旧接口"""
logger.info("正在清理VectorMemoryStorage资源...")
self.stop()
# 全局实例(可选)
_global_vector_storage = None

View File

@@ -110,7 +110,6 @@ class AdaptiveStreamManager:
self.is_running = True
self.monitor_task = asyncio.create_task(self._system_monitor_loop(), name="system_monitor")
self.adjustment_task = asyncio.create_task(self._adjustment_loop(), name="limit_adjustment")
logger.info("自适应流管理器已启动")
async def stop(self):
"""停止自适应管理器"""

View File

@@ -72,7 +72,6 @@ class BatchDatabaseWriter:
self.is_running = True
self.writer_task = asyncio.create_task(self._batch_writer_loop(), name="batch_database_writer")
logger.info("批量数据库写入器已启动")
async def stop(self):
"""停止批量写入器"""

View File

@@ -59,7 +59,6 @@ class StreamLoopManager:
return
self.is_running = True
logger.info("流循环管理器已启动")
async def stop(self) -> None:
"""停止流循环管理器"""

View File

@@ -60,7 +60,6 @@ class MessageManager:
try:
from src.chat.message_manager.batch_database_writer import init_batch_writer
await init_batch_writer()
logger.info("📦 批量数据库写入器已启动")
except Exception as e:
logger.error(f"启动批量数据库写入器失败: {e}")
@@ -68,7 +67,6 @@ class MessageManager:
try:
from src.chat.message_manager.stream_cache_manager import init_stream_cache_manager
await init_stream_cache_manager()
logger.info("🗄️ 流缓存管理器已启动")
except Exception as e:
logger.error(f"启动流缓存管理器失败: {e}")

View File

@@ -72,7 +72,6 @@ class TieredStreamCache:
self.is_running = True
self.cleanup_task = asyncio.create_task(self._cleanup_loop(), name="stream_cache_cleanup")
logger.info("分层流缓存管理器已启动")
async def stop(self):
"""停止缓存管理器"""