This commit is contained in:
Windpicker-owo
2025-10-03 14:16:31 +08:00
10 changed files with 29 additions and 18 deletions

View File

@@ -1,3 +1,4 @@
import asyncio
import time
from typing import Any
@@ -15,6 +16,7 @@ class ChatterManager:
self.action_manager = action_manager
self.chatter_classes: dict[ChatType, list[type]] = {}
self.instances: dict[str, BaseChatter] = {}
self._processing_tasks: dict[str, asyncio.Task] = {}
# 管理器统计
self.stats = {
@@ -155,3 +157,11 @@ class ChatterManager:
"successful_executions": 0,
"failed_executions": 0,
}
def set_processing_task(self, stream_id: str, task: asyncio.Task):
"""设置流的处理任务"""
self._processing_tasks[stream_id] = task
def get_processing_task(self, stream_id: str) -> asyncio.Task | None:
"""获取流的处理任务"""
return self._processing_tasks.get(stream_id)

View File

@@ -365,7 +365,7 @@ class EnergyManager:
# 计算与阈值的相对位置
if energy >= high_threshold:
# 高能量区域:指数增强
adjusted = 0.7 + (energy - 0.7) ** 0.8
adjusted = 0.7 + max(0, energy - 0.7) ** 0.8
elif energy >= reply_threshold:
# 中等能量区域:线性保持
adjusted = energy

View File

@@ -8,9 +8,8 @@ from dataclasses import dataclass
from typing import Any
from src.chat.memory_system.integration_layer import IntegrationConfig, IntegrationMode, MemoryIntegrationLayer
from src.chat.memory_system.memory_formatter import FormatterConfig, format_memories_for_llm
from src.chat.memory_system.memory_chunk import MemoryChunk, MemoryType
from src.chat.memory_system.memory_formatter import FormatterConfig, format_memories_for_llm
from src.common.logger import get_logger
from src.llm_models.utils_model import LLMRequest

View File

@@ -7,7 +7,6 @@ from datetime import datetime
from typing import Any
from src.chat.memory_system.enhanced_memory_manager import enhanced_memory_manager
from src.common.logger import get_logger
from src.config.config import global_config

View File

@@ -6,7 +6,6 @@
from typing import Any
from src.chat.memory_system.enhanced_memory_hooks import enhanced_memory_hooks
from src.common.logger import get_logger
logger = get_logger(__name__)

View File

@@ -10,7 +10,6 @@ from enum import Enum
from typing import Any
from src.chat.memory_system.enhanced_memory_core import EnhancedMemorySystem
from src.chat.memory_system.memory_chunk import MemoryChunk
from src.common.logger import get_logger
from src.llm_models.utils_model import LLMRequest

View File

@@ -12,7 +12,6 @@ from src.chat.memory_system.enhanced_memory_adapter import (
process_conversation_with_enhanced_memory,
retrieve_memories_with_enhanced_system,
)
from src.common.logger import get_logger
logger = get_logger(__name__)

View File

@@ -9,8 +9,8 @@ from enum import Enum
from typing import Any
import orjson
from src.chat.memory_system.enhanced_reranker import EnhancedReRanker, ReRankingConfig
from src.chat.memory_system.enhanced_reranker import EnhancedReRanker, ReRankingConfig
from src.chat.memory_system.memory_chunk import MemoryChunk, MemoryType
from src.common.logger import get_logger

View File

@@ -245,6 +245,11 @@ class StreamLoopManager:
except asyncio.CancelledError:
logger.info(f"流循环被取消: {stream_id}")
if self.chatter_manager:
task = self.chatter_manager.get_processing_task(stream_id)
if task and not task.done():
task.cancel()
logger.debug(f"已取消 chatter 处理任务: {stream_id}")
break
except Exception as e:
logger.error(f"流循环出错 {stream_id}: {e}", exc_info=True)
@@ -319,8 +324,9 @@ class StreamLoopManager:
start_time = time.time()
# 直接调用chatter_manager处理流上下文
context.processing_task = asyncio.create_task(self.chatter_manager.process_stream_context(stream_id, context))
results = await context.processing_task
task = asyncio.create_task(self.chatter_manager.process_stream_context(stream_id, context))
self.chatter_manager.set_processing_task(stream_id, task)
results = await task
success = results.get("success", False)
if success: