重构消息处理并用DatabaseMessages替换MessageRecv
-更新PlusCommand以使用DatabaseMessages而不是MessageRecv。 -将消息处理逻辑重构到一个新模块message_processor.py中,以处理消息段并从消息字典中创建DatabaseMessages。 -删除了已弃用的MessageRecv类及其相关逻辑。 -调整了各种插件以适应新的DatabaseMessages结构。 -增强了消息处理功能中的错误处理和日志记录。
This commit is contained in:
@@ -29,7 +29,6 @@ class SingleStreamContextManager:
|
||||
|
||||
# 配置参数
|
||||
self.max_context_size = max_context_size or getattr(global_config.chat, "max_context_size", 100)
|
||||
self.context_ttl = getattr(global_config.chat, "context_ttl", 24 * 3600) # 24小时
|
||||
|
||||
# 元数据
|
||||
self.created_time = time.time()
|
||||
@@ -93,27 +92,24 @@ class SingleStreamContextManager:
|
||||
return True
|
||||
else:
|
||||
logger.warning(f"消息缓存系统添加失败,回退到直接添加: {self.stream_id}")
|
||||
|
||||
except ImportError:
|
||||
logger.debug("MessageManager不可用,使用直接添加模式")
|
||||
except Exception as e:
|
||||
logger.warning(f"消息缓存系统异常,回退到直接添加: {self.stream_id}, error={e}")
|
||||
|
||||
# 回退方案:直接添加到未读消息
|
||||
message.is_read = False
|
||||
self.context.unread_messages.append(message)
|
||||
# 回退方案:直接添加到未读消息
|
||||
message.is_read = False
|
||||
self.context.unread_messages.append(message)
|
||||
|
||||
# 自动检测和更新chat type
|
||||
self._detect_chat_type(message)
|
||||
# 自动检测和更新chat type
|
||||
self._detect_chat_type(message)
|
||||
|
||||
# 在上下文管理器中计算兴趣值
|
||||
await self._calculate_message_interest(message)
|
||||
self.total_messages += 1
|
||||
self.last_access_time = time.time()
|
||||
# 启动流的循环任务(如果还未启动)
|
||||
asyncio.create_task(stream_loop_manager.start_stream_loop(self.stream_id))
|
||||
logger.debug(f"添加消息{message.processed_plain_text}到单流上下文: {self.stream_id}")
|
||||
return True
|
||||
# 在上下文管理器中计算兴趣值
|
||||
await self._calculate_message_interest(message)
|
||||
self.total_messages += 1
|
||||
self.last_access_time = time.time()
|
||||
# 启动流的循环任务(如果还未启动)
|
||||
asyncio.create_task(stream_loop_manager.start_stream_loop(self.stream_id))
|
||||
logger.debug(f"添加消息{message.processed_plain_text}到单流上下文: {self.stream_id}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"添加消息到单流上下文失败 {self.stream_id}: {e}", exc_info=True)
|
||||
return False
|
||||
|
||||
@@ -71,14 +71,6 @@ class MessageManager:
|
||||
except Exception as e:
|
||||
logger.error(f"启动批量数据库写入器失败: {e}")
|
||||
|
||||
# 启动流缓存管理器
|
||||
try:
|
||||
from src.chat.message_manager.stream_cache_manager import init_stream_cache_manager
|
||||
|
||||
await init_stream_cache_manager()
|
||||
except Exception as e:
|
||||
logger.error(f"启动流缓存管理器失败: {e}")
|
||||
|
||||
# 启动消息缓存系统(内置)
|
||||
logger.info("📦 消息缓存系统已启动")
|
||||
|
||||
@@ -116,15 +108,6 @@ class MessageManager:
|
||||
except Exception as e:
|
||||
logger.error(f"停止批量数据库写入器失败: {e}")
|
||||
|
||||
# 停止流缓存管理器
|
||||
try:
|
||||
from src.chat.message_manager.stream_cache_manager import shutdown_stream_cache_manager
|
||||
|
||||
await shutdown_stream_cache_manager()
|
||||
logger.info("🗄️ 流缓存管理器已停止")
|
||||
except Exception as e:
|
||||
logger.error(f"停止流缓存管理器失败: {e}")
|
||||
|
||||
# 停止消息缓存系统(内置)
|
||||
self.message_caches.clear()
|
||||
self.stream_processing_status.clear()
|
||||
@@ -152,7 +135,7 @@ class MessageManager:
|
||||
# 检查是否为notice消息
|
||||
if self._is_notice_message(message):
|
||||
# Notice消息处理 - 添加到全局管理器
|
||||
logger.info(f"📢 检测到notice消息: message_id={message.message_id}, is_notify={message.is_notify}, notice_type={getattr(message, 'notice_type', None)}")
|
||||
logger.info(f"📢 检测到notice消息: notice_type={getattr(message, 'notice_type', None)}")
|
||||
await self._handle_notice_message(stream_id, message)
|
||||
|
||||
# 根据配置决定是否继续处理(触发聊天流程)
|
||||
@@ -206,39 +189,6 @@ class MessageManager:
|
||||
except Exception as e:
|
||||
logger.error(f"更新消息 {message_id} 时发生错误: {e}")
|
||||
|
||||
async def bulk_update_messages(self, stream_id: str, updates: list[dict[str, Any]]) -> int:
|
||||
"""批量更新消息信息,降低更新频率"""
|
||||
if not updates:
|
||||
return 0
|
||||
|
||||
try:
|
||||
chat_manager = get_chat_manager()
|
||||
chat_stream = await chat_manager.get_stream(stream_id)
|
||||
if not chat_stream:
|
||||
logger.warning(f"MessageManager.bulk_update_messages: 聊天流 {stream_id} 不存在")
|
||||
return 0
|
||||
|
||||
updated_count = 0
|
||||
for item in updates:
|
||||
message_id = item.get("message_id")
|
||||
if not message_id:
|
||||
continue
|
||||
|
||||
payload = {key: value for key, value in item.items() if key != "message_id" and value is not None}
|
||||
|
||||
if not payload:
|
||||
continue
|
||||
|
||||
success = await chat_stream.context_manager.update_message(message_id, payload)
|
||||
if success:
|
||||
updated_count += 1
|
||||
|
||||
if updated_count:
|
||||
logger.debug(f"批量更新消息 {updated_count} 条 (stream={stream_id})")
|
||||
return updated_count
|
||||
except Exception as e:
|
||||
logger.error(f"批量更新聊天流 {stream_id} 消息失败: {e}")
|
||||
return 0
|
||||
|
||||
async def add_action(self, stream_id: str, message_id: str, action: str):
|
||||
"""添加动作到消息"""
|
||||
@@ -266,7 +216,7 @@ class MessageManager:
|
||||
logger.warning(f"停用流失败: 聊天流 {stream_id} 不存在")
|
||||
return
|
||||
|
||||
context = chat_stream.stream_context
|
||||
context = chat_stream.context_manager.context
|
||||
context.is_active = False
|
||||
|
||||
# 取消处理任务
|
||||
@@ -288,7 +238,7 @@ class MessageManager:
|
||||
logger.warning(f"激活流失败: 聊天流 {stream_id} 不存在")
|
||||
return
|
||||
|
||||
context = chat_stream.stream_context
|
||||
context = chat_stream.context_manager.context
|
||||
context.is_active = True
|
||||
logger.info(f"激活聊天流: {stream_id}")
|
||||
|
||||
@@ -304,7 +254,7 @@ class MessageManager:
|
||||
if not chat_stream:
|
||||
return None
|
||||
|
||||
context = chat_stream.stream_context
|
||||
context = chat_stream.context_manager.context
|
||||
unread_count = len(chat_stream.context_manager.get_unread_messages())
|
||||
|
||||
return StreamStats(
|
||||
@@ -447,7 +397,7 @@ class MessageManager:
|
||||
await asyncio.sleep(0.1)
|
||||
|
||||
# 获取当前的stream context
|
||||
context = chat_stream.stream_context
|
||||
context = chat_stream.context_manager.context
|
||||
|
||||
# 确保有未读消息需要处理
|
||||
unread_messages = context.get_unread_messages()
|
||||
|
||||
@@ -1,377 +0,0 @@
|
||||
"""
|
||||
流缓存管理器 - 使用优化版聊天流和智能缓存策略
|
||||
提供分层缓存和自动清理功能
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import time
|
||||
from collections import OrderedDict
|
||||
from dataclasses import dataclass
|
||||
|
||||
from maim_message import GroupInfo, UserInfo
|
||||
|
||||
from src.chat.message_receive.optimized_chat_stream import OptimizedChatStream, create_optimized_chat_stream
|
||||
from src.common.logger import get_logger
|
||||
|
||||
logger = get_logger("stream_cache_manager")
|
||||
|
||||
|
||||
@dataclass
|
||||
class StreamCacheStats:
|
||||
"""缓存统计信息"""
|
||||
|
||||
hot_cache_size: int = 0
|
||||
warm_storage_size: int = 0
|
||||
cold_storage_size: int = 0
|
||||
total_memory_usage: int = 0 # 估算的内存使用(字节)
|
||||
cache_hits: int = 0
|
||||
cache_misses: int = 0
|
||||
evictions: int = 0
|
||||
last_cleanup_time: float = 0
|
||||
|
||||
|
||||
class TieredStreamCache:
|
||||
"""分层流缓存管理器"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
max_hot_size: int = 100,
|
||||
max_warm_size: int = 500,
|
||||
max_cold_size: int = 2000,
|
||||
cleanup_interval: float = 300.0, # 5分钟清理一次
|
||||
hot_timeout: float = 1800.0, # 30分钟未访问降级到warm
|
||||
warm_timeout: float = 7200.0, # 2小时未访问降级到cold
|
||||
cold_timeout: float = 86400.0, # 24小时未访问删除
|
||||
):
|
||||
self.max_hot_size = max_hot_size
|
||||
self.max_warm_size = max_warm_size
|
||||
self.max_cold_size = max_cold_size
|
||||
self.cleanup_interval = cleanup_interval
|
||||
self.hot_timeout = hot_timeout
|
||||
self.warm_timeout = warm_timeout
|
||||
self.cold_timeout = cold_timeout
|
||||
|
||||
# 三层缓存存储
|
||||
self.hot_cache: OrderedDict[str, OptimizedChatStream] = OrderedDict() # 热数据(LRU)
|
||||
self.warm_storage: dict[str, tuple[OptimizedChatStream, float]] = {} # 温数据(最后访问时间)
|
||||
self.cold_storage: dict[str, tuple[OptimizedChatStream, float]] = {} # 冷数据(最后访问时间)
|
||||
|
||||
# 统计信息
|
||||
self.stats = StreamCacheStats()
|
||||
|
||||
# 清理任务
|
||||
self.cleanup_task: asyncio.Task | None = None
|
||||
self.is_running = False
|
||||
|
||||
logger.info(f"分层流缓存管理器初始化完成 (hot:{max_hot_size}, warm:{max_warm_size}, cold:{max_cold_size})")
|
||||
|
||||
async def start(self):
|
||||
"""启动缓存管理器"""
|
||||
if self.is_running:
|
||||
logger.warning("缓存管理器已经在运行")
|
||||
return
|
||||
|
||||
self.is_running = True
|
||||
self.cleanup_task = asyncio.create_task(self._cleanup_loop(), name="stream_cache_cleanup")
|
||||
|
||||
async def stop(self):
|
||||
"""停止缓存管理器"""
|
||||
if not self.is_running:
|
||||
return
|
||||
|
||||
self.is_running = False
|
||||
|
||||
if self.cleanup_task and not self.cleanup_task.done():
|
||||
self.cleanup_task.cancel()
|
||||
try:
|
||||
await asyncio.wait_for(self.cleanup_task, timeout=10.0)
|
||||
except asyncio.TimeoutError:
|
||||
logger.warning("缓存清理任务停止超时")
|
||||
except Exception as e:
|
||||
logger.error(f"停止缓存清理任务时出错: {e}")
|
||||
|
||||
logger.info("分层流缓存管理器已停止")
|
||||
|
||||
async def get_or_create_stream(
|
||||
self,
|
||||
stream_id: str,
|
||||
platform: str,
|
||||
user_info: UserInfo,
|
||||
group_info: GroupInfo | None = None,
|
||||
data: dict | None = None,
|
||||
) -> OptimizedChatStream:
|
||||
"""获取或创建流 - 优化版本"""
|
||||
current_time = time.time()
|
||||
|
||||
# 1. 检查热缓存
|
||||
if stream_id in self.hot_cache:
|
||||
stream = self.hot_cache[stream_id]
|
||||
# 移动到末尾(LRU更新)
|
||||
self.hot_cache.move_to_end(stream_id)
|
||||
self.stats.cache_hits += 1
|
||||
logger.debug(f"热缓存命中: {stream_id}")
|
||||
return stream.create_snapshot()
|
||||
|
||||
# 2. 检查温存储
|
||||
if stream_id in self.warm_storage:
|
||||
stream, last_access = self.warm_storage[stream_id]
|
||||
self.warm_storage[stream_id] = (stream, current_time)
|
||||
self.stats.cache_hits += 1
|
||||
logger.debug(f"温缓存命中: {stream_id}")
|
||||
# 提升到热缓存
|
||||
await self._promote_to_hot(stream_id, stream)
|
||||
return stream.create_snapshot()
|
||||
|
||||
# 3. 检查冷存储
|
||||
if stream_id in self.cold_storage:
|
||||
stream, last_access = self.cold_storage[stream_id]
|
||||
self.cold_storage[stream_id] = (stream, current_time)
|
||||
self.stats.cache_hits += 1
|
||||
logger.debug(f"冷缓存命中: {stream_id}")
|
||||
# 提升到温缓存
|
||||
await self._promote_to_warm(stream_id, stream)
|
||||
return stream.create_snapshot()
|
||||
|
||||
# 4. 缓存未命中,创建新流
|
||||
self.stats.cache_misses += 1
|
||||
stream = create_optimized_chat_stream(
|
||||
stream_id=stream_id, platform=platform, user_info=user_info, group_info=group_info, data=data
|
||||
)
|
||||
logger.debug(f"缓存未命中,创建新流: {stream_id}")
|
||||
|
||||
# 添加到热缓存
|
||||
await self._add_to_hot(stream_id, stream)
|
||||
|
||||
return stream
|
||||
|
||||
async def _add_to_hot(self, stream_id: str, stream: OptimizedChatStream):
|
||||
"""添加到热缓存"""
|
||||
# 检查是否需要驱逐
|
||||
if len(self.hot_cache) >= self.max_hot_size:
|
||||
await self._evict_from_hot()
|
||||
|
||||
self.hot_cache[stream_id] = stream
|
||||
self.stats.hot_cache_size = len(self.hot_cache)
|
||||
|
||||
async def _promote_to_hot(self, stream_id: str, stream: OptimizedChatStream):
|
||||
"""提升到热缓存"""
|
||||
# 从温存储中移除
|
||||
if stream_id in self.warm_storage:
|
||||
del self.warm_storage[stream_id]
|
||||
self.stats.warm_storage_size = len(self.warm_storage)
|
||||
|
||||
# 添加到热缓存
|
||||
await self._add_to_hot(stream_id, stream)
|
||||
logger.debug(f"流 {stream_id} 提升到热缓存")
|
||||
|
||||
async def _promote_to_warm(self, stream_id: str, stream: OptimizedChatStream):
|
||||
"""提升到温缓存"""
|
||||
# 从冷存储中移除
|
||||
if stream_id in self.cold_storage:
|
||||
del self.cold_storage[stream_id]
|
||||
self.stats.cold_storage_size = len(self.cold_storage)
|
||||
|
||||
# 添加到温存储
|
||||
if len(self.warm_storage) >= self.max_warm_size:
|
||||
await self._evict_from_warm()
|
||||
|
||||
current_time = time.time()
|
||||
self.warm_storage[stream_id] = (stream, current_time)
|
||||
self.stats.warm_storage_size = len(self.warm_storage)
|
||||
logger.debug(f"流 {stream_id} 提升到温缓存")
|
||||
|
||||
async def _evict_from_hot(self):
|
||||
"""从热缓存驱逐最久未使用的流"""
|
||||
if not self.hot_cache:
|
||||
return
|
||||
|
||||
# LRU驱逐
|
||||
stream_id, stream = self.hot_cache.popitem(last=False)
|
||||
self.stats.evictions += 1
|
||||
logger.debug(f"从热缓存驱逐: {stream_id}")
|
||||
|
||||
# 移动到温存储
|
||||
if len(self.warm_storage) < self.max_warm_size:
|
||||
current_time = time.time()
|
||||
self.warm_storage[stream_id] = (stream, current_time)
|
||||
self.stats.warm_storage_size = len(self.warm_storage)
|
||||
else:
|
||||
# 温存储也满了,直接删除
|
||||
logger.debug(f"温存储已满,删除流: {stream_id}")
|
||||
|
||||
self.stats.hot_cache_size = len(self.hot_cache)
|
||||
|
||||
async def _evict_from_warm(self):
|
||||
"""从温存储驱逐最久未使用的流"""
|
||||
if not self.warm_storage:
|
||||
return
|
||||
|
||||
# 找到最久未访问的流
|
||||
oldest_stream_id = min(self.warm_storage.keys(), key=lambda k: self.warm_storage[k][1])
|
||||
stream, last_access = self.warm_storage.pop(oldest_stream_id)
|
||||
self.stats.evictions += 1
|
||||
logger.debug(f"从温存储驱逐: {oldest_stream_id}")
|
||||
|
||||
# 移动到冷存储
|
||||
if len(self.cold_storage) < self.max_cold_size:
|
||||
current_time = time.time()
|
||||
self.cold_storage[oldest_stream_id] = (stream, current_time)
|
||||
self.stats.cold_storage_size = len(self.cold_storage)
|
||||
else:
|
||||
# 冷存储也满了,直接删除
|
||||
logger.debug(f"冷存储已满,删除流: {oldest_stream_id}")
|
||||
|
||||
self.stats.warm_storage_size = len(self.warm_storage)
|
||||
|
||||
async def _cleanup_loop(self):
|
||||
"""清理循环"""
|
||||
logger.info("流缓存清理循环启动")
|
||||
|
||||
while self.is_running:
|
||||
try:
|
||||
await asyncio.sleep(self.cleanup_interval)
|
||||
await self._perform_cleanup()
|
||||
except asyncio.CancelledError:
|
||||
logger.info("流缓存清理循环被取消")
|
||||
break
|
||||
except Exception as e:
|
||||
logger.error(f"流缓存清理出错: {e}")
|
||||
|
||||
logger.info("流缓存清理循环结束")
|
||||
|
||||
async def _perform_cleanup(self):
|
||||
"""执行清理操作"""
|
||||
current_time = time.time()
|
||||
cleanup_stats = {
|
||||
"hot_to_warm": 0,
|
||||
"warm_to_cold": 0,
|
||||
"cold_removed": 0,
|
||||
}
|
||||
|
||||
# 1. 检查热缓存超时
|
||||
hot_to_demote = []
|
||||
for stream_id, stream in self.hot_cache.items():
|
||||
# 获取最后访问时间(简化:使用创建时间作为近似)
|
||||
last_access = getattr(stream, "last_active_time", stream.create_time)
|
||||
if current_time - last_access > self.hot_timeout:
|
||||
hot_to_demote.append(stream_id)
|
||||
|
||||
for stream_id in hot_to_demote:
|
||||
stream = self.hot_cache.pop(stream_id)
|
||||
current_time_local = time.time()
|
||||
self.warm_storage[stream_id] = (stream, current_time_local)
|
||||
cleanup_stats["hot_to_warm"] += 1
|
||||
|
||||
# 2. 检查温存储超时
|
||||
warm_to_demote = []
|
||||
for stream_id, (stream, last_access) in self.warm_storage.items():
|
||||
if current_time - last_access > self.warm_timeout:
|
||||
warm_to_demote.append(stream_id)
|
||||
|
||||
for stream_id in warm_to_demote:
|
||||
stream, last_access = self.warm_storage.pop(stream_id)
|
||||
self.cold_storage[stream_id] = (stream, last_access)
|
||||
cleanup_stats["warm_to_cold"] += 1
|
||||
|
||||
# 3. 检查冷存储超时
|
||||
cold_to_remove = []
|
||||
for stream_id, (stream, last_access) in self.cold_storage.items():
|
||||
if current_time - last_access > self.cold_timeout:
|
||||
cold_to_remove.append(stream_id)
|
||||
|
||||
for stream_id in cold_to_remove:
|
||||
self.cold_storage.pop(stream_id)
|
||||
cleanup_stats["cold_removed"] += 1
|
||||
|
||||
# 更新统计信息
|
||||
self.stats.hot_cache_size = len(self.hot_cache)
|
||||
self.stats.warm_storage_size = len(self.warm_storage)
|
||||
self.stats.cold_storage_size = len(self.cold_storage)
|
||||
self.stats.last_cleanup_time = current_time
|
||||
|
||||
# 估算内存使用(粗略估计)
|
||||
self.stats.total_memory_usage = (
|
||||
len(self.hot_cache) * 1024 # 每个热流约1KB
|
||||
+ len(self.warm_storage) * 512 # 每个温流约512B
|
||||
+ len(self.cold_storage) * 256 # 每个冷流约256B
|
||||
)
|
||||
|
||||
if sum(cleanup_stats.values()) > 0:
|
||||
logger.info(
|
||||
f"缓存清理完成: {cleanup_stats['hot_to_warm']}热→温, "
|
||||
f"{cleanup_stats['warm_to_cold']}温→冷, "
|
||||
f"{cleanup_stats['cold_removed']}冷删除"
|
||||
)
|
||||
|
||||
def get_stats(self) -> StreamCacheStats:
|
||||
"""获取缓存统计信息"""
|
||||
# 计算命中率
|
||||
total_requests = self.stats.cache_hits + self.stats.cache_misses
|
||||
hit_rate = self.stats.cache_hits / total_requests if total_requests > 0 else 0
|
||||
|
||||
stats_copy = StreamCacheStats(
|
||||
hot_cache_size=self.stats.hot_cache_size,
|
||||
warm_storage_size=self.stats.warm_storage_size,
|
||||
cold_storage_size=self.stats.cold_storage_size,
|
||||
total_memory_usage=self.stats.total_memory_usage,
|
||||
cache_hits=self.stats.cache_hits,
|
||||
cache_misses=self.stats.cache_misses,
|
||||
evictions=self.stats.evictions,
|
||||
last_cleanup_time=self.stats.last_cleanup_time,
|
||||
)
|
||||
|
||||
# 添加命中率信息
|
||||
stats_copy.hit_rate = hit_rate
|
||||
|
||||
return stats_copy
|
||||
|
||||
def clear_cache(self):
|
||||
"""清空所有缓存"""
|
||||
self.hot_cache.clear()
|
||||
self.warm_storage.clear()
|
||||
self.cold_storage.clear()
|
||||
|
||||
self.stats.hot_cache_size = 0
|
||||
self.stats.warm_storage_size = 0
|
||||
self.stats.cold_storage_size = 0
|
||||
self.stats.total_memory_usage = 0
|
||||
|
||||
logger.info("所有缓存已清空")
|
||||
|
||||
async def get_stream_snapshot(self, stream_id: str) -> OptimizedChatStream | None:
|
||||
"""获取流的快照(不修改缓存状态)"""
|
||||
if stream_id in self.hot_cache:
|
||||
return self.hot_cache[stream_id].create_snapshot()
|
||||
elif stream_id in self.warm_storage:
|
||||
return self.warm_storage[stream_id][0].create_snapshot()
|
||||
elif stream_id in self.cold_storage:
|
||||
return self.cold_storage[stream_id][0].create_snapshot()
|
||||
return None
|
||||
|
||||
def get_cached_stream_ids(self) -> set[str]:
|
||||
"""获取所有缓存的流ID"""
|
||||
return set(self.hot_cache.keys()) | set(self.warm_storage.keys()) | set(self.cold_storage.keys())
|
||||
|
||||
|
||||
# 全局缓存管理器实例
|
||||
_cache_manager: TieredStreamCache | None = None
|
||||
|
||||
|
||||
def get_stream_cache_manager() -> TieredStreamCache:
|
||||
"""获取流缓存管理器实例"""
|
||||
global _cache_manager
|
||||
if _cache_manager is None:
|
||||
_cache_manager = TieredStreamCache()
|
||||
return _cache_manager
|
||||
|
||||
|
||||
async def init_stream_cache_manager():
|
||||
"""初始化流缓存管理器"""
|
||||
manager = get_stream_cache_manager()
|
||||
await manager.start()
|
||||
|
||||
|
||||
async def shutdown_stream_cache_manager():
|
||||
"""关闭流缓存管理器"""
|
||||
manager = get_stream_cache_manager()
|
||||
await manager.stop()
|
||||
Reference in New Issue
Block a user