refactor(db): 修正SQLAlchemy异步操作调用方式
移除session.add()方法的不必要await调用,修正异步数据库操作模式。主要变更包括: - 将 `await session.add()` 统一改为 `session.add()` - 修正部分函数调用为异步版本(如消息查询函数) - 重构SQLAlchemyTransaction为完全异步实现 - 重写napcat_adapter_plugin数据库层以符合异步规范 - 添加aiomysql和aiosqlite依赖支持
This commit is contained in:
@@ -0,0 +1,25 @@
|
|||||||
|
[inner]
|
||||||
|
version = "0.2.0" # 版本号
|
||||||
|
# 请勿修改版本号,除非你知道自己在做什么
|
||||||
|
|
||||||
|
[nickname] # 现在没用
|
||||||
|
nickname = ""
|
||||||
|
|
||||||
|
[napcat_server] # Napcat连接的ws服务设置
|
||||||
|
mode = "reverse" # 连接模式:reverse=反向连接(作为服务器), forward=正向连接(作为客户端)
|
||||||
|
host = "localhost" # 主机地址
|
||||||
|
port = 8095 # 端口号
|
||||||
|
url = "" # 正向连接时的完整WebSocket URL,如 ws://localhost:8080/ws (仅在forward模式下使用)
|
||||||
|
access_token = "" # WebSocket 连接的访问令牌,用于身份验证(可选)
|
||||||
|
heartbeat_interval = 30 # 心跳间隔时间(按秒计)
|
||||||
|
|
||||||
|
[maibot_server] # 连接麦麦的ws服务设置
|
||||||
|
host = "localhost" # 麦麦在.env文件中设置的主机地址,即HOST字段
|
||||||
|
port = 8000 # 麦麦在.env文件中设置的端口,即PORT字段
|
||||||
|
|
||||||
|
[voice] # 发送语音设置
|
||||||
|
use_tts = false # 是否使用tts语音(请确保你配置了tts并有对应的adapter)
|
||||||
|
|
||||||
|
[debug]
|
||||||
|
level = "INFO" # 日志等级(DEBUG, INFO, WARNING, ERROR, CRITICAL)
|
||||||
|
|
||||||
@@ -32,7 +32,7 @@ class AntiInjectionStatistics:
|
|||||||
stats = session.query(AntiInjectionStats).order_by(AntiInjectionStats.id.desc()).first()
|
stats = session.query(AntiInjectionStats).order_by(AntiInjectionStats.id.desc()).first()
|
||||||
if not stats:
|
if not stats:
|
||||||
stats = AntiInjectionStats()
|
stats = AntiInjectionStats()
|
||||||
await session.add(stats)
|
session.add(stats)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
await session.refresh(stats)
|
await session.refresh(stats)
|
||||||
return stats
|
return stats
|
||||||
@@ -48,7 +48,7 @@ class AntiInjectionStatistics:
|
|||||||
stats = session.query(AntiInjectionStats).order_by(AntiInjectionStats.id.desc()).first()
|
stats = session.query(AntiInjectionStats).order_by(AntiInjectionStats.id.desc()).first()
|
||||||
if not stats:
|
if not stats:
|
||||||
stats = AntiInjectionStats()
|
stats = AntiInjectionStats()
|
||||||
await session.add(stats)
|
session.add(stats)
|
||||||
|
|
||||||
# 更新统计字段
|
# 更新统计字段
|
||||||
for key, value in kwargs.items():
|
for key, value in kwargs.items():
|
||||||
|
|||||||
@@ -85,7 +85,7 @@ class UserBanManager:
|
|||||||
reason=f"提示词注入攻击 (置信度: {detection_result.confidence:.2f})",
|
reason=f"提示词注入攻击 (置信度: {detection_result.confidence:.2f})",
|
||||||
created_at=datetime.datetime.now(),
|
created_at=datetime.datetime.now(),
|
||||||
)
|
)
|
||||||
await session.add(ban_record)
|
session.add(ban_record)
|
||||||
|
|
||||||
await session.commit()
|
await session.commit()
|
||||||
|
|
||||||
|
|||||||
@@ -166,7 +166,7 @@ class MaiEmoji:
|
|||||||
usage_count=self.usage_count,
|
usage_count=self.usage_count,
|
||||||
last_used_time=self.last_used_time,
|
last_used_time=self.last_used_time,
|
||||||
)
|
)
|
||||||
await session.add(emoji)
|
session.add(emoji)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
|
|
||||||
logger.info(f"[注册] 表情包信息保存到数据库: {self.filename} ({self.emotion})")
|
logger.info(f"[注册] 表情包信息保存到数据库: {self.filename} ({self.emotion})")
|
||||||
|
|||||||
@@ -381,7 +381,7 @@ class ExpressionLearner:
|
|||||||
type=type,
|
type=type,
|
||||||
create_date=current_time, # 手动设置创建日期
|
create_date=current_time, # 手动设置创建日期
|
||||||
)
|
)
|
||||||
await session.add(new_expression)
|
session.add(new_expression)
|
||||||
|
|
||||||
# 限制最大数量
|
# 限制最大数量
|
||||||
exprs_result = await session.execute(
|
exprs_result = await session.execute(
|
||||||
@@ -608,7 +608,7 @@ class ExpressionLearnerManager:
|
|||||||
type=type_str,
|
type=type_str,
|
||||||
create_date=last_active_time, # 迁移时使用last_active_time作为创建时间
|
create_date=last_active_time, # 迁移时使用last_active_time作为创建时间
|
||||||
)
|
)
|
||||||
await session.add(new_expression)
|
session.add(new_expression)
|
||||||
|
|
||||||
migrated_count += 1
|
migrated_count += 1
|
||||||
logger.info(f"已迁移 {expr_file} 到数据库,包含 {len(expressions)} 个表达方式")
|
logger.info(f"已迁移 {expr_file} 到数据库,包含 {len(expressions)} 个表达方式")
|
||||||
|
|||||||
@@ -117,7 +117,7 @@ class InstantMemory:
|
|||||||
create_time=memory_item.create_time,
|
create_time=memory_item.create_time,
|
||||||
last_view_time=memory_item.last_view_time,
|
last_view_time=memory_item.last_view_time,
|
||||||
)
|
)
|
||||||
await session.add(memory)
|
session.add(memory)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
|
|
||||||
async def get_memory(self, target: str):
|
async def get_memory(self, target: str):
|
||||||
|
|||||||
@@ -122,7 +122,7 @@ class MessageStorage:
|
|||||||
is_picid=is_picid,
|
is_picid=is_picid,
|
||||||
)
|
)
|
||||||
async with get_db_session() as session:
|
async with get_db_session() as session:
|
||||||
await session.add(new_message)
|
session.add(new_message)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
|
|||||||
@@ -128,7 +128,7 @@ class ImageManager:
|
|||||||
description=description,
|
description=description,
|
||||||
timestamp=current_timestamp,
|
timestamp=current_timestamp,
|
||||||
)
|
)
|
||||||
await session.add(new_desc)
|
session.add(new_desc)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
# 会在上下文管理器中自动调用
|
# 会在上下文管理器中自动调用
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -278,7 +278,7 @@ class ImageManager:
|
|||||||
description=detailed_description, # 保存详细描述
|
description=detailed_description, # 保存详细描述
|
||||||
timestamp=current_timestamp,
|
timestamp=current_timestamp,
|
||||||
)
|
)
|
||||||
await session.add(new_img)
|
session.add(new_img)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"保存到Images表失败: {str(e)}")
|
logger.error(f"保存到Images表失败: {str(e)}")
|
||||||
@@ -370,7 +370,7 @@ class ImageManager:
|
|||||||
vlm_processed=True,
|
vlm_processed=True,
|
||||||
count=1,
|
count=1,
|
||||||
)
|
)
|
||||||
await session.add(new_img)
|
session.add(new_img)
|
||||||
logger.debug(f"[数据库] 创建新图片记录: {image_hash[:8]}...")
|
logger.debug(f"[数据库] 创建新图片记录: {image_hash[:8]}...")
|
||||||
|
|
||||||
await session.commit()
|
await session.commit()
|
||||||
@@ -590,7 +590,7 @@ class ImageManager:
|
|||||||
vlm_processed=True,
|
vlm_processed=True,
|
||||||
count=1,
|
count=1,
|
||||||
)
|
)
|
||||||
await session.add(new_img)
|
session.add(new_img)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
|
|
||||||
return image_id, f"[picid:{image_id}]"
|
return image_id, f"[picid:{image_id}]"
|
||||||
|
|||||||
@@ -22,6 +22,7 @@ from src.llm_models.utils_model import LLMRequest
|
|||||||
from src.config.config import global_config, model_config
|
from src.config.config import global_config, model_config
|
||||||
from src.common.logger import get_logger
|
from src.common.logger import get_logger
|
||||||
from src.common.database.sqlalchemy_models import get_db_session, Videos
|
from src.common.database.sqlalchemy_models import get_db_session, Videos
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
logger = get_logger("utils_video")
|
logger = get_logger("utils_video")
|
||||||
|
|
||||||
@@ -205,34 +206,29 @@ class VideoAnalyzer:
|
|||||||
return hash_obj.hexdigest()
|
return hash_obj.hexdigest()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _check_video_exists(video_hash: str) -> Optional[Videos]:
|
async def _check_video_exists(video_hash: str) -> Optional[Videos]:
|
||||||
"""检查视频是否已经分析过"""
|
"""检查视频是否已经分析过 (异步)"""
|
||||||
try:
|
try:
|
||||||
with get_db_session() as session:
|
async with get_db_session() as session:
|
||||||
# 明确刷新会话以确保看到其他事务的最新提交
|
result = await session.execute(select(Videos).where(Videos.video_hash == video_hash))
|
||||||
session.expire_all()
|
return result.scalar_one_or_none()
|
||||||
return session.query(Videos).filter(Videos.video_hash == video_hash).first()
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"检查视频是否存在时出错: {e}")
|
logger.warning(f"检查视频是否存在时出错: {e}")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _store_video_result(
|
async def _store_video_result(
|
||||||
video_hash: str, description: str, metadata: Optional[Dict] = None
|
video_hash: str, description: str, metadata: Optional[Dict] = None
|
||||||
) -> Optional[Videos]:
|
) -> Optional[Videos]:
|
||||||
"""存储视频分析结果到数据库"""
|
"""存储视频分析结果到数据库 (异步)"""
|
||||||
# 检查描述是否为错误信息,如果是则不保存
|
|
||||||
if description.startswith("❌"):
|
if description.startswith("❌"):
|
||||||
logger.warning(f"⚠️ 检测到错误信息,不保存到数据库: {description[:50]}...")
|
logger.warning(f"⚠️ 检测到错误信息,不保存到数据库: {description[:50]}...")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with get_db_session() as session:
|
async with get_db_session() as session:
|
||||||
# 只根据video_hash查找
|
result = await session.execute(select(Videos).where(Videos.video_hash == video_hash))
|
||||||
existing_video = session.query(Videos).filter(Videos.video_hash == video_hash).first()
|
existing_video = result.scalar_one_or_none()
|
||||||
|
|
||||||
if existing_video:
|
if existing_video:
|
||||||
# 如果已存在,更新描述和计数
|
|
||||||
existing_video.description = description
|
existing_video.description = description
|
||||||
existing_video.count += 1
|
existing_video.count += 1
|
||||||
existing_video.timestamp = time.time()
|
existing_video.timestamp = time.time()
|
||||||
@@ -243,12 +239,17 @@ class VideoAnalyzer:
|
|||||||
existing_video.resolution = metadata.get("resolution")
|
existing_video.resolution = metadata.get("resolution")
|
||||||
existing_video.file_size = metadata.get("file_size")
|
existing_video.file_size = metadata.get("file_size")
|
||||||
await session.commit()
|
await session.commit()
|
||||||
session.refresh(existing_video)
|
await session.refresh(existing_video)
|
||||||
logger.info(f"✅ 更新已存在的视频记录,hash: {video_hash[:16]}..., count: {existing_video.count}")
|
logger.info(
|
||||||
|
f"✅ 更新已存在的视频记录,hash: {video_hash[:16]}..., count: {existing_video.count}"
|
||||||
|
)
|
||||||
return existing_video
|
return existing_video
|
||||||
else:
|
else:
|
||||||
video_record = Videos(
|
video_record = Videos(
|
||||||
video_hash=video_hash, description=description, timestamp=time.time(), count=1
|
video_hash=video_hash,
|
||||||
|
description=description,
|
||||||
|
timestamp=time.time(),
|
||||||
|
count=1,
|
||||||
)
|
)
|
||||||
if metadata:
|
if metadata:
|
||||||
video_record.duration = metadata.get("duration")
|
video_record.duration = metadata.get("duration")
|
||||||
@@ -256,11 +257,12 @@ class VideoAnalyzer:
|
|||||||
video_record.fps = metadata.get("fps")
|
video_record.fps = metadata.get("fps")
|
||||||
video_record.resolution = metadata.get("resolution")
|
video_record.resolution = metadata.get("resolution")
|
||||||
video_record.file_size = metadata.get("file_size")
|
video_record.file_size = metadata.get("file_size")
|
||||||
|
session.add(video_record)
|
||||||
await session.add(video_record)
|
|
||||||
await session.commit()
|
await session.commit()
|
||||||
session.refresh(video_record)
|
await session.refresh(video_record)
|
||||||
logger.info(f"✅ 新视频分析结果已保存到数据库,hash: {video_hash[:16]}...")
|
logger.info(
|
||||||
|
f"✅ 新视频分析结果已保存到数据库,hash: {video_hash[:16]}..."
|
||||||
|
)
|
||||||
return video_record
|
return video_record
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"❌ 存储视频分析结果时出错: {e}")
|
logger.error(f"❌ 存储视频分析结果时出错: {e}")
|
||||||
@@ -708,7 +710,7 @@ class VideoAnalyzer:
|
|||||||
logger.info("✅ 等待结束,检查是否有处理结果")
|
logger.info("✅ 等待结束,检查是否有处理结果")
|
||||||
|
|
||||||
# 检查是否有结果了
|
# 检查是否有结果了
|
||||||
existing_video = self._check_video_exists(video_hash)
|
existing_video = await self._check_video_exists(video_hash)
|
||||||
if existing_video:
|
if existing_video:
|
||||||
logger.info(f"✅ 找到了处理结果,直接返回 (id: {existing_video.id})")
|
logger.info(f"✅ 找到了处理结果,直接返回 (id: {existing_video.id})")
|
||||||
return {"summary": existing_video.description}
|
return {"summary": existing_video.description}
|
||||||
@@ -722,7 +724,7 @@ class VideoAnalyzer:
|
|||||||
logger.info(f"🔒 获得视频处理锁,开始处理 (hash: {video_hash[:16]}...)")
|
logger.info(f"🔒 获得视频处理锁,开始处理 (hash: {video_hash[:16]}...)")
|
||||||
|
|
||||||
# 再次检查数据库(可能在等待期间已经有结果了)
|
# 再次检查数据库(可能在等待期间已经有结果了)
|
||||||
existing_video = self._check_video_exists(video_hash)
|
existing_video = await self._check_video_exists(video_hash)
|
||||||
if existing_video:
|
if existing_video:
|
||||||
logger.info(f"✅ 获得锁后发现已有结果,直接返回 (id: {existing_video.id})")
|
logger.info(f"✅ 获得锁后发现已有结果,直接返回 (id: {existing_video.id})")
|
||||||
video_event.set() # 通知其他等待者
|
video_event.set() # 通知其他等待者
|
||||||
@@ -753,7 +755,7 @@ class VideoAnalyzer:
|
|||||||
# 保存分析结果到数据库(仅保存成功的结果)
|
# 保存分析结果到数据库(仅保存成功的结果)
|
||||||
if success:
|
if success:
|
||||||
metadata = {"filename": filename, "file_size": len(video_bytes), "analysis_timestamp": time.time()}
|
metadata = {"filename": filename, "file_size": len(video_bytes), "analysis_timestamp": time.time()}
|
||||||
self._store_video_result(video_hash=video_hash, description=result, metadata=metadata)
|
await self._store_video_result(video_hash=video_hash, description=result, metadata=metadata)
|
||||||
logger.info("✅ 分析结果已保存到数据库")
|
logger.info("✅ 分析结果已保存到数据库")
|
||||||
else:
|
else:
|
||||||
logger.warning("⚠️ 分析失败,不保存到数据库以便后续重试")
|
logger.warning("⚠️ 分析失败,不保存到数据库以便后续重试")
|
||||||
|
|||||||
@@ -32,21 +32,32 @@ class DatabaseProxy:
|
|||||||
|
|
||||||
|
|
||||||
class SQLAlchemyTransaction:
|
class SQLAlchemyTransaction:
|
||||||
"""SQLAlchemy事务上下文管理器"""
|
"""SQLAlchemy 异步事务上下文管理器 (兼容旧代码示例,推荐直接使用 get_db_session)。"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
self._ctx = None
|
||||||
self.session = None
|
self.session = None
|
||||||
|
|
||||||
def __enter__(self):
|
async def __aenter__(self):
|
||||||
self.session = get_db_session()
|
# get_db_session 是一个 async contextmanager
|
||||||
|
self._ctx = get_db_session()
|
||||||
|
self.session = await self._ctx.__aenter__()
|
||||||
return self.session
|
return self.session
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
try:
|
||||||
|
if self.session:
|
||||||
if exc_type is None:
|
if exc_type is None:
|
||||||
self.await session.commit()
|
try:
|
||||||
|
await self.session.commit()
|
||||||
|
except Exception:
|
||||||
|
await self.session.rollback()
|
||||||
|
raise
|
||||||
else:
|
else:
|
||||||
self.session.rollback()
|
await self.session.rollback()
|
||||||
self.session.close()
|
finally:
|
||||||
|
if self._ctx:
|
||||||
|
await self._ctx.__aexit__(exc_type, exc_val, exc_tb)
|
||||||
|
|
||||||
|
|
||||||
# 创建全局数据库代理实例
|
# 创建全局数据库代理实例
|
||||||
|
|||||||
@@ -168,7 +168,7 @@ async def db_query(
|
|||||||
|
|
||||||
# 创建新记录
|
# 创建新记录
|
||||||
new_record = model_class(**data)
|
new_record = model_class(**data)
|
||||||
await session.add(new_record)
|
session.add(new_record)
|
||||||
await session.flush() # 获取自动生成的ID
|
await session.flush() # 获取自动生成的ID
|
||||||
|
|
||||||
# 转换为字典格式返回
|
# 转换为字典格式返回
|
||||||
@@ -295,7 +295,7 @@ async def db_save(
|
|||||||
|
|
||||||
# 创建新记录
|
# 创建新记录
|
||||||
new_record = model_class(**data)
|
new_record = model_class(**data)
|
||||||
await session.add(new_record)
|
session.add(new_record)
|
||||||
await session.flush()
|
await session.flush()
|
||||||
|
|
||||||
# 转换为字典格式返回
|
# 转换为字典格式返回
|
||||||
|
|||||||
@@ -201,5 +201,5 @@ async def count_messages(message_filter: dict[str, Any]) -> int:
|
|||||||
|
|
||||||
|
|
||||||
# 你可以在这里添加更多与 messages 集合相关的数据库操作函数,例如 find_one_message, insert_message 等。
|
# 你可以在这里添加更多与 messages 集合相关的数据库操作函数,例如 find_one_message, insert_message 等。
|
||||||
# 注意:对于 SQLAlchemy,插入操作通常是使用 await session.add() 和 await session.commit()。
|
# 注意:对于 SQLAlchemy,插入操作通常是使用 session.add() 和 await session.commit()。
|
||||||
# 查找单个消息可以使用 session.execute(select(Messages).where(...)).scalar_one_or_none()。
|
# 查找单个消息可以使用 session.execute(select(Messages).where(...)).scalar_one_or_none()。
|
||||||
|
|||||||
@@ -178,7 +178,7 @@ class LLMUsageRecorder:
|
|||||||
timestamp=datetime.now(), # SQLAlchemy 会处理 DateTime 字段
|
timestamp=datetime.now(), # SQLAlchemy 会处理 DateTime 字段
|
||||||
)
|
)
|
||||||
|
|
||||||
await session.add(usage_record)
|
session.add(usage_record)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
|
|||||||
@@ -160,7 +160,7 @@ class ChatMood:
|
|||||||
self.regression_count = 0
|
self.regression_count = 0
|
||||||
|
|
||||||
message_time: float = message.message_info.time # type: ignore
|
message_time: float = message.message_info.time # type: ignore
|
||||||
message_list_before_now = get_raw_msg_by_timestamp_with_chat_inclusive(
|
message_list_before_now = await get_raw_msg_by_timestamp_with_chat_inclusive(
|
||||||
chat_id=self.chat_id,
|
chat_id=self.chat_id,
|
||||||
timestamp_start=self.last_change_time,
|
timestamp_start=self.last_change_time,
|
||||||
timestamp_end=message_time,
|
timestamp_end=message_time,
|
||||||
@@ -239,7 +239,7 @@ class ChatMood:
|
|||||||
|
|
||||||
async def regress_mood(self):
|
async def regress_mood(self):
|
||||||
message_time = time.time()
|
message_time = time.time()
|
||||||
message_list_before_now = get_raw_msg_by_timestamp_with_chat_inclusive(
|
message_list_before_now = await get_raw_msg_by_timestamp_with_chat_inclusive(
|
||||||
chat_id=self.chat_id,
|
chat_id=self.chat_id,
|
||||||
timestamp_start=self.last_change_time,
|
timestamp_start=self.last_change_time,
|
||||||
timestamp_end=message_time,
|
timestamp_end=message_time,
|
||||||
|
|||||||
@@ -98,7 +98,7 @@ class ChatMood:
|
|||||||
)
|
)
|
||||||
|
|
||||||
message_time: float = message.message_info.time # type: ignore
|
message_time: float = message.message_info.time # type: ignore
|
||||||
message_list_before_now = get_raw_msg_by_timestamp_with_chat_inclusive(
|
message_list_before_now = await get_raw_msg_by_timestamp_with_chat_inclusive(
|
||||||
chat_id=self.chat_id,
|
chat_id=self.chat_id,
|
||||||
timestamp_start=self.last_change_time,
|
timestamp_start=self.last_change_time,
|
||||||
timestamp_end=message_time,
|
timestamp_end=message_time,
|
||||||
@@ -147,7 +147,7 @@ class ChatMood:
|
|||||||
|
|
||||||
async def regress_mood(self):
|
async def regress_mood(self):
|
||||||
message_time = time.time()
|
message_time = time.time()
|
||||||
message_list_before_now = get_raw_msg_by_timestamp_with_chat_inclusive(
|
message_list_before_now = await get_raw_msg_by_timestamp_with_chat_inclusive(
|
||||||
chat_id=self.chat_id,
|
chat_id=self.chat_id,
|
||||||
timestamp_start=self.last_change_time,
|
timestamp_start=self.last_change_time,
|
||||||
timestamp_end=message_time,
|
timestamp_end=message_time,
|
||||||
|
|||||||
@@ -180,7 +180,7 @@ class PersonInfoManager:
|
|||||||
async with get_db_session() as session:
|
async with get_db_session() as session:
|
||||||
try:
|
try:
|
||||||
new_person = PersonInfo(**p_data)
|
new_person = PersonInfo(**p_data)
|
||||||
await session.add(new_person)
|
session.add(new_person)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
return True
|
return True
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -245,7 +245,7 @@ class PersonInfoManager:
|
|||||||
|
|
||||||
# 尝试创建
|
# 尝试创建
|
||||||
new_person = PersonInfo(**p_data)
|
new_person = PersonInfo(**p_data)
|
||||||
await session.add(new_person)
|
session.add(new_person)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
return True
|
return True
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -607,7 +607,7 @@ class PersonInfoManager:
|
|||||||
# 记录不存在,尝试创建
|
# 记录不存在,尝试创建
|
||||||
try:
|
try:
|
||||||
new_person = PersonInfo(**init_data)
|
new_person = PersonInfo(**init_data)
|
||||||
await session.add(new_person)
|
session.add(new_person)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
await session.refresh(new_person)
|
await session.refresh(new_person)
|
||||||
return new_person, True # 创建成功
|
return new_person, True # 创建成功
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import traceback
|
|||||||
import os
|
import os
|
||||||
import pickle
|
import pickle
|
||||||
import random
|
import random
|
||||||
from typing import List, Dict, Any, Coroutine
|
from typing import List, Dict, Any
|
||||||
from src.config.config import global_config
|
from src.config.config import global_config
|
||||||
from src.common.logger import get_logger
|
from src.common.logger import get_logger
|
||||||
from src.person_info.relationship_manager import get_relationship_manager
|
from src.person_info.relationship_manager import get_relationship_manager
|
||||||
@@ -113,7 +113,7 @@ class RelationshipBuilder:
|
|||||||
# 负责跟踪用户消息活动、管理消息段、清理过期数据
|
# 负责跟踪用户消息活动、管理消息段、清理过期数据
|
||||||
# ================================
|
# ================================
|
||||||
|
|
||||||
def _update_message_segments(self, person_id: str, message_time: float):
|
async def _update_message_segments(self, person_id: str, message_time: float):
|
||||||
"""更新用户的消息段
|
"""更新用户的消息段
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -126,11 +126,8 @@ class RelationshipBuilder:
|
|||||||
segments = self.person_engaged_cache[person_id]
|
segments = self.person_engaged_cache[person_id]
|
||||||
|
|
||||||
# 获取该消息前5条消息的时间作为潜在的开始时间
|
# 获取该消息前5条消息的时间作为潜在的开始时间
|
||||||
before_messages = get_raw_msg_before_timestamp_with_chat(self.chat_id, message_time, limit=5)
|
before_messages = await get_raw_msg_before_timestamp_with_chat(self.chat_id, message_time, limit=5)
|
||||||
if before_messages:
|
potential_start_time = before_messages[0]["time"] if before_messages else message_time
|
||||||
potential_start_time = before_messages[0]["time"]
|
|
||||||
else:
|
|
||||||
potential_start_time = message_time
|
|
||||||
|
|
||||||
# 如果没有现有消息段,创建新的
|
# 如果没有现有消息段,创建新的
|
||||||
if not segments:
|
if not segments:
|
||||||
@@ -138,10 +135,9 @@ class RelationshipBuilder:
|
|||||||
"start_time": potential_start_time,
|
"start_time": potential_start_time,
|
||||||
"end_time": message_time,
|
"end_time": message_time,
|
||||||
"last_msg_time": message_time,
|
"last_msg_time": message_time,
|
||||||
"message_count": self._count_messages_in_timerange(potential_start_time, message_time),
|
"message_count": await self._count_messages_in_timerange(potential_start_time, message_time),
|
||||||
}
|
}
|
||||||
segments.append(new_segment)
|
segments.append(new_segment)
|
||||||
|
|
||||||
person_name = get_person_info_manager().get_value_sync(person_id, "person_name") or person_id
|
person_name = get_person_info_manager().get_value_sync(person_id, "person_name") or person_id
|
||||||
logger.debug(
|
logger.debug(
|
||||||
f"{self.log_prefix} 眼熟用户 {person_name} 在 {time.strftime('%H:%M:%S', time.localtime(potential_start_time))} - {time.strftime('%H:%M:%S', time.localtime(message_time))} 之间有 {new_segment['message_count']} 条消息"
|
f"{self.log_prefix} 眼熟用户 {person_name} 在 {time.strftime('%H:%M:%S', time.localtime(potential_start_time))} - {time.strftime('%H:%M:%S', time.localtime(message_time))} 之间有 {new_segment['message_count']} 条消息"
|
||||||
@@ -153,39 +149,32 @@ class RelationshipBuilder:
|
|||||||
last_segment = segments[-1]
|
last_segment = segments[-1]
|
||||||
|
|
||||||
# 计算从最后一条消息到当前消息之间的消息数量(不包含边界)
|
# 计算从最后一条消息到当前消息之间的消息数量(不包含边界)
|
||||||
messages_between = self._count_messages_between(last_segment["last_msg_time"], message_time)
|
messages_between = await self._count_messages_between(last_segment["last_msg_time"], message_time)
|
||||||
|
|
||||||
if messages_between <= 10:
|
if messages_between <= 10:
|
||||||
# 在10条消息内,延伸当前消息段
|
|
||||||
last_segment["end_time"] = message_time
|
last_segment["end_time"] = message_time
|
||||||
last_segment["last_msg_time"] = message_time
|
last_segment["last_msg_time"] = message_time
|
||||||
# 重新计算整个消息段的消息数量
|
last_segment["message_count"] = await self._count_messages_in_timerange(
|
||||||
last_segment["message_count"] = self._count_messages_in_timerange(
|
|
||||||
last_segment["start_time"], last_segment["end_time"]
|
last_segment["start_time"], last_segment["end_time"]
|
||||||
)
|
)
|
||||||
logger.debug(f"{self.log_prefix} 延伸用户 {person_id} 的消息段: {last_segment}")
|
logger.debug(f"{self.log_prefix} 延伸用户 {person_id} 的消息段: {last_segment}")
|
||||||
else:
|
else:
|
||||||
# 超过10条消息,结束当前消息段并创建新的
|
|
||||||
# 结束当前消息段:延伸到原消息段最后一条消息后5条消息的时间
|
|
||||||
current_time = time.time()
|
current_time = time.time()
|
||||||
after_messages = get_raw_msg_by_timestamp_with_chat(
|
after_messages = await get_raw_msg_by_timestamp_with_chat(
|
||||||
self.chat_id, last_segment["last_msg_time"], current_time, limit=5, limit_mode="earliest"
|
self.chat_id, last_segment["last_msg_time"], current_time, limit=5, limit_mode="earliest"
|
||||||
)
|
)
|
||||||
if after_messages and len(after_messages) >= 5:
|
if after_messages and len(after_messages) >= 5:
|
||||||
# 如果有足够的后续消息,使用第5条消息的时间作为结束时间
|
|
||||||
last_segment["end_time"] = after_messages[4]["time"]
|
last_segment["end_time"] = after_messages[4]["time"]
|
||||||
|
|
||||||
# 重新计算当前消息段的消息数量
|
last_segment["message_count"] = await self._count_messages_in_timerange(
|
||||||
last_segment["message_count"] = self._count_messages_in_timerange(
|
|
||||||
last_segment["start_time"], last_segment["end_time"]
|
last_segment["start_time"], last_segment["end_time"]
|
||||||
)
|
)
|
||||||
|
|
||||||
# 创建新的消息段
|
|
||||||
new_segment = {
|
new_segment = {
|
||||||
"start_time": potential_start_time,
|
"start_time": potential_start_time,
|
||||||
"end_time": message_time,
|
"end_time": message_time,
|
||||||
"last_msg_time": message_time,
|
"last_msg_time": message_time,
|
||||||
"message_count": self._count_messages_in_timerange(potential_start_time, message_time),
|
"message_count": await self._count_messages_in_timerange(potential_start_time, message_time),
|
||||||
}
|
}
|
||||||
segments.append(new_segment)
|
segments.append(new_segment)
|
||||||
person_info_manager = get_person_info_manager()
|
person_info_manager = get_person_info_manager()
|
||||||
@@ -196,14 +185,14 @@ class RelationshipBuilder:
|
|||||||
|
|
||||||
self._save_cache()
|
self._save_cache()
|
||||||
|
|
||||||
def _count_messages_in_timerange(self, start_time: float, end_time: float) -> int:
|
async def _count_messages_in_timerange(self, start_time: float, end_time: float) -> int:
|
||||||
"""计算指定时间范围内的消息数量(包含边界)"""
|
"""计算指定时间范围内的消息数量(包含边界)"""
|
||||||
messages = get_raw_msg_by_timestamp_with_chat_inclusive(self.chat_id, start_time, end_time)
|
messages = await get_raw_msg_by_timestamp_with_chat_inclusive(self.chat_id, start_time, end_time)
|
||||||
return len(messages)
|
return len(messages)
|
||||||
|
|
||||||
def _count_messages_between(self, start_time: float, end_time: float) -> Coroutine[Any, Any, int]:
|
async def _count_messages_between(self, start_time: float, end_time: float) -> int:
|
||||||
"""计算两个时间点之间的消息数量(不包含边界),用于间隔检查"""
|
"""计算两个时间点之间的消息数量(不包含边界),用于间隔检查"""
|
||||||
return num_new_messages_since(self.chat_id, start_time, end_time)
|
return await num_new_messages_since(self.chat_id, start_time, end_time)
|
||||||
|
|
||||||
def _get_total_message_count(self, person_id: str) -> int:
|
def _get_total_message_count(self, person_id: str) -> int:
|
||||||
"""获取用户所有消息段的总消息数量"""
|
"""获取用户所有消息段的总消息数量"""
|
||||||
@@ -350,7 +339,7 @@ class RelationshipBuilder:
|
|||||||
self._cleanup_old_segments()
|
self._cleanup_old_segments()
|
||||||
current_time = time.time()
|
current_time = time.time()
|
||||||
|
|
||||||
if latest_messages := get_raw_msg_by_timestamp_with_chat(
|
if latest_messages := await get_raw_msg_by_timestamp_with_chat(
|
||||||
self.chat_id,
|
self.chat_id,
|
||||||
self.last_processed_message_time,
|
self.last_processed_message_time,
|
||||||
current_time,
|
current_time,
|
||||||
@@ -369,7 +358,7 @@ class RelationshipBuilder:
|
|||||||
and msg_time > self.last_processed_message_time
|
and msg_time > self.last_processed_message_time
|
||||||
):
|
):
|
||||||
person_id = PersonInfoManager.get_person_id(platform, user_id)
|
person_id = PersonInfoManager.get_person_id(platform, user_id)
|
||||||
self._update_message_segments(person_id, msg_time)
|
await self._update_message_segments(person_id, msg_time)
|
||||||
logger.debug(
|
logger.debug(
|
||||||
f"{self.log_prefix} 更新用户 {person_id} 的消息段,消息时间:{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(msg_time))}"
|
f"{self.log_prefix} 更新用户 {person_id} 的消息段,消息时间:{time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(msg_time))}"
|
||||||
)
|
)
|
||||||
@@ -439,7 +428,7 @@ class RelationshipBuilder:
|
|||||||
start_date = time.strftime("%Y-%m-%d %H:%M", time.localtime(start_time))
|
start_date = time.strftime("%Y-%m-%d %H:%M", time.localtime(start_time))
|
||||||
|
|
||||||
# 获取该段的消息(包含边界)
|
# 获取该段的消息(包含边界)
|
||||||
segment_messages = get_raw_msg_by_timestamp_with_chat_inclusive(self.chat_id, start_time, end_time)
|
segment_messages = await get_raw_msg_by_timestamp_with_chat_inclusive(self.chat_id, start_time, end_time)
|
||||||
logger.debug(
|
logger.debug(
|
||||||
f"消息段: {start_date} - {time.strftime('%Y-%m-%d %H:%M', time.localtime(end_time))}, 消息数: {len(segment_messages)}"
|
f"消息段: {start_date} - {time.strftime('%Y-%m-%d %H:%M', time.localtime(end_time))}, 消息数: {len(segment_messages)}"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -149,7 +149,7 @@ class PermissionManager(IPermissionManager):
|
|||||||
default_granted=node.default_granted,
|
default_granted=node.default_granted,
|
||||||
created_at=datetime.utcnow(),
|
created_at=datetime.utcnow(),
|
||||||
)
|
)
|
||||||
await session.add(new_node)
|
session.add(new_node)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
logger.info(f"注册新权限节点: {node.node_name} (插件: {node.plugin_name})")
|
logger.info(f"注册新权限节点: {node.node_name} (插件: {node.plugin_name})")
|
||||||
return True
|
return True
|
||||||
@@ -204,7 +204,7 @@ class PermissionManager(IPermissionManager):
|
|||||||
granted=True,
|
granted=True,
|
||||||
granted_at=datetime.utcnow(),
|
granted_at=datetime.utcnow(),
|
||||||
)
|
)
|
||||||
await session.add(new_perm)
|
session.add(new_perm)
|
||||||
|
|
||||||
await session.commit()
|
await session.commit()
|
||||||
logger.info(f"已授权用户 {user.platform}:{user.user_id} 权限节点 {permission_node}")
|
logger.info(f"已授权用户 {user.platform}:{user.user_id} 权限节点 {permission_node}")
|
||||||
@@ -257,7 +257,7 @@ class PermissionManager(IPermissionManager):
|
|||||||
granted=False,
|
granted=False,
|
||||||
granted_at=datetime.utcnow(),
|
granted_at=datetime.utcnow(),
|
||||||
)
|
)
|
||||||
await session.add(new_perm)
|
session.add(new_perm)
|
||||||
|
|
||||||
await session.commit()
|
await session.commit()
|
||||||
logger.info(f"已撤销用户 {user.platform}:{user.user_id} 权限节点 {permission_node}")
|
logger.info(f"已撤销用户 {user.platform}:{user.user_id} 权限节点 {permission_node}")
|
||||||
|
|||||||
@@ -186,7 +186,7 @@ class SchedulerService:
|
|||||||
story_content=content,
|
story_content=content,
|
||||||
send_success=success,
|
send_success=success,
|
||||||
)
|
)
|
||||||
await session.add(new_record)
|
session.add(new_record)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
logger.info(f"已更新日程处理状态: {hour_str} - {activity} - 成功: {success}")
|
logger.info(f"已更新日程处理状态: {hour_str} - {activity} - 成功: {success}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|||||||
@@ -1,162 +1,156 @@
|
|||||||
import os
|
"""Napcat Adapter 插件数据库层 (基于主程序异步SQLAlchemy API)
|
||||||
from typing import Optional, List
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from sqlmodel import Field, Session, SQLModel, create_engine, select
|
|
||||||
|
|
||||||
|
本模块替换原先的 sqlmodel + 同步Session 实现:
|
||||||
|
1. 复用主项目的异步数据库连接与迁移体系
|
||||||
|
2. 提供与旧接口名兼容的方法(update_ban_record/create_ban_record/delete_ban_record)
|
||||||
|
3. 新增首选异步方法: update_ban_records / create_or_update / delete_record / get_ban_records
|
||||||
|
|
||||||
|
数据语义:
|
||||||
|
user_id == 0 表示群全体禁言
|
||||||
|
|
||||||
|
注意: 所有方法均为异步, 需要在 async 上下文中调用。
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Optional, List, Sequence
|
||||||
|
|
||||||
|
from sqlalchemy import Column, Integer, BigInteger, UniqueConstraint, select, Index
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from src.common.database.sqlalchemy_models import Base, get_db_session
|
||||||
from src.common.logger import get_logger
|
from src.common.logger import get_logger
|
||||||
|
|
||||||
logger = get_logger("napcat_adapter")
|
logger = get_logger("napcat_adapter")
|
||||||
|
|
||||||
"""
|
|
||||||
表记录的方式:
|
|
||||||
| group_id | user_id | lift_time |
|
|
||||||
|----------|---------|-----------|
|
|
||||||
|
|
||||||
其中使用 user_id == 0 表示群全体禁言
|
class NapcatBanRecord(Base):
|
||||||
"""
|
__tablename__ = "napcat_ban_records"
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||||
|
group_id = Column(BigInteger, nullable=False, index=True)
|
||||||
|
user_id = Column(BigInteger, nullable=False, index=True) # 0 == 全体禁言
|
||||||
|
lift_time = Column(BigInteger, nullable=True) # -1 / None 表示未知/永久
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
UniqueConstraint("group_id", "user_id", name="uq_napcat_group_user"),
|
||||||
|
Index("idx_napcat_ban_group", "group_id"),
|
||||||
|
Index("idx_napcat_ban_user", "user_id"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class BanUser:
|
class BanUser:
|
||||||
"""
|
|
||||||
程序处理使用的实例
|
|
||||||
"""
|
|
||||||
|
|
||||||
user_id: int
|
user_id: int
|
||||||
group_id: int
|
group_id: int
|
||||||
lift_time: Optional[int] = Field(default=-1)
|
lift_time: Optional[int] = -1
|
||||||
|
|
||||||
|
def identity(self) -> tuple[int, int]:
|
||||||
|
return self.group_id, self.user_id
|
||||||
|
|
||||||
|
|
||||||
class DB_BanUser(SQLModel, table=True):
|
class NapcatDatabase:
|
||||||
"""
|
async def _fetch_all(self, session: AsyncSession) -> Sequence[NapcatBanRecord]:
|
||||||
表示数据库中的用户禁言记录。
|
result = await session.execute(select(NapcatBanRecord))
|
||||||
使用双重主键
|
return result.scalars().all()
|
||||||
"""
|
|
||||||
|
|
||||||
user_id: int = Field(index=True, primary_key=True) # 被禁言用户的用户 ID
|
async def get_ban_records(self) -> List[BanUser]:
|
||||||
group_id: int = Field(index=True, primary_key=True) # 用户被禁言的群组 ID
|
async with get_db_session() as session:
|
||||||
lift_time: Optional[int] # 禁言解除的时间(时间戳)
|
rows = await self._fetch_all(session)
|
||||||
|
return [BanUser(group_id=r.group_id, user_id=r.user_id, lift_time=r.lift_time) for r in rows]
|
||||||
|
|
||||||
|
async def update_ban_records(self, ban_list: List[BanUser]) -> None:
|
||||||
|
target_map = {b.identity(): b for b in ban_list}
|
||||||
|
async with get_db_session() as session:
|
||||||
|
rows = await self._fetch_all(session)
|
||||||
|
existing_map = {(r.group_id, r.user_id): r for r in rows}
|
||||||
|
|
||||||
def is_identical(obj1: BanUser, obj2: BanUser) -> bool:
|
changed = 0
|
||||||
"""
|
for ident, ban in target_map.items():
|
||||||
检查两个 BanUser 对象是否相同。
|
if ident in existing_map:
|
||||||
"""
|
row = existing_map[ident]
|
||||||
return obj1.user_id == obj2.user_id and obj1.group_id == obj2.group_id
|
if row.lift_time != ban.lift_time:
|
||||||
|
row.lift_time = ban.lift_time
|
||||||
|
changed += 1
|
||||||
class DatabaseManager:
|
|
||||||
"""
|
|
||||||
数据库管理类,负责与数据库交互。
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
os.makedirs(os.path.join(os.path.dirname(__file__), "..", "data"), exist_ok=True) # 确保数据目录存在
|
|
||||||
DATABASE_FILE = os.path.join(os.path.dirname(__file__), "..", "data", "NapcatAdapter.db")
|
|
||||||
self.sqlite_url = f"sqlite:///{DATABASE_FILE}" # SQLite 数据库 URL
|
|
||||||
self.engine = create_engine(self.sqlite_url, echo=False) # 创建数据库引擎
|
|
||||||
self._ensure_database() # 确保数据库和表已创建
|
|
||||||
|
|
||||||
def _ensure_database(self) -> None:
|
|
||||||
"""
|
|
||||||
确保数据库和表已创建。
|
|
||||||
"""
|
|
||||||
logger.info("确保数据库文件和表已创建...")
|
|
||||||
SQLModel.metadata.create_all(self.engine)
|
|
||||||
logger.info("数据库和表已创建或已存在")
|
|
||||||
|
|
||||||
def update_ban_record(self, ban_list: List[BanUser]) -> None:
|
|
||||||
# sourcery skip: class-extract-method
|
|
||||||
"""
|
|
||||||
更新禁言列表到数据库。
|
|
||||||
支持在不存在时创建新记录,对于多余的项目自动删除。
|
|
||||||
"""
|
|
||||||
with Session(self.engine) as session:
|
|
||||||
all_records = session.exec(select(DB_BanUser)).all()
|
|
||||||
for ban_user in ban_list:
|
|
||||||
statement = select(DB_BanUser).where(
|
|
||||||
DB_BanUser.user_id == ban_user.user_id, DB_BanUser.group_id == ban_user.group_id
|
|
||||||
)
|
|
||||||
if existing_record := session.exec(statement).first():
|
|
||||||
if existing_record.lift_time == ban_user.lift_time:
|
|
||||||
logger.debug(f"禁言记录未变更: {existing_record}")
|
|
||||||
continue
|
|
||||||
# 更新现有记录的 lift_time
|
|
||||||
existing_record.lift_time = ban_user.lift_time
|
|
||||||
await session.add(existing_record)
|
|
||||||
logger.debug(f"更新禁言记录: {existing_record}")
|
|
||||||
else:
|
else:
|
||||||
# 创建新记录
|
session.add(
|
||||||
db_record = DB_BanUser(
|
NapcatBanRecord(group_id=ban.group_id, user_id=ban.user_id, lift_time=ban.lift_time)
|
||||||
user_id=ban_user.user_id, group_id=ban_user.group_id, lift_time=ban_user.lift_time
|
|
||||||
)
|
)
|
||||||
await session.add(db_record)
|
changed += 1
|
||||||
logger.debug(f"创建新禁言记录: {ban_user}")
|
|
||||||
# 删除不在 ban_list 中的记录
|
|
||||||
for db_record in all_records:
|
|
||||||
record = BanUser(user_id=db_record.user_id, group_id=db_record.group_id, lift_time=db_record.lift_time)
|
|
||||||
if not any(is_identical(record, ban_user) for ban_user in ban_list):
|
|
||||||
statement = select(DB_BanUser).where(
|
|
||||||
DB_BanUser.user_id == record.user_id, DB_BanUser.group_id == record.group_id
|
|
||||||
)
|
|
||||||
if ban_record := session.exec(statement).first():
|
|
||||||
session.delete(ban_record)
|
|
||||||
|
|
||||||
logger.debug(f"删除禁言记录: {ban_record}")
|
removed = 0
|
||||||
|
for ident, row in existing_map.items():
|
||||||
|
if ident not in target_map:
|
||||||
|
await session.delete(row)
|
||||||
|
removed += 1
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
f"Napcat ban list sync => total_incoming={len(ban_list)} created_or_updated={changed} removed={removed}"
|
||||||
|
)
|
||||||
|
|
||||||
|
async def create_or_update(self, ban_record: BanUser) -> None:
|
||||||
|
async with get_db_session() as session:
|
||||||
|
stmt = select(NapcatBanRecord).where(
|
||||||
|
NapcatBanRecord.group_id == ban_record.group_id,
|
||||||
|
NapcatBanRecord.user_id == ban_record.user_id,
|
||||||
|
)
|
||||||
|
result = await session.execute(stmt)
|
||||||
|
row = result.scalars().first()
|
||||||
|
if row:
|
||||||
|
if row.lift_time != ban_record.lift_time:
|
||||||
|
row.lift_time = ban_record.lift_time
|
||||||
|
logger.debug(
|
||||||
|
f"更新禁言记录 group={ban_record.group_id} user={ban_record.user_id} lift={ban_record.lift_time}"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
logger.info(f"未找到禁言记录: {ban_record}")
|
session.add(
|
||||||
|
NapcatBanRecord(
|
||||||
logger.info("禁言记录已更新")
|
group_id=ban_record.group_id, user_id=ban_record.user_id, lift_time=ban_record.lift_time
|
||||||
|
|
||||||
def get_ban_records(self) -> List[BanUser]:
|
|
||||||
"""
|
|
||||||
读取所有禁言记录。
|
|
||||||
"""
|
|
||||||
with Session(self.engine) as session:
|
|
||||||
statement = select(DB_BanUser)
|
|
||||||
records = session.exec(statement).all()
|
|
||||||
return [BanUser(user_id=item.user_id, group_id=item.group_id, lift_time=item.lift_time) for item in records]
|
|
||||||
|
|
||||||
def create_ban_record(self, ban_record: BanUser) -> None:
|
|
||||||
"""
|
|
||||||
为特定群组中的用户创建禁言记录。
|
|
||||||
一个简化版本的添加方式,防止 update_ban_record 方法的复杂性。
|
|
||||||
其同时还是简化版的更新方式。
|
|
||||||
"""
|
|
||||||
with Session(self.engine) as session:
|
|
||||||
# 检查记录是否已存在
|
|
||||||
statement = select(DB_BanUser).where(
|
|
||||||
DB_BanUser.user_id == ban_record.user_id, DB_BanUser.group_id == ban_record.group_id
|
|
||||||
)
|
)
|
||||||
existing_record = session.exec(statement).first()
|
|
||||||
if existing_record:
|
|
||||||
# 如果记录已存在,更新 lift_time
|
|
||||||
existing_record.lift_time = ban_record.lift_time
|
|
||||||
await session.add(existing_record)
|
|
||||||
logger.debug(f"更新禁言记录: {ban_record}")
|
|
||||||
else:
|
|
||||||
# 如果记录不存在,创建新记录
|
|
||||||
db_record = DB_BanUser(
|
|
||||||
user_id=ban_record.user_id, group_id=ban_record.group_id, lift_time=ban_record.lift_time
|
|
||||||
)
|
)
|
||||||
await session.add(db_record)
|
logger.debug(
|
||||||
logger.debug(f"创建新禁言记录: {ban_record}")
|
f"创建禁言记录 group={ban_record.group_id} user={ban_record.user_id} lift={ban_record.lift_time}"
|
||||||
|
)
|
||||||
|
|
||||||
def delete_ban_record(self, ban_record: BanUser):
|
async def delete_record(self, ban_record: BanUser) -> None:
|
||||||
"""
|
async with get_db_session() as session:
|
||||||
删除特定用户在特定群组中的禁言记录。
|
stmt = select(NapcatBanRecord).where(
|
||||||
一个简化版本的删除方式,防止 update_ban_record 方法的复杂性。
|
NapcatBanRecord.group_id == ban_record.group_id,
|
||||||
"""
|
NapcatBanRecord.user_id == ban_record.user_id,
|
||||||
user_id = ban_record.user_id
|
)
|
||||||
group_id = ban_record.group_id
|
result = await session.execute(stmt)
|
||||||
with Session(self.engine) as session:
|
row = result.scalars().first()
|
||||||
statement = select(DB_BanUser).where(DB_BanUser.user_id == user_id, DB_BanUser.group_id == group_id)
|
if row:
|
||||||
if ban_record := session.exec(statement).first():
|
await session.delete(row)
|
||||||
session.delete(ban_record)
|
logger.debug(
|
||||||
|
f"删除禁言记录 group={ban_record.group_id} user={ban_record.user_id} lift={row.lift_time}"
|
||||||
logger.debug(f"删除禁言记录: {ban_record}")
|
)
|
||||||
else:
|
else:
|
||||||
logger.info(f"未找到禁言记录: user_id: {user_id}, group_id: {group_id}")
|
logger.info(
|
||||||
|
f"未找到禁言记录 group={ban_record.group_id} user={ban_record.user_id}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# 兼容旧命名
|
||||||
|
async def update_ban_record(self, ban_list: List[BanUser]) -> None: # old name
|
||||||
|
await self.update_ban_records(ban_list)
|
||||||
|
|
||||||
|
async def create_ban_record(self, ban_record: BanUser) -> None: # old name
|
||||||
|
await self.create_or_update(ban_record)
|
||||||
|
|
||||||
|
async def delete_ban_record(self, ban_record: BanUser) -> None: # old name
|
||||||
|
await self.delete_record(ban_record)
|
||||||
|
|
||||||
|
|
||||||
db_manager = DatabaseManager()
|
napcat_db = NapcatDatabase()
|
||||||
|
|
||||||
|
|
||||||
|
def is_identical(a: BanUser, b: BanUser) -> bool:
|
||||||
|
return a.group_id == b.group_id and a.user_id == b.user_id
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"BanUser",
|
||||||
|
"NapcatBanRecord",
|
||||||
|
"napcat_db",
|
||||||
|
"is_identical",
|
||||||
|
]
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ from src.common.logger import get_logger
|
|||||||
logger = get_logger("napcat_adapter")
|
logger = get_logger("napcat_adapter")
|
||||||
|
|
||||||
from src.plugin_system.apis import config_api
|
from src.plugin_system.apis import config_api
|
||||||
from ..database import BanUser, db_manager, is_identical
|
from ..database import BanUser, napcat_db, is_identical
|
||||||
from . import NoticeType, ACCEPT_FORMAT
|
from . import NoticeType, ACCEPT_FORMAT
|
||||||
from .message_sending import message_send_instance
|
from .message_sending import message_send_instance
|
||||||
from .message_handler import message_handler
|
from .message_handler import message_handler
|
||||||
@@ -62,7 +62,7 @@ class NoticeHandler:
|
|||||||
return self.server_connection
|
return self.server_connection
|
||||||
return websocket_manager.get_connection()
|
return websocket_manager.get_connection()
|
||||||
|
|
||||||
def _ban_operation(self, group_id: int, user_id: Optional[int] = None, lift_time: Optional[int] = None) -> None:
|
async def _ban_operation(self, group_id: int, user_id: Optional[int] = None, lift_time: Optional[int] = None) -> None:
|
||||||
"""
|
"""
|
||||||
将用户禁言记录添加到self.banned_list中
|
将用户禁言记录添加到self.banned_list中
|
||||||
如果是全体禁言,则user_id为0
|
如果是全体禁言,则user_id为0
|
||||||
@@ -71,16 +71,16 @@ class NoticeHandler:
|
|||||||
user_id = 0 # 使用0表示全体禁言
|
user_id = 0 # 使用0表示全体禁言
|
||||||
lift_time = -1
|
lift_time = -1
|
||||||
ban_record = BanUser(user_id=user_id, group_id=group_id, lift_time=lift_time)
|
ban_record = BanUser(user_id=user_id, group_id=group_id, lift_time=lift_time)
|
||||||
for record in self.banned_list:
|
for record in list(self.banned_list):
|
||||||
if is_identical(record, ban_record):
|
if is_identical(record, ban_record):
|
||||||
self.banned_list.remove(record)
|
self.banned_list.remove(record)
|
||||||
self.banned_list.append(ban_record)
|
self.banned_list.append(ban_record)
|
||||||
db_manager.create_ban_record(ban_record) # 作为更新
|
await napcat_db.create_ban_record(ban_record) # 更新
|
||||||
return
|
return
|
||||||
self.banned_list.append(ban_record)
|
self.banned_list.append(ban_record)
|
||||||
db_manager.create_ban_record(ban_record) # 添加到数据库
|
await napcat_db.create_ban_record(ban_record) # 新建
|
||||||
|
|
||||||
def _lift_operation(self, group_id: int, user_id: Optional[int] = None) -> None:
|
async def _lift_operation(self, group_id: int, user_id: Optional[int] = None) -> None:
|
||||||
"""
|
"""
|
||||||
从self.lifted_group_list中移除已经解除全体禁言的群
|
从self.lifted_group_list中移除已经解除全体禁言的群
|
||||||
"""
|
"""
|
||||||
@@ -88,7 +88,12 @@ class NoticeHandler:
|
|||||||
user_id = 0 # 使用0表示全体禁言
|
user_id = 0 # 使用0表示全体禁言
|
||||||
ban_record = BanUser(user_id=user_id, group_id=group_id, lift_time=-1)
|
ban_record = BanUser(user_id=user_id, group_id=group_id, lift_time=-1)
|
||||||
self.lifted_list.append(ban_record)
|
self.lifted_list.append(ban_record)
|
||||||
db_manager.delete_ban_record(ban_record) # 删除数据库中的记录
|
# 从被禁言列表里移除对应记录
|
||||||
|
for record in list(self.banned_list):
|
||||||
|
if is_identical(record, ban_record):
|
||||||
|
self.banned_list.remove(record)
|
||||||
|
break
|
||||||
|
await napcat_db.delete_ban_record(ban_record)
|
||||||
|
|
||||||
async def handle_notice(self, raw_message: dict) -> None:
|
async def handle_notice(self, raw_message: dict) -> None:
|
||||||
notice_type = raw_message.get("notice_type")
|
notice_type = raw_message.get("notice_type")
|
||||||
@@ -376,7 +381,7 @@ class NoticeHandler:
|
|||||||
|
|
||||||
if user_id == 0: # 为全体禁言
|
if user_id == 0: # 为全体禁言
|
||||||
sub_type: str = "whole_ban"
|
sub_type: str = "whole_ban"
|
||||||
self._ban_operation(group_id)
|
await self._ban_operation(group_id)
|
||||||
else: # 为单人禁言
|
else: # 为单人禁言
|
||||||
# 获取被禁言人的信息
|
# 获取被禁言人的信息
|
||||||
sub_type: str = "ban"
|
sub_type: str = "ban"
|
||||||
@@ -390,7 +395,7 @@ class NoticeHandler:
|
|||||||
user_nickname=user_nickname,
|
user_nickname=user_nickname,
|
||||||
user_cardname=user_cardname,
|
user_cardname=user_cardname,
|
||||||
)
|
)
|
||||||
self._ban_operation(group_id, user_id, int(time.time() + duration))
|
await self._ban_operation(group_id, user_id, int(time.time() + duration))
|
||||||
|
|
||||||
seg_data: Seg = Seg(
|
seg_data: Seg = Seg(
|
||||||
type="notify",
|
type="notify",
|
||||||
@@ -439,7 +444,7 @@ class NoticeHandler:
|
|||||||
user_id = raw_message.get("user_id")
|
user_id = raw_message.get("user_id")
|
||||||
if user_id == 0: # 全体禁言解除
|
if user_id == 0: # 全体禁言解除
|
||||||
sub_type = "whole_lift_ban"
|
sub_type = "whole_lift_ban"
|
||||||
self._lift_operation(group_id)
|
await self._lift_operation(group_id)
|
||||||
else: # 单人禁言解除
|
else: # 单人禁言解除
|
||||||
sub_type = "lift_ban"
|
sub_type = "lift_ban"
|
||||||
# 获取被解除禁言人的信息
|
# 获取被解除禁言人的信息
|
||||||
@@ -455,7 +460,7 @@ class NoticeHandler:
|
|||||||
user_nickname=user_nickname,
|
user_nickname=user_nickname,
|
||||||
user_cardname=user_cardname,
|
user_cardname=user_cardname,
|
||||||
)
|
)
|
||||||
self._lift_operation(group_id, user_id)
|
await self._lift_operation(group_id, user_id)
|
||||||
|
|
||||||
seg_data: Seg = Seg(
|
seg_data: Seg = Seg(
|
||||||
type="notify",
|
type="notify",
|
||||||
@@ -483,7 +488,7 @@ class NoticeHandler:
|
|||||||
group_id = lift_record.group_id
|
group_id = lift_record.group_id
|
||||||
user_id = lift_record.user_id
|
user_id = lift_record.user_id
|
||||||
|
|
||||||
db_manager.delete_ban_record(lift_record) # 从数据库中删除禁言记录
|
asyncio.create_task(napcat_db.delete_ban_record(lift_record)) # 从数据库中删除禁言记录
|
||||||
|
|
||||||
seg_message: Seg = await self.natural_lift(group_id, user_id)
|
seg_message: Seg = await self.natural_lift(group_id, user_id)
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import urllib3
|
|||||||
import ssl
|
import ssl
|
||||||
import io
|
import io
|
||||||
|
|
||||||
from .database import BanUser, db_manager
|
from .database import BanUser, napcat_db
|
||||||
from src.common.logger import get_logger
|
from src.common.logger import get_logger
|
||||||
|
|
||||||
logger = get_logger("napcat_adapter")
|
logger = get_logger("napcat_adapter")
|
||||||
@@ -270,10 +270,11 @@ async def read_ban_list(
|
|||||||
]
|
]
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
ban_list = db_manager.get_ban_records()
|
ban_list = await napcat_db.get_ban_records()
|
||||||
lifted_list: List[BanUser] = []
|
lifted_list: List[BanUser] = []
|
||||||
logger.info("已经读取禁言列表")
|
logger.info("已经读取禁言列表")
|
||||||
for ban_record in ban_list:
|
# 复制列表以避免迭代中修改原列表问题
|
||||||
|
for ban_record in list(ban_list):
|
||||||
if ban_record.user_id == 0:
|
if ban_record.user_id == 0:
|
||||||
fetched_group_info = await get_group_info(websocket, ban_record.group_id)
|
fetched_group_info = await get_group_info(websocket, ban_record.group_id)
|
||||||
if fetched_group_info is None:
|
if fetched_group_info is None:
|
||||||
@@ -301,12 +302,12 @@ async def read_ban_list(
|
|||||||
ban_list.remove(ban_record)
|
ban_list.remove(ban_record)
|
||||||
else:
|
else:
|
||||||
ban_record.lift_time = lift_ban_time
|
ban_record.lift_time = lift_ban_time
|
||||||
db_manager.update_ban_record(ban_list)
|
await napcat_db.update_ban_record(ban_list)
|
||||||
return ban_list, lifted_list
|
return ban_list, lifted_list
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"读取禁言列表失败: {e}")
|
logger.error(f"读取禁言列表失败: {e}")
|
||||||
return [], []
|
return [], []
|
||||||
|
|
||||||
|
|
||||||
def save_ban_record(list: List[BanUser]):
|
async def save_ban_record(list: List[BanUser]):
|
||||||
return db_manager.update_ban_record(list)
|
return await napcat_db.update_ban_record(list)
|
||||||
|
|||||||
@@ -42,7 +42,7 @@ async def add_new_plans(plans: List[str], month: str):
|
|||||||
new_plan_objects = [
|
new_plan_objects = [
|
||||||
MonthlyPlan(plan_text=plan, target_month=month, status="active") for plan in plans_to_add
|
MonthlyPlan(plan_text=plan, target_month=month, status="active") for plan in plans_to_add
|
||||||
]
|
]
|
||||||
await session.add_all(new_plan_objects)
|
session.add_all(new_plan_objects)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
|
|
||||||
logger.info(f"成功向数据库添加了 {len(new_plan_objects)} 条 {month} 的月度计划。")
|
logger.info(f"成功向数据库添加了 {len(new_plan_objects)} 条 {month} 的月度计划。")
|
||||||
|
|||||||
@@ -128,7 +128,7 @@ class ScheduleManager:
|
|||||||
existing_schedule.updated_at = datetime.now()
|
existing_schedule.updated_at = datetime.now()
|
||||||
else:
|
else:
|
||||||
new_schedule = Schedule(date=date_str, schedule_data=schedule_json)
|
new_schedule = Schedule(date=date_str, schedule_data=schedule_json)
|
||||||
await session.add(new_schedule)
|
session.add(new_schedule)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|||||||
29
uv.lock
generated
29
uv.lock
generated
@@ -154,6 +154,18 @@ wheels = [
|
|||||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/98/3b/40a68de458904bcc143622015fff2352b6461cd92fd66d3527bf1c6f5716/aiohttp_cors-0.8.1-py3-none-any.whl", hash = "sha256:3180cf304c5c712d626b9162b195b1db7ddf976a2a25172b35bb2448b890a80d", size = 25231, upload-time = "2025-03-31T14:16:18.478Z" },
|
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/98/3b/40a68de458904bcc143622015fff2352b6461cd92fd66d3527bf1c6f5716/aiohttp_cors-0.8.1-py3-none-any.whl", hash = "sha256:3180cf304c5c712d626b9162b195b1db7ddf976a2a25172b35bb2448b890a80d", size = 25231, upload-time = "2025-03-31T14:16:18.478Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "aiomysql"
|
||||||
|
version = "0.2.0"
|
||||||
|
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "pymysql" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/67/76/2c5b55e4406a1957ffdfd933a94c2517455291c97d2b81cec6813754791a/aiomysql-0.2.0.tar.gz", hash = "sha256:558b9c26d580d08b8c5fd1be23c5231ce3aeff2dadad989540fee740253deb67", size = 114706, upload-time = "2023-06-11T19:57:53.608Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/42/87/c982ee8b333c85b8ae16306387d703a1fcdfc81a2f3f15a24820ab1a512d/aiomysql-0.2.0-py3-none-any.whl", hash = "sha256:b7c26da0daf23a5ec5e0b133c03d20657276e4eae9b73e040b72787f6f6ade0a", size = 44215, upload-time = "2023-06-11T19:57:51.09Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aiosignal"
|
name = "aiosignal"
|
||||||
version = "1.4.0"
|
version = "1.4.0"
|
||||||
@@ -167,6 +179,18 @@ wheels = [
|
|||||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" },
|
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "aiosqlite"
|
||||||
|
version = "0.21.0"
|
||||||
|
source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "typing-extensions" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/13/7d/8bca2bf9a247c2c5dfeec1d7a5f40db6518f88d314b8bca9da29670d2671/aiosqlite-0.21.0.tar.gz", hash = "sha256:131bb8056daa3bc875608c631c678cda73922a2d4ba8aec373b19f18c17e7aa3", size = 13454, upload-time = "2025-02-03T07:30:16.235Z" }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/f5/10/6c25ed6de94c49f88a91fa5018cb4c0f3625f31d5be9f771ebe5cc7cd506/aiosqlite-0.21.0-py3-none-any.whl", hash = "sha256:2549cf4057f95f53dcba16f2b64e8e2791d7e1adedb13197dd8ed77bb226d7d0", size = 15792, upload-time = "2025-02-03T07:30:13.6Z" },
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "annotated-types"
|
name = "annotated-types"
|
||||||
version = "0.7.0"
|
version = "0.7.0"
|
||||||
@@ -774,7 +798,6 @@ dependencies = [
|
|||||||
{ name = "numpy", version = "2.3.1", source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }, marker = "python_full_version >= '3.11'" },
|
{ name = "numpy", version = "2.3.1", source = { registry = "https://pypi.tuna.tsinghua.edu.cn/simple" }, marker = "python_full_version >= '3.11'" },
|
||||||
{ name = "packaging" },
|
{ name = "packaging" },
|
||||||
]
|
]
|
||||||
sdist = { url = "https://pypi.tuna.tsinghua.edu.cn/packages/e7/9a/e33fc563f007924dd4ec3c5101fe5320298d6c13c158a24a9ed849058569/faiss_cpu-1.11.0.tar.gz", hash = "sha256:44877b896a2b30a61e35ea4970d008e8822545cb340eca4eff223ac7f40a1db9", size = 70218, upload-time = "2025-04-28T07:48:30.459Z" }
|
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ed/e5/7490368ec421e44efd60a21aa88d244653c674d8d6ee6bc455d8ee3d02ed/faiss_cpu-1.11.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:1995119152928c68096b0c1e5816e3ee5b1eebcf615b80370874523be009d0f6", size = 3307996, upload-time = "2025-04-28T07:47:29.126Z" },
|
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/ed/e5/7490368ec421e44efd60a21aa88d244653c674d8d6ee6bc455d8ee3d02ed/faiss_cpu-1.11.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:1995119152928c68096b0c1e5816e3ee5b1eebcf615b80370874523be009d0f6", size = 3307996, upload-time = "2025-04-28T07:47:29.126Z" },
|
||||||
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/dd/ac/a94fbbbf4f38c2ad11862af92c071ff346630ebf33f3d36fe75c3817c2f0/faiss_cpu-1.11.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:788d7bf24293fdecc1b93f1414ca5cc62ebd5f2fecfcbb1d77f0e0530621c95d", size = 7886309, upload-time = "2025-04-28T07:47:31.668Z" },
|
{ url = "https://pypi.tuna.tsinghua.edu.cn/packages/dd/ac/a94fbbbf4f38c2ad11862af92c071ff346630ebf33f3d36fe75c3817c2f0/faiss_cpu-1.11.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:788d7bf24293fdecc1b93f1414ca5cc62ebd5f2fecfcbb1d77f0e0530621c95d", size = 7886309, upload-time = "2025-04-28T07:47:31.668Z" },
|
||||||
@@ -1693,6 +1716,8 @@ source = { virtual = "." }
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "aiohttp" },
|
{ name = "aiohttp" },
|
||||||
{ name = "aiohttp-cors" },
|
{ name = "aiohttp-cors" },
|
||||||
|
{ name = "aiomysql" },
|
||||||
|
{ name = "aiosqlite" },
|
||||||
{ name = "apscheduler" },
|
{ name = "apscheduler" },
|
||||||
{ name = "asyncddgs" },
|
{ name = "asyncddgs" },
|
||||||
{ name = "asyncio" },
|
{ name = "asyncio" },
|
||||||
@@ -1773,6 +1798,8 @@ lint = [
|
|||||||
requires-dist = [
|
requires-dist = [
|
||||||
{ name = "aiohttp", specifier = ">=3.12.14" },
|
{ name = "aiohttp", specifier = ">=3.12.14" },
|
||||||
{ name = "aiohttp-cors", specifier = ">=0.8.1" },
|
{ name = "aiohttp-cors", specifier = ">=0.8.1" },
|
||||||
|
{ name = "aiomysql", specifier = ">=0.2.0" },
|
||||||
|
{ name = "aiosqlite", specifier = ">=0.21.0" },
|
||||||
{ name = "apscheduler", specifier = ">=3.11.0" },
|
{ name = "apscheduler", specifier = ">=3.11.0" },
|
||||||
{ name = "asyncddgs", specifier = ">=0.1.0a1" },
|
{ name = "asyncddgs", specifier = ">=0.1.0a1" },
|
||||||
{ name = "asyncio", specifier = ">=4.0.0" },
|
{ name = "asyncio", specifier = ">=4.0.0" },
|
||||||
|
|||||||
Reference in New Issue
Block a user