ruff
This commit is contained in:
@@ -220,7 +220,9 @@ class DatabaseMessages(BaseDataModel):
|
||||
"chat_info_user_cardname": self.chat_info.user_info.user_cardname,
|
||||
}
|
||||
|
||||
def update_message_info(self, interest_value: float | None = None, actions: list | None = None, should_reply: bool | None = None):
|
||||
def update_message_info(
|
||||
self, interest_value: float | None = None, actions: list | None = None, should_reply: bool | None = None
|
||||
):
|
||||
"""
|
||||
更新消息信息
|
||||
|
||||
|
||||
@@ -53,8 +53,6 @@ class StreamContext(BaseDataModel):
|
||||
priority_mode: str | None = None
|
||||
priority_info: dict | None = None
|
||||
|
||||
|
||||
|
||||
def add_action_to_message(self, message_id: str, action: str):
|
||||
"""
|
||||
向指定消息添加执行的动作
|
||||
@@ -75,9 +73,6 @@ class StreamContext(BaseDataModel):
|
||||
message.add_action(action)
|
||||
break
|
||||
|
||||
|
||||
|
||||
|
||||
def mark_message_as_read(self, message_id: str):
|
||||
"""标记消息为已读"""
|
||||
for msg in self.unread_messages:
|
||||
|
||||
@@ -78,7 +78,7 @@ class ConnectionPoolManager:
|
||||
"total_expired": 0,
|
||||
"active_connections": 0,
|
||||
"pool_hits": 0,
|
||||
"pool_misses": 0
|
||||
"pool_misses": 0,
|
||||
}
|
||||
|
||||
# 后台清理任务
|
||||
@@ -156,7 +156,9 @@ class ConnectionPoolManager:
|
||||
if connection_info:
|
||||
connection_info.mark_released()
|
||||
|
||||
async def _get_reusable_connection(self, session_factory: async_sessionmaker[AsyncSession]) -> ConnectionInfo | None:
|
||||
async def _get_reusable_connection(
|
||||
self, session_factory: async_sessionmaker[AsyncSession]
|
||||
) -> ConnectionInfo | None:
|
||||
"""获取可复用的连接"""
|
||||
async with self._lock:
|
||||
# 清理过期连接
|
||||
@@ -164,9 +166,7 @@ class ConnectionPoolManager:
|
||||
|
||||
# 查找可复用的连接
|
||||
for connection_info in list(self._connections):
|
||||
if (not connection_info.in_use and
|
||||
not connection_info.is_expired(self.max_lifetime, self.max_idle)):
|
||||
|
||||
if not connection_info.in_use and not connection_info.is_expired(self.max_lifetime, self.max_idle):
|
||||
# 验证连接是否仍然有效
|
||||
try:
|
||||
# 执行一个简单的查询来验证连接
|
||||
@@ -191,8 +191,7 @@ class ConnectionPoolManager:
|
||||
expired_connections = []
|
||||
|
||||
for connection_info in list(self._connections):
|
||||
if (connection_info.is_expired(self.max_lifetime, self.max_idle) and
|
||||
not connection_info.in_use):
|
||||
if connection_info.is_expired(self.max_lifetime, self.max_idle) and not connection_info.in_use:
|
||||
expired_connections.append(connection_info)
|
||||
|
||||
for connection_info in expired_connections:
|
||||
@@ -238,7 +237,8 @@ class ConnectionPoolManager:
|
||||
"max_pool_size": self.max_pool_size,
|
||||
"pool_efficiency": (
|
||||
self._stats["pool_hits"] / max(1, self._stats["pool_hits"] + self._stats["pool_misses"])
|
||||
) * 100
|
||||
)
|
||||
* 100,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -24,6 +24,7 @@ T = TypeVar("T")
|
||||
@dataclass
|
||||
class BatchOperation:
|
||||
"""批量操作基础类"""
|
||||
|
||||
operation_type: str # 'select', 'insert', 'update', 'delete'
|
||||
model_class: Any
|
||||
conditions: dict[str, Any]
|
||||
@@ -40,6 +41,7 @@ class BatchOperation:
|
||||
@dataclass
|
||||
class BatchResult:
|
||||
"""批量操作结果"""
|
||||
|
||||
success: bool
|
||||
data: Any = None
|
||||
error: str | None = None
|
||||
@@ -48,10 +50,12 @@ class BatchResult:
|
||||
class DatabaseBatchScheduler:
|
||||
"""数据库批量调度器"""
|
||||
|
||||
def __init__(self,
|
||||
batch_size: int = 50,
|
||||
max_wait_time: float = 0.1, # 100ms
|
||||
max_queue_size: int = 1000):
|
||||
def __init__(
|
||||
self,
|
||||
batch_size: int = 50,
|
||||
max_wait_time: float = 0.1, # 100ms
|
||||
max_queue_size: int = 1000,
|
||||
):
|
||||
self.batch_size = batch_size
|
||||
self.max_wait_time = max_wait_time
|
||||
self.max_queue_size = max_queue_size
|
||||
@@ -65,12 +69,7 @@ class DatabaseBatchScheduler:
|
||||
self._lock = asyncio.Lock()
|
||||
|
||||
# 统计信息
|
||||
self.stats = {
|
||||
"total_operations": 0,
|
||||
"batched_operations": 0,
|
||||
"cache_hits": 0,
|
||||
"execution_time": 0.0
|
||||
}
|
||||
self.stats = {"total_operations": 0, "batched_operations": 0, "cache_hits": 0, "execution_time": 0.0}
|
||||
|
||||
# 简单的结果缓存(用于频繁的查询)
|
||||
self._result_cache: dict[str, tuple[Any, float]] = {}
|
||||
@@ -105,11 +104,7 @@ class DatabaseBatchScheduler:
|
||||
def _generate_cache_key(self, operation_type: str, model_class: Any, conditions: dict[str, Any]) -> str:
|
||||
"""生成缓存键"""
|
||||
# 简单的缓存键生成,实际可以根据需要优化
|
||||
key_parts = [
|
||||
operation_type,
|
||||
model_class.__name__,
|
||||
str(sorted(conditions.items()))
|
||||
]
|
||||
key_parts = [operation_type, model_class.__name__, str(sorted(conditions.items()))]
|
||||
return "|".join(key_parts)
|
||||
|
||||
def _get_from_cache(self, cache_key: str) -> Any | None:
|
||||
@@ -132,11 +127,7 @@ class DatabaseBatchScheduler:
|
||||
"""添加操作到队列"""
|
||||
# 检查是否可以立即返回缓存结果
|
||||
if operation.operation_type == "select":
|
||||
cache_key = self._generate_cache_key(
|
||||
operation.operation_type,
|
||||
operation.model_class,
|
||||
operation.conditions
|
||||
)
|
||||
cache_key = self._generate_cache_key(operation.operation_type, operation.model_class, operation.conditions)
|
||||
cached_result = self._get_from_cache(cache_key)
|
||||
if cached_result is not None:
|
||||
if operation.callback:
|
||||
@@ -180,10 +171,7 @@ class DatabaseBatchScheduler:
|
||||
return
|
||||
|
||||
# 复制队列内容,避免长时间占用锁
|
||||
queues_copy = {
|
||||
key: deque(operations)
|
||||
for key, operations in self.operation_queues.items()
|
||||
}
|
||||
queues_copy = {key: deque(operations) for key, operations in self.operation_queues.items()}
|
||||
# 清空原队列
|
||||
for queue in self.operation_queues.values():
|
||||
queue.clear()
|
||||
@@ -240,9 +228,7 @@ class DatabaseBatchScheduler:
|
||||
# 缓存查询结果
|
||||
if operation.operation_type == "select":
|
||||
cache_key = self._generate_cache_key(
|
||||
operation.operation_type,
|
||||
operation.model_class,
|
||||
operation.conditions
|
||||
operation.operation_type, operation.model_class, operation.conditions
|
||||
)
|
||||
self._set_cache(cache_key, result)
|
||||
|
||||
@@ -287,12 +273,9 @@ class DatabaseBatchScheduler:
|
||||
else:
|
||||
# 需要根据条件过滤结果
|
||||
op_result = [
|
||||
item for item in data
|
||||
if all(
|
||||
getattr(item, k) == v
|
||||
for k, v in op.conditions.items()
|
||||
if hasattr(item, k)
|
||||
)
|
||||
item
|
||||
for item in data
|
||||
if all(getattr(item, k) == v for k, v in op.conditions.items() if hasattr(item, k))
|
||||
]
|
||||
results.append(op_result)
|
||||
|
||||
@@ -429,7 +412,7 @@ class DatabaseBatchScheduler:
|
||||
**self.stats,
|
||||
"cache_size": len(self._result_cache),
|
||||
"queue_sizes": {k: len(v) for k, v in self.operation_queues.items()},
|
||||
"is_running": self._is_running
|
||||
"is_running": self._is_running,
|
||||
}
|
||||
|
||||
|
||||
@@ -452,43 +435,25 @@ async def get_batch_session():
|
||||
# 便捷函数
|
||||
async def batch_select(model_class: Any, conditions: dict[str, Any]) -> Any:
|
||||
"""批量查询"""
|
||||
operation = BatchOperation(
|
||||
operation_type="select",
|
||||
model_class=model_class,
|
||||
conditions=conditions
|
||||
)
|
||||
operation = BatchOperation(operation_type="select", model_class=model_class, conditions=conditions)
|
||||
return await db_batch_scheduler.add_operation(operation)
|
||||
|
||||
|
||||
async def batch_insert(model_class: Any, data: dict[str, Any]) -> int:
|
||||
"""批量插入"""
|
||||
operation = BatchOperation(
|
||||
operation_type="insert",
|
||||
model_class=model_class,
|
||||
conditions={},
|
||||
data=data
|
||||
)
|
||||
operation = BatchOperation(operation_type="insert", model_class=model_class, conditions={}, data=data)
|
||||
return await db_batch_scheduler.add_operation(operation)
|
||||
|
||||
|
||||
async def batch_update(model_class: Any, conditions: dict[str, Any], data: dict[str, Any]) -> int:
|
||||
"""批量更新"""
|
||||
operation = BatchOperation(
|
||||
operation_type="update",
|
||||
model_class=model_class,
|
||||
conditions=conditions,
|
||||
data=data
|
||||
)
|
||||
operation = BatchOperation(operation_type="update", model_class=model_class, conditions=conditions, data=data)
|
||||
return await db_batch_scheduler.add_operation(operation)
|
||||
|
||||
|
||||
async def batch_delete(model_class: Any, conditions: dict[str, Any]) -> int:
|
||||
"""批量删除"""
|
||||
operation = BatchOperation(
|
||||
operation_type="delete",
|
||||
model_class=model_class,
|
||||
conditions=conditions
|
||||
)
|
||||
operation = BatchOperation(operation_type="delete", model_class=model_class, conditions=conditions)
|
||||
return await db_batch_scheduler.add_operation(operation)
|
||||
|
||||
|
||||
|
||||
@@ -304,8 +304,7 @@ def load_log_config(): # sourcery skip: use-contextlib-suppress
|
||||
"library_log_levels": {"aiohttp": "WARNING"},
|
||||
}
|
||||
|
||||
|
||||
# 误加的即刻线程启动已移除;真正的线程在 start_log_cleanup_task 中按午夜调度
|
||||
# 误加的即刻线程启动已移除;真正的线程在 start_log_cleanup_task 中按午夜调度
|
||||
|
||||
try:
|
||||
if config_path.exists():
|
||||
|
||||
Reference in New Issue
Block a user