优化日志
This commit is contained in:
@@ -291,7 +291,7 @@ class CoreSinkManager:
|
||||
# 使用 MessageRuntime 处理消息
|
||||
await self._runtime.handle_message(envelope)
|
||||
except Exception as e:
|
||||
logger.error(f"MessageRuntime 处理消息时出错: {e}", exc_info=True)
|
||||
logger.error(f"MessageRuntime 处理消息时出错: {e}")
|
||||
|
||||
|
||||
# 全局单例
|
||||
|
||||
@@ -164,11 +164,11 @@ class StreamContext(BaseDataModel):
|
||||
await unified_manager.add_message(message_dict)
|
||||
logger.debug(f"<EFBFBD><EFBFBD>Ϣ<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ӵ<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ϵͳ: {message.message_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>Ϣ<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ϵͳʧ<EFBFBD><EFBFBD>: {e}", exc_info=True)
|
||||
logger.error(f"<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>Ϣ<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ϵͳʧ<EFBFBD><EFBFBD>: {e}")
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>Ϣ<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ʧ<EFBFBD><EFBFBD> {self.stream_id}: {e}", exc_info=True)
|
||||
logger.error(f"<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>Ϣ<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ʧ<EFBFBD><EFBFBD> {self.stream_id}: {e}")
|
||||
return False
|
||||
|
||||
async def update_message(self, message_id: str, updates: dict[str, Any]) -> bool:
|
||||
@@ -197,7 +197,7 @@ class StreamContext(BaseDataModel):
|
||||
logger.debug(f"<EFBFBD><EFBFBD><EFBFBD>µ<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>Ϣ: {self.stream_id}/{message_id}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"<EFBFBD><EFBFBD><EFBFBD>µ<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>Ϣʧ<EFBFBD><EFBFBD> {self.stream_id}/{message_id}: {e}", exc_info=True)
|
||||
logger.error(f"<EFBFBD><EFBFBD><EFBFBD>µ<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>Ϣʧ<EFBFBD><EFBFBD> {self.stream_id}/{message_id}: {e}")
|
||||
return False
|
||||
|
||||
def add_action_to_message(self, message_id: str, action: str):
|
||||
@@ -277,7 +277,7 @@ class StreamContext(BaseDataModel):
|
||||
self._update_access_stats()
|
||||
return messages
|
||||
except Exception as e:
|
||||
logger.error(f"获取上下文消息失败 {self.stream_id}: {e}", exc_info=True)
|
||||
logger.error(f"获取上下文消息失败 {self.stream_id}: {e}")
|
||||
return []
|
||||
|
||||
def mark_messages_as_read(self, message_ids: list[str]) -> bool:
|
||||
@@ -292,7 +292,7 @@ class StreamContext(BaseDataModel):
|
||||
logger.warning(f"标记消息已读失败 {message_id}: {e}")
|
||||
return marked_count > 0
|
||||
except Exception as e:
|
||||
logger.error(f"批量标记消息已读失败 {self.stream_id}: {e}", exc_info=True)
|
||||
logger.error(f"批量标记消息已读失败 {self.stream_id}: {e}")
|
||||
return False
|
||||
|
||||
def calculate_interruption_probability(self, max_limit: int, min_probability: float = 0.1, probability_factor: float | None = None) -> float:
|
||||
@@ -349,7 +349,7 @@ class StreamContext(BaseDataModel):
|
||||
logger.debug(f"清空上下文成功: {self.stream_id}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"清空上下文失败 {self.stream_id}: {e}", exc_info=True)
|
||||
logger.error(f"清空上下文失败 {self.stream_id}: {e}")
|
||||
return False
|
||||
|
||||
def get_statistics(self) -> dict[str, Any]:
|
||||
@@ -378,7 +378,7 @@ class StreamContext(BaseDataModel):
|
||||
stats["cache_stats"] = self.get_cache_stats()
|
||||
return stats
|
||||
except Exception as e:
|
||||
logger.error(f"获取上下文统计失败 {self.stream_id}: {e}", exc_info=True)
|
||||
logger.error(f"获取上下文统计失败 {self.stream_id}: {e}")
|
||||
return {}
|
||||
|
||||
def validate_integrity(self) -> bool:
|
||||
@@ -547,7 +547,7 @@ class StreamContext(BaseDataModel):
|
||||
logger.debug(f"无历史消息需要加载: {self.stream_id}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"从数据库加载历史消息失败: {self.stream_id}, {e}", exc_info=True)
|
||||
logger.error(f"从数据库加载历史消息失败: {self.stream_id}, {e}")
|
||||
self._history_initialized = False
|
||||
|
||||
def _detect_chat_type(self, message: "DatabaseMessages"):
|
||||
@@ -588,7 +588,7 @@ class StreamContext(BaseDataModel):
|
||||
return 0.5
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"计算消息兴趣时出现异常: {e}", exc_info=True)
|
||||
logger.error(f"计算消息兴趣时出现异常: {e}")
|
||||
if hasattr(message, "interest_calculated"):
|
||||
message.interest_calculated = False
|
||||
return 0.5
|
||||
|
||||
@@ -104,7 +104,7 @@ async def store_action_info(
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"存储动作信息时发生错误: {e}", exc_info=True)
|
||||
logger.error(f"存储动作信息时发生错误: {e}")
|
||||
return None
|
||||
|
||||
|
||||
@@ -246,7 +246,7 @@ async def update_person_affinity(
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"更新好感度失败: {e}", exc_info=True)
|
||||
logger.error(f"更新好感度失败: {e}")
|
||||
return False
|
||||
|
||||
|
||||
@@ -480,5 +480,5 @@ async def update_relationship_affinity(
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"更新关系好感度失败: {e}", exc_info=True)
|
||||
logger.error(f"更新关系好感度失败: {e}")
|
||||
return False
|
||||
|
||||
@@ -272,7 +272,7 @@ async def db_query(
|
||||
return {"count": count}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"数据库操作失败: {e}", exc_info=True)
|
||||
logger.error(f"数据库操作失败: {e}")
|
||||
return None if single_result or query_type != "get" else []
|
||||
|
||||
|
||||
@@ -308,7 +308,7 @@ async def db_save(
|
||||
return _model_to_dict(instance)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"保存数据库记录出错: {e}", exc_info=True)
|
||||
logger.error(f"保存数据库记录出错: {e}")
|
||||
return None
|
||||
|
||||
|
||||
|
||||
@@ -130,7 +130,7 @@ async def get_engine() -> AsyncEngine:
|
||||
return _engine
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"❌ 数据库引擎初始化失败: {e}", exc_info=True)
|
||||
logger.error(f"❌ 数据库引擎初始化失败: {e}")
|
||||
raise DatabaseInitializationError(f"引擎初始化失败: {e}") from e
|
||||
|
||||
|
||||
|
||||
@@ -69,7 +69,7 @@ async def check_and_migrate_database(existing_engine=None):
|
||||
# 提交表创建事务
|
||||
await connection.commit()
|
||||
except Exception as e:
|
||||
logger.error(f"创建表时失败: {e}", exc_info=True)
|
||||
logger.error(f"创建表时失败: {e}")
|
||||
await connection.rollback()
|
||||
|
||||
# 2. 然后处理现有表的列和索引的添加
|
||||
@@ -182,7 +182,7 @@ async def check_and_migrate_database(existing_engine=None):
|
||||
logger.debug(f"表 '{table_name}' 的索引一致。")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"在处理表 '{table_name}' 时发生意外错误: {e}", exc_info=True)
|
||||
logger.error(f"在处理表 '{table_name}' 时发生意外错误: {e}")
|
||||
await connection.rollback()
|
||||
continue
|
||||
|
||||
|
||||
@@ -218,7 +218,7 @@ class AdaptiveBatchScheduler:
|
||||
except asyncio.CancelledError:
|
||||
break
|
||||
except Exception as e:
|
||||
logger.error(f"调度器循环异常: {e}", exc_info=True)
|
||||
logger.error(f"调度器循环异常: {e}")
|
||||
|
||||
async def _flush_all_queues(self) -> None:
|
||||
"""刷新所有队列"""
|
||||
@@ -292,7 +292,7 @@ class AdaptiveBatchScheduler:
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"批量操作执行失败: {e}", exc_info=True)
|
||||
logger.error(f"批量操作执行失败: {e}")
|
||||
self.stats.error_count += 1
|
||||
|
||||
# 设置所有future的异常
|
||||
@@ -320,7 +320,7 @@ class AdaptiveBatchScheduler:
|
||||
raise ValueError(f"未知操作类型: {op_type}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"执行{op_type}操作组失败: {e}", exc_info=True)
|
||||
logger.error(f"执行{op_type}操作组失败: {e}")
|
||||
for op in operations:
|
||||
if op.future and not op.future.done():
|
||||
op.future.set_exception(e)
|
||||
@@ -362,7 +362,7 @@ class AdaptiveBatchScheduler:
|
||||
logger.warning(f"回调执行失败: {e}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"查询失败: {e}", exc_info=True)
|
||||
logger.error(f"查询失败: {e}")
|
||||
if op.future and not op.future.done():
|
||||
op.future.set_exception(e)
|
||||
|
||||
@@ -395,7 +395,7 @@ class AdaptiveBatchScheduler:
|
||||
logger.warning(f"回调执行失败: {e}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"批量插入失败: {e}", exc_info=True)
|
||||
logger.error(f"批量插入失败: {e}")
|
||||
await session.rollback()
|
||||
for op in operations:
|
||||
if op.future and not op.future.done():
|
||||
@@ -439,7 +439,7 @@ class AdaptiveBatchScheduler:
|
||||
logger.warning(f"回调执行失败: {e}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"批量更新失败: {e}", exc_info=True)
|
||||
logger.error(f"批量更新失败: {e}")
|
||||
await session.rollback()
|
||||
# 所有操作都失败
|
||||
for op in operations:
|
||||
@@ -481,7 +481,7 @@ class AdaptiveBatchScheduler:
|
||||
logger.warning(f"回调执行失败: {e}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"批量删除失败: {e}", exc_info=True)
|
||||
logger.error(f"批量删除失败: {e}")
|
||||
await session.rollback()
|
||||
# 所有操作都失败
|
||||
for op in operations:
|
||||
|
||||
@@ -598,7 +598,7 @@ class MultiLevelCache:
|
||||
except asyncio.TimeoutError:
|
||||
logger.warning("内存限制检查超时,跳过本次检查")
|
||||
except Exception as e:
|
||||
logger.error(f"内存限制检查失败: {e}", exc_info=True)
|
||||
logger.error(f"内存限制检查失败: {e}")
|
||||
|
||||
async def start_cleanup_task(self, interval: float = 60) -> None:
|
||||
"""启动定期清理任务
|
||||
@@ -641,7 +641,7 @@ class MultiLevelCache:
|
||||
except asyncio.CancelledError:
|
||||
break
|
||||
except Exception as e:
|
||||
logger.error(f"清理任务异常: {e}", exc_info=True)
|
||||
logger.error(f"清理任务异常: {e}")
|
||||
|
||||
self._cleanup_task = asyncio.create_task(cleanup_loop())
|
||||
logger.info(f"缓存清理任务已启动,间隔{interval}秒")
|
||||
@@ -706,7 +706,7 @@ class MultiLevelCache:
|
||||
logger.debug(f"缓存清理任务 {'L1' if i == 0 else 'L2'} 完成")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"清理过期条目失败: {e}", exc_info=True)
|
||||
logger.error(f"清理过期条目失败: {e}")
|
||||
|
||||
async def _clean_cache_layer_expired(self, cache_layer, current_time: float, layer_name: str) -> int:
|
||||
"""清理单个缓存层的过期条目(避免锁嵌套)"""
|
||||
|
||||
@@ -219,7 +219,7 @@ class DataPreloader:
|
||||
logger.debug(f"发现关联数据: {related_key}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"预加载数据失败 {key}: {e}", exc_info=True)
|
||||
logger.error(f"预加载数据失败 {key}: {e}")
|
||||
|
||||
async def start_preload_batch(
|
||||
self,
|
||||
|
||||
Reference in New Issue
Block a user