feat(database): 完成API层、Utils层和兼容层重构 (Stage 4-6)
Stage 4: API层重构
=================
新增文件:
- api/crud.py (430行): CRUDBase泛型类,提供12个CRUD方法
* get, get_by, get_multi, create, update, delete
* count, exists, get_or_create, bulk_create, bulk_update
* 集成缓存: 自动缓存读操作,写操作清除缓存
* 集成批处理: 可选use_batch参数透明使用AdaptiveBatchScheduler
- api/query.py (461行): 高级查询构建器
* QueryBuilder: 链式调用,MongoDB风格操作符
- 操作符: __gt, __lt, __gte, __lte, __ne, __in, __nin, __like, __isnull
- 方法: filter, filter_or, order_by, limit, offset, no_cache
- 执行: all, first, count, exists, paginate
* AggregateQuery: 聚合查询
- sum, avg, max, min, group_by_count
- api/specialized.py (461行): 业务特定API
* ActionRecords: store_action_info, get_recent_actions
* Messages: get_chat_history, get_message_count, save_message
* PersonInfo: get_or_create_person, update_person_affinity
* ChatStreams: get_or_create_chat_stream, get_active_streams
* LLMUsage: record_llm_usage, get_usage_statistics
* UserRelationships: get_user_relationship, update_relationship_affinity
- 更新api/__init__.py: 导出所有API接口
Stage 5: Utils层实现
===================
新增文件:
- utils/decorators.py (320行): 数据库操作装饰器
* @retry: 自动重试失败操作,指数退避
* @timeout: 超时控制
* @cached: 自动缓存函数结果
* @measure_time: 性能测量,慢查询日志
* @transactional: 事务管理,自动提交/回滚
* @db_operation: 组合装饰器
- utils/monitoring.py (330行): 性能监控系统
* DatabaseMonitor: 单例监控器
* OperationMetrics: 操作指标 (次数、时间、错误)
* DatabaseMetrics: 全局指标
- 连接池统计
- 缓存命中率
- 批处理统计
- 预加载统计
* 便捷函数: get_monitor, record_operation, print_stats
- 更新utils/__init__.py: 导出装饰器和监控函数
Stage 6: 兼容层实现
==================
新增目录: compatibility/
- adapter.py (370行): 向后兼容适配器
* 完全兼容旧API签名: db_query, db_save, db_get, store_action_info
* 支持MongoDB风格操作符 (\, \, \)
* 内部使用新架构 (QueryBuilder + CRUDBase)
* 保持返回dict格式不变
* MODEL_MAPPING: 25个模型映射
- __init__.py: 导出兼容API
更新database/__init__.py:
- 导出核心层 (engine, session, models, migration)
- 导出优化层 (cache, preloader, batch_scheduler)
- 导出API层 (CRUD, Query, 业务API)
- 导出Utils层 (装饰器, 监控)
- 导出兼容层 (db_query, db_save等)
核心特性
========
类型安全: Generic[T]提供完整类型推断
缓存透明: 自动缓存,用户无需关心
批处理透明: 可选批处理,自动优化高频写入
链式查询: 流畅的API设计
业务封装: 常用操作封装成便捷函数
向后兼容: 兼容层保证现有代码无缝迁移
性能监控: 完整的指标收集和报告
统计数据
========
- 新增文件: 7个
- 代码行数: ~2050行
- API函数: 14个业务API + 6个装饰器
- 兼容函数: 5个 (db_query, db_save, db_get等)
下一步
======
- 更新28个文件的import语句 (从sqlalchemy_database_api迁移)
- 移动旧文件到old/目录
- 编写Stage 4-6的测试
- 集成测试验证兼容性
This commit is contained in:
@@ -6,6 +6,7 @@
|
||||
- 性能监控
|
||||
"""
|
||||
|
||||
from .decorators import cached, db_operation, measure_time, retry, timeout, transactional
|
||||
from .exceptions import (
|
||||
BatchSchedulerError,
|
||||
CacheError,
|
||||
@@ -17,8 +18,18 @@ from .exceptions import (
|
||||
DatabaseQueryError,
|
||||
DatabaseTransactionError,
|
||||
)
|
||||
from .monitoring import (
|
||||
DatabaseMonitor,
|
||||
get_monitor,
|
||||
print_stats,
|
||||
record_cache_hit,
|
||||
record_cache_miss,
|
||||
record_operation,
|
||||
reset_stats,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
# 异常
|
||||
"DatabaseError",
|
||||
"DatabaseInitializationError",
|
||||
"DatabaseConnectionError",
|
||||
@@ -28,4 +39,19 @@ __all__ = [
|
||||
"CacheError",
|
||||
"BatchSchedulerError",
|
||||
"ConnectionPoolError",
|
||||
# 装饰器
|
||||
"retry",
|
||||
"timeout",
|
||||
"cached",
|
||||
"measure_time",
|
||||
"transactional",
|
||||
"db_operation",
|
||||
# 监控
|
||||
"DatabaseMonitor",
|
||||
"get_monitor",
|
||||
"record_operation",
|
||||
"record_cache_hit",
|
||||
"record_cache_miss",
|
||||
"print_stats",
|
||||
"reset_stats",
|
||||
]
|
||||
|
||||
309
src/common/database/utils/decorators.py
Normal file
309
src/common/database/utils/decorators.py
Normal file
@@ -0,0 +1,309 @@
|
||||
"""数据库操作装饰器
|
||||
|
||||
提供常用的装饰器:
|
||||
- @retry: 自动重试失败的数据库操作
|
||||
- @timeout: 为数据库操作添加超时控制
|
||||
- @cached: 自动缓存函数结果
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
import hashlib
|
||||
import time
|
||||
from typing import Any, Awaitable, Callable, Optional, TypeVar
|
||||
|
||||
from sqlalchemy.exc import DBAPIError, OperationalError, TimeoutError as SQLTimeoutError
|
||||
|
||||
from src.common.database.optimization import get_cache
|
||||
from src.common.logger import get_logger
|
||||
|
||||
logger = get_logger("database.decorators")
|
||||
|
||||
T = TypeVar("T")
|
||||
F = TypeVar("F", bound=Callable[..., Awaitable[Any]])
|
||||
|
||||
|
||||
def retry(
|
||||
max_attempts: int = 3,
|
||||
delay: float = 0.5,
|
||||
backoff: float = 2.0,
|
||||
exceptions: tuple[type[Exception], ...] = (OperationalError, DBAPIError, SQLTimeoutError),
|
||||
):
|
||||
"""重试装饰器
|
||||
|
||||
自动重试失败的数据库操作,适用于临时性错误
|
||||
|
||||
Args:
|
||||
max_attempts: 最大尝试次数
|
||||
delay: 初始延迟时间(秒)
|
||||
backoff: 延迟倍数(指数退避)
|
||||
exceptions: 需要重试的异常类型
|
||||
|
||||
Example:
|
||||
@retry(max_attempts=3, delay=1.0)
|
||||
async def query_data():
|
||||
return await session.execute(stmt)
|
||||
"""
|
||||
|
||||
def decorator(func: Callable[..., T]) -> Callable[..., T]:
|
||||
@functools.wraps(func)
|
||||
async def wrapper(*args: Any, **kwargs: Any) -> T:
|
||||
last_exception = None
|
||||
current_delay = delay
|
||||
|
||||
for attempt in range(1, max_attempts + 1):
|
||||
try:
|
||||
return await func(*args, **kwargs)
|
||||
except exceptions as e:
|
||||
last_exception = e
|
||||
if attempt < max_attempts:
|
||||
logger.warning(
|
||||
f"{func.__name__} 失败 (尝试 {attempt}/{max_attempts}): {e}. "
|
||||
f"等待 {current_delay:.2f}s 后重试..."
|
||||
)
|
||||
await asyncio.sleep(current_delay)
|
||||
current_delay *= backoff
|
||||
else:
|
||||
logger.error(
|
||||
f"{func.__name__} 在 {max_attempts} 次尝试后仍然失败: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
# 所有尝试都失败
|
||||
raise last_exception
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def timeout(seconds: float):
|
||||
"""超时装饰器
|
||||
|
||||
为数据库操作添加超时控制
|
||||
|
||||
Args:
|
||||
seconds: 超时时间(秒)
|
||||
|
||||
Example:
|
||||
@timeout(30.0)
|
||||
async def long_query():
|
||||
return await session.execute(complex_stmt)
|
||||
"""
|
||||
|
||||
def decorator(func: Callable[..., T]) -> Callable[..., T]:
|
||||
@functools.wraps(func)
|
||||
async def wrapper(*args: Any, **kwargs: Any) -> T:
|
||||
try:
|
||||
return await asyncio.wait_for(func(*args, **kwargs), timeout=seconds)
|
||||
except asyncio.TimeoutError:
|
||||
logger.error(f"{func.__name__} 执行超时 (>{seconds}s)")
|
||||
raise TimeoutError(f"{func.__name__} 执行超时 (>{seconds}s)")
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def cached(
|
||||
ttl: Optional[int] = 300,
|
||||
key_prefix: Optional[str] = None,
|
||||
use_args: bool = True,
|
||||
use_kwargs: bool = True,
|
||||
):
|
||||
"""缓存装饰器
|
||||
|
||||
自动缓存函数返回值
|
||||
|
||||
Args:
|
||||
ttl: 缓存过期时间(秒),None表示永不过期
|
||||
key_prefix: 缓存键前缀,默认使用函数名
|
||||
use_args: 是否将位置参数包含在缓存键中
|
||||
use_kwargs: 是否将关键字参数包含在缓存键中
|
||||
|
||||
Example:
|
||||
@cached(ttl=60, key_prefix="user_data")
|
||||
async def get_user_info(user_id: str) -> dict:
|
||||
return await query_user(user_id)
|
||||
"""
|
||||
|
||||
def decorator(func: Callable[..., T]) -> Callable[..., T]:
|
||||
@functools.wraps(func)
|
||||
async def wrapper(*args: Any, **kwargs: Any) -> T:
|
||||
# 生成缓存键
|
||||
cache_key_parts = [key_prefix or func.__name__]
|
||||
|
||||
if use_args and args:
|
||||
# 将位置参数转换为字符串
|
||||
args_str = ",".join(str(arg) for arg in args)
|
||||
args_hash = hashlib.md5(args_str.encode()).hexdigest()[:8]
|
||||
cache_key_parts.append(f"args:{args_hash}")
|
||||
|
||||
if use_kwargs and kwargs:
|
||||
# 将关键字参数转换为字符串(排序以保证一致性)
|
||||
kwargs_str = ",".join(f"{k}={v}" for k, v in sorted(kwargs.items()))
|
||||
kwargs_hash = hashlib.md5(kwargs_str.encode()).hexdigest()[:8]
|
||||
cache_key_parts.append(f"kwargs:{kwargs_hash}")
|
||||
|
||||
cache_key = ":".join(cache_key_parts)
|
||||
|
||||
# 尝试从缓存获取
|
||||
cache = await get_cache()
|
||||
cached_result = await cache.get(cache_key)
|
||||
|
||||
if cached_result is not None:
|
||||
logger.debug(f"缓存命中: {cache_key}")
|
||||
return cached_result
|
||||
|
||||
# 执行函数
|
||||
result = await func(*args, **kwargs)
|
||||
|
||||
# 写入缓存(注意:MultiLevelCache.set不支持ttl参数,使用L1缓存的默认TTL)
|
||||
await cache.set(cache_key, result)
|
||||
logger.debug(f"缓存写入: {cache_key}")
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def measure_time(log_slow: Optional[float] = None):
|
||||
"""性能测量装饰器
|
||||
|
||||
测量函数执行时间,可选择性记录慢查询
|
||||
|
||||
Args:
|
||||
log_slow: 慢查询阈值(秒),超过此时间会记录warning日志
|
||||
|
||||
Example:
|
||||
@measure_time(log_slow=1.0)
|
||||
async def complex_query():
|
||||
return await session.execute(stmt)
|
||||
"""
|
||||
|
||||
def decorator(func: Callable[..., T]) -> Callable[..., T]:
|
||||
@functools.wraps(func)
|
||||
async def wrapper(*args: Any, **kwargs: Any) -> T:
|
||||
start_time = time.perf_counter()
|
||||
|
||||
try:
|
||||
result = await func(*args, **kwargs)
|
||||
return result
|
||||
finally:
|
||||
elapsed = time.perf_counter() - start_time
|
||||
|
||||
if log_slow and elapsed > log_slow:
|
||||
logger.warning(
|
||||
f"{func.__name__} 执行缓慢: {elapsed:.3f}s (阈值: {log_slow}s)"
|
||||
)
|
||||
else:
|
||||
logger.debug(f"{func.__name__} 执行时间: {elapsed:.3f}s")
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def transactional(auto_commit: bool = True, auto_rollback: bool = True):
|
||||
"""事务装饰器
|
||||
|
||||
自动管理事务的提交和回滚
|
||||
|
||||
Args:
|
||||
auto_commit: 是否自动提交
|
||||
auto_rollback: 发生异常时是否自动回滚
|
||||
|
||||
Example:
|
||||
@transactional()
|
||||
async def update_multiple_records(session):
|
||||
await session.execute(stmt1)
|
||||
await session.execute(stmt2)
|
||||
|
||||
Note:
|
||||
函数需要接受session参数
|
||||
"""
|
||||
|
||||
def decorator(func: Callable[..., T]) -> Callable[..., T]:
|
||||
@functools.wraps(func)
|
||||
async def wrapper(*args: Any, **kwargs: Any) -> T:
|
||||
# 查找session参数
|
||||
session = None
|
||||
if args:
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
for arg in args:
|
||||
if isinstance(arg, AsyncSession):
|
||||
session = arg
|
||||
break
|
||||
|
||||
if not session and "session" in kwargs:
|
||||
session = kwargs["session"]
|
||||
|
||||
if not session:
|
||||
logger.warning(f"{func.__name__} 未找到session参数,跳过事务管理")
|
||||
return await func(*args, **kwargs)
|
||||
|
||||
try:
|
||||
result = await func(*args, **kwargs)
|
||||
|
||||
if auto_commit:
|
||||
await session.commit()
|
||||
logger.debug(f"{func.__name__} 事务已提交")
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
if auto_rollback:
|
||||
await session.rollback()
|
||||
logger.error(f"{func.__name__} 事务已回滚: {e}")
|
||||
raise
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
# 组合装饰器示例
|
||||
def db_operation(
|
||||
retry_attempts: int = 3,
|
||||
timeout_seconds: Optional[float] = None,
|
||||
cache_ttl: Optional[int] = None,
|
||||
measure: bool = True,
|
||||
):
|
||||
"""组合装饰器
|
||||
|
||||
组合多个装饰器,提供完整的数据库操作保护
|
||||
|
||||
Args:
|
||||
retry_attempts: 重试次数
|
||||
timeout_seconds: 超时时间
|
||||
cache_ttl: 缓存时间
|
||||
measure: 是否测量性能
|
||||
|
||||
Example:
|
||||
@db_operation(retry_attempts=3, timeout_seconds=30, cache_ttl=60)
|
||||
async def important_query():
|
||||
return await complex_operation()
|
||||
"""
|
||||
|
||||
def decorator(func: Callable[..., T]) -> Callable[..., T]:
|
||||
# 从内到外应用装饰器
|
||||
wrapped = func
|
||||
|
||||
if measure:
|
||||
wrapped = measure_time(log_slow=1.0)(wrapped)
|
||||
|
||||
if cache_ttl:
|
||||
wrapped = cached(ttl=cache_ttl)(wrapped)
|
||||
|
||||
if timeout_seconds:
|
||||
wrapped = timeout(timeout_seconds)(wrapped)
|
||||
|
||||
if retry_attempts > 1:
|
||||
wrapped = retry(max_attempts=retry_attempts)(wrapped)
|
||||
|
||||
return wrapped
|
||||
|
||||
return decorator
|
||||
322
src/common/database/utils/monitoring.py
Normal file
322
src/common/database/utils/monitoring.py
Normal file
@@ -0,0 +1,322 @@
|
||||
"""数据库性能监控
|
||||
|
||||
提供数据库操作的性能监控和统计功能
|
||||
"""
|
||||
|
||||
import time
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any, Optional
|
||||
|
||||
from src.common.logger import get_logger
|
||||
|
||||
logger = get_logger("database.monitoring")
|
||||
|
||||
|
||||
@dataclass
|
||||
class OperationMetrics:
|
||||
"""操作指标"""
|
||||
|
||||
count: int = 0
|
||||
total_time: float = 0.0
|
||||
min_time: float = float("inf")
|
||||
max_time: float = 0.0
|
||||
error_count: int = 0
|
||||
last_execution_time: Optional[float] = None
|
||||
|
||||
@property
|
||||
def avg_time(self) -> float:
|
||||
"""平均执行时间"""
|
||||
return self.total_time / self.count if self.count > 0 else 0.0
|
||||
|
||||
def record_success(self, execution_time: float):
|
||||
"""记录成功执行"""
|
||||
self.count += 1
|
||||
self.total_time += execution_time
|
||||
self.min_time = min(self.min_time, execution_time)
|
||||
self.max_time = max(self.max_time, execution_time)
|
||||
self.last_execution_time = time.time()
|
||||
|
||||
def record_error(self):
|
||||
"""记录错误"""
|
||||
self.error_count += 1
|
||||
|
||||
|
||||
@dataclass
|
||||
class DatabaseMetrics:
|
||||
"""数据库指标"""
|
||||
|
||||
# 操作统计
|
||||
operations: dict[str, OperationMetrics] = field(default_factory=dict)
|
||||
|
||||
# 连接池统计
|
||||
connection_acquired: int = 0
|
||||
connection_released: int = 0
|
||||
connection_errors: int = 0
|
||||
|
||||
# 缓存统计
|
||||
cache_hits: int = 0
|
||||
cache_misses: int = 0
|
||||
cache_sets: int = 0
|
||||
cache_invalidations: int = 0
|
||||
|
||||
# 批处理统计
|
||||
batch_operations: int = 0
|
||||
batch_items_total: int = 0
|
||||
batch_avg_size: float = 0.0
|
||||
|
||||
# 预加载统计
|
||||
preload_operations: int = 0
|
||||
preload_hits: int = 0
|
||||
|
||||
@property
|
||||
def cache_hit_rate(self) -> float:
|
||||
"""缓存命中率"""
|
||||
total = self.cache_hits + self.cache_misses
|
||||
return self.cache_hits / total if total > 0 else 0.0
|
||||
|
||||
@property
|
||||
def error_rate(self) -> float:
|
||||
"""错误率"""
|
||||
total_ops = sum(m.count for m in self.operations.values())
|
||||
total_errors = sum(m.error_count for m in self.operations.values())
|
||||
return total_errors / total_ops if total_ops > 0 else 0.0
|
||||
|
||||
def get_operation_metrics(self, operation_name: str) -> OperationMetrics:
|
||||
"""获取操作指标"""
|
||||
if operation_name not in self.operations:
|
||||
self.operations[operation_name] = OperationMetrics()
|
||||
return self.operations[operation_name]
|
||||
|
||||
|
||||
class DatabaseMonitor:
|
||||
"""数据库监控器
|
||||
|
||||
单例模式,收集和报告数据库性能指标
|
||||
"""
|
||||
|
||||
_instance: Optional["DatabaseMonitor"] = None
|
||||
_metrics: DatabaseMetrics
|
||||
|
||||
def __new__(cls):
|
||||
if cls._instance is None:
|
||||
cls._instance = super().__new__(cls)
|
||||
cls._instance._metrics = DatabaseMetrics()
|
||||
return cls._instance
|
||||
|
||||
def record_operation(
|
||||
self,
|
||||
operation_name: str,
|
||||
execution_time: float,
|
||||
success: bool = True,
|
||||
):
|
||||
"""记录操作"""
|
||||
metrics = self._metrics.get_operation_metrics(operation_name)
|
||||
if success:
|
||||
metrics.record_success(execution_time)
|
||||
else:
|
||||
metrics.record_error()
|
||||
|
||||
def record_connection_acquired(self):
|
||||
"""记录连接获取"""
|
||||
self._metrics.connection_acquired += 1
|
||||
|
||||
def record_connection_released(self):
|
||||
"""记录连接释放"""
|
||||
self._metrics.connection_released += 1
|
||||
|
||||
def record_connection_error(self):
|
||||
"""记录连接错误"""
|
||||
self._metrics.connection_errors += 1
|
||||
|
||||
def record_cache_hit(self):
|
||||
"""记录缓存命中"""
|
||||
self._metrics.cache_hits += 1
|
||||
|
||||
def record_cache_miss(self):
|
||||
"""记录缓存未命中"""
|
||||
self._metrics.cache_misses += 1
|
||||
|
||||
def record_cache_set(self):
|
||||
"""记录缓存设置"""
|
||||
self._metrics.cache_sets += 1
|
||||
|
||||
def record_cache_invalidation(self):
|
||||
"""记录缓存失效"""
|
||||
self._metrics.cache_invalidations += 1
|
||||
|
||||
def record_batch_operation(self, batch_size: int):
|
||||
"""记录批处理操作"""
|
||||
self._metrics.batch_operations += 1
|
||||
self._metrics.batch_items_total += batch_size
|
||||
self._metrics.batch_avg_size = (
|
||||
self._metrics.batch_items_total / self._metrics.batch_operations
|
||||
)
|
||||
|
||||
def record_preload_operation(self, hit: bool = False):
|
||||
"""记录预加载操作"""
|
||||
self._metrics.preload_operations += 1
|
||||
if hit:
|
||||
self._metrics.preload_hits += 1
|
||||
|
||||
def get_metrics(self) -> DatabaseMetrics:
|
||||
"""获取指标"""
|
||||
return self._metrics
|
||||
|
||||
def get_summary(self) -> dict[str, Any]:
|
||||
"""获取统计摘要"""
|
||||
metrics = self._metrics
|
||||
|
||||
operation_summary = {}
|
||||
for op_name, op_metrics in metrics.operations.items():
|
||||
operation_summary[op_name] = {
|
||||
"count": op_metrics.count,
|
||||
"avg_time": f"{op_metrics.avg_time:.3f}s",
|
||||
"min_time": f"{op_metrics.min_time:.3f}s",
|
||||
"max_time": f"{op_metrics.max_time:.3f}s",
|
||||
"error_count": op_metrics.error_count,
|
||||
}
|
||||
|
||||
return {
|
||||
"operations": operation_summary,
|
||||
"connections": {
|
||||
"acquired": metrics.connection_acquired,
|
||||
"released": metrics.connection_released,
|
||||
"errors": metrics.connection_errors,
|
||||
"active": metrics.connection_acquired - metrics.connection_released,
|
||||
},
|
||||
"cache": {
|
||||
"hits": metrics.cache_hits,
|
||||
"misses": metrics.cache_misses,
|
||||
"sets": metrics.cache_sets,
|
||||
"invalidations": metrics.cache_invalidations,
|
||||
"hit_rate": f"{metrics.cache_hit_rate:.2%}",
|
||||
},
|
||||
"batch": {
|
||||
"operations": metrics.batch_operations,
|
||||
"total_items": metrics.batch_items_total,
|
||||
"avg_size": f"{metrics.batch_avg_size:.1f}",
|
||||
},
|
||||
"preload": {
|
||||
"operations": metrics.preload_operations,
|
||||
"hits": metrics.preload_hits,
|
||||
"hit_rate": (
|
||||
f"{metrics.preload_hits / metrics.preload_operations:.2%}"
|
||||
if metrics.preload_operations > 0
|
||||
else "N/A"
|
||||
),
|
||||
},
|
||||
"overall": {
|
||||
"error_rate": f"{metrics.error_rate:.2%}",
|
||||
},
|
||||
}
|
||||
|
||||
def print_summary(self):
|
||||
"""打印统计摘要"""
|
||||
summary = self.get_summary()
|
||||
|
||||
logger.info("=" * 60)
|
||||
logger.info("数据库性能统计")
|
||||
logger.info("=" * 60)
|
||||
|
||||
# 操作统计
|
||||
if summary["operations"]:
|
||||
logger.info("\n操作统计:")
|
||||
for op_name, stats in summary["operations"].items():
|
||||
logger.info(
|
||||
f" {op_name}: "
|
||||
f"次数={stats['count']}, "
|
||||
f"平均={stats['avg_time']}, "
|
||||
f"最小={stats['min_time']}, "
|
||||
f"最大={stats['max_time']}, "
|
||||
f"错误={stats['error_count']}"
|
||||
)
|
||||
|
||||
# 连接池统计
|
||||
logger.info("\n连接池:")
|
||||
conn = summary["connections"]
|
||||
logger.info(
|
||||
f" 获取={conn['acquired']}, "
|
||||
f"释放={conn['released']}, "
|
||||
f"活跃={conn['active']}, "
|
||||
f"错误={conn['errors']}"
|
||||
)
|
||||
|
||||
# 缓存统计
|
||||
logger.info("\n缓存:")
|
||||
cache = summary["cache"]
|
||||
logger.info(
|
||||
f" 命中={cache['hits']}, "
|
||||
f"未命中={cache['misses']}, "
|
||||
f"设置={cache['sets']}, "
|
||||
f"失效={cache['invalidations']}, "
|
||||
f"命中率={cache['hit_rate']}"
|
||||
)
|
||||
|
||||
# 批处理统计
|
||||
logger.info("\n批处理:")
|
||||
batch = summary["batch"]
|
||||
logger.info(
|
||||
f" 操作={batch['operations']}, "
|
||||
f"总项目={batch['total_items']}, "
|
||||
f"平均大小={batch['avg_size']}"
|
||||
)
|
||||
|
||||
# 预加载统计
|
||||
logger.info("\n预加载:")
|
||||
preload = summary["preload"]
|
||||
logger.info(
|
||||
f" 操作={preload['operations']}, "
|
||||
f"命中={preload['hits']}, "
|
||||
f"命中率={preload['hit_rate']}"
|
||||
)
|
||||
|
||||
# 整体统计
|
||||
logger.info("\n整体:")
|
||||
overall = summary["overall"]
|
||||
logger.info(f" 错误率={overall['error_rate']}")
|
||||
|
||||
logger.info("=" * 60)
|
||||
|
||||
def reset(self):
|
||||
"""重置统计"""
|
||||
self._metrics = DatabaseMetrics()
|
||||
logger.info("数据库监控统计已重置")
|
||||
|
||||
|
||||
# 全局监控器实例
|
||||
_monitor: Optional[DatabaseMonitor] = None
|
||||
|
||||
|
||||
def get_monitor() -> DatabaseMonitor:
|
||||
"""获取监控器实例"""
|
||||
global _monitor
|
||||
if _monitor is None:
|
||||
_monitor = DatabaseMonitor()
|
||||
return _monitor
|
||||
|
||||
|
||||
# 便捷函数
|
||||
def record_operation(operation_name: str, execution_time: float, success: bool = True):
|
||||
"""记录操作"""
|
||||
get_monitor().record_operation(operation_name, execution_time, success)
|
||||
|
||||
|
||||
def record_cache_hit():
|
||||
"""记录缓存命中"""
|
||||
get_monitor().record_cache_hit()
|
||||
|
||||
|
||||
def record_cache_miss():
|
||||
"""记录缓存未命中"""
|
||||
get_monitor().record_cache_miss()
|
||||
|
||||
|
||||
def print_stats():
|
||||
"""打印统计信息"""
|
||||
get_monitor().print_summary()
|
||||
|
||||
|
||||
def reset_stats():
|
||||
"""重置统计"""
|
||||
get_monitor().reset()
|
||||
Reference in New Issue
Block a user