fix:忍无可忍直接修改log命名方式,避免重命名

This commit is contained in:
SengokuCola
2025-06-15 22:02:28 +08:00
parent 4f258f0870
commit 2af181813e

View File

@@ -33,6 +33,7 @@ def get_file_handler():
root_logger = logging.getLogger() root_logger = logging.getLogger()
# 检查现有handler避免重复创建 # 检查现有handler避免重复创建
root_logger = logging.getLogger()
for handler in root_logger.handlers: for handler in root_logger.handlers:
if isinstance(handler, logging.handlers.RotatingFileHandler): if isinstance(handler, logging.handlers.RotatingFileHandler):
if hasattr(handler, "baseFilename") and Path(handler.baseFilename) == log_file_path: if hasattr(handler, "baseFilename") and Path(handler.baseFilename) == log_file_path:
@@ -65,56 +66,51 @@ def get_console_handler():
return _console_handler return _console_handler
class CompressedRotatingFileHandler(logging.handlers.RotatingFileHandler): class TimestampedFileHandler(logging.Handler):
"""支持压缩的轮转文件处理器""" """基于时间戳的文件处理器,避免重命名操作"""
def __init__(self, filename, maxBytes=0, backupCount=0, encoding=None, compress=True, compress_level=6): def __init__(self, log_dir, max_bytes=10*1024*1024, backup_count=5, encoding='utf-8', compress=True, compress_level=6):
super().__init__(filename, "a", maxBytes, backupCount, encoding) super().__init__()
self.log_dir = Path(log_dir)
self.log_dir.mkdir(exist_ok=True)
self.max_bytes = max_bytes
self.backup_count = backup_count
self.encoding = encoding
self.compress = compress self.compress = compress
self.compress_level = compress_level self.compress_level = compress_level
self._rollover_lock = threading.Lock() # 添加轮转锁 self._lock = threading.Lock()
def doRollover(self): # 当前活跃的日志文件
"""执行日志轮转,并压缩旧文件""" self.current_file = None
with self._rollover_lock: self.current_stream = None
if self.stream: self._init_current_file()
self.stream.close()
self.stream = None
# 如果有备份文件数量限制 def _init_current_file(self):
if self.backupCount > 0: """初始化当前日志文件"""
# 删除最旧的压缩文件 timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
old_gz = f"{self.baseFilename}.{self.backupCount}.gz" self.current_file = self.log_dir / f"app_{timestamp}.log.jsonl"
old_file = f"{self.baseFilename}.{self.backupCount}" self.current_stream = open(self.current_file, 'a', encoding=self.encoding)
if Path(old_gz).exists(): def _should_rollover(self):
self._safe_remove(old_gz) """检查是否需要轮转"""
if Path(old_file).exists(): if self.current_file and self.current_file.exists():
self._safe_remove(old_file) return self.current_file.stat().st_size >= self.max_bytes
return False
# 重命名现有的备份文件 def _do_rollover(self):
for i in range(self.backupCount - 1, 0, -1): """执行轮转:关闭当前文件,创建新文件"""
source_gz = f"{self.baseFilename}.{i}.gz" if self.current_stream:
dest_gz = f"{self.baseFilename}.{i + 1}.gz" self.current_stream.close()
source_file = f"{self.baseFilename}.{i}"
dest_file = f"{self.baseFilename}.{i + 1}"
if Path(source_gz).exists(): # 压缩旧文件
self._safe_rename(source_gz, dest_gz) if self.compress and self.current_file:
elif Path(source_file).exists(): threading.Thread(target=self._compress_file, args=(self.current_file,), daemon=True).start()
self._safe_rename(source_file, dest_file)
# 处理当前日志文件 # 清理旧文件
dest_file = f"{self.baseFilename}.1" self._cleanup_old_files()
if Path(self.baseFilename).exists():
if self._safe_rename(self.baseFilename, dest_file):
# 在后台线程中压缩文件
if self.compress:
threading.Thread(target=self._compress_file, args=(dest_file,), daemon=True).start()
# 重新创建日志文件 # 创建新文件
if not self.delay: self._init_current_file()
self.stream = self._open()
def _safe_rename(self, source, dest): def _safe_rename(self, source, dest):
"""安全重命名文件处理Windows文件占用问题""" """安全重命名文件处理Windows文件占用问题"""
@@ -205,7 +201,7 @@ def close_handlers():
def remove_duplicate_handlers(): def remove_duplicate_handlers():
"""移除重复的handler特别是文件handler""" """移除重复的文件handler"""
root_logger = logging.getLogger() root_logger = logging.getLogger()
log_file_path = str(LOG_DIR / "app.log.jsonl") log_file_path = str(LOG_DIR / "app.log.jsonl")
@@ -314,7 +310,7 @@ def reconfigure_existing_loggers():
# 重新设置根logger的所有handler的格式化器 # 重新设置根logger的所有handler的格式化器
for handler in root_logger.handlers: for handler in root_logger.handlers:
if isinstance(handler, logging.handlers.RotatingFileHandler): if isinstance(handler, TimestampedFileHandler):
handler.setFormatter(file_formatter) handler.setFormatter(file_formatter)
elif isinstance(handler, logging.StreamHandler): elif isinstance(handler, logging.StreamHandler):
handler.setFormatter(console_formatter) handler.setFormatter(console_formatter)
@@ -354,7 +350,7 @@ def reconfigure_existing_loggers():
# 如果logger有自己的handler重新配置它们避免重复创建文件handler # 如果logger有自己的handler重新配置它们避免重复创建文件handler
for handler in original_handlers: for handler in original_handlers:
if isinstance(handler, logging.handlers.RotatingFileHandler): if isinstance(handler, TimestampedFileHandler):
# 不重新添加让它使用根logger的文件handler # 不重新添加让它使用根logger的文件handler
continue continue
elif isinstance(handler, logging.StreamHandler): elif isinstance(handler, logging.StreamHandler):