better:优化单个logger体积

This commit is contained in:
SengokuCola
2025-06-16 13:46:50 +08:00
parent a1037e8960
commit 3951a3a39a
2 changed files with 128 additions and 62 deletions

View File

@@ -202,6 +202,9 @@ class LogViewer:
# 初始化日志格式化器 # 初始化日志格式化器
self.formatter = LogFormatter(self.log_config, self.custom_module_colors, self.custom_level_colors) self.formatter = LogFormatter(self.log_config, self.custom_module_colors, self.custom_level_colors)
# 初始化日志文件路径
self.current_log_file = Path("logs/app.log.jsonl")
# 创建主框架 # 创建主框架
self.main_frame = ttk.Frame(root) self.main_frame = ttk.Frame(root)
self.main_frame.pack(fill=tk.BOTH, expand=True, padx=5, pady=5) self.main_frame.pack(fill=tk.BOTH, expand=True, padx=5, pady=5)
@@ -213,6 +216,23 @@ class LogViewer:
self.control_frame = ttk.Frame(self.main_frame) self.control_frame = ttk.Frame(self.main_frame)
self.control_frame.pack(fill=tk.X, pady=(0, 5)) self.control_frame.pack(fill=tk.X, pady=(0, 5))
# 文件选择框架
self.file_frame = ttk.LabelFrame(self.control_frame, text="日志文件")
self.file_frame.pack(side=tk.TOP, fill=tk.X, padx=5, pady=(0, 5))
# 当前文件显示
self.current_file_var = tk.StringVar(value=str(self.current_log_file))
self.file_label = ttk.Label(self.file_frame, textvariable=self.current_file_var, foreground="blue")
self.file_label.pack(side=tk.LEFT, padx=5, pady=2)
# 选择文件按钮
select_file_btn = ttk.Button(self.file_frame, text="选择文件", command=self.select_log_file)
select_file_btn.pack(side=tk.RIGHT, padx=5, pady=2)
# 刷新按钮
refresh_btn = ttk.Button(self.file_frame, text="刷新", command=self.refresh_log_file)
refresh_btn.pack(side=tk.RIGHT, padx=2, pady=2)
# 模块选择框架 # 模块选择框架
self.module_frame = ttk.LabelFrame(self.control_frame, text="模块") self.module_frame = ttk.LabelFrame(self.control_frame, text="模块")
self.module_frame.pack(side=tk.LEFT, fill=tk.X, expand=True, padx=5) self.module_frame.pack(side=tk.LEFT, fill=tk.X, expand=True, padx=5)
@@ -326,6 +346,14 @@ class LogViewer:
self.update_thread.daemon = True self.update_thread.daemon = True
self.update_thread.start() self.update_thread.start()
# 绑定快捷键
self.root.bind("<Control-o>", lambda e: self.select_log_file())
self.root.bind("<F5>", lambda e: self.refresh_log_file())
self.root.bind("<Control-s>", lambda e: self.export_logs())
# 更新窗口标题
self.update_window_title()
def load_config(self): def load_config(self):
"""加载配置文件""" """加载配置文件"""
# 默认配置 # 默认配置
@@ -422,11 +450,18 @@ class LogViewer:
config_menu.add_separator() config_menu.add_separator()
config_menu.add_command(label="重新加载配置", command=self.reload_config) config_menu.add_command(label="重新加载配置", command=self.reload_config)
# 文件菜单
file_menu = tk.Menu(menubar, tearoff=0)
menubar.add_cascade(label="文件", menu=file_menu)
file_menu.add_command(label="选择日志文件", command=self.select_log_file, accelerator="Ctrl+O")
file_menu.add_command(label="刷新当前文件", command=self.refresh_log_file, accelerator="F5")
file_menu.add_separator()
file_menu.add_command(label="导出当前日志", command=self.export_logs, accelerator="Ctrl+S")
# 工具菜单 # 工具菜单
tools_menu = tk.Menu(menubar, tearoff=0) tools_menu = tk.Menu(menubar, tearoff=0)
menubar.add_cascade(label="工具", menu=tools_menu) menubar.add_cascade(label="工具", menu=tools_menu)
tools_menu.add_command(label="清空日志显示", command=self.clear_log_display) tools_menu.add_command(label="清空日志显示", command=self.clear_log_display)
tools_menu.add_command(label="导出当前日志", command=self.export_logs)
def show_format_settings(self): def show_format_settings(self):
"""显示格式设置窗口""" """显示格式设置窗口"""
@@ -724,9 +759,8 @@ class LogViewer:
def update_module_list(self): def update_module_list(self):
"""更新模块列表""" """更新模块列表"""
log_file = Path("logs/app.log.jsonl") if self.current_log_file.exists():
if log_file.exists(): with open(self.current_log_file, "r", encoding="utf-8") as f:
with open(log_file, "r", encoding="utf-8") as f:
for line in f: for line in f:
try: try:
log_entry = json.loads(line) log_entry = json.loads(line)
@@ -854,14 +888,19 @@ class LogViewer:
def monitor_log_file(self): def monitor_log_file(self):
"""监控日志文件变化""" """监控日志文件变化"""
log_file = Path("logs/app.log.jsonl")
last_position = 0 last_position = 0
current_monitored_file = None
while self.running: while self.running:
if log_file.exists(): # 检查是否需要切换监控的文件
if current_monitored_file != self.current_log_file:
current_monitored_file = self.current_log_file
last_position = 0 # 重置位置
if current_monitored_file.exists():
try: try:
# 使用共享读取模式,避免文件锁定 # 使用共享读取模式,避免文件锁定
with open(log_file, "r", encoding="utf-8", buffering=1) as f: with open(current_monitored_file, "r", encoding="utf-8", buffering=1) as f:
f.seek(last_position) f.seek(last_position)
new_lines = f.readlines() new_lines = f.readlines()
last_position = f.tell() last_position = f.tell()
@@ -1069,6 +1108,72 @@ class LogViewer:
ttk.Button(button_frame, text="保存", command=save_mappings).pack(side=tk.RIGHT, padx=5) ttk.Button(button_frame, text="保存", command=save_mappings).pack(side=tk.RIGHT, padx=5)
ttk.Button(button_frame, text="取消", command=mapping_window.destroy).pack(side=tk.RIGHT, padx=5) ttk.Button(button_frame, text="取消", command=mapping_window.destroy).pack(side=tk.RIGHT, padx=5)
def select_log_file(self):
"""选择日志文件"""
filename = filedialog.askopenfilename(
title="选择日志文件",
filetypes=[("JSONL日志文件", "*.jsonl"), ("所有文件", "*.*")],
initialdir="logs" if Path("logs").exists() else "."
)
if filename:
new_file = Path(filename)
if new_file != self.current_log_file:
self.current_log_file = new_file
self.current_file_var.set(str(self.current_log_file))
self.reload_log_file()
def refresh_log_file(self):
"""刷新日志文件"""
self.reload_log_file()
def reload_log_file(self):
"""重新加载日志文件"""
# 清空当前缓存和显示
self.log_cache.clear()
self.modules.clear()
self.selected_modules.clear()
self.log_text.delete(1.0, tk.END)
# 清空日志队列
while not self.log_queue.empty():
try:
self.log_queue.get_nowait()
except queue.Empty:
break
# 重新读取整个文件
if self.current_log_file.exists():
try:
with open(self.current_log_file, "r", encoding="utf-8") as f:
for line in f:
try:
log_entry = json.loads(line)
self.log_cache.append(log_entry)
# 收集模块信息
if "logger_name" in log_entry:
self.modules.add(log_entry["logger_name"])
except json.JSONDecodeError:
continue
except Exception as e:
messagebox.showerror("错误", f"读取日志文件失败: {e}")
return
# 更新模块列表UI
self.update_module_list()
# 过滤并显示日志
self.filter_logs()
# 更新窗口标题
self.update_window_title()
def update_window_title(self):
"""更新窗口标题"""
filename = self.current_log_file.name
self.root.title(f"MaiBot日志查看器 - {filename}")
def main(): def main():
root = tk.Tk() root = tk.Tk()

View File

@@ -1,11 +1,9 @@
import logging import logging
# 不再需要logging.handlers已切换到基于时间戳的处理器 # 使用基于时间戳的文件处理器,简单的轮转份数限制
from pathlib import Path from pathlib import Path
from typing import Callable, Optional from typing import Callable, Optional
import json import json
import gzip
import shutil
import threading import threading
import time import time
from datetime import datetime, timedelta from datetime import datetime, timedelta
@@ -36,14 +34,12 @@ def get_file_handler():
_file_handler = handler _file_handler = handler
return _file_handler return _file_handler
# 使用新的基于时间戳的handler避免重命名操作 # 使用基于时间戳的handler简单的轮转份数限制
_file_handler = TimestampedFileHandler( _file_handler = TimestampedFileHandler(
log_dir=LOG_DIR, log_dir=LOG_DIR,
max_bytes=10 * 1024 * 1024, # 10MB max_bytes=2 * 1024 * 1024, # 2MB
backup_count=5, backup_count=30,
encoding="utf-8", encoding="utf-8",
compress=True,
compress_level=6,
) )
# 设置文件handler的日志级别 # 设置文件handler的日志级别
file_level = LOG_CONFIG.get("file_log_level", LOG_CONFIG.get("log_level", "INFO")) file_level = LOG_CONFIG.get("file_log_level", LOG_CONFIG.get("log_level", "INFO"))
@@ -63,10 +59,10 @@ def get_console_handler():
class TimestampedFileHandler(logging.Handler): class TimestampedFileHandler(logging.Handler):
"""基于时间戳的文件处理器,避免重命名操作""" """基于时间戳的文件处理器,简单的轮转份数限制"""
def __init__( def __init__(
self, log_dir, max_bytes=10 * 1024 * 1024, backup_count=5, encoding="utf-8", compress=True, compress_level=6 self, log_dir, max_bytes=2 * 1024 * 1024, backup_count=30, encoding="utf-8"
): ):
super().__init__() super().__init__()
self.log_dir = Path(log_dir) self.log_dir = Path(log_dir)
@@ -74,8 +70,6 @@ class TimestampedFileHandler(logging.Handler):
self.max_bytes = max_bytes self.max_bytes = max_bytes
self.backup_count = backup_count self.backup_count = backup_count
self.encoding = encoding self.encoding = encoding
self.compress = compress
self.compress_level = compress_level
self._lock = threading.Lock() self._lock = threading.Lock()
# 当前活跃的日志文件 # 当前活跃的日志文件
@@ -100,48 +94,19 @@ class TimestampedFileHandler(logging.Handler):
if self.current_stream: if self.current_stream:
self.current_stream.close() self.current_stream.close()
# 压缩旧文件
if self.compress and self.current_file:
threading.Thread(target=self._compress_file, args=(self.current_file,), daemon=True).start()
# 清理旧文件 # 清理旧文件
self._cleanup_old_files() self._cleanup_old_files()
# 创建新文件 # 创建新文件
self._init_current_file() self._init_current_file()
def _compress_file(self, file_path):
"""在后台压缩文件"""
try:
time.sleep(0.5) # 等待文件写入完成
if not file_path.exists():
return
compressed_path = file_path.with_suffix(file_path.suffix + ".gz")
original_size = file_path.stat().st_size
with open(file_path, "rb") as f_in:
with gzip.open(compressed_path, "wb", compresslevel=self.compress_level) as f_out:
shutil.copyfileobj(f_in, f_out)
# 删除原文件
file_path.unlink()
compressed_size = compressed_path.stat().st_size
ratio = (1 - compressed_size / original_size) * 100 if original_size > 0 else 0
print(f"[日志压缩] {file_path.name} -> {compressed_path.name} (压缩率: {ratio:.1f}%)")
except Exception as e:
print(f"[日志压缩] 压缩失败 {file_path}: {e}")
def _cleanup_old_files(self): def _cleanup_old_files(self):
"""清理旧的日志文件,保留指定数量""" """清理旧的日志文件,保留指定数量"""
try: try:
# 获取所有日志文件(包括压缩的) # 获取所有日志文件
log_files = [] log_files = list(self.log_dir.glob("app_*.log.jsonl"))
for pattern in ["app_*.log.jsonl", "app_*.log.jsonl.gz"]:
log_files.extend(self.log_dir.glob(pattern))
# 按修改时间排序 # 按修改时间排序
log_files.sort(key=lambda f: f.stat().st_mtime, reverse=True) log_files.sort(key=lambda f: f.stat().st_mtime, reverse=True)
@@ -380,6 +345,8 @@ MODULE_COLORS = {
"hfc": "\033[96m", "hfc": "\033[96m",
"base_action": "\033[96m", "base_action": "\033[96m",
"action_manager": "\033[34m", "action_manager": "\033[34m",
# 关系系统
"relation": "\033[38;5;201m", # 深粉色
# 聊天相关模块 # 聊天相关模块
"normal_chat": "\033[38;5;81m", # 亮蓝绿色 "normal_chat": "\033[38;5;81m", # 亮蓝绿色
"normal_chat_response": "\033[38;5;123m", # 青绿色 "normal_chat_response": "\033[38;5;123m", # 青绿色
@@ -724,8 +691,8 @@ def configure_logging(
level: str = "INFO", level: str = "INFO",
console_level: str = None, console_level: str = None,
file_level: str = None, file_level: str = None,
max_bytes: int = 10 * 1024 * 1024, max_bytes: int = 2 * 1024 * 1024,
backup_count: int = 5, backup_count: int = 30,
log_dir: str = "logs", log_dir: str = "logs",
): ):
"""动态配置日志参数""" """动态配置日志参数"""
@@ -933,7 +900,7 @@ def initialize_logging():
logger.info("日志系统已重新初始化:") logger.info("日志系统已重新初始化:")
logger.info(f" - 控制台级别: {console_level}") logger.info(f" - 控制台级别: {console_level}")
logger.info(f" - 文件级别: {file_level}") logger.info(f" - 文件级别: {file_level}")
logger.info(" - 压缩功能: 启用") logger.info(" - 轮转份数: 30个文件")
logger.info(" - 自动清理: 30天前的日志") logger.info(" - 自动清理: 30天前的日志")
@@ -955,7 +922,7 @@ def force_initialize_logging():
logger = get_logger("logger") logger = get_logger("logger")
console_level = LOG_CONFIG.get("console_log_level", LOG_CONFIG.get("log_level", "INFO")) console_level = LOG_CONFIG.get("console_log_level", LOG_CONFIG.get("log_level", "INFO"))
file_level = LOG_CONFIG.get("file_log_level", LOG_CONFIG.get("log_level", "INFO")) file_level = LOG_CONFIG.get("file_log_level", LOG_CONFIG.get("log_level", "INFO"))
logger.info(f"日志系统已强制重新初始化,控制台级别: {console_level},文件级别: {file_level}所有logger格式已统一") logger.info(f"日志系统已强制重新初始化,控制台级别: {console_level},文件级别: {file_level}轮转份数: 30个文件所有logger格式已统一")
def show_module_colors(): def show_module_colors():
@@ -1033,12 +1000,12 @@ def start_log_cleanup_task():
cleanup_thread.start() cleanup_thread.start()
logger = get_logger("logger") logger = get_logger("logger")
logger.info("已启动日志清理任务将自动清理30天前的日志文件") logger.info("已启动日志清理任务将自动清理30天前的日志文件(轮转份数限制: 30个文件")
def get_log_stats(): def get_log_stats():
"""获取日志文件统计信息""" """获取日志文件统计信息"""
stats = {"total_files": 0, "total_size": 0, "compressed_files": 0, "uncompressed_files": 0, "files": []} stats = {"total_files": 0, "total_size": 0, "files": []}
try: try:
if not LOG_DIR.exists(): if not LOG_DIR.exists():
@@ -1049,18 +1016,12 @@ def get_log_stats():
"name": log_file.name, "name": log_file.name,
"size": log_file.stat().st_size, "size": log_file.stat().st_size,
"modified": datetime.fromtimestamp(log_file.stat().st_mtime).strftime("%Y-%m-%d %H:%M:%S"), "modified": datetime.fromtimestamp(log_file.stat().st_mtime).strftime("%Y-%m-%d %H:%M:%S"),
"compressed": log_file.suffix == ".gz",
} }
stats["files"].append(file_info) stats["files"].append(file_info)
stats["total_files"] += 1 stats["total_files"] += 1
stats["total_size"] += file_info["size"] stats["total_size"] += file_info["size"]
if file_info["compressed"]:
stats["compressed_files"] += 1
else:
stats["uncompressed_files"] += 1
# 按修改时间排序 # 按修改时间排序
stats["files"].sort(key=lambda x: x["modified"], reverse=True) stats["files"].sort(key=lambda x: x["modified"], reverse=True)