From 6f3cc2cb55c058788ff75738f34adcf0a743b87a Mon Sep 17 00:00:00 2001 From: SengokuCola <1026294844@qq.com> Date: Mon, 31 Mar 2025 23:26:38 +0800 Subject: [PATCH] =?UTF-8?q?better=EF=BC=9A=E4=BC=98=E5=8C=96=E4=BA=86?= =?UTF-8?q?=E7=BB=9F=E8=AE=A1=E4=BF=A1=E6=81=AF=EF=BC=8C=E4=BC=9A=E5=9C=A8?= =?UTF-8?q?=E6=8E=A7=E5=88=B6=E5=8F=B0=E6=98=BE=E7=A4=BA=E7=BB=9F=E8=AE=A1?= =?UTF-8?q?=E4=BF=A1=E6=81=AF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- changelogs/changelog.md | 11 ++- changelogs/changelog_config.md | 9 +++ src/heart_flow/observation.py | 2 +- src/plugins/chat/message_sender.py | 4 +- src/plugins/config/config.py | 2 +- src/plugins/utils/statistic.py | 126 +++++++++++++++++++++++++---- 6 files changed, 134 insertions(+), 20 deletions(-) diff --git a/changelogs/changelog.md b/changelogs/changelog.md index fd7cdda76..135d28fb7 100644 --- a/changelogs/changelog.md +++ b/changelogs/changelog.md @@ -8,12 +8,19 @@ - 精简代码结构,优化文件夹组织 - 新增详细统计系统 -#### 思维流系统(实验性功能) +#### 思维流系统 - 新增思维流作为实验功能 - 思维流大核+小核架构 - 思维流回复意愿模式 - 优化思维流自动启停机制,提升资源利用效率 - 思维流与日程系统联动,实现动态日程生成 +- 优化心流运行逻辑和思考时间计算 +- 添加错误检测机制 +- 修复心流无法观察群消息的问题 + +#### 回复系统 +- 优化回复逻辑,添加回复前思考机制 +- 移除推理模型在回复中的使用 #### 记忆系统优化 - 优化记忆抽取策略 @@ -92,6 +99,8 @@ - 优化代码风格和格式 - 完善异常处理机制 - 优化日志输出格式 +- 版本硬编码,新增配置自动更新功能 +- 更新日程生成器功能 ### 主要改进方向 1. 完善思维流系统功能 diff --git a/changelogs/changelog_config.md b/changelogs/changelog_config.md index e2a989d8d..32912f691 100644 --- a/changelogs/changelog_config.md +++ b/changelogs/changelog_config.md @@ -1,5 +1,14 @@ # Changelog +## [1.0.3] - 2025-3-31 +### Added +- 新增了心流相关配置项: + - `heartflow` 配置项,用于控制心流功能 + +### Removed +- 移除了 `response` 配置项中的 `model_r1_probability` 和 `model_v3_probability` 选项 +- 移除了次级推理模型相关配置 + ## [1.0.1] - 2025-3-30 ### Added - 增加了流式输出控制项 `stream` diff --git a/src/heart_flow/observation.py b/src/heart_flow/observation.py index 93057c5ab..b2ad3ce6f 100644 --- a/src/heart_flow/observation.py +++ b/src/heart_flow/observation.py @@ -33,7 +33,7 @@ class ChattingObservation(Observation): self.sub_observe = None self.llm_summary = LLM_request( - model=global_config.llm_observation, temperature=0.7, max_tokens=300, request_type="outer_world" + model=global_config.llm_observation, temperature=0.7, max_tokens=300, request_type="chat_observation" ) # 进行一次观察 返回观察结果observe_info diff --git a/src/plugins/chat/message_sender.py b/src/plugins/chat/message_sender.py index f18257c17..378ee6864 100644 --- a/src/plugins/chat/message_sender.py +++ b/src/plugins/chat/message_sender.py @@ -192,7 +192,7 @@ class MessageManager: print(thinking_time) if ( message_earliest.is_head - and message_earliest.update_thinking_time() > 8 + and message_earliest.update_thinking_time() > 18 and not message_earliest.is_private_message() # 避免在私聊时插入reply ): logger.debug(f"设置回复消息{message_earliest.processed_plain_text}") @@ -219,7 +219,7 @@ class MessageManager: # print(msg.is_private_message()) if ( msg.is_head - and msg.update_thinking_time() > 8 + and msg.update_thinking_time() > 18 and not msg.is_private_message() # 避免在私聊时插入reply ): logger.debug(f"设置回复消息{msg.processed_plain_text}") diff --git a/src/plugins/config/config.py b/src/plugins/config/config.py index be031f0e6..41ef7a3e8 100644 --- a/src/plugins/config/config.py +++ b/src/plugins/config/config.py @@ -25,7 +25,7 @@ logger = get_module_logger("config", config=config_config) #考虑到,实际上配置文件中的mai_version是不会自动更新的,所以采用硬编码 mai_version_main = "0.6.0" -mai_version_fix = "mmc-2" +mai_version_fix = "mmc-3" mai_version = f"{mai_version_main}-{mai_version_fix}" def update_config(): diff --git a/src/plugins/utils/statistic.py b/src/plugins/utils/statistic.py index 8e9ebb2cb..0ca0e4fa9 100644 --- a/src/plugins/utils/statistic.py +++ b/src/plugins/utils/statistic.py @@ -20,6 +20,7 @@ class LLMStatistics: self.output_file = output_file self.running = False self.stats_thread = None + self.console_thread = None self._init_database() def _init_database(self): @@ -32,15 +33,22 @@ class LLMStatistics: """启动统计线程""" if not self.running: self.running = True + # 启动文件统计线程 self.stats_thread = threading.Thread(target=self._stats_loop) self.stats_thread.daemon = True self.stats_thread.start() + # 启动控制台输出线程 + self.console_thread = threading.Thread(target=self._console_output_loop) + self.console_thread.daemon = True + self.console_thread.start() def stop(self): """停止统计线程""" self.running = False if self.stats_thread: self.stats_thread.join() + if self.console_thread: + self.console_thread.join() def _record_online_time(self): """记录在线时间""" @@ -126,10 +134,19 @@ class LLMStatistics: messages_cursor = db.messages.find({"time": {"$gte": start_time.timestamp()}}) for doc in messages_cursor: stats["total_messages"] += 1 - user_id = str(doc.get("user_info", {}).get("user_id", "unknown")) - chat_id = str(doc.get("chat_id", "unknown")) - stats["messages_by_user"][user_id] += 1 - stats["messages_by_chat"][chat_id] += 1 + # user_id = str(doc.get("user_info", {}).get("user_id", "unknown")) + chat_info = doc.get("chat_info", {}) + user_info = doc.get("user_info", {}) + group_info = chat_info.get("group_info") if chat_info else {} + # print(f"group_info: {group_info}") + group_name = "unknown" + if group_info: + group_name = group_info["group_name"] + if user_info and group_name == "unknown": + group_name = user_info["user_nickname"] + # print(f"group_name: {group_name}") + stats["messages_by_user"][user_id] += 1 + stats["messages_by_chat"][group_name] += 1 return stats @@ -201,17 +218,74 @@ class LLMStatistics: ) output.append("") - # 添加消息统计 - output.append("消息统计:") - output.append(("用户ID 消息数量")) - for user_id, count in sorted(stats["messages_by_user"].items()): - output.append(f"{user_id[:32]:<32} {count:>10}") + # 添加聊天统计 + output.append("群组统计:") + output.append(("群组名称 消息数量")) + for group_name, count in sorted(stats["messages_by_chat"].items()): + output.append(f"{group_name[:32]:<32} {count:>10}") + + return "\n".join(output) + + def _format_stats_section_lite(self, stats: Dict[str, Any], title: str) -> str: + """格式化统计部分的输出""" + output = [] + + output.append("\n" + "-" * 84) + output.append(f"{title}") + output.append("-" * 84) + + # output.append(f"总请求数: {stats['total_requests']}") + if stats["total_requests"] > 0: + # output.append(f"总Token数: {stats['total_tokens']}") + output.append(f"总花费: {stats['total_cost']:.4f}¥") + # output.append(f"在线时间: {stats['online_time_minutes']}分钟") + output.append(f"总消息数: {stats['total_messages']}\n") + + data_fmt = "{:<32} {:>10} {:>14} {:>13.4f} ¥" + + # 按模型统计 + output.append("按模型统计:") + output.append(("模型名称 调用次数 Token总量 累计花费")) + for model_name, count in sorted(stats["requests_by_model"].items()): + tokens = stats["tokens_by_model"][model_name] + cost = stats["costs_by_model"][model_name] + output.append( + data_fmt.format(model_name[:32] + ".." if len(model_name) > 32 else model_name, count, tokens, cost) + ) output.append("") - output.append("聊天统计:") - output.append(("聊天ID 消息数量")) - for chat_id, count in sorted(stats["messages_by_chat"].items()): - output.append(f"{chat_id[:32]:<32} {count:>10}") + # 按请求类型统计 + # output.append("按请求类型统计:") + # output.append(("模型名称 调用次数 Token总量 累计花费")) + # for req_type, count in sorted(stats["requests_by_type"].items()): + # tokens = stats["tokens_by_type"][req_type] + # cost = stats["costs_by_type"][req_type] + # output.append( + # data_fmt.format(req_type[:22] + ".." if len(req_type) > 24 else req_type, count, tokens, cost) + # ) + # output.append("") + + # 修正用户统计列宽 + # output.append("按用户统计:") + # output.append(("用户ID 调用次数 Token总量 累计花费")) + # for user_id, count in sorted(stats["requests_by_user"].items()): + # tokens = stats["tokens_by_user"][user_id] + # cost = stats["costs_by_user"][user_id] + # output.append( + # data_fmt.format( + # user_id[:22], # 不再添加省略号,保持原始ID + # count, + # tokens, + # cost, + # ) + # ) + # output.append("") + + # 添加聊天统计 + output.append("群组统计:") + output.append(("群组名称 消息数量")) + for group_name, count in sorted(stats["messages_by_chat"].items()): + output.append(f"{group_name[:32]:<32} {count:>10}") return "\n".join(output) @@ -237,8 +311,30 @@ class LLMStatistics: with open(self.output_file, "w", encoding="utf-8") as f: f.write("\n".join(output)) + def _console_output_loop(self): + """控制台输出循环,每5分钟输出一次最近1小时的统计""" + while self.running: + # 等待5分钟 + for _ in range(30): # 5分钟 = 300秒 + if not self.running: + break + time.sleep(1) + try: + # 收集最近1小时的统计数据 + now = datetime.now() + hour_stats = self._collect_statistics_for_period(now - timedelta(hours=1)) + + # 使用logger输出 + stats_output = self._format_stats_section_lite(hour_stats, "最近1小时统计:详细信息见根目录文件:llm_statistics.txt") + logger.info("\n" + stats_output + "\n" + "=" * 50) + + except Exception: + logger.exception("控制台统计数据输出失败") + + + def _stats_loop(self): - """统计循环,每1分钟运行一次""" + """统计循环,每5分钟运行一次""" while self.running: try: # 记录在线时间 @@ -250,7 +346,7 @@ class LLMStatistics: logger.exception("统计数据处理失败") # 等待5分钟 - for _ in range(30): # 5分钟 = 300秒 + for _ in range(300): # 5分钟 = 300秒 if not self.running: break time.sleep(1)