From 9f49b9d238346205dc3120b66253afb5053847dc Mon Sep 17 00:00:00 2001 From: SengokuCola <1026294844@qq.com> Date: Sun, 30 Mar 2025 10:22:04 +0800 Subject: [PATCH] =?UTF-8?q?fix=EF=BC=9A=E5=BF=83=E8=82=BA=E5=A4=8D?= =?UTF-8?q?=E8=8B=8F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/main.py | 1 + src/plugins/remote/remote.py | 8 +++++--- template/bot_config_template.toml | 2 ++ 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/src/main.py b/src/main.py index 7a23e366c..b96f95bbd 100644 --- a/src/main.py +++ b/src/main.py @@ -14,6 +14,7 @@ from .plugins.chat.storage import MessageStorage from .plugins.config.config import global_config from .plugins.chat.bot import chat_bot from .common.logger import get_module_logger +from .plugins.remote import heartbeat_thread logger = get_module_logger("main") diff --git a/src/plugins/remote/remote.py b/src/plugins/remote/remote.py index 69e18ba79..2b319ed3b 100644 --- a/src/plugins/remote/remote.py +++ b/src/plugins/remote/remote.py @@ -57,18 +57,20 @@ def send_heartbeat(server_url, client_id): data = json.dumps( {"system": sys, "Version": global_config.MAI_VERSION}, ) + logger.info(f"正在发送心跳到服务器: {server_url}") + logger.debug(f"心跳数据: {data}") response = requests.post(f"{server_url}/api/clients", headers=headers, data=data) if response.status_code == 201: data = response.json() - logger.debug(f"心跳发送成功。服务器响应: {data}") + logger.info(f"心跳发送成功。服务器响应: {data}") return True else: - logger.debug(f"心跳发送失败。状态码: {response.status_code}") + logger.error(f"心跳发送失败。状态码: {response.status_code}, 响应内容: {response.text}") return False except requests.RequestException as e: - logger.debug(f"发送心跳时出错: {e}") + logger.error(f"发送心跳时出错: {e}") return False diff --git a/template/bot_config_template.toml b/template/bot_config_template.toml index 7567cdf61..34477b9fd 100644 --- a/template/bot_config_template.toml +++ b/template/bot_config_template.toml @@ -213,6 +213,8 @@ pri_out = 0.35 [model.embedding] #嵌入 name = "BAAI/bge-m3" provider = "SILICONFLOW" +pri_in = 0 +pri_out = 0 #测试模型,给think_glow用,如果你没开实验性功能,随便写就行,但是要有 [model.llm_outer_world] #外世界判断:建议使用qwen2.5 7b