From 362dda1ab38b8b6cd00196bbfdcd961f739b0b07 Mon Sep 17 00:00:00 2001 From: Rikki Date: Sun, 30 Mar 2025 05:00:11 +0800 Subject: [PATCH] =?UTF-8?q?feat:=20=E5=A2=9E=E5=8A=A0steam=E6=8E=A7?= =?UTF-8?q?=E5=88=B6=E5=AD=97=E6=AE=B5=EF=BC=8C=E4=BF=AE=E5=A4=8Dqwq?= =?UTF-8?q?=E4=B8=8D=E8=83=BD=E5=B7=A5=E4=BD=9C=E7=9A=84=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/plugins/config/config.py | 6 +++++- src/plugins/models/utils_model.py | 5 ++--- template/bot_config_template.toml | 8 +++++++- 3 files changed, 14 insertions(+), 5 deletions(-) diff --git a/src/plugins/config/config.py b/src/plugins/config/config.py index 3d60403d0..66c3af659 100644 --- a/src/plugins/config/config.py +++ b/src/plugins/config/config.py @@ -309,13 +309,17 @@ class BotConfig: # base_url 的例子: SILICONFLOW_BASE_URL # key 的例子: SILICONFLOW_KEY - cfg_target = {"name": "", "base_url": "", "key": "", "pri_in": 0, "pri_out": 0} + cfg_target = {"name": "", "base_url": "", "key": "", "stream": False, "pri_in": 0, "pri_out": 0} if config.INNER_VERSION in SpecifierSet("<=0.0.0"): cfg_target = cfg_item elif config.INNER_VERSION in SpecifierSet(">=0.0.1"): stable_item = ["name", "pri_in", "pri_out"] + + if config.INNER_VERSION in SpecifierSet(">=1.0.1"): + stable_item.append("stream") + pricing_item = ["pri_in", "pri_out"] # 从配置中原始拷贝稳定字段 for i in stable_item: diff --git a/src/plugins/models/utils_model.py b/src/plugins/models/utils_model.py index 40809d59c..6c2fba5c8 100644 --- a/src/plugins/models/utils_model.py +++ b/src/plugins/models/utils_model.py @@ -12,8 +12,6 @@ import io import os from ...common.database import db from ..config.config import global_config -from ..config.config_env import env_config - logger = get_module_logger("model_utils") @@ -42,6 +40,7 @@ class LLM_request: self.model_name = model["name"] self.params = kwargs + self.stream = model.get("stream", False) self.pri_in = model.get("pri_in", 0) self.pri_out = model.get("pri_out", 0) @@ -175,7 +174,7 @@ class LLM_request: api_url = f"{self.base_url.rstrip('/')}/{endpoint.lstrip('/')}" # 判断是否为流式 - stream_mode = self.params.get("stream", False) + stream_mode = self.stream # logger_msg = "进入流式输出模式," if stream_mode else "" # logger.debug(f"{logger_msg}发送请求到URL: {api_url}") # logger.info(f"使用模型: {self.model_name}") diff --git a/template/bot_config_template.toml b/template/bot_config_template.toml index dceeb7569..7567cdf61 100644 --- a/template/bot_config_template.toml +++ b/template/bot_config_template.toml @@ -1,5 +1,5 @@ [inner] -version = "1.0.0" +version = "1.0.1" [mai_version] version = "0.6.0" @@ -149,6 +149,12 @@ enable_think_flow = false # 是否启用思维流 注意:可能会消耗大量 #下面的模型若使用硅基流动则不需要更改,使用ds官方则改成.env自定义的宏,使用自定义模型则选择定位相似的模型自己填写 #推理模型 +# 额外字段 +# 下面的模型有以下额外字段可以添加: + +# stream = : 用于指定模型是否是使用流式输出 +# 如果不指定,则该项是 False + [model.llm_reasoning] #回复模型1 主要回复模型 name = "Pro/deepseek-ai/DeepSeek-R1" # name = "Qwen/QwQ-32B"