diff --git a/src/plugins/chat/bot.py b/src/plugins/chat/bot.py index 73296c1da..4306c0f9d 100644 --- a/src/plugins/chat/bot.py +++ b/src/plugins/chat/bot.py @@ -109,13 +109,8 @@ class ChatBot: willing_manager.change_reply_willing_sent(thinking_message.group_id) response,raw_content = await self.gpt.generate_response(message) - - # if response is None: - # thinking_message.interupt=True if response: - # print(f"\033[1;32m[思考结束]\033[0m 思考结束,已得到回复,开始回复") - # 找到并删除对应的thinking消息 container = message_manager.get_container(event.group_id) thinking_message = None # 找到message,删除 diff --git a/src/plugins/chat/config.py b/src/plugins/chat/config.py index 0b47e5d3d..dfce0cd64 100644 --- a/src/plugins/chat/config.py +++ b/src/plugins/chat/config.py @@ -45,13 +45,7 @@ class BotConfig: llm_normal_minor: Dict[str, str] = field(default_factory=lambda: {}) embedding: Dict[str, str] = field(default_factory=lambda: {}) vlm: Dict[str, str] = field(default_factory=lambda: {}) - rerank: Dict[str, str] = field(default_factory=lambda: {}) - # 主题提取配置 - llm_topic_extract: Dict[str, str] = field(default_factory=lambda: {}) - - API_USING: str = "siliconflow" # 使用的API - API_PAID: bool = False # 是否使用付费API MODEL_R1_PROBABILITY: float = 0.8 # R1模型概率 MODEL_V3_PROBABILITY: float = 0.1 # V3模型概率 MODEL_R1_DISTILL_PROBABILITY: float = 0.1 # R1蒸馏模型概率 @@ -134,7 +128,6 @@ class BotConfig: if "llm_normal" in model_config: config.llm_normal = model_config["llm_normal"] - config.llm_topic_extract = config.llm_normal if "llm_normal_minor" in model_config: config.llm_normal_minor = model_config["llm_normal_minor"]