diff --git a/src/chat/normal_chat/normal_chat.py b/src/chat/normal_chat/normal_chat.py index 18185915a..c7edbff3b 100644 --- a/src/chat/normal_chat/normal_chat.py +++ b/src/chat/normal_chat/normal_chat.py @@ -29,7 +29,6 @@ import traceback from .normal_chat_generator import NormalChatGenerator from src.chat.normal_chat.normal_chat_expressor import NormalChatExpressor -from src.chat.replyer.default_generator import DefaultReplyer from src.chat.normal_chat.normal_chat_planner import NormalChatPlanner from src.chat.normal_chat.normal_chat_action_modifier import NormalChatActionModifier diff --git a/src/chat/replyer/default_generator.py b/src/chat/replyer/default_generator.py index f1f79757e..8ebf45f6a 100644 --- a/src/chat/replyer/default_generator.py +++ b/src/chat/replyer/default_generator.py @@ -143,14 +143,14 @@ class DefaultReplyer: ): self.log_prefix = "replyer" self.request_type = request_type - + self.enable_tool = enable_tool if model_configs: self.express_model_configs = model_configs else: # 当未提供配置时,使用默认配置并赋予默认权重 - + model_config_1 = global_config.model.replyer_1.copy() model_config_2 = global_config.model.replyer_2.copy() prob_first = global_config.chat.replyer_random_probability @@ -172,11 +172,7 @@ class DefaultReplyer: self.heart_fc_sender = HeartFCSender() self.memory_activator = MemoryActivator() - self.tool_executor = ToolExecutor( - chat_id=self.chat_stream.stream_id, - enable_cache=True, - cache_ttl=3 - ) + self.tool_executor = ToolExecutor(chat_id=self.chat_stream.stream_id, enable_cache=True, cache_ttl=3) def _select_weighted_model_config(self) -> Dict[str, Any]: """使用加权随机选择来挑选一个模型配置""" @@ -575,8 +571,6 @@ class DefaultReplyer: else: tool_info_block = "" - - if extra_info_block: extra_info_block = f"以下是你在回复时需要参考的信息,现在请你阅读以下内容,进行决策\n{extra_info_block}\n以上是你在回复时需要参考的信息,现在请你阅读以下内容,进行决策" else: diff --git a/src/config/config.py b/src/config/config.py index b1b7e09d5..9beeed6ba 100644 --- a/src/config/config.py +++ b/src/config/config.py @@ -166,6 +166,7 @@ class Config(ConfigBase): lpmm_knowledge: LPMMKnowledgeConfig tool: ToolConfig + def load_config(config_path: str) -> Config: """ 加载配置文件 diff --git a/src/config/official_configs.py b/src/config/official_configs.py index 0ca3d9976..35248e7e7 100644 --- a/src/config/official_configs.py +++ b/src/config/official_configs.py @@ -337,6 +337,7 @@ class ExpressionConfig(ConfigBase): 格式: [["qq:12345:group", "qq:67890:private"]] """ + @dataclass class ToolConfig(ConfigBase): """工具配置类""" @@ -346,7 +347,8 @@ class ToolConfig(ConfigBase): enable_in_focus_chat: bool = True """是否在专注聊天中启用工具""" - + + @dataclass class EmojiConfig(ConfigBase): """表情包配置类""" diff --git a/src/plugin_system/apis/generator_api.py b/src/plugin_system/apis/generator_api.py index 639afe9c1..9f7f136be 100644 --- a/src/plugin_system/apis/generator_api.py +++ b/src/plugin_system/apis/generator_api.py @@ -94,7 +94,9 @@ async def generate_reply( """ try: # 获取回复器 - replyer = get_replyer(chat_stream, chat_id, model_configs=model_configs, request_type=request_type, enable_tool=enable_tool) + replyer = get_replyer( + chat_stream, chat_id, model_configs=model_configs, request_type=request_type, enable_tool=enable_tool + ) if not replyer: logger.error("[GeneratorAPI] 无法获取回复器") return False, []