diff --git a/src/chat/planner_actions/planner.py b/src/chat/planner_actions/planner.py index 3dbf71956..bb27624b6 100644 --- a/src/chat/planner_actions/planner.py +++ b/src/chat/planner_actions/planner.py @@ -319,7 +319,7 @@ class ActionPlanner: action_data = {k: v for k, v in action_json.items() if k not in ["action", "reason"]} target_message = None - if action not in ["no_action", "no_reply"]: + if action not in ["no_action", "no_reply", "do_nothing"]: if target_message_id := action_json.get("target_message_id"): target_message = self.find_message_by_id(target_message_id, message_id_list) if target_message is None: @@ -329,7 +329,7 @@ class ActionPlanner: logger.warning(f"{self.log_prefix}动作'{action}'缺少target_message_id") available_action_names = [name for name, _ in current_available_actions] - if action not in ["no_action", "no_reply", "reply"] and action not in available_action_names: + if action not in ["no_action", "no_reply", "reply", "do_nothing"] and action not in available_action_names: logger.warning( f"{self.log_prefix}LLM 返回了当前不可用或无效的动作: '{action}' (可用: {available_action_names}),将强制使用 'no_action'" ) diff --git a/src/chat/replyer/default_generator.py b/src/chat/replyer/default_generator.py index 2101d5afd..5b2a1741a 100644 --- a/src/chat/replyer/default_generator.py +++ b/src/chat/replyer/default_generator.py @@ -868,6 +868,9 @@ class DefaultReplyer: sender, target = self._parse_reply_target(reply_to) else: # 获取 platform,如果不存在则从 chat_stream 获取,如果还是 None 则使用默认值 + if reply_message is None: + logger.warning("reply_message 为 None,无法构建prompt") + return "" platform = reply_message.get("chat_info_platform") person_id = person_info_manager.get_person_id( platform, # type: ignore diff --git a/src/plugins/built_in/emoji_plugin/emoji.py b/src/plugins/built_in/emoji_plugin/emoji.py index a493b5ea4..695124fd7 100644 --- a/src/plugins/built_in/emoji_plugin/emoji.py +++ b/src/plugins/built_in/emoji_plugin/emoji.py @@ -127,7 +127,7 @@ class EmojiAction(BaseAction): # 5. 调用LLM models = llm_api.get_available_models() - chat_model_config = models.get("utils_small") + chat_model_config = models.get("planner") if not chat_model_config: logger.error(f"{self.log_prefix} 未找到'utils_small'模型配置,无法调用LLM") return False, "未找到'utils_small'模型配置"