Merge branch 'dev' of https://github.com/MaiM-with-u/MaiBot into dev
This commit is contained in:
@@ -5,7 +5,7 @@ from src.chat.message_receive.message import Seg # Local import needed after mo
|
||||
from src.chat.message_receive.message import UserInfo
|
||||
from src.chat.message_receive.chat_stream import chat_manager
|
||||
from src.common.logger_manager import get_logger
|
||||
from src.chat.models.utils_model import LLMRequest
|
||||
from src.llm_models.utils_model import LLMRequest
|
||||
from src.config.config import global_config
|
||||
from src.chat.utils.utils_image import image_path_to_base64 # Local import needed after move
|
||||
from src.chat.utils.timer_calculator import Timer # <--- Import Timer
|
||||
|
||||
@@ -2,7 +2,7 @@ import time
|
||||
import random
|
||||
from typing import List, Dict, Optional, Any, Tuple
|
||||
from src.common.logger_manager import get_logger
|
||||
from src.chat.models.utils_model import LLMRequest
|
||||
from src.llm_models.utils_model import LLMRequest
|
||||
from src.config.config import global_config
|
||||
from src.chat.utils.chat_message_builder import get_raw_msg_by_timestamp_random, build_anonymous_messages
|
||||
from src.chat.focus_chat.heartflow_prompt_builder import Prompt, global_prompt_manager
|
||||
|
||||
@@ -425,7 +425,10 @@ class HeartFChatting:
|
||||
self.all_observations = observations
|
||||
|
||||
with Timer("回忆", cycle_timers):
|
||||
logger.debug(f"{self.log_prefix} 开始回忆")
|
||||
running_memorys = await self.memory_activator.activate_memory(observations)
|
||||
logger.debug(f"{self.log_prefix} 回忆完成")
|
||||
print(running_memorys)
|
||||
|
||||
with Timer("执行 信息处理器", cycle_timers):
|
||||
all_plan_info = await self._process_processors(observations, running_memorys, cycle_timers)
|
||||
|
||||
@@ -11,7 +11,7 @@ from ..message_receive.chat_stream import chat_manager
|
||||
|
||||
# from ..message_receive.message_buffer import message_buffer
|
||||
from ..utils.timer_calculator import Timer
|
||||
from src.chat.person_info.relationship_manager import relationship_manager
|
||||
from src.person_info.relationship_manager import relationship_manager
|
||||
from typing import Optional, Tuple, Dict, Any
|
||||
|
||||
logger = get_logger("chat")
|
||||
|
||||
@@ -3,7 +3,7 @@ from src.common.logger_manager import get_logger
|
||||
from src.individuality.individuality import individuality
|
||||
from src.chat.utils.prompt_builder import Prompt, global_prompt_manager
|
||||
from src.chat.utils.chat_message_builder import build_readable_messages, get_raw_msg_before_timestamp_with_chat
|
||||
from src.chat.person_info.relationship_manager import relationship_manager
|
||||
from src.person_info.relationship_manager import relationship_manager
|
||||
import time
|
||||
from typing import Optional
|
||||
from src.chat.utils.utils import get_recent_group_speaker
|
||||
|
||||
@@ -37,4 +37,4 @@ class SelfInfo(InfoBase):
|
||||
Returns:
|
||||
str: 处理后的信息
|
||||
"""
|
||||
return self.get_self_info()
|
||||
return self.get_self_info() or ""
|
||||
|
||||
@@ -67,3 +67,16 @@ class StructuredInfo:
|
||||
value: 要设置的属性值
|
||||
"""
|
||||
self.data[key] = value
|
||||
|
||||
def get_processed_info(self) -> str:
|
||||
"""获取处理后的信息
|
||||
|
||||
Returns:
|
||||
str: 处理后的信息字符串
|
||||
"""
|
||||
|
||||
info_str = ""
|
||||
for key, value in self.data.items():
|
||||
info_str += f"信息类型:{key},信息内容:{value}\n"
|
||||
|
||||
return info_str
|
||||
|
||||
@@ -8,7 +8,7 @@ from src.chat.heart_flow.observation.hfcloop_observation import HFCloopObservati
|
||||
from src.chat.heart_flow.observation.chatting_observation import ChattingObservation
|
||||
from src.chat.message_receive.chat_stream import ChatStream, chat_manager
|
||||
from typing import Dict
|
||||
from src.chat.models.utils_model import LLMRequest
|
||||
from src.llm_models.utils_model import LLMRequest
|
||||
from src.config.config import global_config
|
||||
import random
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ from src.chat.heart_flow.observation.hfcloop_observation import HFCloopObservati
|
||||
from src.chat.focus_chat.info.cycle_info import CycleInfo
|
||||
from datetime import datetime
|
||||
from typing import Dict
|
||||
from src.chat.models.utils_model import LLMRequest
|
||||
from src.llm_models.utils_model import LLMRequest
|
||||
from src.config.config import global_config
|
||||
|
||||
logger = get_logger("processor")
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from src.chat.heart_flow.observation.chatting_observation import ChattingObservation
|
||||
from src.chat.heart_flow.observation.observation import Observation
|
||||
from src.chat.models.utils_model import LLMRequest
|
||||
from src.llm_models.utils_model import LLMRequest
|
||||
from src.config.config import global_config
|
||||
import time
|
||||
import traceback
|
||||
@@ -9,7 +9,7 @@ from src.individuality.individuality import individuality
|
||||
from src.chat.utils.prompt_builder import Prompt, global_prompt_manager
|
||||
from src.chat.utils.json_utils import safe_json_dumps
|
||||
from src.chat.message_receive.chat_stream import chat_manager
|
||||
from src.chat.person_info.relationship_manager import relationship_manager
|
||||
from src.person_info.relationship_manager import relationship_manager
|
||||
from .base_processor import BaseProcessor
|
||||
from src.chat.focus_chat.info.mind_info import MindInfo
|
||||
from typing import List, Optional
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from src.chat.heart_flow.observation.chatting_observation import ChattingObservation
|
||||
from src.chat.heart_flow.observation.observation import Observation
|
||||
from src.chat.models.utils_model import LLMRequest
|
||||
from src.llm_models.utils_model import LLMRequest
|
||||
from src.config.config import global_config
|
||||
import time
|
||||
import traceback
|
||||
@@ -8,7 +8,7 @@ from src.common.logger_manager import get_logger
|
||||
from src.individuality.individuality import individuality
|
||||
from src.chat.utils.prompt_builder import Prompt, global_prompt_manager
|
||||
from src.chat.message_receive.chat_stream import chat_manager
|
||||
from src.chat.person_info.relationship_manager import relationship_manager
|
||||
from src.person_info.relationship_manager import relationship_manager
|
||||
from .base_processor import BaseProcessor
|
||||
from typing import List, Optional
|
||||
from src.chat.heart_flow.observation.hfcloop_observation import HFCloopObservation
|
||||
@@ -33,12 +33,13 @@ def init_prompt():
|
||||
|
||||
现在请你根据现有的信息,思考自我认同
|
||||
1. 你是一个什么样的人,你和群里的人关系如何
|
||||
2. 思考有没有人提到你,或者图片与你有关
|
||||
3. 你的自我认同是否有助于你的回答,如果你需要自我相关的信息来帮你参与聊天,请输出,否则请输出十个字以内的简短自我认同
|
||||
4. 一般情况下不用输出自我认同,只需要输出十几个字的简短自我认同就好,除非有明显需要自我认同的场景
|
||||
2. 你的形象是什么
|
||||
3. 思考有没有人提到你,或者图片与你有关
|
||||
4. 你的自我认同是否有助于你的回答,如果你需要自我相关的信息来帮你参与聊天,请输出,否则请输出十几个字的简短自我认同
|
||||
5. 一般情况下不用输出自我认同,只需要输出十几个字的简短自我认同就好,除非有明显需要自我认同的场景
|
||||
|
||||
请思考的平淡一些,简短一些,说中文,不要浮夸,平淡一些。
|
||||
请注意不要输出多余内容(包括前后缀,冒号和引号,括号(),表情包,at或 @等 )。只输出自我认同内容。
|
||||
输出内容平淡一些,说中文,不要浮夸,平淡一些。
|
||||
请注意不要输出多余内容(包括前后缀,冒号和引号,括号(),表情包,at或 @等 )。只输出自我认同内容,记得明确说明这是你的自我认同。
|
||||
|
||||
"""
|
||||
Prompt(indentify_prompt, "indentify_prompt")
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from src.chat.heart_flow.observation.chatting_observation import ChattingObservation
|
||||
from src.chat.models.utils_model import LLMRequest
|
||||
from src.llm_models.utils_model import LLMRequest
|
||||
from src.config.config import global_config
|
||||
import time
|
||||
from src.common.logger_manager import get_logger
|
||||
@@ -7,7 +7,7 @@ from src.individuality.individuality import individuality
|
||||
from src.chat.utils.prompt_builder import Prompt, global_prompt_manager
|
||||
from src.tools.tool_use import ToolUser
|
||||
from src.chat.utils.json_utils import process_llm_tool_calls
|
||||
from src.chat.person_info.relationship_manager import relationship_manager
|
||||
from src.person_info.relationship_manager import relationship_manager
|
||||
from .base_processor import BaseProcessor
|
||||
from typing import List, Optional, Dict
|
||||
from src.chat.heart_flow.observation.observation import Observation
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from src.chat.heart_flow.observation.chatting_observation import ChattingObservation
|
||||
from src.chat.heart_flow.observation.observation import Observation
|
||||
from src.chat.models.utils_model import LLMRequest
|
||||
from src.llm_models.utils_model import LLMRequest
|
||||
from src.config.config import global_config
|
||||
import time
|
||||
import traceback
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from src.chat.heart_flow.observation.chatting_observation import ChattingObservation
|
||||
from src.chat.heart_flow.observation.structure_observation import StructureObservation
|
||||
from src.chat.heart_flow.observation.hfcloop_observation import HFCloopObservation
|
||||
from src.chat.models.utils_model import LLMRequest
|
||||
from src.llm_models.utils_model import LLMRequest
|
||||
from src.config.config import global_config
|
||||
from src.common.logger_manager import get_logger
|
||||
from src.chat.utils.prompt_builder import Prompt
|
||||
@@ -61,6 +61,8 @@ class MemoryActivator:
|
||||
elif isinstance(observation, HFCloopObservation):
|
||||
obs_info_text += observation.get_observe_info()
|
||||
|
||||
logger.debug(f"回忆待检索内容:obs_info_text: {obs_info_text}")
|
||||
|
||||
# prompt = await global_prompt_manager.format_prompt(
|
||||
# "memory_activator_prompt",
|
||||
# obs_info_text=obs_info_text,
|
||||
@@ -81,7 +83,7 @@ class MemoryActivator:
|
||||
# valid_keywords=keywords, max_memory_num=3, max_memory_length=2, max_depth=3
|
||||
# )
|
||||
related_memory = await HippocampusManager.get_instance().get_memory_from_text(
|
||||
text=obs_info_text, max_memory_num=3, max_memory_length=2, max_depth=3, fast_retrieval=True
|
||||
text=obs_info_text, max_memory_num=5, max_memory_length=2, max_depth=3, fast_retrieval=True
|
||||
)
|
||||
|
||||
# logger.debug(f"获取到的记忆: {related_memory}")
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
import traceback
|
||||
from typing import Tuple, Dict, List, Any, Optional
|
||||
from src.chat.focus_chat.planners.actions.base_action import BaseAction
|
||||
from src.chat.focus_chat.planners.actions.base_action import BaseAction, register_action # noqa F401
|
||||
from src.chat.heart_flow.observation.chatting_observation import ChattingObservation
|
||||
from src.chat.focus_chat.hfc_utils import create_empty_anchor_message
|
||||
from src.common.logger_manager import get_logger
|
||||
from src.chat.person_info.person_info import person_info_manager
|
||||
from src.person_info.person_info import person_info_manager
|
||||
from abc import abstractmethod
|
||||
import os
|
||||
import inspect
|
||||
import toml # 导入 toml 库
|
||||
|
||||
logger = get_logger("plugin_action")
|
||||
|
||||
@@ -16,12 +19,24 @@ class PluginAction(BaseAction):
|
||||
封装了主程序内部依赖,提供简化的API接口给插件开发者
|
||||
"""
|
||||
|
||||
def __init__(self, action_data: dict, reasoning: str, cycle_timers: dict, thinking_id: str, **kwargs):
|
||||
action_config_file_name: Optional[str] = None # 插件可以覆盖此属性来指定配置文件名
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
action_data: dict,
|
||||
reasoning: str,
|
||||
cycle_timers: dict,
|
||||
thinking_id: str,
|
||||
global_config: Optional[dict] = None,
|
||||
**kwargs,
|
||||
):
|
||||
"""初始化插件动作基类"""
|
||||
super().__init__(action_data, reasoning, cycle_timers, thinking_id)
|
||||
|
||||
# 存储内部服务和对象引用
|
||||
self._services = {}
|
||||
self._global_config = global_config # 存储全局配置的只读引用
|
||||
self.config: Dict[str, Any] = {} # 用于存储插件自身的配置
|
||||
|
||||
# 从kwargs提取必要的内部服务
|
||||
if "observations" in kwargs:
|
||||
@@ -32,6 +47,61 @@ class PluginAction(BaseAction):
|
||||
self._services["chat_stream"] = kwargs["chat_stream"]
|
||||
|
||||
self.log_prefix = kwargs.get("log_prefix", "")
|
||||
self._load_plugin_config() # 初始化时加载插件配置
|
||||
|
||||
def _load_plugin_config(self):
|
||||
"""
|
||||
加载插件自身的配置文件。
|
||||
配置文件应与插件模块在同一目录下。
|
||||
插件可以通过覆盖 `action_config_file_name` 类属性来指定文件名。
|
||||
如果 `action_config_file_name` 未指定,则不加载配置。
|
||||
仅支持 TOML (.toml) 格式。
|
||||
"""
|
||||
if not self.action_config_file_name:
|
||||
logger.debug(
|
||||
f"{self.log_prefix} 插件 {self.__class__.__name__} 未指定 action_config_file_name,不加载插件配置。"
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
plugin_module_path = inspect.getfile(self.__class__)
|
||||
plugin_dir = os.path.dirname(plugin_module_path)
|
||||
config_file_path = os.path.join(plugin_dir, self.action_config_file_name)
|
||||
|
||||
if not os.path.exists(config_file_path):
|
||||
logger.warning(
|
||||
f"{self.log_prefix} 插件 {self.__class__.__name__} 的配置文件 {config_file_path} 不存在。"
|
||||
)
|
||||
return
|
||||
|
||||
file_ext = os.path.splitext(self.action_config_file_name)[1].lower()
|
||||
|
||||
if file_ext == ".toml":
|
||||
with open(config_file_path, "r", encoding="utf-8") as f:
|
||||
self.config = toml.load(f) or {}
|
||||
logger.info(f"{self.log_prefix} 插件 {self.__class__.__name__} 的配置已从 {config_file_path} 加载。")
|
||||
else:
|
||||
logger.warning(
|
||||
f"{self.log_prefix} 不支持的插件配置文件格式: {file_ext}。仅支持 .toml。插件配置未加载。"
|
||||
)
|
||||
self.config = {} # 确保未加载时为空字典
|
||||
return
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"{self.log_prefix} 加载插件 {self.__class__.__name__} 的配置文件 {self.action_config_file_name} 时出错: {e}"
|
||||
)
|
||||
self.config = {} # 出错时确保 config 是一个空字典
|
||||
|
||||
def get_global_config(self, key: str, default: Any = None) -> Any:
|
||||
"""
|
||||
安全地从全局配置中获取一个值。
|
||||
插件应使用此方法读取全局配置,以保证只读和隔离性。
|
||||
"""
|
||||
if self._global_config:
|
||||
return self._global_config.get(key, default)
|
||||
logger.debug(f"{self.log_prefix} 尝试访问全局配置项 '{key}',但全局配置未提供。")
|
||||
return default
|
||||
|
||||
async def get_user_id_by_person_name(self, person_name: str) -> Tuple[str, str]:
|
||||
"""根据用户名获取用户ID"""
|
||||
|
||||
@@ -2,7 +2,7 @@ import json # <--- 确保导入 json
|
||||
import traceback
|
||||
from typing import List, Dict, Any, Optional
|
||||
from rich.traceback import install
|
||||
from src.chat.models.utils_model import LLMRequest
|
||||
from src.llm_models.utils_model import LLMRequest
|
||||
from src.config.config import global_config
|
||||
from src.chat.focus_chat.info.info_base import InfoBase
|
||||
from src.chat.focus_chat.info.obs_info import ObsInfo
|
||||
@@ -10,6 +10,7 @@ from src.chat.focus_chat.info.cycle_info import CycleInfo
|
||||
from src.chat.focus_chat.info.mind_info import MindInfo
|
||||
from src.chat.focus_chat.info.action_info import ActionInfo
|
||||
from src.chat.focus_chat.info.structured_info import StructuredInfo
|
||||
from src.chat.focus_chat.info.self_info import SelfInfo
|
||||
from src.common.logger_manager import get_logger
|
||||
from src.chat.utils.prompt_builder import Prompt, global_prompt_manager
|
||||
from src.individuality.individuality import individuality
|
||||
@@ -22,7 +23,11 @@ install(extra_lines=3)
|
||||
|
||||
def init_prompt():
|
||||
Prompt(
|
||||
"""{extra_info_block}
|
||||
"""
|
||||
你的自我认知是:
|
||||
{self_info_block}
|
||||
|
||||
{extra_info_block}
|
||||
|
||||
你需要基于以下信息决定如何参与对话
|
||||
这些信息可能会有冲突,请你整合这些信息,并选择一个最合适的action:
|
||||
@@ -127,6 +132,8 @@ class ActionPlanner:
|
||||
current_mind = info.get_current_mind()
|
||||
elif isinstance(info, CycleInfo):
|
||||
cycle_info = info.get_observe_info()
|
||||
elif isinstance(info, SelfInfo):
|
||||
self_info = info.get_processed_info()
|
||||
elif isinstance(info, StructuredInfo):
|
||||
_structured_info = info.get_data()
|
||||
elif not isinstance(info, ActionInfo): # 跳过已处理的ActionInfo
|
||||
@@ -148,6 +155,7 @@ class ActionPlanner:
|
||||
|
||||
# --- 构建提示词 (调用修改后的 PromptBuilder 方法) ---
|
||||
prompt = await self.build_planner_prompt(
|
||||
self_info_block=self_info,
|
||||
is_group_chat=is_group_chat, # <-- Pass HFC state
|
||||
chat_target_info=None,
|
||||
observed_messages_str=observed_messages_str, # <-- Pass local variable
|
||||
@@ -236,6 +244,7 @@ class ActionPlanner:
|
||||
|
||||
async def build_planner_prompt(
|
||||
self,
|
||||
self_info_block: str,
|
||||
is_group_chat: bool, # Now passed as argument
|
||||
chat_target_info: Optional[dict], # Now passed as argument
|
||||
observed_messages_str: str,
|
||||
@@ -301,7 +310,8 @@ class ActionPlanner:
|
||||
|
||||
planner_prompt_template = await global_prompt_manager.get_prompt_async("planner_prompt")
|
||||
prompt = planner_prompt_template.format(
|
||||
bot_name=global_config.bot.nickname,
|
||||
self_info_block=self_info_block,
|
||||
# bot_name=global_config.bot.nickname,
|
||||
prompt_personality=personality_block,
|
||||
chat_context_description=chat_context_description,
|
||||
chat_content_block=chat_content_block,
|
||||
|
||||
@@ -3,7 +3,7 @@ import traceback
|
||||
from json_repair import repair_json
|
||||
from rich.traceback import install
|
||||
from src.common.logger_manager import get_logger
|
||||
from src.chat.models.utils_model import LLMRequest
|
||||
from src.llm_models.utils_model import LLMRequest
|
||||
from src.config.config import global_config
|
||||
from src.chat.focus_chat.working_memory.memory_item import MemoryItem
|
||||
import json # 添加json模块导入
|
||||
|
||||
Reference in New Issue
Block a user