fix:调整目录结构,优化hfc prompt,移除日程,移除动态和llm判断willing模式,

This commit is contained in:
SengokuCola
2025-05-13 18:37:55 +08:00
parent 6376da0682
commit fed71bccad
131 changed files with 422 additions and 1500 deletions

View File

@@ -10,13 +10,13 @@ from time import sleep
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
from src.plugins.knowledge.src.lpmmconfig import PG_NAMESPACE, global_config from src.chat.knowledge.src.lpmmconfig import PG_NAMESPACE, global_config
from src.plugins.knowledge.src.embedding_store import EmbeddingManager from src.chat.knowledge.src.embedding_store import EmbeddingManager
from src.plugins.knowledge.src.llm_client import LLMClient from src.chat.knowledge.src.llm_client import LLMClient
from src.plugins.knowledge.src.open_ie import OpenIE from src.chat.knowledge.src.open_ie import OpenIE
from src.plugins.knowledge.src.kg_manager import KGManager from src.chat.knowledge.src.kg_manager import KGManager
from src.common.logger import get_module_logger from src.common.logger import get_module_logger
from src.plugins.knowledge.src.utils.hash import get_sha256 from src.chat.knowledge.src.utils.hash import get_sha256
# 添加项目根目录到 sys.path # 添加项目根目录到 sys.path

View File

@@ -13,11 +13,11 @@ sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
from rich.progress import Progress # 替换为 rich 进度条 from rich.progress import Progress # 替换为 rich 进度条
from src.common.logger import get_module_logger from src.common.logger import get_module_logger
from src.plugins.knowledge.src.lpmmconfig import global_config from src.chat.knowledge.src.lpmmconfig import global_config
from src.plugins.knowledge.src.ie_process import info_extract_from_str from src.chat.knowledge.src.ie_process import info_extract_from_str
from src.plugins.knowledge.src.llm_client import LLMClient from src.chat.knowledge.src.llm_client import LLMClient
from src.plugins.knowledge.src.open_ie import OpenIE from src.chat.knowledge.src.open_ie import OpenIE
from src.plugins.knowledge.src.raw_processing import load_raw_data from src.chat.knowledge.src.raw_processing import load_raw_data
from rich.progress import ( from rich.progress import (
BarColumn, BarColumn,
TimeElapsedColumn, TimeElapsedColumn,

View File

@@ -6,7 +6,7 @@ import datetime # 新增导入
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
from src.common.logger_manager import get_logger from src.common.logger_manager import get_logger
from src.plugins.knowledge.src.lpmmconfig import global_config from src.chat.knowledge.src.lpmmconfig import global_config
logger = get_logger("lpmm") logger = get_logger("lpmm")
ROOT_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) ROOT_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))

View File

@@ -34,14 +34,6 @@ class APIBotConfig:
gender: str # 性别 gender: str # 性别
appearance: str # 外貌特征描述 appearance: str # 外貌特征描述
# schedule
enable_schedule_gen: bool # 是否启用日程表
enable_schedule_interaction: bool # 日程表是否影响回复模式
prompt_schedule_gen: str # 日程生成提示词
schedule_doing_update_interval: int # 日程表更新间隔(秒)
schedule_temperature: float # 日程表温度
time_zone: str # 时区
# platforms # platforms
platforms: Dict[str, str] # 平台信息 platforms: Dict[str, str] # 平台信息
@@ -164,7 +156,6 @@ class APIBotConfig:
"groups", "groups",
"personality", "personality",
"identity", "identity",
"schedule",
"platforms", "platforms",
"chat", "chat",
"normal_chat", "normal_chat",

17
src/chat/__init__.py Normal file
View File

@@ -0,0 +1,17 @@
"""
MaiMBot插件系统
包含聊天、情绪、记忆、日程等功能模块
"""
from src.chat.message_receive.chat_stream import chat_manager
from src.chat.emoji_system.emoji_manager import emoji_manager
from src.chat.person_info.relationship_manager import relationship_manager
from src.chat.normal_chat.willing.willing_manager import willing_manager
# 导出主要组件供外部使用
__all__ = [
"chat_manager",
"emoji_manager",
"relationship_manager",
"willing_manager",
]

View File

@@ -12,7 +12,7 @@ import re
from ...common.database import db from ...common.database import db
from ...config.config import global_config from ...config.config import global_config
from ..chat.utils_image import image_path_to_base64, image_manager from ..utils.utils_image import image_path_to_base64, image_manager
from ..models.utils_model import LLMRequest from ..models.utils_model import LLMRequest
from src.common.logger_manager import get_logger from src.common.logger_manager import get_logger
from rich.traceback import install from rich.traceback import install

View File

@@ -1,7 +1,7 @@
import os import os
import time import time
from typing import List, Dict, Any, Tuple from typing import List, Dict, Any, Tuple
from src.plugins.heartFC_chat.heartFC_Cycleinfo import CycleInfo from src.chat.focus_chat.heartFC_Cycleinfo import CycleInfo
from src.common.logger_manager import get_logger from src.common.logger_manager import get_logger
logger = get_logger("cycle_analyzer") logger = get_logger("cycle_analyzer")

View File

@@ -1,6 +1,6 @@
import os import os
import argparse import argparse
from src.plugins.heartFC_chat.cycle_analyzer import CycleAnalyzer from src.chat.focus_chat.cycle_analyzer import CycleAnalyzer
def print_section(title: str, width: int = 80): def print_section(title: str, width: int = 80):

View File

@@ -1,23 +1,23 @@
import time import time
import traceback import traceback
from typing import List, Optional, Dict, Any from typing import List, Optional, Dict, Any
from src.plugins.chat.message import MessageRecv, MessageThinking, MessageSending from src.chat.message_receive.message import MessageRecv, MessageThinking, MessageSending
from src.plugins.chat.message import Seg # Local import needed after move from src.chat.message_receive.message import Seg # Local import needed after move
from src.plugins.chat.message import UserInfo from src.chat.message_receive.message import UserInfo
from src.plugins.chat.chat_stream import chat_manager from src.chat.message_receive.chat_stream import chat_manager
from src.common.logger_manager import get_logger from src.common.logger_manager import get_logger
from src.plugins.models.utils_model import LLMRequest from src.chat.models.utils_model import LLMRequest
from src.config.config import global_config from src.config.config import global_config
from src.plugins.chat.utils_image import image_path_to_base64 # Local import needed after move from src.chat.utils.utils_image import image_path_to_base64 # Local import needed after move
from src.plugins.utils.timer_calculator import Timer # <--- Import Timer from src.chat.utils.timer_calculator import Timer # <--- Import Timer
from src.plugins.emoji_system.emoji_manager import emoji_manager from src.chat.emoji_system.emoji_manager import emoji_manager
from src.plugins.heartFC_chat.heartflow_prompt_builder import prompt_builder from src.chat.focus_chat.heartflow_prompt_builder import prompt_builder
from src.plugins.heartFC_chat.heartFC_sender import HeartFCSender from src.chat.focus_chat.heartFC_sender import HeartFCSender
from src.plugins.chat.utils import process_llm_response from src.chat.utils.utils import process_llm_response
from src.plugins.respon_info_catcher.info_catcher import info_catcher_manager from src.chat.utils.info_catcher import info_catcher_manager
from src.manager.mood_manager import mood_manager from src.manager.mood_manager import mood_manager
from src.heart_flow.utils_chat import get_chat_type_and_target_info from src.heart_flow.utils_chat import get_chat_type_and_target_info
from src.plugins.chat.chat_stream import ChatStream from src.chat.message_receive.chat_stream import ChatStream
logger = get_logger("expressor") logger = get_logger("expressor")
@@ -99,6 +99,7 @@ class DefaultExpressor:
anchor_message=anchor_message, anchor_message=anchor_message,
thinking_id=thinking_id, thinking_id=thinking_id,
reason=reasoning, reason=reasoning,
action_data=action_data,
) )
if reply: if reply:
@@ -135,6 +136,7 @@ class DefaultExpressor:
reason: str, reason: str,
anchor_message: MessageRecv, anchor_message: MessageRecv,
thinking_id: str, thinking_id: str,
action_data: Dict[str, Any],
) -> Optional[List[str]]: ) -> Optional[List[str]]:
""" """
回复器 (Replier): 核心逻辑负责生成回复文本 回复器 (Replier): 核心逻辑负责生成回复文本
@@ -160,6 +162,8 @@ class DefaultExpressor:
) )
# --- End determining sender_name --- # --- End determining sender_name ---
target_message = action_data.get("target", "")
# 3. 构建 Prompt # 3. 构建 Prompt
with Timer("构建Prompt", {}): # 内部计时器,可选保留 with Timer("构建Prompt", {}): # 内部计时器,可选保留
prompt = await prompt_builder.build_prompt( prompt = await prompt_builder.build_prompt(
@@ -170,6 +174,7 @@ class DefaultExpressor:
current_mind_info="", current_mind_info="",
structured_info="", structured_info="",
sender_name=sender_name_for_prompt, # Pass determined name sender_name=sender_name_for_prompt, # Pass determined name
target_message=target_message,
) )
# 4. 调用 LLM 生成回复 # 4. 调用 LLM 生成回复
@@ -182,9 +187,14 @@ class DefaultExpressor:
try: try:
with Timer("LLM生成", {}): # 内部计时器,可选保留 with Timer("LLM生成", {}): # 内部计时器,可选保留
# logger.info(f"{self.log_prefix}[Replier-{thinking_id}]\nPrompt:\n{prompt}\n")
content, reasoning_content, model_name = await self.express_model.generate_response(prompt) content, reasoning_content, model_name = await self.express_model.generate_response(prompt)
# logger.info(f"{self.log_prefix}[Replier-{thinking_id}]\nPrompt:\n{prompt}\n生成回复: {content}\n")
# 捕捉 LLM 输出信息 logger.info(f"{self.log_prefix}\nPrompt:\n{prompt}\n---------------------------\n")
logger.info(f"想要表达:{in_mind_reply}")
logger.info(f"理由:{reason}")
logger.info(f"生成回复: {content}\n")
info_catcher.catch_after_llm_generated( info_catcher.catch_after_llm_generated(
prompt=prompt, response=content, reasoning_content=reasoning_content, model_name=model_name prompt=prompt, response=content, reasoning_content=reasoning_content, model_name=model_name
) )

View File

@@ -2,10 +2,10 @@ import time
import random import random
from typing import List, Dict, Optional, Any, Tuple from typing import List, Dict, Optional, Any, Tuple
from src.common.logger_manager import get_logger from src.common.logger_manager import get_logger
from src.plugins.models.utils_model import LLMRequest from src.chat.models.utils_model import LLMRequest
from src.config.config import global_config from src.config.config import global_config
from src.plugins.utils.chat_message_builder import get_raw_msg_by_timestamp_random, build_readable_messages from src.chat.utils.chat_message_builder import get_raw_msg_by_timestamp_random, build_readable_messages
from src.plugins.heartFC_chat.heartflow_prompt_builder import Prompt, global_prompt_manager from src.chat.focus_chat.heartflow_prompt_builder import Prompt, global_prompt_manager
import os import os
import json import json
@@ -16,12 +16,12 @@ logger = get_logger("expressor")
def init_prompt() -> None: def init_prompt() -> None:
learn_expression_prompt = """ learn_style_prompt = """
{chat_str} {chat_str}
请从上面这段群聊中概括除了人名为"麦麦"之外的人的语言风格只考虑文字不要考虑表情包和图片 请从上面这段群聊中概括除了人名为"麦麦"之外的人的语言风格只考虑文字不要考虑表情包和图片
不要涉及具体的人名只考虑语言风格 不要涉及具体的人名只考虑语言风格
思考回复语法长度和情感 语言风格包含特殊内容和情感
思考有没有特殊的梗一并总结成语言风格 思考有没有特殊的梗一并总结成语言风格
总结成如下格式的规律总结的内容要详细但具有概括性 总结成如下格式的规律总结的内容要详细但具有概括性
"xxx"可以"xxx", xxx不超过10个字 "xxx"可以"xxx", xxx不超过10个字
@@ -29,20 +29,18 @@ def init_prompt() -> None:
例如 例如
"表示十分惊叹"使用"我嘞个xxxx" "表示十分惊叹"使用"我嘞个xxxx"
"表示讽刺的赞同,不想讲道理"使用"对对对" "表示讽刺的赞同,不想讲道理"使用"对对对"
"想表达某个观点,但不想明说"使用"反讽的句式"
"想说明某个观点,但懒得明说"使用"懂的都懂" "想说明某个观点,但懒得明说"使用"懂的都懂"
"想搞笑的表现高深的感觉"使用"文言文句式"
注意不要总结你自己的发言 注意不要总结你自己的发言
现在请你概括 现在请你概括
""" """
Prompt(learn_expression_prompt, "learn_expression_prompt") Prompt(learn_style_prompt, "learn_style_prompt")
personality_expression_prompt = """ personality_expression_prompt = """
{personality} {personality}
请从以上人设中总结出这个角色可能的语言风格 请从以上人设中总结出这个角色可能的语言风格
思考回复语法长度和情感 思考回复的特殊内容和情感
思考有没有特殊的梗一并总结成语言风格 思考有没有特殊的梗一并总结成语言风格
总结成如下格式的规律总结的内容要详细但具有概括性 总结成如下格式的规律总结的内容要详细但具有概括性
"xxx"可以"xxx", xxx不超过10个字 "xxx"可以"xxx", xxx不超过10个字
@@ -50,20 +48,38 @@ def init_prompt() -> None:
例如 例如
"表示十分惊叹"使用"我嘞个xxxx" "表示十分惊叹"使用"我嘞个xxxx"
"表示讽刺的赞同,不想讲道理"使用"对对对" "表示讽刺的赞同,不想讲道理"使用"对对对"
"想表达某个观点,但不想明说"使用"反讽的句式"
"想说明某个观点,但懒得明说"使用"懂的都懂" "想说明某个观点,但懒得明说"使用"懂的都懂"
"想搞笑的表现高深的感觉"使用"文言文句式"
现在请你概括 现在请你概括
""" """
Prompt(personality_expression_prompt, "personality_expression_prompt") Prompt(personality_expression_prompt, "personality_expression_prompt")
learn_grammar_prompt = """
{chat_str}
请从上面这段群聊中概括除了人名为"麦麦"之外的人的语法和句法特点只考虑纯文字不要考虑表情包和图片
不要总结图片动画表情[图片][动画表情]不总结 表情符号
不要涉及具体的人名只考虑语法和句法特点,
语法和句法特点要包括句子长短具体字数如何分局有何种语病如何拆分句子
总结成如下格式的规律总结的内容要简洁不浮夸
"xxx"可以"xxx"
例如
"表达观点较复杂"使用"省略主语"的句法
"不用详细说明的一般表达"使用"非常简洁的句子"的句法
"需要单纯简单的确认"使用"单字或几个字的肯定"的句法
注意不要总结你自己的发言
现在请你概括
"""
Prompt(learn_grammar_prompt, "learn_grammar_prompt")
class ExpressionLearner: class ExpressionLearner:
def __init__(self) -> None: def __init__(self) -> None:
self.express_learn_model: LLMRequest = LLMRequest( self.express_learn_model: LLMRequest = LLMRequest(
model=global_config.llm_normal, model=global_config.llm_normal,
temperature=global_config.llm_normal["temp"], temperature=0.1,
max_tokens=256, max_tokens=256,
request_type="response_heartflow", request_type="response_heartflow",
) )
@@ -73,17 +89,22 @@ class ExpressionLearner:
读取/data/expression/learnt/{chat_id}/expressions.json和/data/expression/personality/expressions.json 读取/data/expression/learnt/{chat_id}/expressions.json和/data/expression/personality/expressions.json
返回(learnt_expressions, personality_expressions) 返回(learnt_expressions, personality_expressions)
""" """
learnt_file = os.path.join("data", "expression", "learnt", str(chat_id), "expressions.json") learnt_style_file = os.path.join("data", "expression", "learnt_style", str(chat_id), "expressions.json")
learnt_grammar_file = os.path.join("data", "expression", "learnt_grammar", str(chat_id), "expressions.json")
personality_file = os.path.join("data", "expression", "personality", "expressions.json") personality_file = os.path.join("data", "expression", "personality", "expressions.json")
learnt_expressions = [] learnt_style_expressions = []
learnt_grammar_expressions = []
personality_expressions = [] personality_expressions = []
if os.path.exists(learnt_file): if os.path.exists(learnt_style_file):
with open(learnt_file, "r", encoding="utf-8") as f: with open(learnt_style_file, "r", encoding="utf-8") as f:
learnt_expressions = json.load(f) learnt_style_expressions = json.load(f)
if os.path.exists(learnt_grammar_file):
with open(learnt_grammar_file, "r", encoding="utf-8") as f:
learnt_grammar_expressions = json.load(f)
if os.path.exists(personality_file): if os.path.exists(personality_file):
with open(personality_file, "r", encoding="utf-8") as f: with open(personality_file, "r", encoding="utf-8") as f:
personality_expressions = json.load(f) personality_expressions = json.load(f)
return learnt_expressions, personality_expressions return learnt_style_expressions, learnt_grammar_expressions, personality_expressions
def is_similar(self, s1: str, s2: str) -> bool: def is_similar(self, s1: str, s2: str) -> bool:
""" """
@@ -98,23 +119,48 @@ class ExpressionLearner:
return same / min_len > 0.8 return same / min_len > 0.8
async def learn_and_store_expression(self) -> List[Tuple[str, str, str]]: async def learn_and_store_expression(self) -> List[Tuple[str, str, str]]:
"""选择从当前到最近1小时内的随机10条消息然后学习这些消息的表达方式""" """
logger.info("开始学习表达方式...") 学习并存储表达方式分别学习语言风格和句法特点
expressions: Optional[List[Tuple[str, str, str]]] = await self.learn_expression() """
logger.info(f"学习到{len(expressions) if expressions else 0}条表达方式") learnt_style: Optional[List[Tuple[str, str, str]]] = await self.learn_and_store(type="style", num=3)
# expressions: List[(chat_id, situation, style)] if not learnt_style:
if not expressions:
logger.info("没有学习到表达方式")
return [] return []
learnt_grammar: Optional[List[Tuple[str, str, str]]] = await self.learn_and_store(type="grammar", num=2)
if not learnt_grammar:
return []
return learnt_style, learnt_grammar
async def learn_and_store(self, type: str, num: int = 10) -> List[Tuple[str, str, str]]:
"""
选择从当前到最近1小时内的随机num条消息然后学习这些消息的表达方式
type: "style" or "grammar"
"""
if type == "style":
type_str = "语言风格"
elif type == "grammar":
type_str = "句法特点"
else:
raise ValueError(f"Invalid type: {type}")
logger.info(f"开始学习{type_str}...")
learnt_expressions: Optional[List[Tuple[str, str, str]]] = await self.learn_expression(type, num)
logger.info(f"学习到{len(learnt_expressions) if learnt_expressions else 0}{type_str}")
# learnt_expressions: List[(chat_id, situation, style)]
if not learnt_expressions:
logger.info(f"没有学习到{type_str}")
return []
# 按chat_id分组 # 按chat_id分组
chat_dict: Dict[str, List[Dict[str, str]]] = {} chat_dict: Dict[str, List[Dict[str, str]]] = {}
for chat_id, situation, style in expressions: for chat_id, situation, style in learnt_expressions:
if chat_id not in chat_dict: if chat_id not in chat_dict:
chat_dict[chat_id] = [] chat_dict[chat_id] = []
chat_dict[chat_id].append({"situation": situation, "style": style}) chat_dict[chat_id].append({"situation": situation, "style": style})
# 存储到/data/expression/对应chat_id/expressions.json # 存储到/data/expression/对应chat_id/expressions.json
for chat_id, expr_list in chat_dict.items(): for chat_id, expr_list in chat_dict.items():
dir_path = os.path.join("data", "expression", "learnt", str(chat_id)) dir_path = os.path.join("data", "expression", f"learnt_{type}", str(chat_id))
os.makedirs(dir_path, exist_ok=True) os.makedirs(dir_path, exist_ok=True)
file_path = os.path.join(dir_path, "expressions.json") file_path = os.path.join(dir_path, "expressions.json")
# 若已存在,先读出合并 # 若已存在,先读出合并
@@ -154,17 +200,26 @@ class ExpressionLearner:
old_data.append(new_expr) old_data.append(new_expr)
with open(file_path, "w", encoding="utf-8") as f: with open(file_path, "w", encoding="utf-8") as f:
json.dump(old_data, f, ensure_ascii=False, indent=2) json.dump(old_data, f, ensure_ascii=False, indent=2)
return expressions return learnt_expressions
async def learn_expression(self) -> Optional[List[Tuple[str, str, str]]]: async def learn_expression(self, type: str, num: int = 10) -> Optional[List[Tuple[str, str, str]]]:
"""选择从当前到最近1小时内的随机10条消息,然后学习这些消息的表达方式 """选择从当前到最近1小时内的随机num条消息,然后学习这些消息的表达方式
Args: Args:
chat_stream (ChatStream): _description_ type: "style" or "grammar"
""" """
if type == "style":
type_str = "语言风格"
prompt = "learn_style_prompt"
elif type == "grammar":
type_str = "句法特点"
prompt = "learn_grammar_prompt"
else:
raise ValueError(f"Invalid type: {type}")
current_time = time.time() current_time = time.time()
random_msg: Optional[List[Dict[str, Any]]] = get_raw_msg_by_timestamp_random( random_msg: Optional[List[Dict[str, Any]]] = get_raw_msg_by_timestamp_random(
current_time - 3600 * 24, current_time, limit=10 current_time - 3600 * 24, current_time, limit=num
) )
if not random_msg: if not random_msg:
return None return None
@@ -173,15 +228,19 @@ class ExpressionLearner:
random_msg_str: str = await build_readable_messages(random_msg, timestamp_mode="normal") random_msg_str: str = await build_readable_messages(random_msg, timestamp_mode="normal")
prompt: str = await global_prompt_manager.format_prompt( prompt: str = await global_prompt_manager.format_prompt(
"learn_expression_prompt", prompt,
chat_str=random_msg_str, chat_str=random_msg_str,
) )
logger.debug(f"学习表达方式的prompt: {prompt}") logger.debug(f"学习{type_str}的prompt: {prompt}")
response, _ = await self.express_learn_model.generate_response_async(prompt) try:
response, _ = await self.express_learn_model.generate_response_async(prompt)
except Exception as e:
logger.error(f"学习{type_str}失败: {e}")
return None
logger.debug(f"学习表达方式的response: {response}") logger.debug(f"学习{type_str}的response: {response}")
expressions: List[Tuple[str, str, str]] = self.parse_expression_response(response, chat_id) expressions: List[Tuple[str, str, str]] = self.parse_expression_response(response, chat_id)
@@ -232,7 +291,13 @@ class ExpressionLearner:
personality=global_config.expression_style, personality=global_config.expression_style,
) )
logger.info(f"个性表达方式提取prompt: {prompt}") logger.info(f"个性表达方式提取prompt: {prompt}")
response, _ = await self.express_learn_model.generate_response_async(prompt)
try:
response, _ = await self.express_learn_model.generate_response_async(prompt)
except Exception as e:
logger.error(f"个性表达方式提取失败: {e}")
return
logger.info(f"个性表达方式提取response: {response}") logger.info(f"个性表达方式提取response: {response}")
# chat_id用personality # chat_id用personality
expressions = self.parse_expression_response(response, "personality") expressions = self.parse_expression_response(response, "personality")

View File

@@ -6,32 +6,32 @@ import time
import traceback import traceback
from collections import deque from collections import deque
from typing import List, Optional, Dict, Any, Deque, Callable, Coroutine from typing import List, Optional, Dict, Any, Deque, Callable, Coroutine
from src.plugins.chat.chat_stream import ChatStream from src.chat.message_receive.chat_stream import ChatStream
from src.plugins.chat.chat_stream import chat_manager from src.chat.message_receive.chat_stream import chat_manager
from rich.traceback import install from rich.traceback import install
from src.common.logger_manager import get_logger from src.common.logger_manager import get_logger
from src.plugins.models.utils_model import LLMRequest from src.chat.models.utils_model import LLMRequest
from src.config.config import global_config from src.config.config import global_config
from src.plugins.utils.timer_calculator import Timer from src.chat.utils.timer_calculator import Timer
from src.heart_flow.observation.observation import Observation from src.heart_flow.observation.observation import Observation
from src.plugins.heartFC_chat.heartflow_prompt_builder import prompt_builder from src.chat.focus_chat.heartflow_prompt_builder import prompt_builder
from src.plugins.heartFC_chat.heartFC_Cycleinfo import CycleDetail from src.chat.focus_chat.heartFC_Cycleinfo import CycleDetail
from src.heart_flow.observation.chatting_observation import ChattingObservation from src.heart_flow.observation.chatting_observation import ChattingObservation
from src.heart_flow.utils_chat import get_chat_type_and_target_info from src.heart_flow.utils_chat import get_chat_type_and_target_info
from src.heart_flow.info.info_base import InfoBase from src.chat.focus_chat.info.info_base import InfoBase
from src.heart_flow.info.obs_info import ObsInfo from src.chat.focus_chat.info.obs_info import ObsInfo
from src.heart_flow.info.cycle_info import CycleInfo from src.chat.focus_chat.info.cycle_info import CycleInfo
from src.heart_flow.info.mind_info import MindInfo from src.chat.focus_chat.info.mind_info import MindInfo
from src.heart_flow.info.structured_info import StructuredInfo from src.chat.focus_chat.info.structured_info import StructuredInfo
from src.plugins.heartFC_chat.info_processors.chattinginfo_processor import ChattingInfoProcessor from src.chat.focus_chat.info_processors.chattinginfo_processor import ChattingInfoProcessor
from src.plugins.heartFC_chat.info_processors.mind_processor import MindProcessor from src.chat.focus_chat.info_processors.mind_processor import MindProcessor
from src.heart_flow.observation.memory_observation import MemoryObservation from src.heart_flow.observation.memory_observation import MemoryObservation
from src.heart_flow.observation.hfcloop_observation import HFCloopObservation from src.heart_flow.observation.hfcloop_observation import HFCloopObservation
from src.heart_flow.observation.working_observation import WorkingObservation from src.heart_flow.observation.working_observation import WorkingObservation
from src.plugins.heartFC_chat.info_processors.tool_processor import ToolProcessor from src.chat.focus_chat.info_processors.tool_processor import ToolProcessor
from src.plugins.heartFC_chat.expressors.default_expressor import DefaultExpressor from src.chat.focus_chat.expressors.default_expressor import DefaultExpressor
from src.plugins.heartFC_chat.hfc_utils import _create_empty_anchor_message from src.chat.focus_chat.hfc_utils import _create_empty_anchor_message
from src.plugins.heartFC_chat.memory_activator import MemoryActivator from src.chat.focus_chat.memory_activator import MemoryActivator
install(extra_lines=3) install(extra_lines=3)

View File

@@ -5,7 +5,7 @@ HeartFC_chat 是一个基于心流理论的聊天系统,通过模拟人类的
## 核心工作流程 ## 核心工作流程
### 1. 消息处理与存储 (HeartFCProcessor) ### 1. 消息处理与存储 (HeartFCProcessor)
[代码位置: src/plugins/heartFC_chat/heartflow_processor.py] [代码位置: src/plugins/focus_chat/heartflow_processor.py]
消息处理器负责接收和预处理消息,主要完成以下工作: 消息处理器负责接收和预处理消息,主要完成以下工作:
```mermaid ```mermaid
@@ -23,7 +23,7 @@ graph TD
- 消息存储:`storage.store_message()` [行号: 108] - 消息存储:`storage.store_message()` [行号: 108]
### 2. 对话管理循环 (HeartFChatting) ### 2. 对话管理循环 (HeartFChatting)
[代码位置: src/plugins/heartFC_chat/heartFC_chat.py] [代码位置: src/plugins/focus_chat/focus_chat.py]
HeartFChatting是系统的核心组件实现了完整的对话管理循环 HeartFChatting是系统的核心组件实现了完整的对话管理循环
@@ -55,7 +55,7 @@ graph TD
* 处理表情:`_handle_emoji()` [行号: 527-567] * 处理表情:`_handle_emoji()` [行号: 527-567]
### 3. 回复生成机制 (HeartFCGenerator) ### 3. 回复生成机制 (HeartFCGenerator)
[代码位置: src/plugins/heartFC_chat/heartFC_generator.py] [代码位置: src/plugins/focus_chat/heartFC_generator.py]
回复生成器负责产生高质量的回复内容: 回复生成器负责产生高质量的回复内容:
@@ -74,7 +74,7 @@ graph TD
* 响应处理:`_process_response()` [行号: 97-106] * 响应处理:`_process_response()` [行号: 97-106]
### 4. 提示词构建系统 (HeartFlowPromptBuilder) ### 4. 提示词构建系统 (HeartFlowPromptBuilder)
[代码位置: src/plugins/heartFC_chat/heartflow_prompt_builder.py] [代码位置: src/plugins/focus_chat/heartflow_prompt_builder.py]
提示词构建器支持两种工作模式HeartFC_chat专门使用Focus模式而Normal模式是为normal_chat设计的 提示词构建器支持两种工作模式HeartFC_chat专门使用Focus模式而Normal模式是为normal_chat设计的
@@ -106,8 +106,8 @@ graph TD
## 智能特性 ## 智能特性
### 1. 对话决策机制 ### 1. 对话决策机制
- LLM决策工具定义`PLANNER_TOOL_DEFINITION` [heartFC_chat.py 行号: 13-42] - LLM决策工具定义`PLANNER_TOOL_DEFINITION` [focus_chat.py 行号: 13-42]
- 决策执行:`_planner()` [heartFC_chat.py 行号: 282-386] - 决策执行:`_planner()` [focus_chat.py 行号: 282-386]
- 考虑因素: - 考虑因素:
* 上下文相关性 * 上下文相关性
* 情感状态 * 情感状态
@@ -115,7 +115,7 @@ graph TD
* 对话时机 * 对话时机
### 2. 状态管理 ### 2. 状态管理
[代码位置: src/plugins/heartFC_chat/heartFC_chat.py] [代码位置: src/plugins/focus_chat/focus_chat.py]
- 状态机实现:`HeartFChatting`类 [行号: 44-567] - 状态机实现:`HeartFChatting`类 [行号: 44-567]
- 核心功能: - 核心功能:
* 初始化:`_initialize()` [行号: 89-112] * 初始化:`_initialize()` [行号: 89-112]
@@ -123,7 +123,7 @@ graph TD
* 状态转换:`_handle_loop_completion()` [行号: 166-190] * 状态转换:`_handle_loop_completion()` [行号: 166-190]
### 3. 回复生成策略 ### 3. 回复生成策略
[代码位置: src/plugins/heartFC_chat/heartFC_generator.py] [代码位置: src/plugins/focus_chat/heartFC_generator.py]
- 温度调节:`current_model.temperature = global_config.llm_normal["temp"] * arousal_multiplier` [行号: 48] - 温度调节:`current_model.temperature = global_config.llm_normal["temp"] * arousal_multiplier` [行号: 48]
- 生成控制:`_generate_response_with_model()` [行号: 69-95] - 生成控制:`_generate_response_with_model()` [行号: 69-95]
- 响应处理:`_process_response()` [行号: 97-106] - 响应处理:`_process_response()` [行号: 97-106]
@@ -133,7 +133,7 @@ graph TD
### 关键参数 ### 关键参数
- LLM配置`model_normal` [heartFC_generator.py 行号: 32-37] - LLM配置`model_normal` [heartFC_generator.py 行号: 32-37]
- 过滤规则:`_check_ban_words()`, `_check_ban_regex()` [heartflow_processor.py 行号: 196-215] - 过滤规则:`_check_ban_words()`, `_check_ban_regex()` [heartflow_processor.py 行号: 196-215]
- 状态控制:`INITIAL_DURATION = 60.0` [heartFC_chat.py 行号: 11] - 状态控制:`INITIAL_DURATION = 60.0` [focus_chat.py 行号: 11]
### 优化建议 ### 优化建议
1. 调整LLM参数`temperature``max_tokens` 1. 调整LLM参数`temperature``max_tokens`

View File

@@ -1,14 +1,11 @@
# src/plugins/heartFC_chat/heartFC_sender.py import asyncio
import asyncio # 重新导入 asyncio
from typing import Dict, Optional # 重新导入类型 from typing import Dict, Optional # 重新导入类型
from ..chat.message import MessageSending, MessageThinking # 只保留 MessageSending 和 MessageThinking from src.chat.message_receive.message import MessageSending, MessageThinking
from src.common.message.api import global_api
# from ..message import global_api from src.chat.message_receive.storage import MessageStorage
from src.plugins.message.api import global_api from src.chat.utils.utils import truncate_message
from ..storage.storage import MessageStorage
from ..chat.utils import truncate_message
from src.common.logger_manager import get_logger from src.common.logger_manager import get_logger
from src.plugins.chat.utils import calculate_typing_time from src.chat.utils.utils import calculate_typing_time
from rich.traceback import install from rich.traceback import install
install(extra_lines=3) install(extra_lines=3)

View File

@@ -2,16 +2,16 @@ import time
import traceback import traceback
from ..memory_system.Hippocampus import HippocampusManager from ..memory_system.Hippocampus import HippocampusManager
from ...config.config import global_config from ...config.config import global_config
from ..chat.message import MessageRecv from ..message_receive.message import MessageRecv
from ..storage.storage import MessageStorage from ..message_receive.storage import MessageStorage
from ..chat.utils import is_mentioned_bot_in_message from ..utils.utils import is_mentioned_bot_in_message
from maim_message import Seg from maim_message import Seg
from src.heart_flow.heartflow import heartflow from src.heart_flow.heartflow import heartflow
from src.common.logger_manager import get_logger from src.common.logger_manager import get_logger
from ..chat.chat_stream import chat_manager from ..message_receive.chat_stream import chat_manager
from ..chat.message_buffer import message_buffer from ..message_receive.message_buffer import message_buffer
from ..utils.timer_calculator import Timer from ..utils.timer_calculator import Timer
from src.plugins.person_info.relationship_manager import relationship_manager from src.chat.person_info.relationship_manager import relationship_manager
from typing import Optional, Tuple, Dict, Any from typing import Optional, Tuple, Dict, Any
logger = get_logger("chat") logger = get_logger("chat")
@@ -215,7 +215,7 @@ class HeartFCProcessor:
f"[{current_time}][{mes_name}]" f"[{current_time}][{mes_name}]"
f"{userinfo.user_nickname}:" f"{userinfo.user_nickname}:"
f"{message.processed_plain_text}" f"{message.processed_plain_text}"
f"[兴趣度: {interested_rate:.2f}]" f"[激活: {interested_rate:.1f}]"
) )
# 8. 关系处理 # 8. 关系处理

View File

@@ -1,21 +1,20 @@
import random from src.config.config import global_config
from ...config.config import global_config
from src.common.logger_manager import get_logger from src.common.logger_manager import get_logger
from ...individuality.individuality import Individuality from src.individuality.individuality import Individuality
from src.plugins.utils.prompt_builder import Prompt, global_prompt_manager from src.chat.utils.prompt_builder import Prompt, global_prompt_manager
from src.plugins.utils.chat_message_builder import build_readable_messages, get_raw_msg_before_timestamp_with_chat from src.chat.utils.chat_message_builder import build_readable_messages, get_raw_msg_before_timestamp_with_chat
from src.plugins.person_info.relationship_manager import relationship_manager from src.chat.person_info.relationship_manager import relationship_manager
from src.plugins.chat.utils import get_embedding from src.chat.utils.utils import get_embedding
import time import time
from typing import Union, Optional, Dict, Any from typing import Union, Optional, Dict, Any
from ...common.database import db from src.common.database import db
from ..chat.utils import get_recent_group_speaker from src.chat.utils.utils import get_recent_group_speaker
from src.manager.mood_manager import mood_manager from src.manager.mood_manager import mood_manager
from ..memory_system.Hippocampus import HippocampusManager from src.chat.memory_system.Hippocampus import HippocampusManager
from ..schedule.schedule_generator import bot_schedule from src.chat.knowledge.knowledge_lib import qa_manager
from ..knowledge.knowledge_lib import qa_manager from src.chat.focus_chat.expressors.exprssion_learner import expression_learner
from .expressors.exprssion_learner import expression_learner
import traceback import traceback
import random
logger = get_logger("prompt") logger = get_logger("prompt")
@@ -24,21 +23,22 @@ logger = get_logger("prompt")
def init_prompt(): def init_prompt():
Prompt( Prompt(
""" """
你可以参考以下的语言习惯如果情景合适就使用不要盲目使用 你可以参考以下的语言习惯如果情景合适就使用不要盲目使用,不要生硬使用而是结合到表达中
{language_habits} {style_habbits}
不要生硬使用而是结合到表达中
你现在正在群里聊天以下是群里正在进行的聊天内容
{chat_info} {chat_info}
需要了解聊天记录中的内容就好 以上是聊天内容你需要了解聊天记录中的内容
{chat_target} {chat_target}
你的名字是{bot_name}{prompt_personality}你想表达{in_mind_reply},原因是{reason} 你的名字是{bot_name}{prompt_personality}在这聊天中"{target_message}"引起了你的注意你想表达{in_mind_reply},原因是{reason}你现在要思考怎么回复
你需要使用合适的语法和句法参考聊天内容组织一条日常且口语化的回复
注意请根据你想表达的内容和原因参考聊天内容组织一条日常且口语化的回复 你根据情景使用以下句法
要求回复尽量简短一些{reply_style2}{prompt_ger}可以参考贴吧知乎或者微博的回复风格你可以完全重组回复保留最基本的表达含义就好但注意简短保持一个话题 {grammar_habbits}
{reply_style1}说中文不要刻意突出自身学科背景不要浮夸不要用夸张修辞平淡一些不要输出多余内容(包括前后缀冒号和引号括号表情包at或 @等 )只输出一条回复就好 回复尽量简短一些可以参考贴吧知乎和微博的回复风格你可以完全重组回复保留最基本的表达含义就好但注意回复要简短
回复不要浮夸不要用夸张修辞平淡一些不要输出多余内容(包括前后缀冒号和引号括号表情包at或 @等 )只输出一条回复就好
现在你说
""", """,
"heart_flow_prompt", "heart_flow_prompt",
) )
@@ -119,7 +119,6 @@ def init_prompt():
{memory_prompt} {memory_prompt}
{relation_prompt} {relation_prompt}
{prompt_info} {prompt_info}
{schedule_prompt}
{chat_target} {chat_target}
{chat_talking_prompt} {chat_talking_prompt}
现在"{sender_name}"说的:{message_txt}引起了你的注意你想要在群里发言或者回复这条消息\n 现在"{sender_name}"说的:{message_txt}引起了你的注意你想要在群里发言或者回复这条消息\n
@@ -137,7 +136,7 @@ def init_prompt():
"你回忆起:{related_memory_info}\n以上是你的回忆,不一定是目前聊天里的人说的,也不一定是现在发生的事情,请记住。\n", "你回忆起:{related_memory_info}\n以上是你的回忆,不一定是目前聊天里的人说的,也不一定是现在发生的事情,请记住。\n",
"memory_prompt", "memory_prompt",
) )
Prompt("你现在正在做的事情是:{schedule_info}", "schedule_prompt")
Prompt("\n你有以下这些**知识**\n{prompt_info}\n请你**记住上面的知识**,之后可能会用到。\n", "knowledge_prompt") Prompt("\n你有以下这些**知识**\n{prompt_info}\n请你**记住上面的知识**,之后可能会用到。\n", "knowledge_prompt")
# --- Template for HeartFChatting (FOCUSED mode) --- # --- Template for HeartFChatting (FOCUSED mode) ---
@@ -168,7 +167,6 @@ def init_prompt():
{memory_prompt} {memory_prompt}
{relation_prompt} {relation_prompt}
{prompt_info} {prompt_info}
{schedule_prompt}
你正在和 {sender_name} 私聊 你正在和 {sender_name} 私聊
聊天记录如下 聊天记录如下
{chat_talking_prompt} {chat_talking_prompt}
@@ -186,7 +184,7 @@ def init_prompt():
async def _build_prompt_focus( async def _build_prompt_focus(
reason, current_mind_info, structured_info, chat_stream, sender_name, in_mind_reply reason, current_mind_info, structured_info, chat_stream, sender_name, in_mind_reply, target_message
) -> str: ) -> str:
individuality = Individuality.get_instance() individuality = Individuality.get_instance()
prompt_personality = individuality.get_prompt(x_person=0, level=2) prompt_personality = individuality.get_prompt(x_person=0, level=2)
@@ -206,38 +204,12 @@ async def _build_prompt_focus(
chat_talking_prompt = await build_readable_messages( chat_talking_prompt = await build_readable_messages(
message_list_before_now, message_list_before_now,
replace_bot_name=True, replace_bot_name=True,
merge_messages=False, merge_messages=True,
timestamp_mode="normal", timestamp_mode="relative",
read_mark=0.0, read_mark=0.0,
truncate=True, truncate=True,
) )
prompt_ger = ""
if random.random() < 0.04:
prompt_ger += "你喜欢用倒装句"
if random.random() < 0.02:
prompt_ger += "你喜欢用反问句"
reply_styles1 = [
("给出日常且口语化的回复,平淡一些", 0.4),
("给出非常简短的回复", 0.4),
("给出缺失主语的回复,简短", 0.15),
("给出带有语病的回复,朴实平淡", 0.05),
]
reply_style1_chosen = random.choices(
[style[0] for style in reply_styles1], weights=[style[1] for style in reply_styles1], k=1
)[0]
reply_styles2 = [
("不要回复的太有条理,可以有个性", 0.7),
("不要回复的太有条理,可以复读,但是不要复读自己说的话", 0.1),
("回复的认真一些", 0.1),
("可以回复单个表情符号", 0.05),
]
reply_style2_chosen = random.choices(
[style[0] for style in reply_styles2], weights=[style[1] for style in reply_styles2], k=1
)[0]
if structured_info: if structured_info:
structured_info_prompt = await global_prompt_manager.format_prompt( structured_info_prompt = await global_prompt_manager.format_prompt(
"info_from_tools", structured_info=structured_info "info_from_tools", structured_info=structured_info
@@ -246,33 +218,36 @@ async def _build_prompt_focus(
structured_info_prompt = "" structured_info_prompt = ""
# 从/data/expression/对应chat_id/expressions.json中读取表达方式 # 从/data/expression/对应chat_id/expressions.json中读取表达方式
learnt_expressions, personality_expressions = await expression_learner.get_expression_by_chat_id( (
chat_stream.stream_id learnt_style_expressions,
) learnt_grammar_expressions,
language_habits = [] personality_expressions,
# 1. learnt_expressions加权随机选5条 ) = await expression_learner.get_expression_by_chat_id(chat_stream.stream_id)
if learnt_expressions:
weights = [expr["count"] for expr in learnt_expressions] style_habbits = []
selected_learnt = weighted_sample_no_replacement(learnt_expressions, weights, 5) grammar_habbits = []
# 1. learnt_expressions加权随机选3条
if learnt_style_expressions:
weights = [expr["count"] for expr in learnt_style_expressions]
selected_learnt = weighted_sample_no_replacement(learnt_style_expressions, weights, 3)
for expr in selected_learnt: for expr in selected_learnt:
if isinstance(expr, dict) and "situation" in expr and "style" in expr: if isinstance(expr, dict) and "situation" in expr and "style" in expr:
language_habits.append(f"{expr['situation']}时,使用 {expr['style']}") style_habbits.append(f"{expr['situation']}时,使用 {expr['style']}")
# 2. personality_expressions随机选1 # 2. learnt_grammar_expressions加权随机选3
if learnt_grammar_expressions:
weights = [expr["count"] for expr in learnt_grammar_expressions]
selected_learnt = weighted_sample_no_replacement(learnt_grammar_expressions, weights, 3)
for expr in selected_learnt:
if isinstance(expr, dict) and "situation" in expr and "style" in expr:
grammar_habbits.append(f"{expr['situation']}时,使用 {expr['style']}")
# 3. personality_expressions随机选1条
if personality_expressions: if personality_expressions:
expr = random.choice(personality_expressions) expr = random.choice(personality_expressions)
if isinstance(expr, dict) and "situation" in expr and "style" in expr: if isinstance(expr, dict) and "situation" in expr and "style" in expr:
language_habits.append(f"{expr['situation']}时,使用 {expr['style']}") style_habbits.append(f"{expr['situation']}时,使用 {expr['style']}")
# 3. 如果都没有,给默认
if not language_habits: style_habbits_str = "\n".join(style_habbits)
language_habits = [ grammar_habbits_str = "\n".join(grammar_habbits)
"当表示惊讶无语是,使用 不是?",
"当表示无语时,使用 阿这",
"当用调侃的方式称呼某人,使用 哈基X",
"当表示讽刺的赞同时,使用 对对对",
"当你想说明某个观点,但懒得明说,使用 懂的都懂",
]
language_habits = random.sample(language_habits, 6)
language_habits_str = "\n".join(language_habits)
logger.debug("开始构建 focus prompt") logger.debug("开始构建 focus prompt")
@@ -286,20 +261,17 @@ async def _build_prompt_focus(
prompt = await global_prompt_manager.format_prompt( prompt = await global_prompt_manager.format_prompt(
template_name, template_name,
# info_from_tools=structured_info_prompt, # info_from_tools=structured_info_prompt,
language_habits=language_habits_str, style_habbits=style_habbits_str,
grammar_habbits=grammar_habbits_str,
chat_target=chat_target_1, # Used in group template chat_target=chat_target_1, # Used in group template
# chat_talking_prompt=chat_talking_prompt, # chat_talking_prompt=chat_talking_prompt,
chat_info=chat_talking_prompt, chat_info=chat_talking_prompt,
bot_name=global_config.BOT_NICKNAME, bot_name=global_config.BOT_NICKNAME,
# prompt_personality=prompt_personality, # prompt_personality=prompt_personality,
prompt_personality="", prompt_personality="",
# chat_target_2=chat_target_2, # Used in group template
# current_mind_info=current_mind_info,
reply_style2=reply_style2_chosen,
reply_style1=reply_style1_chosen,
reason=reason, reason=reason,
in_mind_reply=in_mind_reply, in_mind_reply=in_mind_reply,
prompt_ger=prompt_ger, target_message=target_message,
# moderation_prompt=await global_prompt_manager.get_prompt_async("moderation_prompt"), # moderation_prompt=await global_prompt_manager.get_prompt_async("moderation_prompt"),
# sender_name is not used in the group template # sender_name is not used in the group template
) )
@@ -314,10 +286,7 @@ async def _build_prompt_focus(
prompt_personality=prompt_personality, prompt_personality=prompt_personality,
# chat_target and chat_target_2 are not used in private template # chat_target and chat_target_2 are not used in private template
current_mind_info=current_mind_info, current_mind_info=current_mind_info,
reply_style2=reply_style2_chosen,
reply_style1=reply_style1_chosen,
reason=reason, reason=reason,
prompt_ger=prompt_ger,
moderation_prompt=await global_prompt_manager.get_prompt_async("moderation_prompt"), moderation_prompt=await global_prompt_manager.get_prompt_async("moderation_prompt"),
) )
# --- End choosing template --- # --- End choosing template ---
@@ -464,13 +433,6 @@ class PromptBuilder:
end_time = time.time() end_time = time.time()
logger.debug(f"知识检索耗时: {(end_time - start_time):.3f}") logger.debug(f"知识检索耗时: {(end_time - start_time):.3f}")
if global_config.ENABLE_SCHEDULE_GEN:
schedule_prompt = await global_prompt_manager.format_prompt(
"schedule_prompt", schedule_info=bot_schedule.get_current_num_task(num=1, time_info=False)
)
else:
schedule_prompt = ""
logger.debug("开始构建 normal prompt") logger.debug("开始构建 normal prompt")
# --- Choose template and format based on chat type --- # --- Choose template and format based on chat type ---
@@ -486,7 +448,6 @@ class PromptBuilder:
sender_name=effective_sender_name, sender_name=effective_sender_name,
memory_prompt=memory_prompt, memory_prompt=memory_prompt,
prompt_info=prompt_info, prompt_info=prompt_info,
schedule_prompt=schedule_prompt,
chat_target=chat_target_1, chat_target=chat_target_1,
chat_target_2=chat_target_2, chat_target_2=chat_target_2,
chat_talking_prompt=chat_talking_prompt, chat_talking_prompt=chat_talking_prompt,
@@ -511,7 +472,6 @@ class PromptBuilder:
sender_name=effective_sender_name, sender_name=effective_sender_name,
memory_prompt=memory_prompt, memory_prompt=memory_prompt,
prompt_info=prompt_info, prompt_info=prompt_info,
schedule_prompt=schedule_prompt,
chat_talking_prompt=chat_talking_prompt, chat_talking_prompt=chat_talking_prompt,
message_txt=message_txt, message_txt=message_txt,
bot_name=global_config.BOT_NICKNAME, bot_name=global_config.BOT_NICKNAME,
@@ -861,7 +821,7 @@ class PromptBuilder:
return "[构建 Planner Prompt 时出错]" return "[构建 Planner Prompt 时出错]"
def weighted_sample_no_replacement(items, weights, k): def weighted_sample_no_replacement(items, weights, k) -> list:
""" """
加权且不放回地随机抽取k个元素 加权且不放回地随机抽取k个元素
@@ -872,6 +832,8 @@ def weighted_sample_no_replacement(items, weights, k):
返回 返回
selected: 按权重加权且不重复抽取的k个元素组成的列表 selected: 按权重加权且不重复抽取的k个元素组成的列表
如果items中的元素不足k就只会返回所有可用的元素
实现思路 实现思路
每次从当前池中按权重加权随机选出一个元素选中后将其从池中移除重复k次 每次从当前池中按权重加权随机选出一个元素选中后将其从池中移除重复k次
这样保证了 这样保证了

View File

@@ -1,9 +1,9 @@
import time import time
import traceback import traceback
from typing import Optional from typing import Optional
from src.plugins.chat.message import MessageRecv, BaseMessageInfo from src.chat.message_receive.message import MessageRecv, BaseMessageInfo
from src.plugins.chat.chat_stream import ChatStream from src.chat.message_receive.chat_stream import ChatStream
from src.plugins.chat.message import UserInfo from src.chat.message_receive.message import UserInfo
from src.common.logger_manager import get_logger from src.common.logger_manager import get_logger
import json import json

View File

@@ -1,6 +1,6 @@
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from typing import List, Any, Optional, Dict from typing import List, Any, Optional, Dict
from src.heart_flow.info.info_base import InfoBase from src.chat.focus_chat.info.info_base import InfoBase
from src.heart_flow.observation.observation import Observation from src.heart_flow.observation.observation import Observation
from src.common.logger_manager import get_logger from src.common.logger_manager import get_logger

View File

@@ -1,15 +1,15 @@
from typing import List, Optional, Any from typing import List, Optional, Any
from src.heart_flow.info.obs_info import ObsInfo from src.chat.focus_chat.info.obs_info import ObsInfo
from src.heart_flow.observation.observation import Observation from src.heart_flow.observation.observation import Observation
from src.heart_flow.info.info_base import InfoBase from src.chat.focus_chat.info.info_base import InfoBase
from .base_processor import BaseProcessor from .base_processor import BaseProcessor
from src.common.logger_manager import get_logger from src.common.logger_manager import get_logger
from src.heart_flow.observation.chatting_observation import ChattingObservation from src.heart_flow.observation.chatting_observation import ChattingObservation
from src.heart_flow.observation.hfcloop_observation import HFCloopObservation from src.heart_flow.observation.hfcloop_observation import HFCloopObservation
from src.heart_flow.info.cycle_info import CycleInfo from src.chat.focus_chat.info.cycle_info import CycleInfo
from datetime import datetime from datetime import datetime
from typing import Dict from typing import Dict
from src.plugins.models.utils_model import LLMRequest from src.chat.models.utils_model import LLMRequest
from src.config.config import global_config from src.config.config import global_config
logger = get_logger("observation") logger = get_logger("observation")

View File

@@ -1,28 +1,28 @@
from src.heart_flow.observation.chatting_observation import ChattingObservation from src.heart_flow.observation.chatting_observation import ChattingObservation
from src.heart_flow.observation.observation import Observation from src.heart_flow.observation.observation import Observation
from src.plugins.models.utils_model import LLMRequest from src.chat.models.utils_model import LLMRequest
from src.config.config import global_config from src.config.config import global_config
import time import time
import traceback import traceback
from src.common.logger_manager import get_logger from src.common.logger_manager import get_logger
from src.individuality.individuality import Individuality from src.individuality.individuality import Individuality
import random import random
from src.plugins.utils.prompt_builder import Prompt, global_prompt_manager from src.chat.utils.prompt_builder import Prompt, global_prompt_manager
from src.plugins.utils.json_utils import safe_json_dumps from src.chat.utils.json_utils import safe_json_dumps
from src.plugins.chat.chat_stream import chat_manager from src.chat.message_receive.chat_stream import chat_manager
import difflib import difflib
from src.plugins.person_info.relationship_manager import relationship_manager from src.chat.person_info.relationship_manager import relationship_manager
from .base_processor import BaseProcessor from .base_processor import BaseProcessor
from src.heart_flow.info.mind_info import MindInfo from src.chat.focus_chat.info.mind_info import MindInfo
from typing import List, Optional from typing import List, Optional
from src.heart_flow.observation.hfcloop_observation import HFCloopObservation from src.heart_flow.observation.hfcloop_observation import HFCloopObservation
from src.plugins.heartFC_chat.info_processors.processor_utils import ( from src.chat.focus_chat.info_processors.processor_utils import (
calculate_similarity, calculate_similarity,
calculate_replacement_probability, calculate_replacement_probability,
get_spark, get_spark,
) )
from typing import Dict from typing import Dict
from src.heart_flow.info.info_base import InfoBase from src.chat.focus_chat.info.info_base import InfoBase
logger = get_logger("sub_heartflow") logger = get_logger("sub_heartflow")

View File

@@ -1,18 +1,18 @@
from src.heart_flow.observation.chatting_observation import ChattingObservation from src.heart_flow.observation.chatting_observation import ChattingObservation
from src.plugins.models.utils_model import LLMRequest from src.chat.models.utils_model import LLMRequest
from src.config.config import global_config from src.config.config import global_config
import time import time
from src.common.logger_manager import get_logger from src.common.logger_manager import get_logger
from src.individuality.individuality import Individuality from src.individuality.individuality import Individuality
from src.plugins.utils.prompt_builder import Prompt, global_prompt_manager from src.chat.utils.prompt_builder import Prompt, global_prompt_manager
from src.tools.tool_use import ToolUser from src.tools.tool_use import ToolUser
from src.plugins.utils.json_utils import process_llm_tool_calls from src.chat.utils.json_utils import process_llm_tool_calls
from src.plugins.person_info.relationship_manager import relationship_manager from src.chat.person_info.relationship_manager import relationship_manager
from .base_processor import BaseProcessor from .base_processor import BaseProcessor
from typing import List, Optional, Dict from typing import List, Optional, Dict
from src.heart_flow.observation.observation import Observation from src.heart_flow.observation.observation import Observation
from src.heart_flow.observation.working_observation import WorkingObservation from src.heart_flow.observation.working_observation import WorkingObservation
from src.heart_flow.info.structured_info import StructuredInfo from src.chat.focus_chat.info.structured_info import StructuredInfo
logger = get_logger("tool_use") logger = get_logger("tool_use")

View File

@@ -1,12 +1,12 @@
from src.heart_flow.observation.chatting_observation import ChattingObservation from src.heart_flow.observation.chatting_observation import ChattingObservation
from src.heart_flow.observation.working_observation import WorkingObservation from src.heart_flow.observation.working_observation import WorkingObservation
from src.heart_flow.observation.hfcloop_observation import HFCloopObservation from src.heart_flow.observation.hfcloop_observation import HFCloopObservation
from src.plugins.models.utils_model import LLMRequest from src.chat.models.utils_model import LLMRequest
from src.config.config import global_config from src.config.config import global_config
from src.common.logger_manager import get_logger from src.common.logger_manager import get_logger
from src.plugins.utils.prompt_builder import Prompt from src.chat.utils.prompt_builder import Prompt
from datetime import datetime from datetime import datetime
from src.plugins.memory_system.Hippocampus import HippocampusManager from src.chat.memory_system.Hippocampus import HippocampusManager
from typing import List, Dict from typing import List, Dict

View File

@@ -11,14 +11,14 @@ import networkx as nx
import numpy as np import numpy as np
from collections import Counter from collections import Counter
from ...common.database import db from ...common.database import db
from ...plugins.models.utils_model import LLMRequest from ...chat.models.utils_model import LLMRequest
from src.common.logger_manager import get_logger from src.common.logger_manager import get_logger
from src.plugins.memory_system.sample_distribution import MemoryBuildScheduler # 分布生成器 from src.chat.memory_system.sample_distribution import MemoryBuildScheduler # 分布生成器
from ..utils.chat_message_builder import ( from ..utils.chat_message_builder import (
get_raw_msg_by_timestamp, get_raw_msg_by_timestamp,
build_readable_messages, build_readable_messages,
) # 导入 build_readable_messages ) # 导入 build_readable_messages
from ..chat.utils import translate_timestamp_to_human_readable from ..utils.utils import translate_timestamp_to_human_readable
from .memory_config import MemoryConfig from .memory_config import MemoryConfig
from rich.traceback import install from rich.traceback import install

View File

@@ -6,7 +6,7 @@ import os
# 添加项目根目录到系统路径 # 添加项目根目录到系统路径
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))) sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(__file__)))))
from src.plugins.memory_system.Hippocampus import HippocampusManager from src.chat.memory_system.Hippocampus import HippocampusManager
from src.config.config import global_config from src.config.config import global_config
from rich.traceback import install from rich.traceback import install

View File

@@ -2,7 +2,7 @@ from ..emoji_system.emoji_manager import emoji_manager
from ..person_info.relationship_manager import relationship_manager from ..person_info.relationship_manager import relationship_manager
from .chat_stream import chat_manager from .chat_stream import chat_manager
from .message_sender import message_manager from .message_sender import message_manager
from ..storage.storage import MessageStorage from .storage import MessageStorage
__all__ = [ __all__ = [

View File

@@ -3,13 +3,13 @@ from typing import Dict, Any
from src.common.logger_manager import get_logger from src.common.logger_manager import get_logger
from src.manager.mood_manager import mood_manager # 导入情绪管理器 from src.manager.mood_manager import mood_manager # 导入情绪管理器
from .chat_stream import chat_manager from src.chat.message_receive.chat_stream import chat_manager
from .message import MessageRecv from src.chat.message_receive.message import MessageRecv
from .only_message_process import MessageProcessor from src.experimental.only_message_process import MessageProcessor
from ..PFC.pfc_manager import PFCManager from src.experimental.PFC.pfc_manager import PFCManager
from ..heartFC_chat.heartflow_processor import HeartFCProcessor from src.chat.focus_chat.heartflow_processor import HeartFCProcessor
from ..utils.prompt_builder import Prompt, global_prompt_manager from src.chat.utils.prompt_builder import Prompt, global_prompt_manager
from ...config.config import global_config from src.config.config import global_config
# 定义日志配置 # 定义日志配置

View File

@@ -7,7 +7,7 @@ import urllib3
from src.common.logger_manager import get_logger from src.common.logger_manager import get_logger
from .chat_stream import ChatStream from .chat_stream import ChatStream
from .utils_image import image_manager from ..utils.utils_image import image_manager
from maim_message import Seg, UserInfo, BaseMessageInfo, MessageBase from maim_message import Seg, UserInfo, BaseMessageInfo, MessageBase
from rich.traceback import install from rich.traceback import install

View File

@@ -3,14 +3,14 @@ import asyncio
import time import time
from asyncio import Task from asyncio import Task
from typing import Union from typing import Union
from src.plugins.message.api import global_api from src.common.message.api import global_api
# from ...common.database import db # 数据库依赖似乎不需要了,注释掉 # from ...common.database import db # 数据库依赖似乎不需要了,注释掉
from .message import MessageSending, MessageThinking, MessageSet from .message import MessageSending, MessageThinking, MessageSet
from ..storage.storage import MessageStorage from .storage import MessageStorage
from ...config.config import global_config from ...config.config import global_config
from .utils import truncate_message, calculate_typing_time, count_messages_between from ..utils.utils import truncate_message, calculate_typing_time, count_messages_between
from src.common.logger_manager import get_logger from src.common.logger_manager import get_logger
from rich.traceback import install from rich.traceback import install

View File

@@ -2,8 +2,8 @@ import re
from typing import Union from typing import Union
from ...common.database import db from ...common.database import db
from ..chat.message import MessageSending, MessageRecv from .message import MessageSending, MessageRecv
from ..chat.chat_stream import ChatStream from .chat_stream import ChatStream
from src.common.logger import get_module_logger from src.common.logger import get_module_logger
logger = get_module_logger("message_storage") logger = get_module_logger("message_storage")

View File

@@ -157,7 +157,7 @@ class LLMRequest:
completion_tokens: 输出token数 completion_tokens: 输出token数
total_tokens: 总token数 total_tokens: 总token数
user_id: 用户ID默认为system user_id: 用户ID默认为system
request_type: 请求类型(chat/embedding/image/topic/schedule) request_type: 请求类型
endpoint: API端点 endpoint: API端点
""" """
# 如果 request_type 为 None则使用实例变量中的值 # 如果 request_type 为 None则使用实例变量中的值

View File

@@ -10,17 +10,17 @@ from maim_message import UserInfo, Seg
from src.common.logger_manager import get_logger from src.common.logger_manager import get_logger
from src.heart_flow.utils_chat import get_chat_type_and_target_info from src.heart_flow.utils_chat import get_chat_type_and_target_info
from src.manager.mood_manager import mood_manager from src.manager.mood_manager import mood_manager
from src.plugins.chat.chat_stream import ChatStream, chat_manager from src.chat.message_receive.chat_stream import ChatStream, chat_manager
from src.plugins.person_info.relationship_manager import relationship_manager from src.chat.person_info.relationship_manager import relationship_manager
from src.plugins.respon_info_catcher.info_catcher import info_catcher_manager from src.chat.utils.info_catcher import info_catcher_manager
from src.plugins.utils.timer_calculator import Timer from src.chat.utils.timer_calculator import Timer
from .normal_chat_generator import NormalChatGenerator from .normal_chat_generator import NormalChatGenerator
from ..chat.message import MessageSending, MessageRecv, MessageThinking, MessageSet from ..message_receive.message import MessageSending, MessageRecv, MessageThinking, MessageSet
from ..chat.message_sender import message_manager from src.chat.message_receive.message_sender import message_manager
from ..chat.utils_image import image_path_to_base64 from src.chat.utils.utils_image import image_path_to_base64
from ..emoji_system.emoji_manager import emoji_manager from src.chat.emoji_system.emoji_manager import emoji_manager
from ..willing.willing_manager import willing_manager from src.chat.normal_chat.willing.willing_manager import willing_manager
from ...config.config import global_config from src.config.config import global_config
logger = get_logger("chat") logger = get_logger("chat")

View File

@@ -2,12 +2,12 @@ from typing import List, Optional, Tuple, Union
import random import random
from ..models.utils_model import LLMRequest from ..models.utils_model import LLMRequest
from ...config.config import global_config from ...config.config import global_config
from ..chat.message import MessageThinking from ..message_receive.message import MessageThinking
from .heartflow_prompt_builder import prompt_builder from src.chat.focus_chat.heartflow_prompt_builder import prompt_builder
from ..chat.utils import process_llm_response from src.chat.utils.utils import process_llm_response
from ..utils.timer_calculator import Timer from src.chat.utils.timer_calculator import Timer
from src.common.logger_manager import get_logger from src.common.logger_manager import get_logger
from src.plugins.respon_info_catcher.info_catcher import info_catcher_manager from src.chat.utils.info_catcher import info_catcher_manager
logger = get_logger("llm") logger = get_logger("llm")

View File

@@ -1,9 +1,9 @@
from src.common.logger import LogConfig, WILLING_STYLE_CONFIG, LoguruLogger, get_module_logger from src.common.logger import LogConfig, WILLING_STYLE_CONFIG, LoguruLogger, get_module_logger
from dataclasses import dataclass from dataclasses import dataclass
from ...config.config import global_config, BotConfig from src.config.config import global_config, BotConfig
from ..chat.chat_stream import ChatStream, GroupInfo from src.chat.message_receive.chat_stream import ChatStream, GroupInfo
from ..chat.message import MessageRecv from src.chat.message_receive.message import MessageRecv
from ..person_info.person_info import person_info_manager, PersonInfoManager from src.chat.person_info.person_info import person_info_manager, PersonInfoManager
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
import importlib import importlib
from typing import Dict, Optional from typing import Dict, Optional

View File

@@ -6,7 +6,7 @@ from typing import Any, Callable, Dict
import datetime import datetime
import asyncio import asyncio
import numpy as np import numpy as np
from src.plugins.models.utils_model import LLMRequest from src.chat.models.utils_model import LLMRequest
from src.config.config import global_config from src.config.config import global_config
from src.individuality.individuality import Individuality from src.individuality.individuality import Individuality

View File

@@ -1,5 +1,5 @@
from src.common.logger_manager import get_logger from src.common.logger_manager import get_logger
from ..chat.chat_stream import ChatStream from ..message_receive.chat_stream import ChatStream
import math import math
from bson.decimal128 import Decimal128 from bson.decimal128 import Decimal128
from .person_info import person_info_manager from .person_info import person_info_manager

View File

@@ -1,24 +1,11 @@
from src.config.config import global_config from src.config.config import global_config
# 不再直接使用 db
# from src.common.database import db
# 移除 logger 和 traceback因为错误处理移至 repository
# from src.common.logger import get_module_logger
# import traceback
from typing import List, Dict, Any, Tuple # 确保类型提示被导入 from typing import List, Dict, Any, Tuple # 确保类型提示被导入
import time # 导入 time 模块以获取当前时间 import time # 导入 time 模块以获取当前时间
import random import random
import re import re
# 导入新的 repository 函数
from src.common.message_repository import find_messages, count_messages from src.common.message_repository import find_messages, count_messages
from src.chat.person_info.person_info import person_info_manager
# 导入 PersonInfoManager 和时间转换工具 from src.chat.utils.utils import translate_timestamp_to_human_readable
from src.plugins.person_info.person_info import person_info_manager
from src.plugins.chat.utils import translate_timestamp_to_human_readable
# 不再需要文件级别的 logger
# logger = get_module_logger(__name__)
def get_raw_msg_by_timestamp( def get_raw_msg_by_timestamp(

View File

@@ -1,5 +1,5 @@
from src.config.config import global_config from src.config.config import global_config
from src.plugins.chat.message import MessageRecv, MessageSending, Message from src.chat.message_receive.message import MessageRecv, MessageSending, Message
from src.common.database import db from src.common.database import db
import time import time
import traceback import traceback

View File

@@ -10,9 +10,9 @@ from pymongo.errors import PyMongoError
from src.common.logger import get_module_logger from src.common.logger import get_module_logger
from src.manager.mood_manager import mood_manager from src.manager.mood_manager import mood_manager
from .message import MessageRecv from ..message_receive.message import MessageRecv
from ..models.utils_model import LLMRequest from ..models.utils_model import LLMRequest
from ..utils.typo_generator import ChineseTypoGenerator from .typo_generator import ChineseTypoGenerator
from ...common.database import db from ...common.database import db
from ...config.config import global_config from ...config.config import global_config

View File

@@ -8,7 +8,7 @@ from dotenv import load_dotenv
# 加载 .env 文件 # 加载 .env 文件
env_path = Path(__file__).resolve().parent.parent.parent / ".env" env_path = Path(os.getcwd()) / ".env"
load_dotenv(dotenv_path=env_path) load_dotenv(dotenv_path=env_path)
# 保存原生处理器ID # 保存原生处理器ID
@@ -29,8 +29,7 @@ _handler_registry: dict[str, List[int]] = {}
_custom_style_handlers: dict[Tuple[str, str], List[int]] = {} # 记录自定义样式处理器ID _custom_style_handlers: dict[Tuple[str, str], List[int]] = {} # 记录自定义样式处理器ID
# 获取日志存储根地址 # 获取日志存储根地址
current_file_path = Path(__file__).resolve() ROOT_PATH = os.getcwd()
ROOT_PATH = os.path.abspath(os.path.join(current_file_path, "..", ".."))
LOG_ROOT = str(ROOT_PATH) + "/" + "logs" LOG_ROOT = str(ROOT_PATH) + "/" + "logs"
SIMPLE_OUTPUT = os.getenv("SIMPLE_OUTPUT", "false").strip().lower() SIMPLE_OUTPUT = os.getenv("SIMPLE_OUTPUT", "false").strip().lower()

View File

@@ -9,7 +9,6 @@ from src.common.logger import (
RELATION_STYLE_CONFIG, RELATION_STYLE_CONFIG,
CONFIG_STYLE_CONFIG, CONFIG_STYLE_CONFIG,
HEARTFLOW_STYLE_CONFIG, HEARTFLOW_STYLE_CONFIG,
SCHEDULE_STYLE_CONFIG,
LLM_STYLE_CONFIG, LLM_STYLE_CONFIG,
CHAT_STYLE_CONFIG, CHAT_STYLE_CONFIG,
EMOJI_STYLE_CONFIG, EMOJI_STYLE_CONFIG,
@@ -56,7 +55,6 @@ MODULE_LOGGER_CONFIGS = {
"relation": RELATION_STYLE_CONFIG, # 关系 "relation": RELATION_STYLE_CONFIG, # 关系
"config": CONFIG_STYLE_CONFIG, # 配置 "config": CONFIG_STYLE_CONFIG, # 配置
"heartflow": HEARTFLOW_STYLE_CONFIG, # 麦麦大脑袋 "heartflow": HEARTFLOW_STYLE_CONFIG, # 麦麦大脑袋
"schedule": SCHEDULE_STYLE_CONFIG, # 在干嘛
"llm": LLM_STYLE_CONFIG, # 麦麦组织语言 "llm": LLM_STYLE_CONFIG, # 麦麦组织语言
"chat": CHAT_STYLE_CONFIG, # 见闻 "chat": CHAT_STYLE_CONFIG, # 见闻
"emoji": EMOJI_STYLE_CONFIG, # 表情包 "emoji": EMOJI_STYLE_CONFIG, # 表情包

View File

@@ -2,7 +2,6 @@ import os
import re import re
from dataclasses import dataclass, field from dataclasses import dataclass, field
from typing import Dict, List, Optional from typing import Dict, List, Optional
from dateutil import tz
import tomli import tomli
import tomlkit import tomlkit
@@ -167,13 +166,6 @@ class BotConfig:
gender: str = "" # 性别 gender: str = "" # 性别
appearance: str = "用几句话描述外貌特征" # 外貌特征 appearance: str = "用几句话描述外貌特征" # 外貌特征
# schedule
ENABLE_SCHEDULE_GEN: bool = False # 是否启用日程生成
PROMPT_SCHEDULE_GEN = "无日程"
SCHEDULE_DOING_UPDATE_INTERVAL: int = 300 # 日程表更新间隔 单位秒
SCHEDULE_TEMPERATURE: float = 0.5 # 日程表温度建议0.5-1.0
TIME_ZONE: str = "Asia/Shanghai" # 时区
# chat # chat
allow_focus_mode: bool = True # 是否允许专注聊天状态 allow_focus_mode: bool = True # 是否允许专注聊天状态
@@ -374,24 +366,6 @@ class BotConfig:
config.gender = identity_config.get("gender", config.gender) config.gender = identity_config.get("gender", config.gender)
config.appearance = identity_config.get("appearance", config.appearance) config.appearance = identity_config.get("appearance", config.appearance)
def schedule(parent: dict):
schedule_config = parent["schedule"]
config.ENABLE_SCHEDULE_GEN = schedule_config.get("enable_schedule_gen", config.ENABLE_SCHEDULE_GEN)
config.PROMPT_SCHEDULE_GEN = schedule_config.get("prompt_schedule_gen", config.PROMPT_SCHEDULE_GEN)
config.SCHEDULE_DOING_UPDATE_INTERVAL = schedule_config.get(
"schedule_doing_update_interval", config.SCHEDULE_DOING_UPDATE_INTERVAL
)
logger.info(
f"载入自定义日程prompt:{schedule_config.get('prompt_schedule_gen', config.PROMPT_SCHEDULE_GEN)}"
)
if config.INNER_VERSION in SpecifierSet(">=1.0.2"):
config.SCHEDULE_TEMPERATURE = schedule_config.get("schedule_temperature", config.SCHEDULE_TEMPERATURE)
time_zone = schedule_config.get("time_zone", config.TIME_ZONE)
if tz.gettz(time_zone) is None:
logger.error(f"无效的时区: {time_zone},使用默认值: {config.TIME_ZONE}")
else:
config.TIME_ZONE = time_zone
def emoji(parent: dict): def emoji(parent: dict):
emoji_config = parent["emoji"] emoji_config = parent["emoji"]
config.EMOJI_CHECK_INTERVAL = emoji_config.get("check_interval", config.EMOJI_CHECK_INTERVAL) config.EMOJI_CHECK_INTERVAL = emoji_config.get("check_interval", config.EMOJI_CHECK_INTERVAL)
@@ -681,7 +655,6 @@ class BotConfig:
"groups": {"func": groups, "support": ">=0.0.0"}, "groups": {"func": groups, "support": ">=0.0.0"},
"personality": {"func": personality, "support": ">=0.0.0"}, "personality": {"func": personality, "support": ">=0.0.0"},
"identity": {"func": identity, "support": ">=1.2.4"}, "identity": {"func": identity, "support": ">=1.2.4"},
"schedule": {"func": schedule, "support": ">=0.0.11", "necessary": False},
"emoji": {"func": emoji, "support": ">=0.0.0"}, "emoji": {"func": emoji, "support": ">=0.0.0"},
"model": {"func": model, "support": ">=0.0.0"}, "model": {"func": model, "support": ">=0.0.0"},
"memory": {"func": memory, "support": ">=0.0.0", "necessary": False}, "memory": {"func": memory, "support": ">=0.0.0", "necessary": False},

View File

@@ -1,14 +1,14 @@
import time import time
from typing import Tuple, Optional # 增加了 Optional from typing import Tuple, Optional # 增加了 Optional
from src.common.logger_manager import get_logger from src.common.logger_manager import get_logger
from ..models.utils_model import LLMRequest from src.chat.models.utils_model import LLMRequest
from ...config.config import global_config from src.config.config import global_config
from .chat_observer import ChatObserver from src.experimental.PFC.chat_observer import ChatObserver
from .pfc_utils import get_items_from_json from src.experimental.PFC.pfc_utils import get_items_from_json
from src.individuality.individuality import Individuality from src.individuality.individuality import Individuality
from .observation_info import ObservationInfo from src.experimental.PFC.observation_info import ObservationInfo
from .conversation_info import ConversationInfo from src.experimental.PFC.conversation_info import ConversationInfo
from src.plugins.utils.chat_message_builder import build_readable_messages from src.chat.utils.chat_message_builder import build_readable_messages
logger = get_logger("pfc_action_planner") logger = get_logger("pfc_action_planner")

View File

@@ -4,9 +4,13 @@ import traceback
from typing import Optional, Dict, Any, List from typing import Optional, Dict, Any, List
from src.common.logger import get_module_logger from src.common.logger import get_module_logger
from maim_message import UserInfo from maim_message import UserInfo
from ...config.config import global_config from src.config.config import global_config
from .chat_states import NotificationManager, create_new_message_notification, create_cold_chat_notification from src.experimental.PFC.chat_states import (
from .message_storage import MongoDBMessageStorage NotificationManager,
create_new_message_notification,
create_cold_chat_notification,
)
from src.experimental.PFC.message_storage import MongoDBMessageStorage
from rich.traceback import install from rich.traceback import install
install(extra_lines=3) install(extra_lines=3)

View File

@@ -3,11 +3,11 @@ import asyncio
import datetime import datetime
# from .message_storage import MongoDBMessageStorage # from .message_storage import MongoDBMessageStorage
from src.plugins.utils.chat_message_builder import build_readable_messages, get_raw_msg_before_timestamp_with_chat from src.chat.utils.chat_message_builder import build_readable_messages, get_raw_msg_before_timestamp_with_chat
# from ...config.config import global_config # from ...config.config import global_config
from typing import Dict, Any, Optional from typing import Dict, Any, Optional
from ..chat.message import Message from src.chat.message_receive.message import Message
from .pfc_types import ConversationState from .pfc_types import ConversationState
from .pfc import ChatObserver, GoalAnalyzer from .pfc import ChatObserver, GoalAnalyzer
from .message_sender import DirectMessageSender from .message_sender import DirectMessageSender
@@ -16,9 +16,9 @@ from .action_planner import ActionPlanner
from .observation_info import ObservationInfo from .observation_info import ObservationInfo
from .conversation_info import ConversationInfo # 确保导入 ConversationInfo from .conversation_info import ConversationInfo # 确保导入 ConversationInfo
from .reply_generator import ReplyGenerator from .reply_generator import ReplyGenerator
from ..chat.chat_stream import ChatStream from src.chat.message_receive.chat_stream import ChatStream
from maim_message import UserInfo from src.chat.message_receive.message import UserInfo
from src.plugins.chat.chat_stream import chat_manager from src.chat.message_receive.chat_stream import chat_manager
from .pfc_KnowledgeFetcher import KnowledgeFetcher from .pfc_KnowledgeFetcher import KnowledgeFetcher
from .waiter import Waiter from .waiter import Waiter

View File

@@ -1,13 +1,13 @@
import time import time
from typing import Optional from typing import Optional
from src.common.logger import get_module_logger from src.common.logger import get_module_logger
from ..chat.chat_stream import ChatStream from src.chat.message_receive.chat_stream import ChatStream
from ..chat.message import Message from src.chat.message_receive.message import Message
from maim_message import UserInfo, Seg from maim_message import UserInfo, Seg
from src.plugins.chat.message import MessageSending, MessageSet from src.chat.message_receive.message import MessageSending, MessageSet
from src.plugins.chat.message_sender import message_manager from src.chat.message_receive.message_sender import message_manager
from ..storage.storage import MessageStorage from src.chat.message_receive.storage import MessageStorage
from ...config.config import global_config from src.config.config import global_config
from rich.traceback import install from rich.traceback import install
install(extra_lines=3) install(extra_lines=3)

View File

@@ -2,9 +2,9 @@ from typing import List, Optional, Dict, Any, Set
from maim_message import UserInfo from maim_message import UserInfo
import time import time
from src.common.logger import get_module_logger from src.common.logger import get_module_logger
from .chat_observer import ChatObserver from src.experimental.PFC.chat_observer import ChatObserver
from .chat_states import NotificationHandler, NotificationType, Notification from src.experimental.PFC.chat_states import NotificationHandler, NotificationType, Notification
from src.plugins.utils.chat_message_builder import build_readable_messages from src.chat.utils.chat_message_builder import build_readable_messages
import traceback # 导入 traceback 用于调试 import traceback # 导入 traceback 用于调试
logger = get_module_logger("observation_info") logger = get_module_logger("observation_info")

View File

@@ -1,13 +1,13 @@
from typing import List, Tuple, TYPE_CHECKING from typing import List, Tuple, TYPE_CHECKING
from src.common.logger import get_module_logger from src.common.logger import get_module_logger
from ..models.utils_model import LLMRequest from src.chat.models.utils_model import LLMRequest
from ...config.config import global_config from src.config.config import global_config
from .chat_observer import ChatObserver from src.experimental.PFC.chat_observer import ChatObserver
from .pfc_utils import get_items_from_json from src.experimental.PFC.pfc_utils import get_items_from_json
from src.individuality.individuality import Individuality from src.individuality.individuality import Individuality
from .conversation_info import ConversationInfo from src.experimental.PFC.conversation_info import ConversationInfo
from .observation_info import ObservationInfo from src.experimental.PFC.observation_info import ObservationInfo
from src.plugins.utils.chat_message_builder import build_readable_messages from src.chat.utils.chat_message_builder import build_readable_messages
from rich.traceback import install from rich.traceback import install
install(extra_lines=3) install(extra_lines=3)

View File

@@ -1,11 +1,11 @@
from typing import List, Tuple from typing import List, Tuple
from src.common.logger import get_module_logger from src.common.logger import get_module_logger
from src.plugins.memory_system.Hippocampus import HippocampusManager from src.chat.memory_system.Hippocampus import HippocampusManager
from ..models.utils_model import LLMRequest from src.chat.models.utils_model import LLMRequest
from ...config.config import global_config from src.config.config import global_config
from ..chat.message import Message from src.chat.message_receive.message import Message
from ..knowledge.knowledge_lib import qa_manager from src.chat.knowledge.knowledge_lib import qa_manager
from ..utils.chat_message_builder import build_readable_messages from src.chat.utils.chat_message_builder import build_readable_messages
logger = get_module_logger("knowledge_fetcher") logger = get_module_logger("knowledge_fetcher")

Some files were not shown because too many files have changed in this diff Show More