pass ruff

This commit is contained in:
SengokuCola
2025-05-28 20:44:26 +08:00
parent bc489861d3
commit 218d0d4a5d
16 changed files with 45 additions and 63 deletions

View File

@@ -16,7 +16,6 @@ from src.chat.utils.info_catcher import info_catcher_manager
from src.chat.heart_flow.utils_chat import get_chat_type_and_target_info
from src.chat.message_receive.chat_stream import ChatStream
from src.chat.focus_chat.hfc_utils import parse_thinking_id_to_timestamp
from src.individuality.individuality import individuality
from src.chat.utils.prompt_builder import Prompt, global_prompt_manager
from src.chat.utils.chat_message_builder import build_readable_messages, get_raw_msg_before_timestamp_with_chat
import time
@@ -106,10 +105,7 @@ class DefaultExpressor:
user_nickname=global_config.bot.nickname,
platform=messageinfo.platform,
)
# logger.debug(f"创建思考消息:{anchor_message}")
# logger.debug(f"创建思考消息chat{chat}")
# logger.debug(f"创建思考消息bot_user_info{bot_user_info}")
# logger.debug(f"创建思考消息messageinfo{messageinfo}")
thinking_message = MessageThinking(
message_id=thinking_id,
chat_stream=chat,
@@ -281,14 +277,14 @@ class DefaultExpressor:
in_mind_reply,
target_message,
) -> str:
prompt_personality = individuality.get_prompt(x_person=0, level=2)
# prompt_personality = individuality.get_prompt(x_person=0, level=2)
# Determine if it's a group chat
is_group_chat = bool(chat_stream.group_info)
# Use sender_name passed from caller for private chat, otherwise use a default for group
# Default sender_name for group chat isn't used in the group prompt template, but set for consistency
effective_sender_name = sender_name if not is_group_chat else "某人"
# effective_sender_name = sender_name if not is_group_chat else "某人"
message_list_before_now = get_raw_msg_before_timestamp_with_chat(
chat_id=chat_stream.stream_id,
@@ -377,7 +373,11 @@ class DefaultExpressor:
# --- 发送器 (Sender) --- #
async def send_response_messages(
self, anchor_message: Optional[MessageRecv], response_set: List[Tuple[str, str]], thinking_id: str = "", display_message: str = ""
self,
anchor_message: Optional[MessageRecv],
response_set: List[Tuple[str, str]],
thinking_id: str = "",
display_message: str = "",
) -> Optional[MessageSending]:
"""发送回复消息 (尝试锚定到 anchor_message),使用 HeartFCSender"""
chat = self.chat_stream
@@ -412,10 +412,9 @@ class DefaultExpressor:
# 为每个消息片段生成唯一ID
type = msg_text[0]
data = msg_text[1]
if global_config.experimental.debug_show_chat_mode and type == "text":
data += ""
part_message_id = f"{thinking_id}_{i}"
message_segment = Seg(type=type, data=data)

View File

@@ -379,12 +379,14 @@ class HeartFChatting:
for processor in self.processors:
processor_name = processor.__class__.log_prefix
# 用lambda包裹便于传参
async def run_with_timeout(proc=processor):
return await asyncio.wait_for(
proc.process_info(observations=observations, running_memorys=running_memorys),
timeout=PROCESSOR_TIMEOUT
timeout=PROCESSOR_TIMEOUT,
)
task = asyncio.create_task(run_with_timeout())
processor_tasks.append(task)
task_to_name_map[task] = processor_name

View File

@@ -1,4 +1,3 @@
import time
import traceback
from ..memory_system.Hippocampus import HippocampusManager
from ...config.config import global_config
@@ -73,12 +72,12 @@ async def _calculate_interest(message: MessageRecv) -> Tuple[float, bool]:
text_len = len(message.processed_plain_text)
# 根据文本长度调整兴趣度长度越大兴趣度越高但增长率递减最低0.01最高0.05
# 采用对数函数实现递减增长
base_interest = 0.01 + (0.05 - 0.01) * (math.log10(text_len + 1) / math.log10(1000 + 1))
base_interest = min(max(base_interest, 0.01), 0.05)
interested_rate += base_interest
logger.trace(f"记忆激活率: {interested_rate:.2f}")
if is_mentioned:
@@ -220,11 +219,7 @@ class HeartFCMessageReceiver:
# 7. 日志记录
mes_name = chat.group_info.group_name if chat.group_info else "私聊"
# current_time = time.strftime("%H:%M:%S", time.localtime(message.message_info.time))
logger.info(
f"[{mes_name}]"
f"{userinfo.user_nickname}:"
f"{message.processed_plain_text}"
)
logger.info(f"[{mes_name}]{userinfo.user_nickname}:{message.processed_plain_text}")
# 8. 关系处理
if global_config.relationship.give_name:

View File

@@ -8,8 +8,6 @@ from src.config.config import global_config
logger = get_logger("background_tasks")
# 新增私聊激活检查间隔
PRIVATE_CHAT_ACTIVATION_CHECK_INTERVAL_SECONDS = 5 # 与兴趣评估类似设为5秒
@@ -136,9 +134,7 @@ class BackgroundTaskManager:
# 第三步:清空任务列表
self._tasks = [] # 重置任务列表
# 状态转换处理
# 状态转换处理
async def _perform_cleanup_work(self):
"""执行子心流清理任务
@@ -165,13 +161,11 @@ class BackgroundTaskManager:
# 记录最终清理结果
logger.info(f"[清理任务] 清理完成, 共停止 {stopped_count}/{len(flows_to_stop)} 个子心流")
async def _run_cleanup_cycle(self):
await _run_periodic_loop(
task_name="Subflow Cleanup", interval=CLEANUP_INTERVAL_SECONDS, task_func=self._perform_cleanup_work
)
# 新增私聊激活任务运行器
async def _run_private_chat_activation_cycle(self, interval: int):
await _run_periodic_loop(

View File

@@ -15,7 +15,6 @@ class Heartflow:
"""
def __init__(self):
# 子心流管理 (在初始化时传入 current_state)
self.subheartflow_manager: SubHeartflowManager = SubHeartflowManager()

View File

@@ -18,6 +18,7 @@ logger = get_logger("sub_heartflow")
install(extra_lines=3)
class SubHeartflow:
def __init__(
self,

View File

@@ -24,7 +24,7 @@ class MessageStorage:
else:
filtered_processed_plain_text = ""
if isinstance(message,MessageSending):
if isinstance(message, MessageSending):
display_message = message.display_message
if display_message:
filtered_display_message = re.sub(pattern, "", display_message, flags=re.DOTALL)

View File

@@ -37,7 +37,7 @@ class NormalChat:
self.is_group_chat: bool = False
self.chat_target_info: Optional[dict] = None
self.willing_amplifier = 1
# Other sync initializations
@@ -56,7 +56,6 @@ class NormalChat:
self._disabled = False # 增加停用标志
async def initialize(self):
"""异步初始化,获取聊天类型和目标信息。"""
if self._initialized:
@@ -210,7 +209,7 @@ class NormalChat:
try:
# 处理消息
self.adjust_reply_frequency()
await self.normal_response(
message=message,
is_mentioned=is_mentioned,
@@ -233,7 +232,7 @@ class NormalChat:
timing_results = {}
reply_probability = 1.0 if is_mentioned else 0.0 # 如果被提及基础概率为1否则需要意愿判断
# 意愿管理器设置当前message信息
willing_manager.setup(message, self.chat_stream, is_mentioned, interested_rate)
@@ -306,7 +305,7 @@ class NormalChat:
return # 不执行后续步骤
logger.info(f"[{self.stream_name}] 回复内容: {response_set}")
if self._disabled:
logger.info(f"[{self.stream_name}] 已停用,忽略 normal_response。")
return
@@ -340,13 +339,11 @@ class NormalChat:
# 检查是否需要切换到focus模式
await self._check_switch_to_focus()
info_catcher.done_catch()
with Timer("处理表情包", timing_results):
await self._handle_emoji(message, response_set[0])
with Timer("关系更新", timing_results):
await self._update_relationship(message, response_set)
@@ -479,8 +476,7 @@ class NormalChat:
await self.on_switch_to_focus_callback()
except Exception as e:
logger.error(f"[{self.stream_name}] 触发切换到focus模式时出错: {e}\n{traceback.format_exc()}")
def adjust_reply_frequency(self, duration: int = 10):
"""
调整回复频率
@@ -492,16 +488,15 @@ class NormalChat:
print(f"[{self.stream_name}] 最近{duration}分钟内回复数量: {bot_reply_count}")
total_message_count = stats["total_message_count"]
print(f"[{self.stream_name}] 最近{duration}分钟内消息总数: {total_message_count}")
# 计算回复频率
_reply_frequency = bot_reply_count / total_message_count
# 如果回复频率低于0.5,增加回复概率
if bot_reply_count/duration < global_config.normal_chat.talk_frequency:
if bot_reply_count / duration < global_config.normal_chat.talk_frequency:
# differ = global_config.normal_chat.talk_frequency - reply_frequency
logger.info(f"[{self.stream_name}] 回复频率低于{global_config.normal_chat.talk_frequency},增加回复概率")
self.willing_amplifier += 0.1
else:
logger.info(f"[{self.stream_name}] 回复频率高于{global_config.normal_chat.talk_frequency},减少回复概率")
self.willing_amplifier -= 0.1

View File

@@ -64,11 +64,12 @@ class NormalChatGenerator:
async def _generate_response_with_model(self, message: MessageThinking, model: LLMRequest, thinking_id: str):
info_catcher = info_catcher_manager.get_info_catcher(thinking_id)
person_id = person_info_manager.get_person_id(
message.chat_stream.user_info.platform, message.chat_stream.user_info.user_id
)
person_id = person_info_manager.get_person_id(message.chat_stream.user_info.platform, message.chat_stream.user_info.user_id)
person_name = await person_info_manager.get_value(person_id, "person_name")
if message.chat_stream.user_info.user_cardname and message.chat_stream.user_info.user_nickname:
sender_name = (
f"[{message.chat_stream.user_info.user_nickname}]"
@@ -78,8 +79,7 @@ class NormalChatGenerator:
sender_name = f"[{message.chat_stream.user_info.user_nickname}]你叫ta{person_name}"
else:
sender_name = f"用户({message.chat_stream.user_info.user_id})"
# 构建prompt
with Timer() as t_build_prompt:
prompt = await prompt_builder.build_prompt(

View File

@@ -27,7 +27,4 @@ def get_recent_message_stats(minutes: int = 30, chat_id: str = None) -> dict:
bot_filter["user_id"] = bot_id
bot_reply_count = count_messages(bot_filter)
return {
"bot_reply_count": bot_reply_count,
"total_message_count": total_message_count
}
return {"bot_reply_count": bot_reply_count, "total_message_count": total_message_count}

View File

@@ -17,7 +17,6 @@ logger = get_logger("prompt")
def init_prompt():
Prompt("你正在qq群里聊天下面是群里在聊的内容", "chat_target_group1")
Prompt("你正在和{sender_name}聊天,这是你们之前聊的内容:", "chat_target_private1")
Prompt("在群里聊天", "chat_target_group2")

View File

@@ -10,6 +10,7 @@ from rich.traceback import install
install(extra_lines=3)
def get_raw_msg_by_timestamp(
timestamp_start: float, timestamp_end: float, limit: int = 0, limit_mode: str = "latest"
) -> List[Dict[str, Any]]:
@@ -198,7 +199,7 @@ async def _build_readable_messages_internal(
content = msg.get("display_message")
else:
content = msg.get("processed_plain_text", "") # 默认空字符串
if "" in content:
content = content.replace("", "")
if "" in content:
@@ -465,7 +466,7 @@ async def build_anonymous_messages(messages: List[Dict[str, Any]]) -> str:
content = msg.get("display_message")
else:
content = msg.get("processed_plain_text", "")
if "" in content:
content = content.replace("", "")
if "" in content:

View File

@@ -59,7 +59,7 @@ class ChatConfig(ConfigBase):
chat_mode: str = "normal"
"""聊天模式"""
auto_focus_threshold: float = 1.0
"""自动切换到专注聊天的阈值,越低越容易进入专注聊天"""
@@ -132,8 +132,6 @@ class NormalChatConfig(ConfigBase):
class FocusChatConfig(ConfigBase):
"""专注聊天配置类"""
observation_context_size: int = 12
"""可观察到的最长上下文大小,超过这个值的上下文会被压缩"""
@@ -346,7 +344,7 @@ class TelemetryConfig(ConfigBase):
class ExperimentalConfig(ConfigBase):
"""实验功能配置类"""
debug_show_chat_mode: bool = True
debug_show_chat_mode: bool = False
"""是否在回复后显示当前聊天模式"""
enable_friend_chat: bool = False

View File

@@ -2,5 +2,6 @@
# 导入所有动作模块以确保装饰器被执行
from . import test_action # noqa
# from . import online_action # noqa
from . import mute_action # noqa

View File

@@ -57,14 +57,15 @@ class MuteAction(PluginAction):
# 确保duration是字符串类型
if int(duration) < 60:
duration = 60
if int(duration) > 3600*24*30:
duration = 3600*24*30
if int(duration) > 3600 * 24 * 30:
duration = 3600 * 24 * 30
duration_str = str(int(duration))
# 发送群聊禁言命令,按照新格式
await self.send_message(
type = "command", data = {"name": "GROUP_BAN", "args": {"qq_id": str(user_id), "duration": duration_str}},
display_message = f"我 禁言了 {target} {duration_str}"
type="command",
data={"name": "GROUP_BAN", "args": {"qq_id": str(user_id), "duration": duration_str}},
display_message=f"我 禁言了 {target} {duration_str}",
)
logger.info(f"{self.log_prefix} 成功发送禁言命令,用户 {target}({user_id}),时长 {duration}")

View File

@@ -335,7 +335,7 @@ key_file = "" # SSL密钥文件路径仅在use_wss=true时有效
enable = true
[experimental] #实验性功能
debug_show_chat_mode = true # 是否在回复后显示当前聊天模式
debug_show_chat_mode = false # 是否在回复后显示当前聊天模式
enable_friend_chat = false # 是否启用好友聊天
pfc_chatting = false # 暂时无效