pass ruff

This commit is contained in:
SengokuCola
2025-05-28 20:44:26 +08:00
parent bc489861d3
commit 218d0d4a5d
16 changed files with 45 additions and 63 deletions

View File

@@ -16,7 +16,6 @@ from src.chat.utils.info_catcher import info_catcher_manager
from src.chat.heart_flow.utils_chat import get_chat_type_and_target_info
from src.chat.message_receive.chat_stream import ChatStream
from src.chat.focus_chat.hfc_utils import parse_thinking_id_to_timestamp
from src.individuality.individuality import individuality
from src.chat.utils.prompt_builder import Prompt, global_prompt_manager
from src.chat.utils.chat_message_builder import build_readable_messages, get_raw_msg_before_timestamp_with_chat
import time
@@ -106,10 +105,7 @@ class DefaultExpressor:
user_nickname=global_config.bot.nickname,
platform=messageinfo.platform,
)
# logger.debug(f"创建思考消息:{anchor_message}")
# logger.debug(f"创建思考消息chat{chat}")
# logger.debug(f"创建思考消息bot_user_info{bot_user_info}")
# logger.debug(f"创建思考消息messageinfo{messageinfo}")
thinking_message = MessageThinking(
message_id=thinking_id,
chat_stream=chat,
@@ -281,14 +277,14 @@ class DefaultExpressor:
in_mind_reply,
target_message,
) -> str:
prompt_personality = individuality.get_prompt(x_person=0, level=2)
# prompt_personality = individuality.get_prompt(x_person=0, level=2)
# Determine if it's a group chat
is_group_chat = bool(chat_stream.group_info)
# Use sender_name passed from caller for private chat, otherwise use a default for group
# Default sender_name for group chat isn't used in the group prompt template, but set for consistency
effective_sender_name = sender_name if not is_group_chat else "某人"
# effective_sender_name = sender_name if not is_group_chat else "某人"
message_list_before_now = get_raw_msg_before_timestamp_with_chat(
chat_id=chat_stream.stream_id,
@@ -377,7 +373,11 @@ class DefaultExpressor:
# --- 发送器 (Sender) --- #
async def send_response_messages(
self, anchor_message: Optional[MessageRecv], response_set: List[Tuple[str, str]], thinking_id: str = "", display_message: str = ""
self,
anchor_message: Optional[MessageRecv],
response_set: List[Tuple[str, str]],
thinking_id: str = "",
display_message: str = "",
) -> Optional[MessageSending]:
"""发送回复消息 (尝试锚定到 anchor_message),使用 HeartFCSender"""
chat = self.chat_stream
@@ -416,7 +416,6 @@ class DefaultExpressor:
if global_config.experimental.debug_show_chat_mode and type == "text":
data += ""
part_message_id = f"{thinking_id}_{i}"
message_segment = Seg(type=type, data=data)

View File

@@ -379,12 +379,14 @@ class HeartFChatting:
for processor in self.processors:
processor_name = processor.__class__.log_prefix
# 用lambda包裹便于传参
async def run_with_timeout(proc=processor):
return await asyncio.wait_for(
proc.process_info(observations=observations, running_memorys=running_memorys),
timeout=PROCESSOR_TIMEOUT
timeout=PROCESSOR_TIMEOUT,
)
task = asyncio.create_task(run_with_timeout())
processor_tasks.append(task)
task_to_name_map[task] = processor_name

View File

@@ -1,4 +1,3 @@
import time
import traceback
from ..memory_system.Hippocampus import HippocampusManager
from ...config.config import global_config
@@ -220,11 +219,7 @@ class HeartFCMessageReceiver:
# 7. 日志记录
mes_name = chat.group_info.group_name if chat.group_info else "私聊"
# current_time = time.strftime("%H:%M:%S", time.localtime(message.message_info.time))
logger.info(
f"[{mes_name}]"
f"{userinfo.user_nickname}:"
f"{message.processed_plain_text}"
)
logger.info(f"[{mes_name}]{userinfo.user_nickname}:{message.processed_plain_text}")
# 8. 关系处理
if global_config.relationship.give_name:

View File

@@ -8,8 +8,6 @@ from src.config.config import global_config
logger = get_logger("background_tasks")
# 新增私聊激活检查间隔
PRIVATE_CHAT_ACTIVATION_CHECK_INTERVAL_SECONDS = 5 # 与兴趣评估类似设为5秒
@@ -138,8 +136,6 @@ class BackgroundTaskManager:
# 状态转换处理
async def _perform_cleanup_work(self):
"""执行子心流清理任务
1. 获取需要清理的不活跃子心流列表
@@ -165,13 +161,11 @@ class BackgroundTaskManager:
# 记录最终清理结果
logger.info(f"[清理任务] 清理完成, 共停止 {stopped_count}/{len(flows_to_stop)} 个子心流")
async def _run_cleanup_cycle(self):
await _run_periodic_loop(
task_name="Subflow Cleanup", interval=CLEANUP_INTERVAL_SECONDS, task_func=self._perform_cleanup_work
)
# 新增私聊激活任务运行器
async def _run_private_chat_activation_cycle(self, interval: int):
await _run_periodic_loop(

View File

@@ -15,7 +15,6 @@ class Heartflow:
"""
def __init__(self):
# 子心流管理 (在初始化时传入 current_state)
self.subheartflow_manager: SubHeartflowManager = SubHeartflowManager()

View File

@@ -18,6 +18,7 @@ logger = get_logger("sub_heartflow")
install(extra_lines=3)
class SubHeartflow:
def __init__(
self,

View File

@@ -56,7 +56,6 @@ class NormalChat:
self._disabled = False # 增加停用标志
async def initialize(self):
"""异步初始化,获取聊天类型和目标信息。"""
if self._initialized:
@@ -340,13 +339,11 @@ class NormalChat:
# 检查是否需要切换到focus模式
await self._check_switch_to_focus()
info_catcher.done_catch()
with Timer("处理表情包", timing_results):
await self._handle_emoji(message, response_set[0])
with Timer("关系更新", timing_results):
await self._update_relationship(message, response_set)
@@ -480,7 +477,6 @@ class NormalChat:
except Exception as e:
logger.error(f"[{self.stream_name}] 触发切换到focus模式时出错: {e}\n{traceback.format_exc()}")
def adjust_reply_frequency(self, duration: int = 10):
"""
调整回复频率
@@ -504,4 +500,3 @@ class NormalChat:
else:
logger.info(f"[{self.stream_name}] 回复频率高于{global_config.normal_chat.talk_frequency},减少回复概率")
self.willing_amplifier -= 0.1

View File

@@ -64,8 +64,9 @@ class NormalChatGenerator:
async def _generate_response_with_model(self, message: MessageThinking, model: LLMRequest, thinking_id: str):
info_catcher = info_catcher_manager.get_info_catcher(thinking_id)
person_id = person_info_manager.get_person_id(message.chat_stream.user_info.platform, message.chat_stream.user_info.user_id)
person_id = person_info_manager.get_person_id(
message.chat_stream.user_info.platform, message.chat_stream.user_info.user_id
)
person_name = await person_info_manager.get_value(person_id, "person_name")
@@ -79,7 +80,6 @@ class NormalChatGenerator:
else:
sender_name = f"用户({message.chat_stream.user_info.user_id})"
# 构建prompt
with Timer() as t_build_prompt:
prompt = await prompt_builder.build_prompt(

View File

@@ -27,7 +27,4 @@ def get_recent_message_stats(minutes: int = 30, chat_id: str = None) -> dict:
bot_filter["user_id"] = bot_id
bot_reply_count = count_messages(bot_filter)
return {
"bot_reply_count": bot_reply_count,
"total_message_count": total_message_count
}
return {"bot_reply_count": bot_reply_count, "total_message_count": total_message_count}

View File

@@ -17,7 +17,6 @@ logger = get_logger("prompt")
def init_prompt():
Prompt("你正在qq群里聊天下面是群里在聊的内容", "chat_target_group1")
Prompt("你正在和{sender_name}聊天,这是你们之前聊的内容:", "chat_target_private1")
Prompt("在群里聊天", "chat_target_group2")

View File

@@ -10,6 +10,7 @@ from rich.traceback import install
install(extra_lines=3)
def get_raw_msg_by_timestamp(
timestamp_start: float, timestamp_end: float, limit: int = 0, limit_mode: str = "latest"
) -> List[Dict[str, Any]]:

View File

@@ -132,8 +132,6 @@ class NormalChatConfig(ConfigBase):
class FocusChatConfig(ConfigBase):
"""专注聊天配置类"""
observation_context_size: int = 12
"""可观察到的最长上下文大小,超过这个值的上下文会被压缩"""
@@ -346,7 +344,7 @@ class TelemetryConfig(ConfigBase):
class ExperimentalConfig(ConfigBase):
"""实验功能配置类"""
debug_show_chat_mode: bool = True
debug_show_chat_mode: bool = False
"""是否在回复后显示当前聊天模式"""
enable_friend_chat: bool = False

View File

@@ -2,5 +2,6 @@
# 导入所有动作模块以确保装饰器被执行
from . import test_action # noqa
# from . import online_action # noqa
from . import mute_action # noqa

View File

@@ -63,8 +63,9 @@ class MuteAction(PluginAction):
# 发送群聊禁言命令,按照新格式
await self.send_message(
type = "command", data = {"name": "GROUP_BAN", "args": {"qq_id": str(user_id), "duration": duration_str}},
display_message = f"我 禁言了 {target} {duration_str}"
type="command",
data={"name": "GROUP_BAN", "args": {"qq_id": str(user_id), "duration": duration_str}},
display_message=f"我 禁言了 {target} {duration_str}",
)
logger.info(f"{self.log_prefix} 成功发送禁言命令,用户 {target}({user_id}),时长 {duration}")

View File

@@ -335,7 +335,7 @@ key_file = "" # SSL密钥文件路径仅在use_wss=true时有效
enable = true
[experimental] #实验性功能
debug_show_chat_mode = true # 是否在回复后显示当前聊天模式
debug_show_chat_mode = false # 是否在回复后显示当前聊天模式
enable_friend_chat = false # 是否启用好友聊天
pfc_chatting = false # 暂时无效