pass ruff

This commit is contained in:
SengokuCola
2025-05-28 20:44:26 +08:00
parent bc489861d3
commit 218d0d4a5d
16 changed files with 45 additions and 63 deletions

View File

@@ -16,7 +16,6 @@ from src.chat.utils.info_catcher import info_catcher_manager
from src.chat.heart_flow.utils_chat import get_chat_type_and_target_info
from src.chat.message_receive.chat_stream import ChatStream
from src.chat.focus_chat.hfc_utils import parse_thinking_id_to_timestamp
from src.individuality.individuality import individuality
from src.chat.utils.prompt_builder import Prompt, global_prompt_manager
from src.chat.utils.chat_message_builder import build_readable_messages, get_raw_msg_before_timestamp_with_chat
import time
@@ -106,10 +105,7 @@ class DefaultExpressor:
user_nickname=global_config.bot.nickname,
platform=messageinfo.platform,
)
# logger.debug(f"创建思考消息:{anchor_message}")
# logger.debug(f"创建思考消息chat{chat}")
# logger.debug(f"创建思考消息bot_user_info{bot_user_info}")
# logger.debug(f"创建思考消息messageinfo{messageinfo}")
thinking_message = MessageThinking(
message_id=thinking_id,
chat_stream=chat,
@@ -281,14 +277,14 @@ class DefaultExpressor:
in_mind_reply,
target_message,
) -> str:
prompt_personality = individuality.get_prompt(x_person=0, level=2)
# prompt_personality = individuality.get_prompt(x_person=0, level=2)
# Determine if it's a group chat
is_group_chat = bool(chat_stream.group_info)
# Use sender_name passed from caller for private chat, otherwise use a default for group
# Default sender_name for group chat isn't used in the group prompt template, but set for consistency
effective_sender_name = sender_name if not is_group_chat else "某人"
# effective_sender_name = sender_name if not is_group_chat else "某人"
message_list_before_now = get_raw_msg_before_timestamp_with_chat(
chat_id=chat_stream.stream_id,
@@ -377,7 +373,11 @@ class DefaultExpressor:
# --- 发送器 (Sender) --- #
async def send_response_messages(
self, anchor_message: Optional[MessageRecv], response_set: List[Tuple[str, str]], thinking_id: str = "", display_message: str = ""
self,
anchor_message: Optional[MessageRecv],
response_set: List[Tuple[str, str]],
thinking_id: str = "",
display_message: str = "",
) -> Optional[MessageSending]:
"""发送回复消息 (尝试锚定到 anchor_message),使用 HeartFCSender"""
chat = self.chat_stream
@@ -412,10 +412,9 @@ class DefaultExpressor:
# 为每个消息片段生成唯一ID
type = msg_text[0]
data = msg_text[1]
if global_config.experimental.debug_show_chat_mode and type == "text":
data += ""
part_message_id = f"{thinking_id}_{i}"
message_segment = Seg(type=type, data=data)

View File

@@ -379,12 +379,14 @@ class HeartFChatting:
for processor in self.processors:
processor_name = processor.__class__.log_prefix
# 用lambda包裹便于传参
async def run_with_timeout(proc=processor):
return await asyncio.wait_for(
proc.process_info(observations=observations, running_memorys=running_memorys),
timeout=PROCESSOR_TIMEOUT
timeout=PROCESSOR_TIMEOUT,
)
task = asyncio.create_task(run_with_timeout())
processor_tasks.append(task)
task_to_name_map[task] = processor_name

View File

@@ -1,4 +1,3 @@
import time
import traceback
from ..memory_system.Hippocampus import HippocampusManager
from ...config.config import global_config
@@ -73,12 +72,12 @@ async def _calculate_interest(message: MessageRecv) -> Tuple[float, bool]:
text_len = len(message.processed_plain_text)
# 根据文本长度调整兴趣度长度越大兴趣度越高但增长率递减最低0.01最高0.05
# 采用对数函数实现递减增长
base_interest = 0.01 + (0.05 - 0.01) * (math.log10(text_len + 1) / math.log10(1000 + 1))
base_interest = min(max(base_interest, 0.01), 0.05)
interested_rate += base_interest
logger.trace(f"记忆激活率: {interested_rate:.2f}")
if is_mentioned:
@@ -220,11 +219,7 @@ class HeartFCMessageReceiver:
# 7. 日志记录
mes_name = chat.group_info.group_name if chat.group_info else "私聊"
# current_time = time.strftime("%H:%M:%S", time.localtime(message.message_info.time))
logger.info(
f"[{mes_name}]"
f"{userinfo.user_nickname}:"
f"{message.processed_plain_text}"
)
logger.info(f"[{mes_name}]{userinfo.user_nickname}:{message.processed_plain_text}")
# 8. 关系处理
if global_config.relationship.give_name: