🤖 自动格式化代码 [skip ci]

This commit is contained in:
github-actions[bot]
2025-07-01 16:22:13 +00:00
parent bb2a95e388
commit b369d12c90
4 changed files with 36 additions and 33 deletions

View File

@@ -18,12 +18,12 @@ from src.mais4u.mais4u_chat.s4u_msg_processor import S4UMessageProcessor
# 定义日志配置 # 定义日志配置
# 获取项目根目录假设本文件在src/chat/message_receive/下,根目录为上上上级目录) # 获取项目根目录假设本文件在src/chat/message_receive/下,根目录为上上上级目录)
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../..')) PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../.."))
ENABLE_S4U_CHAT = os.path.isfile(os.path.join(PROJECT_ROOT, 's4u.s4u')) ENABLE_S4U_CHAT = os.path.isfile(os.path.join(PROJECT_ROOT, "s4u.s4u"))
if ENABLE_S4U_CHAT: if ENABLE_S4U_CHAT:
print('''\nS4U私聊模式已开启\n!!!!!!!!!!!!!!!!!\n''') print("""\nS4U私聊模式已开启\n!!!!!!!!!!!!!!!!!\n""")
# 仅内部开启 # 仅内部开启
# 配置主程序日志格式 # 配置主程序日志格式

View File

@@ -217,7 +217,9 @@ class S4UChat:
if should_interrupt: if should_interrupt:
if self.gpt.partial_response: if self.gpt.partial_response:
logger.warning(f"[{self.stream_name}] Interrupting reply. Already generated: '{self.gpt.partial_response}'") logger.warning(
f"[{self.stream_name}] Interrupting reply. Already generated: '{self.gpt.partial_response}'"
)
self._current_generation_task.cancel() self._current_generation_task.cancel()
# 将消息放入对应的队列 # 将消息放入对应的队列
@@ -248,7 +250,9 @@ class S4UChat:
priority, entry_count, timestamp, message = self._normal_queue.get_nowait() priority, entry_count, timestamp, message = self._normal_queue.get_nowait()
# 检查普通消息是否超时 # 检查普通消息是否超时
if time.time() - timestamp > self._MESSAGE_TIMEOUT_SECONDS: if time.time() - timestamp > self._MESSAGE_TIMEOUT_SECONDS:
logger.info(f"[{self.stream_name}] Discarding stale normal message: {message.processed_plain_text[:20]}...") logger.info(
f"[{self.stream_name}] Discarding stale normal message: {message.processed_plain_text[:20]}..."
)
self._normal_queue.task_done() self._normal_queue.task_done()
continue # 处理下一条 continue # 处理下一条
queue_name = "normal" queue_name = "normal"
@@ -261,7 +265,9 @@ class S4UChat:
try: try:
await self._current_generation_task await self._current_generation_task
except asyncio.CancelledError: except asyncio.CancelledError:
logger.info(f"[{self.stream_name}] Reply generation was interrupted externally for {queue_name} message. The message will be discarded.") logger.info(
f"[{self.stream_name}] Reply generation was interrupted externally for {queue_name} message. The message will be discarded."
)
# 被中断的消息应该被丢弃,而不是重新排队,以响应最新的用户输入。 # 被中断的消息应该被丢弃,而不是重新排队,以响应最新的用户输入。
# 旧的重新入队逻辑会导致所有中断的消息最终都被回复。 # 旧的重新入队逻辑会导致所有中断的消息最终都被回复。
@@ -271,7 +277,7 @@ class S4UChat:
self._current_generation_task = None self._current_generation_task = None
self._current_message_being_replied = None self._current_message_being_replied = None
# 标记任务完成 # 标记任务完成
if queue_name == 'vip': if queue_name == "vip":
self._vip_queue.task_done() self._vip_queue.task_done()
else: else:
self._normal_queue.task_done() self._normal_queue.task_done()

View File

@@ -1,6 +1,5 @@
from src.config.config import global_config from src.config.config import global_config
from src.common.logger import get_logger from src.common.logger import get_logger
from src.individuality.individuality import get_individuality
from src.chat.utils.prompt_builder import Prompt, global_prompt_manager from src.chat.utils.prompt_builder import Prompt, global_prompt_manager
from src.chat.utils.chat_message_builder import build_readable_messages, get_raw_msg_before_timestamp_with_chat from src.chat.utils.chat_message_builder import build_readable_messages, get_raw_msg_before_timestamp_with_chat
import time import time
@@ -105,7 +104,9 @@ class PromptBuilder:
) )
relation_info = "".join(relation_info_list) relation_info = "".join(relation_info_list)
if relation_info: if relation_info:
relation_prompt = await global_prompt_manager.format_prompt("relation_prompt", relation_info=relation_info) relation_prompt = await global_prompt_manager.format_prompt(
"relation_prompt", relation_info=relation_info
)
return relation_prompt return relation_prompt
async def build_memory_block(self, text: str) -> str: async def build_memory_block(self, text: str) -> str:
@@ -199,7 +200,6 @@ class PromptBuilder:
return core_msg_str, background_dialogue_prompt return core_msg_str, background_dialogue_prompt
async def build_prompt_normal( async def build_prompt_normal(
self, self,
message, message,
@@ -207,11 +207,8 @@ class PromptBuilder:
message_txt: str, message_txt: str,
sender_name: str = "某人", sender_name: str = "某人",
) -> str: ) -> str:
identity_block, relation_info_block, memory_block = await asyncio.gather( identity_block, relation_info_block, memory_block = await asyncio.gather(
self.build_identity_block(), self.build_identity_block(), self.build_relation_info(chat_stream), self.build_memory_block(message_txt)
self.build_relation_info(chat_stream),
self.build_memory_block(message_txt)
) )
core_dialogue_prompt, background_dialogue_prompt = self.build_chat_history_prompts(chat_stream, message) core_dialogue_prompt, background_dialogue_prompt = self.build_chat_history_prompts(chat_stream, message)