feat:支持reply_to解析at和reply

This commit is contained in:
SengokuCola
2025-07-03 13:43:07 +08:00
parent fbe8f08862
commit 20645bb7e9
3 changed files with 37 additions and 70 deletions

View File

@@ -108,74 +108,6 @@ class ChattingObservation(Observation):
def get_observe_info(self, ids=None):
return self.talking_message_str
def get_recv_message_by_text(self, sender: str, text: str) -> Optional[MessageRecv]:
"""
根据回复的纯文本
1. 在talking_message中查找最新的最匹配的消息
2. 如果找到,则返回消息
"""
find_msg = None
reverse_talking_message = list(reversed(self.talking_message))
for message in reverse_talking_message:
user_id = message["user_id"]
platform = message["platform"]
person_id = get_person_info_manager().get_person_id(platform, user_id)
person_name = get_person_info_manager().get_value(person_id, "person_name")
if person_name == sender:
similarity = difflib.SequenceMatcher(None, text, message["processed_plain_text"]).ratio()
if similarity >= 0.9:
find_msg = message
break
if not find_msg:
return None
user_info = {
"platform": find_msg.get("user_platform", ""),
"user_id": find_msg.get("user_id", ""),
"user_nickname": find_msg.get("user_nickname", ""),
"user_cardname": find_msg.get("user_cardname", ""),
}
group_info = {}
if find_msg.get("chat_info_group_id"):
group_info = {
"platform": find_msg.get("chat_info_group_platform", ""),
"group_id": find_msg.get("chat_info_group_id", ""),
"group_name": find_msg.get("chat_info_group_name", ""),
}
content_format = ""
accept_format = ""
template_items = {}
format_info = {"content_format": content_format, "accept_format": accept_format}
template_info = {
"template_items": template_items,
}
message_info = {
"platform": self.platform,
"message_id": find_msg.get("message_id"),
"time": find_msg.get("time"),
"group_info": group_info,
"user_info": user_info,
"additional_config": find_msg.get("additional_config"),
"format_info": format_info,
"template_info": template_info,
}
message_dict = {
"message_info": message_info,
"raw_message": find_msg.get("processed_plain_text"),
"detailed_plain_text": find_msg.get("processed_plain_text"),
"processed_plain_text": find_msg.get("processed_plain_text"),
}
find_rec_msg = MessageRecv(message_dict)
find_rec_msg.update_chat_stream(get_chat_manager().get_or_create_stream(self.chat_id))
return find_rec_msg
async def observe(self):
# 自上一次观察的新消息

View File

@@ -257,7 +257,7 @@ class DefaultReplyer:
# 加权随机选择一个模型配置
selected_model_config = self._select_weighted_model_config()
logger.info(
f"{self.log_prefix} 使用模型配置: {selected_model_config.get('model_name', 'N/A')} (权重: {selected_model_config.get('weight', 1.0)})"
f"{self.log_prefix} 使用模型配置: {selected_model_config.get('name', 'N/A')} (权重: {selected_model_config.get('weight', 1.0)})"
)
express_model = LLMRequest(

View File

@@ -22,6 +22,7 @@
import traceback
import time
import difflib
import re
from typing import Optional, Union
from src.common.logger import get_logger
@@ -171,7 +172,41 @@ async def _find_reply_message(target_stream, reply_to: str) -> Optional[MessageR
person_id = get_person_info_manager().get_person_id(platform, user_id)
person_name = await get_person_info_manager().get_value(person_id, "person_name")
if person_name == sender:
similarity = difflib.SequenceMatcher(None, text, message["processed_plain_text"]).ratio()
translate_text = message["processed_plain_text"]
# 检查是否有 回复<aaa:bbb> 字段
reply_pattern = r"回复<([^:<>]+):([^:<>]+)>"
match = re.search(reply_pattern, translate_text)
if match:
aaa = match.group(1)
bbb = match.group(2)
reply_person_id = get_person_info_manager().get_person_id(platform, bbb)
reply_person_name = await get_person_info_manager().get_value(reply_person_id, "person_name")
if not reply_person_name:
reply_person_name = aaa
# 在内容前加上回复信息
translate_text = re.sub(reply_pattern, f"回复 {reply_person_name}", translate_text, count=1)
# 检查是否有 @<aaa:bbb> 字段
at_pattern = r"@<([^:<>]+):([^:<>]+)>"
at_matches = list(re.finditer(at_pattern, translate_text))
if at_matches:
new_content = ""
last_end = 0
for m in at_matches:
new_content += translate_text[last_end : m.start()]
aaa = m.group(1)
bbb = m.group(2)
at_person_id = get_person_info_manager().get_person_id(platform, bbb)
at_person_name = await get_person_info_manager().get_value(at_person_id, "person_name")
if not at_person_name:
at_person_name = aaa
new_content += f"@{at_person_name}"
last_end = m.end()
new_content += translate_text[last_end:]
translate_text = new_content
similarity = difflib.SequenceMatcher(None, text, translate_text).ratio()
if similarity >= 0.9:
find_msg = message
break