迁移:1f91967(remove:移除willing系统,移除reply2,移除能量值,移除reply_to改为message)

This commit is contained in:
Windpicker-owo
2025-09-03 21:27:28 +08:00
parent a63ca537d1
commit 53e72ecbdb
22 changed files with 151 additions and 947 deletions

View File

@@ -32,7 +32,6 @@ logger = get_logger("generator_api")
def get_replyer(
chat_stream: Optional[ChatStream] = None,
chat_id: Optional[str] = None,
model_set_with_weight: Optional[List[Tuple[TaskConfig, float]]] = None,
request_type: str = "replyer",
) -> Optional[DefaultReplyer]:
"""获取回复器对象
@@ -43,7 +42,6 @@ def get_replyer(
Args:
chat_stream: 聊天流对象(优先)
chat_id: 聊天ID实际上就是stream_id
model_set_with_weight: 模型配置列表,每个元素为 (TaskConfig, weight) 元组
request_type: 请求类型
Returns:
@@ -59,7 +57,6 @@ def get_replyer(
return replyer_manager.get_replyer(
chat_stream=chat_stream,
chat_id=chat_id,
model_set_with_weight=model_set_with_weight,
request_type=request_type,
)
except Exception as e:
@@ -78,13 +75,13 @@ async def generate_reply(
chat_id: Optional[str] = None,
action_data: Optional[Dict[str, Any]] = None,
reply_to: str = "",
reply_message: Optional[Dict[str, Any]] = None,
extra_info: str = "",
available_actions: Optional[Dict[str, ActionInfo]] = None,
enable_tool: bool = False,
enable_splitter: bool = True,
enable_chinese_typo: bool = True,
return_prompt: bool = False,
model_set_with_weight: Optional[List[Tuple[TaskConfig, float]]] = None,
request_type: str = "generator_api",
from_plugin: bool = True,
) -> Tuple[bool, List[Tuple[str, Any]], Optional[str]]:
@@ -95,6 +92,7 @@ async def generate_reply(
chat_id: 聊天ID备用
action_data: 动作数据向下兼容包含reply_to和extra_info
reply_to: 回复对象,格式为 "发送者:消息内容"
reply_message: 回复的原始消息
extra_info: 额外信息,用于补充上下文
available_actions: 可用动作
enable_tool: 是否启用工具调用
@@ -110,7 +108,7 @@ async def generate_reply(
try:
# 获取回复器
replyer = get_replyer(
chat_stream, chat_id, model_set_with_weight=model_set_with_weight, request_type=request_type
chat_stream, chat_id, request_type=request_type
)
if not replyer:
logger.error("[GeneratorAPI] 无法获取回复器")
@@ -131,6 +129,7 @@ async def generate_reply(
enable_tool=enable_tool,
from_plugin=from_plugin,
stream_id=chat_stream.stream_id if chat_stream else chat_id,
reply_message=reply_message,
)
if not success:
logger.warning("[GeneratorAPI] 回复生成失败")
@@ -166,11 +165,11 @@ async def rewrite_reply(
chat_id: Optional[str] = None,
enable_splitter: bool = True,
enable_chinese_typo: bool = True,
model_set_with_weight: Optional[List[Tuple[TaskConfig, float]]] = None,
raw_reply: str = "",
reason: str = "",
reply_to: str = "",
return_prompt: bool = False,
request_type: str = "generator_api",
) -> Tuple[bool, List[Tuple[str, Any]], Optional[str]]:
"""重写回复
@@ -180,7 +179,6 @@ async def rewrite_reply(
chat_id: 聊天ID备用
enable_splitter: 是否启用消息分割器
enable_chinese_typo: 是否启用错字生成器
model_set_with_weight: 模型配置列表,每个元素为 (TaskConfig, weight) 元组
raw_reply: 原始回复内容
reason: 回复原因
reply_to: 回复对象
@@ -191,7 +189,7 @@ async def rewrite_reply(
"""
try:
# 获取回复器
replyer = get_replyer(chat_stream, chat_id, model_set_with_weight=model_set_with_weight)
replyer = get_replyer(chat_stream, chat_id, request_type=request_type)
if not replyer:
logger.error("[GeneratorAPI] 无法获取回复器")
return False, [], None
@@ -258,10 +256,10 @@ def process_human_text(content: str, enable_splitter: bool, enable_chinese_typo:
async def generate_response_custom(
chat_stream: Optional[ChatStream] = None,
chat_id: Optional[str] = None,
model_set_with_weight: Optional[List[Tuple[TaskConfig, float]]] = None,
request_type: str = "generator_api",
prompt: str = "",
) -> Optional[str]:
replyer = get_replyer(chat_stream, chat_id, model_set_with_weight=model_set_with_weight)
replyer = get_replyer(chat_stream, chat_id, request_type=request_type)
if not replyer:
logger.error("[GeneratorAPI] 无法获取回复器")
return None

View File

@@ -32,7 +32,7 @@ import traceback
import time
import difflib
import asyncio
from typing import Optional, Union, Dict
from typing import Optional, Union, Dict, Any
from src.common.logger import get_logger
# 导入依赖
@@ -88,7 +88,8 @@ async def _send_to_target(
display_message: str = "",
typing: bool = False,
reply_to: str = "",
reply_to_platform_id: Optional[str] = None,
set_reply: bool = False,
reply_to_message: Optional[Dict[str, Any]] = None,
storage_message: bool = True,
show_log: bool = True,
) -> bool:
@@ -101,7 +102,6 @@ async def _send_to_target(
display_message: 显示消息
typing: 是否模拟打字等待。
reply_to: 回复消息,格式为"发送者:消息内容"
reply_to_platform_id: 回复消息,格式为"平台:用户ID",如果不提供则自动查找(插件开发者禁用!)
storage_message: 是否存储消息到数据库
show_log: 发送是否显示日志
@@ -109,6 +109,9 @@ async def _send_to_target(
bool: 是否发送成功
"""
try:
if reply_to:
logger.warning("[SendAPI] 在0.10.0, reply_to 参数已弃用,请使用 reply_to_message 参数")
if show_log:
logger.debug(f"[SendAPI] 发送{message_type}消息到 {stream_id}")
@@ -135,14 +138,14 @@ async def _send_to_target(
# 创建消息段
message_segment = Seg(type=message_type, data=content) # type: ignore
# 处理回复消息
anchor_message = None
if reply_to:
anchor_message = await _find_reply_message(target_stream, reply_to)
if anchor_message and anchor_message.message_info.user_info and not reply_to_platform_id:
reply_to_platform_id = (
f"{anchor_message.message_info.platform}:{anchor_message.message_info.user_info.user_id}"
)
if reply_to_message:
anchor_message = MessageRecv(message_dict=reply_to_message)
anchor_message.update_chat_stream(target_stream)
reply_to_platform_id = (
f"{anchor_message.message_info.platform}:{anchor_message.message_info.user_info.user_id}"
)
else:
anchor_message = None
# 构建发送消息对象
bot_message = MessageSending(
@@ -163,7 +166,7 @@ async def _send_to_target(
sent_msg = await heart_fc_sender.send_message(
bot_message,
typing=typing,
set_reply=(anchor_message is not None),
set_reply=set_reply,
storage_message=storage_message,
show_log=show_log,
)
@@ -298,7 +301,8 @@ async def text_to_stream(
stream_id: str,
typing: bool = False,
reply_to: str = "",
reply_to_platform_id: str = "",
reply_to_message: Optional[Dict[str, Any]] = None,
set_reply: bool = False,
storage_message: bool = True,
) -> bool:
"""向指定流发送文本消息
@@ -308,7 +312,6 @@ async def text_to_stream(
stream_id: 聊天流ID
typing: 是否显示正在输入
reply_to: 回复消息,格式为"发送者:消息内容"
reply_to_platform_id: 回复消息,格式为"平台:用户ID",如果不提供则自动查找(插件开发者禁用!)
storage_message: 是否存储消息到数据库
Returns:
@@ -321,12 +324,13 @@ async def text_to_stream(
"",
typing,
reply_to,
reply_to_platform_id=reply_to_platform_id,
set_reply=set_reply,
reply_to_message=reply_to_message,
storage_message=storage_message,
)
async def emoji_to_stream(emoji_base64: str, stream_id: str, storage_message: bool = True) -> bool:
async def emoji_to_stream(emoji_base64: str, stream_id: str, storage_message: bool = True, set_reply: bool = False) -> bool:
"""向指定流发送表情包
Args:
@@ -337,10 +341,10 @@ async def emoji_to_stream(emoji_base64: str, stream_id: str, storage_message: bo
Returns:
bool: 是否发送成功
"""
return await _send_to_target("emoji", emoji_base64, stream_id, "", typing=False, storage_message=storage_message)
return await _send_to_target("emoji", emoji_base64, stream_id, "", typing=False, storage_message=storage_message, set_reply=set_reply)
async def image_to_stream(image_base64: str, stream_id: str, storage_message: bool = True) -> bool:
async def image_to_stream(image_base64: str, stream_id: str, storage_message: bool = True, set_reply: bool = False) -> bool:
"""向指定流发送图片
Args:
@@ -351,11 +355,11 @@ async def image_to_stream(image_base64: str, stream_id: str, storage_message: bo
Returns:
bool: 是否发送成功
"""
return await _send_to_target("image", image_base64, stream_id, "", typing=False, storage_message=storage_message)
return await _send_to_target("image", image_base64, stream_id, "", typing=False, storage_message=storage_message, set_reply=set_reply)
async def command_to_stream(
command: Union[str, dict], stream_id: str, storage_message: bool = True, display_message: str = ""
command: Union[str, dict], stream_id: str, storage_message: bool = True, display_message: str = "", set_reply: bool = False
) -> bool:
"""向指定流发送命令
@@ -379,6 +383,8 @@ async def custom_to_stream(
display_message: str = "",
typing: bool = False,
reply_to: str = "",
reply_to_message: Optional[Dict[str, Any]] = None,
set_reply: bool = False,
storage_message: bool = True,
show_log: bool = True,
) -> bool:
@@ -403,6 +409,8 @@ async def custom_to_stream(
display_message=display_message,
typing=typing,
reply_to=reply_to,
reply_to_message=reply_to_message,
set_reply=set_reply,
storage_message=storage_message,
show_log=show_log,
)