s4u的一些修改,包括动作适配
This commit is contained in:
@@ -21,6 +21,7 @@ from src.plugin_system.base.component_types import ActionInfo, ChatMode
|
||||
from src.plugin_system.apis import generator_api, send_api, message_api
|
||||
from src.chat.willing.willing_manager import get_willing_manager
|
||||
from src.chat.mai_thinking.mai_think import mai_thinking_manager
|
||||
from maim_message.message_base import GroupInfo,UserInfo
|
||||
|
||||
ENABLE_THINKING = True
|
||||
|
||||
@@ -257,6 +258,34 @@ class HeartFChatting:
|
||||
person_name = await person_info_manager.get_value(person_id, "person_name")
|
||||
return f"{person_name}:{message_data.get('processed_plain_text')}"
|
||||
|
||||
async def send_typing(self):
|
||||
group_info = GroupInfo(platform = "amaidesu_default",group_id = 114514,group_name = "内心")
|
||||
|
||||
chat = await get_chat_manager().get_or_create_stream(
|
||||
platform = "amaidesu_default",
|
||||
user_info = None,
|
||||
group_info = group_info
|
||||
)
|
||||
|
||||
|
||||
await send_api.custom_to_stream(
|
||||
message_type="state", content="typing", stream_id=chat.stream_id, storage_message=False
|
||||
)
|
||||
|
||||
async def stop_typing(self):
|
||||
group_info = GroupInfo(platform = "amaidesu_default",group_id = 114514,group_name = "内心")
|
||||
|
||||
chat = await get_chat_manager().get_or_create_stream(
|
||||
platform = "amaidesu_default",
|
||||
user_info = None,
|
||||
group_info = group_info
|
||||
)
|
||||
|
||||
|
||||
await send_api.custom_to_stream(
|
||||
message_type="state", content="stop_typing", stream_id=chat.stream_id, storage_message=False
|
||||
)
|
||||
|
||||
async def _observe(self, message_data: Optional[Dict[str, Any]] = None):
|
||||
# sourcery skip: hoist-statement-from-if, merge-comparisons, reintroduce-else
|
||||
if not message_data:
|
||||
@@ -267,6 +296,8 @@ class HeartFChatting:
|
||||
|
||||
logger.info(f"{self.log_prefix} 开始第{self._cycle_counter}次思考[模式:{self.loop_mode}]")
|
||||
|
||||
await self.send_typing()
|
||||
|
||||
async with global_prompt_manager.async_message_scope(self.chat_stream.context.get_template_name()):
|
||||
loop_start_time = time.time()
|
||||
await self.relationship_builder.build_relation()
|
||||
@@ -335,6 +366,10 @@ class HeartFChatting:
|
||||
# 发送回复 (不再需要传入 chat)
|
||||
reply_text = await self._send_response(response_set, reply_to_str, loop_start_time,message_data)
|
||||
|
||||
await self.stop_typing()
|
||||
|
||||
|
||||
|
||||
if ENABLE_THINKING:
|
||||
await mai_thinking_manager.get_mai_think(self.stream_id).do_think_after_response(reply_text)
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ from src.llm_models.utils_model import LLMRequest
|
||||
from src.config.config import global_config
|
||||
from src.chat.message_receive.message import MessageSending, MessageRecv, MessageRecvS4U
|
||||
from src.mais4u.mais4u_chat.s4u_msg_processor import S4UMessageProcessor
|
||||
|
||||
from src.mais4u.mais4u_chat.internal_manager import internal_manager
|
||||
from src.common.logger import get_logger
|
||||
logger = get_logger(__name__)
|
||||
|
||||
@@ -90,12 +90,13 @@ class MaiThinking:
|
||||
self.mind = result
|
||||
|
||||
logger.info(f"[{self.chat_id}] 思考前想法:{self.mind}")
|
||||
logger.info(f"[{self.chat_id}] 思考前prompt:{prompt}")
|
||||
# logger.info(f"[{self.chat_id}] 思考前prompt:{prompt}")
|
||||
logger.info(f"[{self.chat_id}] 思考后想法:{self.mind}")
|
||||
|
||||
|
||||
msg_recv = await self.build_internal_message_recv(self.mind)
|
||||
await self.s4u_message_processor.process_message(msg_recv)
|
||||
internal_manager.set_internal_state(self.mind)
|
||||
|
||||
|
||||
async def do_think_when_receive_message(self):
|
||||
|
||||
@@ -16,7 +16,7 @@ from src.chat.utils.prompt_builder import Prompt, global_prompt_manager
|
||||
from src.plugin_system.core.component_registry import component_registry # 导入新插件系统
|
||||
from src.plugin_system.base.base_command import BaseCommand
|
||||
from src.mais4u.mais4u_chat.s4u_msg_processor import S4UMessageProcessor
|
||||
|
||||
from src.llm_models.utils_model import LLMRequest
|
||||
|
||||
# 定义日志配置
|
||||
|
||||
@@ -158,6 +158,40 @@ class ChatBot:
|
||||
|
||||
return
|
||||
|
||||
async def hanle_notice_message(self, message: MessageRecv):
|
||||
if message.message_info.message_id == "notice":
|
||||
logger.info(f"收到notice消息,暂时不支持处理")
|
||||
return True
|
||||
|
||||
async def check_ban_content(self,message:MessageRecv):
|
||||
content = message.processed_plain_text
|
||||
model = LLMRequest(
|
||||
model=global_config.model.utils_small,
|
||||
temperature=0.3,
|
||||
request_type="text_ban",
|
||||
)
|
||||
|
||||
if content.startswith("[picid:"):
|
||||
return False
|
||||
|
||||
prompt = f"""
|
||||
你是一个直播内容审核者,请判断以下内容是否符合直播内容规范,如果符合,请输出"符合",如果不符合,请输出"不符合"以及理由
|
||||
如果存在反动,敏感内容,请输出"不符合"
|
||||
|
||||
内容:{content}
|
||||
|
||||
现在请输出:
|
||||
"""
|
||||
|
||||
response, (reasoning_content, model_name) = await model.generate_response_async(prompt=prompt)
|
||||
logger.info(f"response: {response}")
|
||||
logger.info(f"reasoning_content: {reasoning_content}")
|
||||
|
||||
if "不符合" in response:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
async def message_process(self, message_data: Dict[str, Any]) -> None:
|
||||
"""处理转化后的统一格式消息
|
||||
@@ -174,6 +208,8 @@ class ChatBot:
|
||||
- 性能计时
|
||||
"""
|
||||
try:
|
||||
|
||||
|
||||
# 确保所有任务已启动
|
||||
await self._ensure_started()
|
||||
|
||||
@@ -194,6 +230,11 @@ class ChatBot:
|
||||
# print(message_data)
|
||||
# logger.debug(str(message_data))
|
||||
message = MessageRecv(message_data)
|
||||
|
||||
if await self.hanle_notice_message(message):
|
||||
return
|
||||
|
||||
|
||||
group_info = message.message_info.group_info
|
||||
user_info = message.message_info.user_info
|
||||
if message.message_info.additional_config:
|
||||
@@ -215,6 +256,11 @@ class ChatBot:
|
||||
# 处理消息内容,生成纯文本
|
||||
await message.process()
|
||||
|
||||
if await self.check_ban_content(message):
|
||||
logger.warning(f"检测到消息中含有违法,色情,暴力,反动,敏感内容,消息内容:{message.processed_plain_text},发送者:{message.message_info.user_info.user_nickname}")
|
||||
return
|
||||
|
||||
|
||||
# 过滤检查
|
||||
if _check_ban_words(message.processed_plain_text, chat, user_info) or _check_ban_regex( # type: ignore
|
||||
message.raw_message, # type: ignore
|
||||
|
||||
@@ -98,6 +98,8 @@ def init_prompt():
|
||||
{relation_info_block}
|
||||
{extra_info_block}
|
||||
|
||||
你是一个AI虚拟主播,正在直播QQ聊天,同时也在直播间回复弹幕,不过回复的时候不用过多提及这点
|
||||
|
||||
{identity}
|
||||
|
||||
{action_descriptions}
|
||||
|
||||
@@ -34,7 +34,10 @@ BODY_CODE = {
|
||||
"另一个帅气的姿势": "010_0191",
|
||||
"手掌朝前可爱": "010_0210",
|
||||
"平静,双手后放":"平静,双手后放",
|
||||
"思考": "思考"
|
||||
"思考": "思考",
|
||||
"优雅,左手放在腰上": "优雅,左手放在腰上",
|
||||
"一般": "一般",
|
||||
"可爱,双手前放": "可爱,双手前放",
|
||||
}
|
||||
|
||||
|
||||
@@ -84,8 +87,7 @@ def init_prompt():
|
||||
class ChatAction:
|
||||
def __init__(self, chat_id: str):
|
||||
self.chat_id: str = chat_id
|
||||
self.hand_action: str = "双手放在桌面"
|
||||
self.body_action: str = "坐着"
|
||||
self.body_action: str = "一般"
|
||||
self.head_action: str = "注视摄像机"
|
||||
|
||||
self.regression_count: int = 0
|
||||
|
||||
14
src/mais4u/mais4u_chat/internal_manager.py
Normal file
14
src/mais4u/mais4u_chat/internal_manager.py
Normal file
@@ -0,0 +1,14 @@
|
||||
class InternalManager:
|
||||
def __init__(self):
|
||||
self.now_internal_state = str()
|
||||
|
||||
def set_internal_state(self,internal_state:str):
|
||||
self.now_internal_state = internal_state
|
||||
|
||||
def get_internal_state(self):
|
||||
return self.now_internal_state
|
||||
|
||||
def get_internal_state_str(self):
|
||||
return f"你今天的直播内容是直播QQ水群,你正在一边回复弹幕,一边在QQ群聊天,你在QQ群聊天中产生的想法是:{self.now_internal_state}"
|
||||
|
||||
internal_manager = InternalManager()
|
||||
@@ -195,6 +195,7 @@ class S4UChat:
|
||||
|
||||
self._is_replying = False
|
||||
self.gpt = S4UStreamGenerator()
|
||||
self.gpt.chat_stream = self.chat_stream
|
||||
self.interest_dict: Dict[str, float] = {} # 用户兴趣分
|
||||
|
||||
self.internal_message :List[MessageRecvS4U] = []
|
||||
@@ -357,6 +358,8 @@ class S4UChat:
|
||||
neg_priority, entry_count, timestamp, message = item
|
||||
|
||||
# 如果消息在最近N条消息范围内,保留它
|
||||
logger.info(f"检查消息:{message.processed_plain_text},entry_count:{entry_count} cutoff_counter:{cutoff_counter}")
|
||||
|
||||
if entry_count >= cutoff_counter:
|
||||
temp_messages.append(item)
|
||||
else:
|
||||
@@ -371,6 +374,7 @@ class S4UChat:
|
||||
self._normal_queue.put_nowait(item)
|
||||
|
||||
if removed_count > 0:
|
||||
logger.info(f"消息{message.processed_plain_text}超过{s4u_config.recent_message_keep_count}条,现在counter:{self._entry_counter}被移除")
|
||||
logger.info(f"[{self.stream_name}] Cleaned up {removed_count} old normal messages outside recent {s4u_config.recent_message_keep_count} range.")
|
||||
|
||||
async def _message_processor(self):
|
||||
@@ -391,29 +395,7 @@ class S4UChat:
|
||||
queue_name = "vip"
|
||||
# 其次处理普通队列
|
||||
elif not self._normal_queue.empty():
|
||||
# 判断 normal 队列是否只有一条消息,且 internal_message 有内容
|
||||
if self._normal_queue.qsize() == 1 and self.internal_message:
|
||||
if random.random() < 0.5:
|
||||
# 50% 概率用 internal_message 最新一条
|
||||
message = self.internal_message[-1]
|
||||
priority = 0 # internal_message 没有优先级,设为 0
|
||||
queue_name = "internal"
|
||||
neg_priority = 0
|
||||
entry_count = 0
|
||||
logger.info(f"[{self.stream_name}] 触发 internal_message 生成回复: {getattr(message, 'processed_plain_text', str(message))[:20]}...")
|
||||
# 不要从 normal 队列取出消息,保留在队列中
|
||||
else:
|
||||
neg_priority, entry_count, timestamp, message = self._normal_queue.get_nowait()
|
||||
priority = -neg_priority
|
||||
# 检查普通消息是否超时
|
||||
if time.time() - timestamp > s4u_config.message_timeout_seconds:
|
||||
logger.info(
|
||||
f"[{self.stream_name}] Discarding stale normal message: {message.processed_plain_text[:20]}..."
|
||||
)
|
||||
self._normal_queue.task_done()
|
||||
continue # 处理下一条
|
||||
queue_name = "normal"
|
||||
else:
|
||||
|
||||
neg_priority, entry_count, timestamp, message = self._normal_queue.get_nowait()
|
||||
priority = -neg_priority
|
||||
# 检查普通消息是否超时
|
||||
@@ -427,10 +409,13 @@ class S4UChat:
|
||||
else:
|
||||
if self.internal_message:
|
||||
message = self.internal_message[-1]
|
||||
self.internal_message = []
|
||||
|
||||
priority = 0
|
||||
neg_priority = 0
|
||||
entry_count = 0
|
||||
queue_name = "internal"
|
||||
|
||||
logger.info(f"[{self.stream_name}] normal/vip 队列都空,触发 internal_message 回复: {getattr(message, 'processed_plain_text', str(message))[:20]}...")
|
||||
else:
|
||||
continue # 没有消息了,回去等事件
|
||||
@@ -488,7 +473,9 @@ class S4UChat:
|
||||
# 视线管理:开始生成回复时切换视线状态
|
||||
chat_watching = watching_manager.get_watching_by_chat_id(self.stream_id)
|
||||
|
||||
|
||||
if message.is_internal:
|
||||
await chat_watching.on_internal_message_start()
|
||||
else:
|
||||
await chat_watching.on_reply_start()
|
||||
|
||||
sender_container = MessageSenderContainer(self.chat_stream, message)
|
||||
|
||||
@@ -144,7 +144,7 @@ class S4UMessageProcessor:
|
||||
async def handle_internal_message(self, message: MessageRecvS4U):
|
||||
if message.is_internal:
|
||||
|
||||
group_info = GroupInfo(platform = "amaidesu_default",group_id = 114514,group_name = "内心")
|
||||
group_info = GroupInfo(platform = "amaidesu_default",group_id = 660154,group_name = "内心")
|
||||
|
||||
chat = await get_chat_manager().get_or_create_stream(
|
||||
platform = "amaidesu_default",
|
||||
|
||||
@@ -17,7 +17,7 @@ from src.mais4u.mais4u_chat.screen_manager import screen_manager
|
||||
from src.chat.express.expression_selector import expression_selector
|
||||
from .s4u_mood_manager import mood_manager
|
||||
from src.person_info.person_info import PersonInfoManager, get_person_info_manager
|
||||
|
||||
from src.mais4u.mais4u_chat.internal_manager import internal_manager
|
||||
logger = get_logger("prompt")
|
||||
|
||||
|
||||
@@ -32,8 +32,8 @@ def init_prompt():
|
||||
虽然是个AI,但你在努力扮演真人, 人设的配色是橙色和紫色和绿色
|
||||
你有一头法式橙色卷发,你很可爱,穿戴英式侦探风格学院风裙子和帽子,你正在一个教室场景里进行虚拟线上直播。不过不要刻意提及这些内容,自然回复
|
||||
你可以看见用户发送的弹幕,礼物和superchat
|
||||
你可以看见面前的屏幕,目前屏幕的内容是:
|
||||
{screen_info}
|
||||
{internal_state}
|
||||
|
||||
{relation_info_block}
|
||||
{memory_block}
|
||||
@@ -97,7 +97,6 @@ class PromptBuilder:
|
||||
self.prompt_built = ""
|
||||
self.activate_messages = ""
|
||||
|
||||
|
||||
async def build_expression_habits(self, chat_stream: ChatStream, chat_history, target):
|
||||
|
||||
style_habits = []
|
||||
@@ -284,10 +283,11 @@ class PromptBuilder:
|
||||
async def build_prompt_normal(
|
||||
self,
|
||||
message: MessageRecvS4U,
|
||||
chat_stream: ChatStream,
|
||||
message_txt: str,
|
||||
) -> str:
|
||||
|
||||
chat_stream = message.chat_stream
|
||||
|
||||
person_id = PersonInfoManager.get_person_id(
|
||||
message.chat_stream.user_info.platform, message.chat_stream.user_info.user_id
|
||||
)
|
||||
@@ -315,6 +315,8 @@ class PromptBuilder:
|
||||
|
||||
screen_info = screen_manager.get_screen_str()
|
||||
|
||||
internal_state = internal_manager.get_internal_state_str()
|
||||
|
||||
time_block = f"当前时间:{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"
|
||||
|
||||
mood = mood_manager.get_mood_by_chat_id(chat_stream.stream_id)
|
||||
@@ -329,6 +331,7 @@ class PromptBuilder:
|
||||
relation_info_block=relation_info_block,
|
||||
memory_block=memory_block,
|
||||
screen_info=screen_info,
|
||||
internal_state=internal_state,
|
||||
gift_info=gift_info,
|
||||
sc_info=sc_info,
|
||||
sender_name=sender_name,
|
||||
@@ -338,8 +341,6 @@ class PromptBuilder:
|
||||
mood_state=mood.mood_state,
|
||||
)
|
||||
else:
|
||||
|
||||
|
||||
prompt = await global_prompt_manager.format_prompt(
|
||||
"s4u_prompt_internal",
|
||||
time_block=time_block,
|
||||
@@ -355,7 +356,7 @@ class PromptBuilder:
|
||||
mood_state=mood.mood_state,
|
||||
)
|
||||
|
||||
print(prompt)
|
||||
# print(prompt)
|
||||
|
||||
return prompt
|
||||
|
||||
|
||||
@@ -46,6 +46,8 @@ class S4UStreamGenerator:
|
||||
re.UNICODE | re.DOTALL,
|
||||
)
|
||||
|
||||
self.chat_stream =None
|
||||
|
||||
async def build_last_internal_message(self,message:MessageRecvS4U,previous_reply_context:str = ""):
|
||||
person_id = PersonInfoManager.get_person_id(
|
||||
message.chat_stream.user_info.platform, message.chat_stream.user_info.user_id
|
||||
@@ -91,10 +93,10 @@ class S4UStreamGenerator:
|
||||
if interupted:
|
||||
message_txt = message_txt_added
|
||||
|
||||
message.chat_stream = self.chat_stream
|
||||
prompt = await prompt_builder.build_prompt_normal(
|
||||
message=message,
|
||||
message_txt=message_txt,
|
||||
chat_stream=message.chat_stream,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
|
||||
@@ -80,6 +80,12 @@ class ChatWatching:
|
||||
)
|
||||
|
||||
|
||||
async def on_internal_message_start(self):
|
||||
"""收到消息时调用"""
|
||||
await send_api.custom_to_stream(
|
||||
message_type="state", content="start_internal_thinking", stream_id=self.chat_id, storage_message=False
|
||||
)
|
||||
|
||||
class WatchingManager:
|
||||
def __init__(self):
|
||||
self.watching_list: list[ChatWatching] = []
|
||||
|
||||
@@ -9,6 +9,6 @@ class ScreenManager:
|
||||
return self.now_screen
|
||||
|
||||
def get_screen_str(self):
|
||||
return f"现在千石可乐在和你一起直播,这是他正在操作的屏幕内容:{self.now_screen}"
|
||||
return f"你可以看见面前的屏幕,目前屏幕的内容是:现在千石可乐在和你一起直播,这是他正在操作的屏幕内容:{self.now_screen}"
|
||||
|
||||
screen_manager = ScreenManager()
|
||||
@@ -47,10 +47,9 @@ async def yes_or_no_head(text: str,emotion: str = "",chat_history: str = "",chat
|
||||
)
|
||||
|
||||
try:
|
||||
logger.info(f"prompt: {prompt}")
|
||||
# logger.info(f"prompt: {prompt}")
|
||||
response, (reasoning_content, model_name) = await model.generate_response_async(prompt=prompt)
|
||||
logger.info(f"response: {response}")
|
||||
logger.info(f"reasoning_content: {reasoning_content}")
|
||||
|
||||
if response in head_actions_list:
|
||||
head_action = response
|
||||
|
||||
Reference in New Issue
Block a user