添加主动思考的执行器喵~(你是一只猫娘喵)()

This commit is contained in:
ikun-11451
2025-10-02 14:48:40 +08:00
parent 1fa568196f
commit cabbdcc90a
2 changed files with 82 additions and 2 deletions

View File

@@ -13,6 +13,7 @@ from src.manager.async_task_manager import async_task_manager, AsyncTask
from src.plugin_system import EventType, BaseEventHandler from src.plugin_system import EventType, BaseEventHandler
from src.plugin_system.apis import chat_api, person_api from src.plugin_system.apis import chat_api, person_api
from src.plugin_system.base.base_event import HandlerResult from src.plugin_system.base.base_event import HandlerResult
from .proactive_thinker_executor import ProactiveThinkerExecutor
logger = get_logger(__name__) logger = get_logger(__name__)
@@ -26,6 +27,7 @@ class ColdStartTask(AsyncTask):
def __init__(self): def __init__(self):
super().__init__(task_name="ColdStartTask") super().__init__(task_name="ColdStartTask")
self.chat_manager = get_chat_manager() self.chat_manager = get_chat_manager()
self.executor = ProactiveThinkerExecutor()
async def run(self): async def run(self):
"""任务主循环,周期性地检查是否有需要“破冰”的新用户。""" """任务主循环,周期性地检查是否有需要“破冰”的新用户。"""
@@ -72,7 +74,7 @@ class ColdStartTask(AsyncTask):
# 创建后,该用户就进入了机器人的“好友列表”,后续将由 ProactiveThinkingTask 接管 # 创建后,该用户就进入了机器人的“好友列表”,后续将由 ProactiveThinkingTask 接管
await self.chat_manager.get_or_create_stream(platform, user_info) await self.chat_manager.get_or_create_stream(platform, user_info)
# TODO: 在这里调用LLM生成一句自然的、符合人设的“破冰”问候语并发送给用户。 await self.executor.execute_cold_start(user_info)
logger.info(f"【冷启动】已为新用户 {chat_id} (昵称: {user_nickname}) 创建聊天流并发送问候。") logger.info(f"【冷启动】已为新用户 {chat_id} (昵称: {user_nickname}) 创建聊天流并发送问候。")
except ValueError: except ValueError:
@@ -100,6 +102,7 @@ class ProactiveThinkingTask(AsyncTask):
def __init__(self): def __init__(self):
super().__init__(task_name="ProactiveThinkingTask") super().__init__(task_name="ProactiveThinkingTask")
self.chat_manager = get_chat_manager() self.chat_manager = get_chat_manager()
self.executor = ProactiveThinkerExecutor()
def _get_next_interval(self) -> float: def _get_next_interval(self) -> float:
""" """
@@ -174,7 +177,7 @@ class ProactiveThinkingTask(AsyncTask):
if time_since_last_active > next_interval: if time_since_last_active > next_interval:
logger.info(f"【日常唤醒】聊天流 {stream.stream_id} 已冷却 {time_since_last_active:.2f} 秒,触发主动对话。") logger.info(f"【日常唤醒】聊天流 {stream.stream_id} 已冷却 {time_since_last_active:.2f} 秒,触发主动对话。")
# TODO: 在这里调用LLM生成一句自然的、符合上下文的问候语并发送。 await self.executor.execute_wakeup(stream.stream_id)
# 【关键步骤】在触发后,立刻更新活跃时间并保存。 # 【关键步骤】在触发后,立刻更新活跃时间并保存。
# 这可以防止在同一个检查周期内,对同一个目标因为意外的延迟而发送多条消息。 # 这可以防止在同一个检查周期内,对同一个目标因为意外的延迟而发送多条消息。

View File

@@ -0,0 +1,77 @@
from typing import Optional
from maim_message import UserInfo
from src.chat.memory_system.memory_manager import MemoryManager
from src.common.logger import get_logger
from src.plugin_system.apis import chat_api, person_api, schedule_api, send_api, llm_api
logger = get_logger(__name__)
class ProactiveThinkerExecutor:
"""
主动思考执行器,负责生成并发送主动消息。
"""
def __init__(self):
self.memory_manager = MemoryManager()
async def _generate_prompt(self, stream_id: str) -> Optional[str]:
"""
根据聊天流信息,生成包含记忆、日程和个人信息的提示词。
"""
# 1. 获取用户信息
stream = chat_api.get_stream_by_stream_id(stream_id)
if not stream:
logger.warning(f"无法找到 stream_id 为 {stream_id} 的聊天流")
return None
user_info = stream.user_info
person_id = person_api.get_person_id(user_info.platform, int(user_info.user_id))
# 2. 获取记忆
memories = await self.memory_manager.get_memories(person_id)
memory_context = "\n".join([f"- {m.content}" for m in memories])
# 3. 获取日程
schedules = await schedule_api.get_today_schedule(person_id)
schedule_context = "\n".join([f"- {s.title} ({s.start_time}-{s.end_time})" for s in schedules])
# 4. 构建提示词
prompt = f"""
# Context
## Memory
{memory_context}
## Schedule
{schedule_context}
# Task
You are a proactive assistant. Based on the user's memory and schedule, initiate a conversation.
"""
return prompt
async def execute_cold_start(self, user_info: UserInfo):
"""
为新用户执行“破冰”操作。
"""
logger.info(f"为新用户 {user_info.user_id} 执行“破冰”操作")
prompt = f"You are a proactive assistant. Initiate a conversation with a new friend named {user_info.user_nickname}."
response = await llm_api.generate(prompt)
await send_api.send_message(user_info.platform, user_info.user_id, response)
async def execute_wakeup(self, stream_id: str):
"""
为已冷却的聊天执行“唤醒”操作。
"""
logger.info(f"为聊天流 {stream_id} 执行“唤醒”操作")
prompt = await self._generate_prompt(stream_id)
if not prompt:
return
response = await llm_api.generate(prompt)
stream = chat_api.get_stream_by_stream_id(stream_id)
await send_api.send_message(stream.user_info.platform, stream.user_info.user_id, response)