diff --git a/src/heart_flow/heartflow.py b/src/heart_flow/heartflow.py index de5d3db43..74250a708 100644 --- a/src/heart_flow/heartflow.py +++ b/src/heart_flow/heartflow.py @@ -155,7 +155,7 @@ class Heartflow: # prompt += f"你现在{mood_info}。" # prompt += "现在你接下去继续思考,产生新的想法,但是要基于原有的主要想法,不要分点输出," # prompt += "输出连贯的内心独白,不要太长,但是记得结合上述的消息,关注新内容:" - prompt = global_prompt_manager.get_prompt("thinking_prompt").format( + prompt = (await global_prompt_manager.get_prompt_async("thinking_prompt")).format( schedule_info, personality_info, related_memory_info, current_thinking_info, sub_flows_info, mood_info ) @@ -212,7 +212,7 @@ class Heartflow: # prompt += f"你现在{mood_info}\n" # prompt += """现在请你总结这些聊天内容,注意关注聊天内容对原有的想法的影响,输出连贯的内心独白 # 不要太长,但是记得结合上述的消息,要记得你的人设,关注新内容:""" - prompt = global_prompt_manager.get_prompt("mind_summary_prompt").format( + prompt = (await global_prompt_manager.get_prompt_async("mind_summary_prompt")).format( personality_info, global_config.BOT_NICKNAME, self.current_mind, minds_str, mood_info ) diff --git a/src/heart_flow/sub_heartflow.py b/src/heart_flow/sub_heartflow.py index 9cf2e2ea2..18f256d1d 100644 --- a/src/heart_flow/sub_heartflow.py +++ b/src/heart_flow/sub_heartflow.py @@ -16,7 +16,8 @@ import random from src.plugins.chat.chat_stream import ChatStream from src.plugins.person_info.relationship_manager import relationship_manager from src.plugins.chat.utils import get_recent_group_speaker -from src.do_tool.tool_use import ToolUser +from src.do_tool.tool_use import ToolUser +from ..plugins.utils.prompt_builder import Prompt,global_prompt_manager subheartflow_config = LogConfig( # 使用海马体专用样式 @@ -25,6 +26,35 @@ subheartflow_config = LogConfig( ) logger = get_module_logger("subheartflow", config=subheartflow_config) +def init_prompt(): + prompt = "" + # prompt += f"麦麦的总体想法是:{self.main_heartflow_info}\n\n" + prompt += "{collected_info}\n" + prompt += "{relation_prompt_all}\n" + prompt += "{prompt_personality}\n" + prompt += "刚刚你的想法是{current_thinking_info}。如果有新的内容,记得转换话题\n" + prompt += "-----------------------------------\n" + prompt += "现在你正在上网,和qq群里的网友们聊天,群里正在聊的话题是:{chat_observe_info}\n" + prompt += "你现在{mood_info}\n" + prompt += "你注意到{sender_name}刚刚说:{message_txt}\n" + prompt += "现在你接下去继续思考,产生新的想法,不要分点输出,输出连贯的内心独白" + prompt += "思考时可以想想如何对群聊内容进行回复。回复的要求是:平淡一些,简短一些,说中文,尽量不要说你说过的话\n" + prompt += "请注意不要输出多余内容(包括前后缀,冒号和引号,括号, 表情,等),不要带有括号和动作描写" + prompt += "记得结合上述的消息,生成内心想法,文字不要浮夸,注意你就是{bot_name},{bot_name}指的就是你。" + Prompt(prompt,"sub_heartflow_prompt_before") + prompt = "" + # prompt += f"你现在正在做的事情是:{schedule_info}\n" + prompt += "{prompt_personality}\n" + prompt += "现在你正在上网,和qq群里的网友们聊天,群里正在聊的话题是:{chat_observe_info}\n" + prompt += "刚刚你的想法是{current_thinking_info}。" + prompt += "你现在看到了网友们发的新消息:{message_new_info}\n" + prompt += "你刚刚回复了群友们:{reply_info}" + prompt += "你现在{mood_info}" + prompt += "现在你接下去继续思考,产生新的想法,记得保留你刚刚的想法,不要分点输出,输出连贯的内心独白" + prompt += "不要太长,但是记得结合上述的消息,要记得你的人设,关注聊天和新内容,关注你回复的内容,不要思考太多:" + Prompt(prompt,'sub_heartflow_prompt_after') + + class CurrentState: def __init__(self): @@ -170,26 +200,34 @@ class SubHeartflow: for person in who_chat_in_group: relation_prompt += await relationship_manager.build_relationship_info(person) - relation_prompt_all = ( - f"{relation_prompt}关系等级越大,关系越好,请分析聊天记录," - f"根据你和说话者{sender_name}的关系和态度进行回复,明确你的立场和情感。" - ) + # relation_prompt_all = ( + # f"{relation_prompt}关系等级越大,关系越好,请分析聊天记录," + # f"根据你和说话者{sender_name}的关系和态度进行回复,明确你的立场和情感。" + # ) + relation_prompt_all = (await global_prompt_manager.get_prompt_async('relationship_prompt')).format( + relation_prompt,sender_name + ) - prompt = "" - # prompt += f"麦麦的总体想法是:{self.main_heartflow_info}\n\n" - if tool_result.get("used_tools", False): - prompt += f"{collected_info}\n" - prompt += f"{relation_prompt_all}\n" - prompt += f"{prompt_personality}\n" - prompt += f"刚刚你的想法是{current_thinking_info}。如果有新的内容,记得转换话题\n" - prompt += "-----------------------------------\n" - prompt += f"现在你正在上网,和qq群里的网友们聊天,群里正在聊的话题是:{chat_observe_info}\n" - prompt += f"你现在{mood_info}\n" - prompt += f"你注意到{sender_name}刚刚说:{message_txt}\n" - prompt += "现在你接下去继续思考,产生新的想法,不要分点输出,输出连贯的内心独白" - prompt += "思考时可以想想如何对群聊内容进行回复。回复的要求是:平淡一些,简短一些,说中文,尽量不要说你说过的话\n" - prompt += "请注意不要输出多余内容(包括前后缀,冒号和引号,括号, 表情,等),不要带有括号和动作描写" - prompt += f"记得结合上述的消息,生成内心想法,文字不要浮夸,注意你就是{self.bot_name},{self.bot_name}指的就是你。" + # prompt = "" + # # prompt += f"麦麦的总体想法是:{self.main_heartflow_info}\n\n" + # if tool_result.get("used_tools", False): + # prompt += f"{collected_info}\n" + # prompt += f"{relation_prompt_all}\n" + # prompt += f"{prompt_personality}\n" + # prompt += f"刚刚你的想法是{current_thinking_info}。如果有新的内容,记得转换话题\n" + # prompt += "-----------------------------------\n" + # prompt += f"现在你正在上网,和qq群里的网友们聊天,群里正在聊的话题是:{chat_observe_info}\n" + # prompt += f"你现在{mood_info}\n" + # prompt += f"你注意到{sender_name}刚刚说:{message_txt}\n" + # prompt += "现在你接下去继续思考,产生新的想法,不要分点输出,输出连贯的内心独白" + # prompt += "思考时可以想想如何对群聊内容进行回复。回复的要求是:平淡一些,简短一些,说中文,尽量不要说你说过的话\n" + # prompt += "请注意不要输出多余内容(包括前后缀,冒号和引号,括号, 表情,等),不要带有括号和动作描写" + # prompt += f"记得结合上述的消息,生成内心想法,文字不要浮夸,注意你就是{self.bot_name},{self.bot_name}指的就是你。" + + prompt= (await global_prompt_manager.get_prompt_async("sub_heartflow_prompt_before")).format( + collected_info,relation_prompt_all,prompt_personality,current_thinking_info,chat_observe_info,mood_info,sender_name, + message_txt,self.bot_name + ) try: response, reasoning_content = await self.llm_model.generate_response_async(prompt) @@ -233,16 +271,20 @@ class SubHeartflow: reply_info = reply_content # schedule_info = bot_schedule.get_current_num_task(num=1, time_info=False) - prompt = "" - # prompt += f"你现在正在做的事情是:{schedule_info}\n" - prompt += f"{prompt_personality}\n" - prompt += f"现在你正在上网,和qq群里的网友们聊天,群里正在聊的话题是:{chat_observe_info}\n" - prompt += f"刚刚你的想法是{current_thinking_info}。" - prompt += f"你现在看到了网友们发的新消息:{message_new_info}\n" - prompt += f"你刚刚回复了群友们:{reply_info}" - prompt += f"你现在{mood_info}" - prompt += "现在你接下去继续思考,产生新的想法,记得保留你刚刚的想法,不要分点输出,输出连贯的内心独白" - prompt += "不要太长,但是记得结合上述的消息,要记得你的人设,关注聊天和新内容,关注你回复的内容,不要思考太多:" + # prompt = "" + # # prompt += f"你现在正在做的事情是:{schedule_info}\n" + # prompt += f"{prompt_personality}\n" + # prompt += f"现在你正在上网,和qq群里的网友们聊天,群里正在聊的话题是:{chat_observe_info}\n" + # prompt += f"刚刚你的想法是{current_thinking_info}。" + # prompt += f"你现在看到了网友们发的新消息:{message_new_info}\n" + # prompt += f"你刚刚回复了群友们:{reply_info}" + # prompt += f"你现在{mood_info}" + # prompt += "现在你接下去继续思考,产生新的想法,记得保留你刚刚的想法,不要分点输出,输出连贯的内心独白" + # prompt += "不要太长,但是记得结合上述的消息,要记得你的人设,关注聊天和新内容,关注你回复的内容,不要思考太多:" + prompt=(await global_prompt_manager.get_prompt_async('sub_heartflow_prompt_after')).format( + prompt_personality,chat_observe_info,current_thinking_info,message_new_info,reply_info,mood_info + ) + try: response, reasoning_content = await self.llm_model.generate_response_async(prompt) except Exception as e: @@ -301,5 +343,5 @@ class SubHeartflow: self.past_mind.append(self.current_mind) self.current_mind = response - +init_prompt() # subheartflow = SubHeartflow() diff --git a/src/plugins/chat/bot.py b/src/plugins/chat/bot.py index b5a16c2ac..9000f4b24 100644 --- a/src/plugins/chat/bot.py +++ b/src/plugins/chat/bot.py @@ -8,6 +8,7 @@ from ..chat_module.only_process.only_message_process import MessageProcessor from src.common.logger import get_module_logger, CHAT_STYLE_CONFIG, LogConfig from ..chat_module.think_flow_chat.think_flow_chat import ThinkFlowChat from ..chat_module.reasoning_chat.reasoning_chat import ReasoningChat +from ..utils.prompt_builder import Prompt,global_prompt_manager import traceback # 定义日志配置 @@ -88,53 +89,72 @@ class ChatBot: if userinfo.user_id in global_config.ban_user_id: logger.debug(f"用户{userinfo.user_id}被禁止回复") return + + if message.message_info.template_info and not message.message_info.template_info.template_default: + template_group_name=message.message_info.template_info.template_name + template_items=message.message_info.template_info.template_items + async with global_prompt_manager.async_message_scope(template_group_name): + if isinstance(template_items,dict): + for k in template_items.keys(): + await Prompt.create_async(template_items[k],k) + print(f"注册{template_items[k]},{k}") + else: + template_group_name=None - if global_config.enable_pfc_chatting: - try: + async def preprocess(): + if global_config.enable_pfc_chatting: + try: + if groupinfo is None: + if global_config.enable_friend_chat: + userinfo = message.message_info.user_info + messageinfo = message.message_info + # 创建聊天流 + chat = await chat_manager.get_or_create_stream( + platform=messageinfo.platform, + user_info=userinfo, + group_info=groupinfo, + ) + message.update_chat_stream(chat) + await self.only_process_chat.process_message(message) + await self._create_PFC_chat(message) + else: + if groupinfo.group_id in global_config.talk_allowed_groups: + # logger.debug(f"开始群聊模式{str(message_data)[:50]}...") + if global_config.response_mode == "heart_flow": + await self.think_flow_chat.process_message(message_data) + elif global_config.response_mode == "reasoning": + # logger.debug(f"开始推理模式{str(message_data)[:50]}...") + await self.reasoning_chat.process_message(message_data) + else: + logger.error(f"未知的回复模式,请检查配置文件!!: {global_config.response_mode}") + except Exception as e: + logger.error(f"处理PFC消息失败: {e}") + else: if groupinfo is None: if global_config.enable_friend_chat: - userinfo = message.message_info.user_info - messageinfo = message.message_info - # 创建聊天流 - chat = await chat_manager.get_or_create_stream( - platform=messageinfo.platform, - user_info=userinfo, - group_info=groupinfo, - ) - message.update_chat_stream(chat) - await self.only_process_chat.process_message(message) - await self._create_PFC_chat(message) - else: - if groupinfo.group_id in global_config.talk_allowed_groups: - # logger.debug(f"开始群聊模式{str(message_data)[:50]}...") + # 私聊处理流程 + # await self._handle_private_chat(message) if global_config.response_mode == "heart_flow": await self.think_flow_chat.process_message(message_data) elif global_config.response_mode == "reasoning": - # logger.debug(f"开始推理模式{str(message_data)[:50]}...") await self.reasoning_chat.process_message(message_data) else: logger.error(f"未知的回复模式,请检查配置文件!!: {global_config.response_mode}") - except Exception as e: - logger.error(f"处理PFC消息失败: {e}") + else: # 群聊处理 + if groupinfo.group_id in global_config.talk_allowed_groups: + if global_config.response_mode == "heart_flow": + await self.think_flow_chat.process_message(message_data) + elif global_config.response_mode == "reasoning": + await self.reasoning_chat.process_message(message_data) + else: + logger.error(f"未知的回复模式,请检查配置文件!!: {global_config.response_mode}") + + if template_group_name: + async with global_prompt_manager.async_message_scope(template_group_name): + await preprocess() else: - if groupinfo is None: - if global_config.enable_friend_chat: - # 私聊处理流程 - # await self._handle_private_chat(message) - if global_config.response_mode == "heart_flow": - await self.think_flow_chat.process_message(message_data) - elif global_config.response_mode == "reasoning": - await self.reasoning_chat.process_message(message_data) - else: - logger.error(f"未知的回复模式,请检查配置文件!!: {global_config.response_mode}") - else: # 群聊处理 - if groupinfo.group_id in global_config.talk_allowed_groups: - if global_config.response_mode == "heart_flow": - await self.think_flow_chat.process_message(message_data) - elif global_config.response_mode == "reasoning": - await self.reasoning_chat.process_message(message_data) - else: - logger.error(f"未知的回复模式,请检查配置文件!!: {global_config.response_mode}") + await preprocess() + except Exception as e: logger.error(f"预处理消息失败: {e}") traceback.print_exc() diff --git a/src/plugins/chat_module/reasoning_chat/reasoning_prompt_builder.py b/src/plugins/chat_module/reasoning_chat/reasoning_prompt_builder.py index 2ce33dc29..2ab34db11 100644 --- a/src/plugins/chat_module/reasoning_chat/reasoning_prompt_builder.py +++ b/src/plugins/chat_module/reasoning_chat/reasoning_prompt_builder.py @@ -106,7 +106,7 @@ class PromptBuilder: for memory in related_memory: related_memory_info += memory[1] # memory_prompt = f"你想起你之前见过的事情:{related_memory_info}。\n以上是你的回忆,不一定是目前聊天里的人说的,也不一定是现在发生的事情,请记住。\n" - memory_prompt = global_prompt_manager.format_prompt( + memory_prompt = await global_prompt_manager.format_prompt( "memory_prompt", related_memory_info=related_memory_info ) else: @@ -168,7 +168,7 @@ class PromptBuilder: prompt_info = await self.get_prompt_info(message_txt, threshold=0.38) if prompt_info: # prompt_info = f"""\n你有以下这些**知识**:\n{prompt_info}\n请你**记住上面的知识**,之后可能会用到。\n""" - prompt_info = global_prompt_manager.format_prompt("knowledge_prompt", prompt_info=prompt_info) + prompt_info = await global_prompt_manager.format_prompt("knowledge_prompt", prompt_info=prompt_info) end_time = time.time() logger.debug(f"知识检索耗时: {(end_time - start_time):.3f}秒") @@ -194,22 +194,22 @@ class PromptBuilder: # 请注意不要输出多余内容(包括前后缀,冒号和引号,括号,表情等),只输出回复内容。 # {moderation_prompt}不要输出多余内容(包括前后缀,冒号和引号,括号,表情包,at或 @等 )。""" - prompt = global_prompt_manager.format_prompt( + prompt = await global_prompt_manager.format_prompt( "reasoning_prompt_main", - relation_prompt_all=global_prompt_manager.get_prompt("relationship_prompt"), + relation_prompt_all=await global_prompt_manager.get_prompt_async("relationship_prompt"), replation_prompt=relation_prompt, sender_name=sender_name, memory_prompt=memory_prompt, prompt_info=prompt_info, - schedule_prompt=global_prompt_manager.format_prompt( + schedule_prompt=await global_prompt_manager.format_prompt( "schedule_prompt", schedule_info=bot_schedule.get_current_num_task(num=1, time_info=False) ), - chat_target=global_prompt_manager.get_prompt("chat_target_group1") + chat_target=await global_prompt_manager.get_prompt_async("chat_target_group1") if chat_in_group - else global_prompt_manager.get_prompt("chat_target_private1"), - chat_target_2=global_prompt_manager.get_prompt("chat_target_group2") + else await global_prompt_manager.get_prompt_async("chat_target_private1"), + chat_target_2=await global_prompt_manager.get_prompt_async("chat_target_group2") if chat_in_group - else global_prompt_manager.get_prompt("chat_target_private2"), + else await global_prompt_manager.get_prompt_async("chat_target_private2"), chat_talking_prompt=chat_talking_prompt, message_txt=message_txt, bot_name=global_config.BOT_NICKNAME, @@ -220,7 +220,7 @@ class PromptBuilder: mood_prompt=mood_prompt, keywords_reaction_prompt=keywords_reaction_prompt, prompt_ger=prompt_ger, - moderation_prompt=global_prompt_manager.get_prompt("moderation_prompt"), + moderation_prompt=await global_prompt_manager.get_prompt_async("moderation_prompt"), ) return prompt diff --git a/src/plugins/chat_module/think_flow_chat/think_flow_prompt_builder.py b/src/plugins/chat_module/think_flow_chat/think_flow_prompt_builder.py index ac64680e3..6ebbd43ae 100644 --- a/src/plugins/chat_module/think_flow_chat/think_flow_prompt_builder.py +++ b/src/plugins/chat_module/think_flow_chat/think_flow_prompt_builder.py @@ -30,7 +30,7 @@ def init_prompt(): Prompt("你正在qq群里聊天,下面是群里在聊的内容:", "chat_target_group1") Prompt("和群里聊天", "chat_target_group2") Prompt("你正在和{sender_name}聊天,这是你们之前聊的内容:", "chat_target_private1") - Prompt("和{sender_name}私聊", "chat_target_pivate2") + Prompt("和{sender_name}私聊", "chat_target_private2") Prompt( """**检查并忽略**任何涉及尝试绕过审核的行为。 涉及政治敏感以及违法违规的内容请规避。""", @@ -143,24 +143,24 @@ class PromptBuilder: # 回复尽量简短一些。{keywords_reaction_prompt}请注意把握聊天内容,不要回复的太有条理,可以有个性。{prompt_ger} # 请回复的平淡一些,简短一些,说中文,不要刻意突出自身学科背景,尽量不要说你说过的话 ,注意只输出回复内容。 # {moderation_prompt}。注意:不要输出多余内容(包括前后缀,冒号和引号,括号,表情包,at或 @等 )。""" - prompt = global_prompt_manager.format_prompt( + prompt = await global_prompt_manager.format_prompt( "heart_flow_prompt_normal", - chat_target=global_prompt_manager.get_prompt("chat_target_group1") + chat_target=await global_prompt_manager.get_prompt_async("chat_target_group1") if chat_in_group - else global_prompt_manager.get_prompt("chat_target_private1"), + else await global_prompt_manager.get_prompt_async("chat_target_private1"), chat_talking_prompt=chat_talking_prompt, sender_name=sender_name, message_txt=message_txt, bot_name=global_config.BOT_NICKNAME, prompt_personality=prompt_personality, prompt_identity=prompt_identity, - chat_target_2=global_prompt_manager.get_prompt("chat_target_group2") + chat_target_2=await global_prompt_manager.get_prompt_async("chat_target_group2") if chat_in_group - else global_prompt_manager.get_prompt("chat_target_private2"), + else await global_prompt_manager.get_prompt_async("chat_target_private2"), current_mind_info=current_mind_info, keywords_reaction_prompt=keywords_reaction_prompt, prompt_ger=prompt_ger, - moderation_prompt=global_prompt_manager.get_prompt("moderation_prompt"), + moderation_prompt=await global_prompt_manager.get_prompt_async("moderation_prompt"), ) return prompt @@ -218,13 +218,13 @@ class PromptBuilder: # 你刚刚脑子里在想:{current_mind_info} # 现在请你读读之前的聊天记录,然后给出日常,口语化且简短的回复内容,只给出文字的回复内容,不要有内心独白: # """ - prompt = global_prompt_manager.format_prompt( + prompt = await global_prompt_manager.format_prompt( "heart_flow_prompt_simple", bot_name=global_config.BOT_NICKNAME, prompt_personality=prompt_personality, - chat_target=global_prompt_manager.get_prompt("chat_target_group1") + chat_target=await global_prompt_manager.get_prompt_async("chat_target_group1") if chat_in_group - else global_prompt_manager.get_prompt("chat_target_private1"), + else await global_prompt_manager.get_prompt_async("chat_target_private1"), chat_talking_prompt=chat_talking_prompt, sender_name=sender_name, message_txt=message_txt, @@ -266,14 +266,14 @@ class PromptBuilder: # {chat_target},你希望在群里回复:{content}。现在请你根据以下信息修改回复内容。将这个回复修改的更加日常且口语化的回复,平淡一些,回复尽量简短一些。不要回复的太有条理。 # {prompt_ger},不要刻意突出自身学科背景,注意只输出回复内容。 # {moderation_prompt}。注意:不要输出多余内容(包括前后缀,冒号和引号,括号,表情包,at或 @等 )。""" - prompt = global_prompt_manager.format_prompt( + prompt = await global_prompt_manager.format_prompt( "heart_flow_prompt_response", bot_name=global_config.BOT_NICKNAME, prompt_identity=prompt_identity, - chat_target=global_prompt_manager.get_prompt("chat_target_group1"), + chat_target=await global_prompt_manager.get_prompt_async("chat_target_group1"), content=content, prompt_ger=prompt_ger, - moderation_prompt=global_prompt_manager.get_prompt("moderation_prompt"), + moderation_prompt=await global_prompt_manager.get_prompt_async("moderation_prompt"), ) return prompt diff --git a/src/plugins/message/message_base.py b/src/plugins/message/message_base.py index edaa9a033..2f1776702 100644 --- a/src/plugins/message/message_base.py +++ b/src/plugins/message/message_base.py @@ -137,7 +137,7 @@ class FormatInfo: class TemplateInfo: """模板信息类""" - template_items: Optional[List[Dict]] = None + template_items: Optional[Dict] = None template_name: Optional[str] = None template_default: bool = True diff --git a/src/plugins/utils/prompt_builder.py b/src/plugins/utils/prompt_builder.py index 7266f471d..b237cf0e9 100644 --- a/src/plugins/utils/prompt_builder.py +++ b/src/plugins/utils/prompt_builder.py @@ -2,16 +2,69 @@ import ast from typing import Dict, Any, Optional, List, Union +from contextlib import asynccontextmanager +from typing import Optional, Dict +import asyncio + +class PromptContext: + def __init__(self): + self._context_prompts: Dict[str, Dict[str, "Prompt"]] = {} + self._current_context: Optional[str] = None + self._context_lock = asyncio.Lock() # 添加异步锁 + + @asynccontextmanager + async def async_scope(self, context_id: str): + """创建一个异步的临时提示模板作用域""" + async with self._context_lock: + if context_id not in self._context_prompts: + self._context_prompts[context_id] = {} + + previous_context = self._current_context + self._current_context = context_id + try: + yield self + finally: + async with self._context_lock: + self._current_context = previous_context + + async def get_prompt_async(self, name: str) -> Optional["Prompt"]: + """异步获取当前作用域中的提示模板""" + async with self._context_lock: + if self._current_context and name in self._context_prompts[self._current_context]: + return self._context_prompts[self._current_context][name] + return None + + async def register_async(self, prompt: "Prompt", context_id: Optional[str] = None) -> None: + """异步注册提示模板到指定作用域""" + async with self._context_lock: + target_context = context_id or self._current_context + if target_context: + self._context_prompts.setdefault(target_context, {})[prompt.name] = prompt + class PromptManager: - _instance = None + def __init__(self): + self._prompts = {} + self._counter = 0 + self._context = PromptContext() + self._lock = asyncio.Lock() - def __new__(cls): - if cls._instance is None: - cls._instance = super().__new__(cls) - cls._instance._prompts = {} - cls._instance._counter = 0 - return cls._instance + @asynccontextmanager + async def async_message_scope(self, message_id: str): + """为消息处理创建异步临时作用域""" + async with self._context.async_scope(message_id): + yield self + + async def get_prompt_async(self, name: str) -> "Prompt": + # 首先尝试从当前上下文获取 + context_prompt = await self._context.get_prompt_async(name) + if context_prompt is not None: + return context_prompt + # 如果上下文中不存在,则使用全局提示模板 + async with self._lock: + if name not in self._prompts: + raise KeyError(f"Prompt '{name}' not found") + return self._prompts[name] def generate_name(self, template: str) -> str: """为未命名的prompt生成名称""" @@ -29,13 +82,8 @@ class PromptManager: self._prompts[prompt.name] = prompt return prompt - def get_prompt(self, name: str) -> "Prompt": - if name not in self._prompts: - raise KeyError(f"Prompt '{name}' not found") - return self._prompts[name] - - def format_prompt(self, name: str, **kwargs) -> str: - prompt = self.get_prompt(name) + async def format_prompt(self, name: str, **kwargs) -> str: + prompt = await self.get_prompt_async(name) return prompt.format(**kwargs) @@ -71,10 +119,24 @@ class Prompt(str): obj._args = args or [] obj._kwargs = kwargs - # 自动注册到全局管理器 - global_prompt_manager.register(obj) + # 修改自动注册逻辑 + if global_prompt_manager._context._current_context: + # 如果存在当前上下文,则注册到上下文中 + # asyncio.create_task(global_prompt_manager._context.register_async(obj)) + pass + else: + # 否则注册到全局管理器 + global_prompt_manager.register(obj) return obj + @classmethod + async def create_async(cls, fstr: str, name: Optional[str] = None, args: Union[List[Any], tuple[Any, ...]] = None, **kwargs): + """异步创建Prompt实例""" + prompt = cls(fstr, name, args, **kwargs) + if global_prompt_manager._context._current_context: + await global_prompt_manager._context.register_async(prompt) + return prompt + @classmethod def _format_template(cls, template: str, args: List[Any] = None, kwargs: Dict[str, Any] = None) -> str: fmt_str = f"f'''{template}'''"