typing change, use enum instead of string, fix typo
This commit is contained in:
@@ -18,13 +18,12 @@ from src.chat.focus_chat.hfc_utils import CycleDetail
|
|||||||
from src.chat.focus_chat.hfc_utils import get_recent_message_stats
|
from src.chat.focus_chat.hfc_utils import get_recent_message_stats
|
||||||
from src.person_info.relationship_builder_manager import relationship_builder_manager
|
from src.person_info.relationship_builder_manager import relationship_builder_manager
|
||||||
from src.person_info.person_info import get_person_info_manager
|
from src.person_info.person_info import get_person_info_manager
|
||||||
from src.plugin_system.base.component_types import ActionInfo
|
from src.plugin_system.base.component_types import ActionInfo, ChatMode
|
||||||
from src.plugin_system.apis import generator_api, send_api, message_api
|
from src.plugin_system.apis import generator_api, send_api, message_api
|
||||||
from src.chat.willing.willing_manager import get_willing_manager
|
from src.chat.willing.willing_manager import get_willing_manager
|
||||||
from ...mais4u.mais4u_chat.priority_manager import PriorityManager
|
from ...mais4u.mais4u_chat.priority_manager import PriorityManager
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
ERROR_LOOP_INFO = {
|
ERROR_LOOP_INFO = {
|
||||||
"loop_plan_info": {
|
"loop_plan_info": {
|
||||||
"action_result": {
|
"action_result": {
|
||||||
@@ -87,7 +86,7 @@ class HeartFChatting:
|
|||||||
|
|
||||||
self.relationship_builder = relationship_builder_manager.get_or_create_builder(self.stream_id)
|
self.relationship_builder = relationship_builder_manager.get_or_create_builder(self.stream_id)
|
||||||
|
|
||||||
self.loop_mode = "normal"
|
self.loop_mode = ChatMode.NORMAL # 初始循环模式为普通模式
|
||||||
|
|
||||||
# 新增:消息计数器和疲惫阈值
|
# 新增:消息计数器和疲惫阈值
|
||||||
self._message_count = 0 # 发送的消息计数
|
self._message_count = 0 # 发送的消息计数
|
||||||
@@ -120,13 +119,11 @@ class HeartFChatting:
|
|||||||
self.priority_manager = PriorityManager(
|
self.priority_manager = PriorityManager(
|
||||||
normal_queue_max_size=5,
|
normal_queue_max_size=5,
|
||||||
)
|
)
|
||||||
self.loop_mode = "priority"
|
self.loop_mode = ChatMode.PRIORITY
|
||||||
else:
|
else:
|
||||||
self.priority_manager = None
|
self.priority_manager = None
|
||||||
|
|
||||||
logger.info(
|
logger.info(f"{self.log_prefix} HeartFChatting 初始化完成")
|
||||||
f"{self.log_prefix} HeartFChatting 初始化完成"
|
|
||||||
)
|
|
||||||
|
|
||||||
self.energy_value = 100
|
self.energy_value = 100
|
||||||
|
|
||||||
@@ -192,14 +189,14 @@ class HeartFChatting:
|
|||||||
)
|
)
|
||||||
|
|
||||||
async def _loopbody(self):
|
async def _loopbody(self):
|
||||||
if self.loop_mode == "focus":
|
if self.loop_mode == ChatMode.FOCUS:
|
||||||
self.energy_value -= 5 * global_config.chat.focus_value
|
self.energy_value -= 5 * global_config.chat.focus_value
|
||||||
if self.energy_value <= 0:
|
if self.energy_value <= 0:
|
||||||
self.loop_mode = "normal"
|
self.loop_mode = ChatMode.NORMAL
|
||||||
return True
|
return True
|
||||||
|
|
||||||
return await self._observe()
|
return await self._observe()
|
||||||
elif self.loop_mode == "normal":
|
elif self.loop_mode == ChatMode.NORMAL:
|
||||||
new_messages_data = get_raw_msg_by_timestamp_with_chat(
|
new_messages_data = get_raw_msg_by_timestamp_with_chat(
|
||||||
chat_id=self.stream_id,
|
chat_id=self.stream_id,
|
||||||
timestamp_start=self.last_read_time,
|
timestamp_start=self.last_read_time,
|
||||||
@@ -210,7 +207,7 @@ class HeartFChatting:
|
|||||||
)
|
)
|
||||||
|
|
||||||
if len(new_messages_data) > 4 * global_config.chat.focus_value:
|
if len(new_messages_data) > 4 * global_config.chat.focus_value:
|
||||||
self.loop_mode = "focus"
|
self.loop_mode = ChatMode.FOCUS
|
||||||
self.energy_value = 100
|
self.energy_value = 100
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -255,14 +252,14 @@ class HeartFChatting:
|
|||||||
logger.error(f"{self.log_prefix} 动作修改失败: {e}")
|
logger.error(f"{self.log_prefix} 动作修改失败: {e}")
|
||||||
|
|
||||||
# 如果normal,开始一个回复生成进程,先准备好回复(其实是和planer同时进行的)
|
# 如果normal,开始一个回复生成进程,先准备好回复(其实是和planer同时进行的)
|
||||||
if self.loop_mode == "normal":
|
if self.loop_mode == ChatMode.NORMAL:
|
||||||
reply_to_str = await self.build_reply_to_str(message_data)
|
reply_to_str = await self.build_reply_to_str(message_data)
|
||||||
gen_task = asyncio.create_task(self._generate_response(message_data, available_actions, reply_to_str))
|
gen_task = asyncio.create_task(self._generate_response(message_data, available_actions, reply_to_str))
|
||||||
|
|
||||||
with Timer("规划器", cycle_timers):
|
with Timer("规划器", cycle_timers):
|
||||||
plan_result = await self.action_planner.plan(mode=self.loop_mode)
|
plan_result = await self.action_planner.plan(mode=self.loop_mode)
|
||||||
|
|
||||||
action_result = plan_result.get("action_result", {})
|
action_result: dict = plan_result.get("action_result", {}) # type: ignore
|
||||||
action_type, action_data, reasoning, is_parallel = (
|
action_type, action_data, reasoning, is_parallel = (
|
||||||
action_result.get("action_type", "error"),
|
action_result.get("action_type", "error"),
|
||||||
action_result.get("action_data", {}),
|
action_result.get("action_data", {}),
|
||||||
@@ -272,7 +269,7 @@ class HeartFChatting:
|
|||||||
|
|
||||||
action_data["loop_start_time"] = loop_start_time
|
action_data["loop_start_time"] = loop_start_time
|
||||||
|
|
||||||
if self.loop_mode == "normal":
|
if self.loop_mode == ChatMode.NORMAL:
|
||||||
if action_type == "no_action":
|
if action_type == "no_action":
|
||||||
logger.info(f"[{self.log_prefix}] {global_config.bot.nickname} 决定进行回复")
|
logger.info(f"[{self.log_prefix}] {global_config.bot.nickname} 决定进行回复")
|
||||||
elif is_parallel:
|
elif is_parallel:
|
||||||
@@ -337,7 +334,7 @@ class HeartFChatting:
|
|||||||
self.end_cycle(loop_info, cycle_timers)
|
self.end_cycle(loop_info, cycle_timers)
|
||||||
self.print_cycle_info(cycle_timers)
|
self.print_cycle_info(cycle_timers)
|
||||||
|
|
||||||
if self.loop_mode == "normal":
|
if self.loop_mode == ChatMode.NORMAL:
|
||||||
await self.willing_manager.after_generate_reply_handle(message_data.get("message_id", ""))
|
await self.willing_manager.after_generate_reply_handle(message_data.get("message_id", ""))
|
||||||
|
|
||||||
return True
|
return True
|
||||||
@@ -611,17 +608,17 @@ class HeartFChatting:
|
|||||||
)
|
)
|
||||||
|
|
||||||
reply_text = ""
|
reply_text = ""
|
||||||
first_replyed = False
|
first_replied = False
|
||||||
for reply_seg in reply_set:
|
for reply_seg in reply_set:
|
||||||
data = reply_seg[1]
|
data = reply_seg[1]
|
||||||
if not first_replyed:
|
if not first_replied:
|
||||||
if need_reply:
|
if need_reply:
|
||||||
await send_api.text_to_stream(
|
await send_api.text_to_stream(
|
||||||
text=data, stream_id=self.chat_stream.stream_id, reply_to=reply_to, typing=False
|
text=data, stream_id=self.chat_stream.stream_id, reply_to=reply_to, typing=False
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
await send_api.text_to_stream(text=data, stream_id=self.chat_stream.stream_id, typing=False)
|
await send_api.text_to_stream(text=data, stream_id=self.chat_stream.stream_id, typing=False)
|
||||||
first_replyed = True
|
first_replied = True
|
||||||
else:
|
else:
|
||||||
await send_api.text_to_stream(text=data, stream_id=self.chat_stream.stream_id, typing=True)
|
await send_api.text_to_stream(text=data, stream_id=self.chat_stream.stream_id, typing=True)
|
||||||
reply_text += data
|
reply_text += data
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import traceback
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from typing import Dict, Any
|
from typing import Dict, Any, Optional
|
||||||
from maim_message import UserInfo
|
from maim_message import UserInfo
|
||||||
|
|
||||||
from src.common.logger import get_logger
|
from src.common.logger import get_logger
|
||||||
@@ -210,7 +210,7 @@ class ChatBot:
|
|||||||
|
|
||||||
# 确认从接口发来的message是否有自定义的prompt模板信息
|
# 确认从接口发来的message是否有自定义的prompt模板信息
|
||||||
if message.message_info.template_info and not message.message_info.template_info.template_default:
|
if message.message_info.template_info and not message.message_info.template_info.template_default:
|
||||||
template_group_name = message.message_info.template_info.template_name
|
template_group_name: Optional[str] = message.message_info.template_info.template_name # type: ignore
|
||||||
template_items = message.message_info.template_info.template_items
|
template_items = message.message_info.template_info.template_items
|
||||||
async with global_prompt_manager.async_message_scope(template_group_name):
|
async with global_prompt_manager.async_message_scope(template_group_name):
|
||||||
if isinstance(template_items, dict):
|
if isinstance(template_items, dict):
|
||||||
|
|||||||
@@ -479,7 +479,7 @@ def message_from_db_dict(db_dict: dict) -> MessageRecv:
|
|||||||
msg = MessageRecv(recv_dict)
|
msg = MessageRecv(recv_dict)
|
||||||
|
|
||||||
# 从数据库字典中填充其他可选字段
|
# 从数据库字典中填充其他可选字段
|
||||||
msg.interest_value = db_dict.get("interest_value")
|
msg.interest_value = db_dict.get("interest_value", 0.0)
|
||||||
msg.is_mentioned = db_dict.get("is_mentioned")
|
msg.is_mentioned = db_dict.get("is_mentioned")
|
||||||
msg.priority_mode = db_dict.get("priority_mode", "interest")
|
msg.priority_mode = db_dict.get("priority_mode", "interest")
|
||||||
msg.priority_info = db_dict.get("priority_info")
|
msg.priority_info = db_dict.get("priority_info")
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ from typing import List, Any, Dict, TYPE_CHECKING
|
|||||||
from src.common.logger import get_logger
|
from src.common.logger import get_logger
|
||||||
from src.config.config import global_config
|
from src.config.config import global_config
|
||||||
from src.llm_models.utils_model import LLMRequest
|
from src.llm_models.utils_model import LLMRequest
|
||||||
from src.chat.focus_chat.hfc_utils import CycleDetail
|
|
||||||
from src.chat.message_receive.chat_stream import get_chat_manager, ChatMessageContext
|
from src.chat.message_receive.chat_stream import get_chat_manager, ChatMessageContext
|
||||||
from src.chat.planner_actions.action_manager import ActionManager
|
from src.chat.planner_actions.action_manager import ActionManager
|
||||||
from src.chat.utils.chat_message_builder import get_raw_msg_before_timestamp_with_chat, build_readable_messages
|
from src.chat.utils.chat_message_builder import get_raw_msg_before_timestamp_with_chat, build_readable_messages
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ from src.chat.utils.chat_message_builder import (
|
|||||||
from src.chat.utils.utils import get_chat_type_and_target_info
|
from src.chat.utils.utils import get_chat_type_and_target_info
|
||||||
from src.chat.planner_actions.action_manager import ActionManager
|
from src.chat.planner_actions.action_manager import ActionManager
|
||||||
from src.chat.message_receive.chat_stream import get_chat_manager
|
from src.chat.message_receive.chat_stream import get_chat_manager
|
||||||
from src.plugin_system.base.component_types import ActionInfo
|
from src.plugin_system.base.component_types import ActionInfo, ChatMode
|
||||||
|
|
||||||
|
|
||||||
logger = get_logger("planner")
|
logger = get_logger("planner")
|
||||||
@@ -79,7 +79,7 @@ class ActionPlanner:
|
|||||||
|
|
||||||
self.last_obs_time_mark = 0.0
|
self.last_obs_time_mark = 0.0
|
||||||
|
|
||||||
async def plan(self, mode: str = "focus") -> Dict[str, Dict[str, Any]]: # sourcery skip: dict-comprehension
|
async def plan(self, mode: ChatMode = ChatMode.FOCUS) -> Dict[str, Dict[str, Any] | str]: # sourcery skip: dict-comprehension
|
||||||
"""
|
"""
|
||||||
规划器 (Planner): 使用LLM根据上下文决定做出什么动作。
|
规划器 (Planner): 使用LLM根据上下文决定做出什么动作。
|
||||||
"""
|
"""
|
||||||
@@ -108,7 +108,7 @@ class ActionPlanner:
|
|||||||
|
|
||||||
# 如果没有可用动作或只有no_reply动作,直接返回no_reply
|
# 如果没有可用动作或只有no_reply动作,直接返回no_reply
|
||||||
if not current_available_actions:
|
if not current_available_actions:
|
||||||
if mode == "focus":
|
if mode == ChatMode.FOCUS:
|
||||||
action = "no_reply"
|
action = "no_reply"
|
||||||
else:
|
else:
|
||||||
action = "no_action"
|
action = "no_action"
|
||||||
@@ -217,7 +217,7 @@ class ActionPlanner:
|
|||||||
is_group_chat: bool, # Now passed as argument
|
is_group_chat: bool, # Now passed as argument
|
||||||
chat_target_info: Optional[dict], # Now passed as argument
|
chat_target_info: Optional[dict], # Now passed as argument
|
||||||
current_available_actions: Dict[str, ActionInfo],
|
current_available_actions: Dict[str, ActionInfo],
|
||||||
mode: str = "focus",
|
mode: ChatMode = ChatMode.FOCUS,
|
||||||
) -> str: # sourcery skip: use-join
|
) -> str: # sourcery skip: use-join
|
||||||
"""构建 Planner LLM 的提示词 (获取模板并填充数据)"""
|
"""构建 Planner LLM 的提示词 (获取模板并填充数据)"""
|
||||||
try:
|
try:
|
||||||
@@ -247,7 +247,7 @@ class ActionPlanner:
|
|||||||
|
|
||||||
self.last_obs_time_mark = time.time()
|
self.last_obs_time_mark = time.time()
|
||||||
|
|
||||||
if mode == "focus":
|
if mode == ChatMode.FOCUS:
|
||||||
by_what = "聊天内容"
|
by_what = "聊天内容"
|
||||||
no_action_block = """重要说明1:
|
no_action_block = """重要说明1:
|
||||||
- 'no_reply' 表示只进行不进行回复,等待合适的回复时机
|
- 'no_reply' 表示只进行不进行回复,等待合适的回复时机
|
||||||
|
|||||||
@@ -33,6 +33,7 @@ class ChatMode(Enum):
|
|||||||
|
|
||||||
FOCUS = "focus" # Focus聊天模式
|
FOCUS = "focus" # Focus聊天模式
|
||||||
NORMAL = "normal" # Normal聊天模式
|
NORMAL = "normal" # Normal聊天模式
|
||||||
|
PRIORITY = "priority" # 优先级聊天模式
|
||||||
ALL = "all" # 所有聊天模式
|
ALL = "all" # 所有聊天模式
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
|
|||||||
@@ -110,16 +110,16 @@ class ReplyAction(BaseAction):
|
|||||||
|
|
||||||
# 构建回复文本
|
# 构建回复文本
|
||||||
reply_text = ""
|
reply_text = ""
|
||||||
first_replyed = False
|
first_replied = False
|
||||||
for reply_seg in reply_set:
|
for reply_seg in reply_set:
|
||||||
data = reply_seg[1]
|
data = reply_seg[1]
|
||||||
if not first_replyed:
|
if not first_replied:
|
||||||
if need_reply:
|
if need_reply:
|
||||||
await self.send_text(content=data, reply_to=self.action_data.get("reply_to", ""), typing=False)
|
await self.send_text(content=data, reply_to=self.action_data.get("reply_to", ""), typing=False)
|
||||||
first_replyed = True
|
first_replied = True
|
||||||
else:
|
else:
|
||||||
await self.send_text(content=data, typing=False)
|
await self.send_text(content=data, typing=False)
|
||||||
first_replyed = True
|
first_replied = True
|
||||||
else:
|
else:
|
||||||
await self.send_text(content=data, typing=True)
|
await self.send_text(content=data, typing=True)
|
||||||
reply_text += data
|
reply_text += data
|
||||||
|
|||||||
Reference in New Issue
Block a user