fix: 懒加载单例以修复环境变量加载问题

This commit is contained in:
春河晴
2025-06-11 17:22:43 +09:00
parent 6d1f5be4e1
commit 6767bd6715
45 changed files with 243 additions and 155 deletions

View File

@@ -5,7 +5,7 @@ from src.llm_models.utils_model import LLMRequest
from src.config.config import global_config
from src.experimental.PFC.chat_observer import ChatObserver
from src.experimental.PFC.pfc_utils import get_items_from_json
from src.individuality.individuality import individuality
from src.individuality.individuality import get_individuality
from src.experimental.PFC.observation_info import ObservationInfo
from src.experimental.PFC.conversation_info import ConversationInfo
from src.chat.utils.chat_message_builder import build_readable_messages
@@ -112,7 +112,7 @@ class ActionPlanner:
temperature=global_config.llm_PFC_action_planner["temp"],
request_type="action_planning",
)
self.personality_info = individuality.get_prompt(x_person=2, level=3)
self.personality_info = get_individuality().get_prompt(x_person=2, level=3)
self.name = global_config.bot.nickname
self.private_name = private_name
self.chat_observer = ChatObserver.get_instance(stream_id, private_name)

View File

@@ -18,7 +18,7 @@ from .conversation_info import ConversationInfo # 确保导入 ConversationInfo
from .reply_generator import ReplyGenerator
from src.chat.message_receive.chat_stream import ChatStream
from src.chat.message_receive.message import UserInfo
from src.chat.message_receive.chat_stream import chat_manager
from src.chat.message_receive.chat_stream import get_chat_manager
from .pfc_KnowledgeFetcher import KnowledgeFetcher
from .waiter import Waiter
@@ -60,7 +60,7 @@ class Conversation:
self.direct_sender = DirectMessageSender(self.private_name)
# 获取聊天流信息
self.chat_stream = chat_manager.get_stream(self.stream_id)
self.chat_stream = get_chat_manager().get_stream(self.stream_id)
self.stop_action_planner = False
except Exception as e:
@@ -248,14 +248,14 @@ class Conversation:
def _convert_to_message(self, msg_dict: Dict[str, Any]) -> Message:
"""将消息字典转换为Message对象"""
try:
# 尝试从 msg_dict 直接获取 chat_stream如果失败则从全局 chat_manager 获取
# 尝试从 msg_dict 直接获取 chat_stream如果失败则从全局 get_chat_manager 获取
chat_info = msg_dict.get("chat_info")
if chat_info and isinstance(chat_info, dict):
chat_stream = ChatStream.from_dict(chat_info)
elif self.chat_stream: # 使用实例变量中的 chat_stream
chat_stream = self.chat_stream
else: # Fallback: 尝试从 manager 获取 (可能需要 stream_id)
chat_stream = chat_manager.get_stream(self.stream_id)
chat_stream = get_chat_manager().get_stream(self.stream_id)
if not chat_stream:
raise ValueError(f"无法确定 ChatStream for stream_id {self.stream_id}")

View File

@@ -4,7 +4,7 @@ from src.llm_models.utils_model import LLMRequest
from src.config.config import global_config
from src.experimental.PFC.chat_observer import ChatObserver
from src.experimental.PFC.pfc_utils import get_items_from_json
from src.individuality.individuality import individuality
from src.individuality.individuality import get_individuality
from src.experimental.PFC.conversation_info import ConversationInfo
from src.experimental.PFC.observation_info import ObservationInfo
from src.chat.utils.chat_message_builder import build_readable_messages
@@ -47,7 +47,7 @@ class GoalAnalyzer:
model=global_config.model.utils, temperature=0.7, max_tokens=1000, request_type="conversation_goal"
)
self.personality_info = individuality.get_prompt(x_person=2, level=3)
self.personality_info = get_individuality().get_prompt(x_person=2, level=3)
self.name = global_config.bot.nickname
self.nick_name = global_config.bot.alias_names
self.private_name = private_name

View File

@@ -4,7 +4,7 @@ from src.llm_models.utils_model import LLMRequest
from src.config.config import global_config
from src.experimental.PFC.chat_observer import ChatObserver
from src.experimental.PFC.reply_checker import ReplyChecker
from src.individuality.individuality import individuality
from src.individuality.individuality import get_individuality
from .observation_info import ObservationInfo
from .conversation_info import ConversationInfo
from src.chat.utils.chat_message_builder import build_readable_messages
@@ -91,7 +91,7 @@ class ReplyGenerator:
temperature=global_config.llm_PFC_chat["temp"],
request_type="reply_generation",
)
self.personality_info = individuality.get_prompt(x_person=2, level=3)
self.personality_info = get_individuality().get_prompt(x_person=2, level=3)
self.name = global_config.bot.nickname
self.private_name = private_name
self.chat_observer = ChatObserver.get_instance(stream_id, private_name)

View File

@@ -2,7 +2,7 @@ from src.common.logger import get_logger
from .chat_observer import ChatObserver
from .conversation_info import ConversationInfo
# from src.individuality.individuality import individuality,Individuality # 不再需要
# from src.individuality.individuality get_individuality,Individuality # 不再需要
from src.config.config import global_config
import time
import asyncio