better:重整配置,分离表达,聊天模式区分

重整配置文件路径,添加更多配置选项
分离了人设表达方式和学习到的表达方式
将聊天模式区分为normal focus和auto
This commit is contained in:
SengokuCola
2025-05-20 22:41:55 +08:00
parent 67569f1fa6
commit 25d9032e62
54 changed files with 387 additions and 482 deletions

View File

@@ -5,7 +5,7 @@ from src.chat.models.utils_model import LLMRequest
from src.config.config import global_config
from src.experimental.PFC.chat_observer import ChatObserver
from src.experimental.PFC.pfc_utils import get_items_from_json
from src.individuality.individuality import Individuality
from src.individuality.individuality import individuality
from src.experimental.PFC.observation_info import ObservationInfo
from src.experimental.PFC.conversation_info import ConversationInfo
from src.chat.utils.chat_message_builder import build_readable_messages
@@ -113,7 +113,7 @@ class ActionPlanner:
max_tokens=1500,
request_type="action_planning",
)
self.personality_info = Individuality.get_instance().get_prompt(x_person=2, level=3)
self.personality_info = individuality.get_prompt(x_person=2, level=3)
self.name = global_config.bot.nickname
self.private_name = private_name
self.chat_observer = ChatObserver.get_instance(stream_id, private_name)

View File

@@ -4,7 +4,7 @@ from src.chat.models.utils_model import LLMRequest
from src.config.config import global_config
from src.experimental.PFC.chat_observer import ChatObserver
from src.experimental.PFC.pfc_utils import get_items_from_json
from src.individuality.individuality import Individuality
from src.individuality.individuality import individuality
from src.experimental.PFC.conversation_info import ConversationInfo
from src.experimental.PFC.observation_info import ObservationInfo
from src.chat.utils.chat_message_builder import build_readable_messages
@@ -47,7 +47,7 @@ class GoalAnalyzer:
model=global_config.model.normal, temperature=0.7, max_tokens=1000, request_type="conversation_goal"
)
self.personality_info = Individuality.get_instance().get_prompt(x_person=2, level=3)
self.personality_info = individuality.get_prompt(x_person=2, level=3)
self.name = global_config.bot.nickname
self.nick_name = global_config.bot.alias_names
self.private_name = private_name

View File

@@ -4,7 +4,7 @@ from src.chat.models.utils_model import LLMRequest
from src.config.config import global_config
from src.experimental.PFC.chat_observer import ChatObserver
from src.experimental.PFC.reply_checker import ReplyChecker
from src.individuality.individuality import Individuality
from src.individuality.individuality import individuality
from .observation_info import ObservationInfo
from .conversation_info import ConversationInfo
from src.chat.utils.chat_message_builder import build_readable_messages
@@ -92,7 +92,7 @@ class ReplyGenerator:
max_tokens=300,
request_type="reply_generation",
)
self.personality_info = Individuality.get_instance().get_prompt(x_person=2, level=3)
self.personality_info = individuality.get_prompt(x_person=2, level=3)
self.name = global_config.bot.nickname
self.private_name = private_name
self.chat_observer = ChatObserver.get_instance(stream_id, private_name)

View File

@@ -2,7 +2,7 @@ from src.common.logger import get_module_logger
from .chat_observer import ChatObserver
from .conversation_info import ConversationInfo
# from src.individuality.individuality import Individuality # 不再需要
# from src.individuality.individuality import individuality,Individuality # 不再需要
from src.config.config import global_config
import time
import asyncio