diff --git a/src/chat/focus_chat/expressors/default_expressor.py b/src/chat/focus_chat/expressors/default_expressor.py index d44d5a6c2..d8cc10648 100644 --- a/src/chat/focus_chat/expressors/default_expressor.py +++ b/src/chat/focus_chat/expressors/default_expressor.py @@ -79,7 +79,7 @@ class DefaultExpressor: model=global_config.model.focus_expressor, # temperature=global_config.model.focus_expressor["temp"], max_tokens=256, - request_type="focus_expressor", + request_type="focus.expressor", ) self.heart_fc_sender = HeartFCSender() diff --git a/src/chat/focus_chat/expressors/exprssion_learner.py b/src/chat/focus_chat/expressors/exprssion_learner.py index 96d4e231b..817a188fe 100644 --- a/src/chat/focus_chat/expressors/exprssion_learner.py +++ b/src/chat/focus_chat/expressors/exprssion_learner.py @@ -64,7 +64,7 @@ class ExpressionLearner: model=global_config.model.focus_expressor, temperature=0.1, max_tokens=256, - request_type="learn_expression", + request_type="expressor.learner", ) async def get_expression_by_chat_id(self, chat_id: str) -> Tuple[List[Dict[str, str]], List[Dict[str, str]]]: diff --git a/src/chat/focus_chat/info_processors/chattinginfo_processor.py b/src/chat/focus_chat/info_processors/chattinginfo_processor.py index a93ce35eb..5bbaee4f7 100644 --- a/src/chat/focus_chat/info_processors/chattinginfo_processor.py +++ b/src/chat/focus_chat/info_processors/chattinginfo_processor.py @@ -28,7 +28,7 @@ class ChattingInfoProcessor(BaseProcessor): super().__init__() # TODO: API-Adapter修改标记 self.model_summary = LLMRequest( - model=global_config.model.utils_small, temperature=0.7, max_tokens=300, request_type="chat_observation" + model=global_config.model.utils_small, temperature=0.7, max_tokens=300, request_type="focus.observation.chat" ) async def process_info( diff --git a/src/chat/focus_chat/info_processors/mind_processor.py b/src/chat/focus_chat/info_processors/mind_processor.py index 609e40e99..12671169f 100644 --- a/src/chat/focus_chat/info_processors/mind_processor.py +++ b/src/chat/focus_chat/info_processors/mind_processor.py @@ -79,7 +79,7 @@ class MindProcessor(BaseProcessor): model=global_config.model.focus_chat_mind, temperature=global_config.model.focus_chat_mind["temp"], max_tokens=800, - request_type="focus_chat_mind", + request_type="focus.processor.chat_mind", ) self.current_mind = "" diff --git a/src/chat/focus_chat/info_processors/self_processor.py b/src/chat/focus_chat/info_processors/self_processor.py index cecaf3084..d489efa22 100644 --- a/src/chat/focus_chat/info_processors/self_processor.py +++ b/src/chat/focus_chat/info_processors/self_processor.py @@ -58,7 +58,7 @@ class SelfProcessor(BaseProcessor): model=global_config.model.focus_self_recognize, temperature=global_config.model.focus_self_recognize["temp"], max_tokens=800, - request_type="focus_self_identify", + request_type="focus.processor.self_identify", ) name = chat_manager.get_stream_name(self.subheartflow_id) diff --git a/src/chat/focus_chat/info_processors/tool_processor.py b/src/chat/focus_chat/info_processors/tool_processor.py index 2d52a04a2..3c0dc116c 100644 --- a/src/chat/focus_chat/info_processors/tool_processor.py +++ b/src/chat/focus_chat/info_processors/tool_processor.py @@ -51,7 +51,7 @@ class ToolProcessor(BaseProcessor): self.llm_model = LLMRequest( model=global_config.model.focus_tool_use, max_tokens=500, - request_type="focus_tool", + request_type="focus.processor.tool", ) self.structured_info = [] diff --git a/src/chat/focus_chat/info_processors/working_memory_processor.py b/src/chat/focus_chat/info_processors/working_memory_processor.py index 27af13255..da7203989 100644 --- a/src/chat/focus_chat/info_processors/working_memory_processor.py +++ b/src/chat/focus_chat/info_processors/working_memory_processor.py @@ -64,7 +64,7 @@ class WorkingMemoryProcessor(BaseProcessor): model=global_config.model.focus_chat_mind, temperature=global_config.model.focus_chat_mind["temp"], max_tokens=800, - request_type="focus_working_memory", + request_type="focus.processor.working_memory", ) name = chat_manager.get_stream_name(self.subheartflow_id) diff --git a/src/chat/focus_chat/memory_activator.py b/src/chat/focus_chat/memory_activator.py index a9f2c8ee4..1e84e3d42 100644 --- a/src/chat/focus_chat/memory_activator.py +++ b/src/chat/focus_chat/memory_activator.py @@ -70,7 +70,7 @@ class MemoryActivator: def __init__(self): # TODO: API-Adapter修改标记 self.summary_model = LLMRequest( - model=global_config.model.memory_summary, temperature=0.7, max_tokens=50, request_type="chat_observation" + model=global_config.model.memory_summary, temperature=0.7, max_tokens=50, request_type="focus.memory_activator" ) self.running_memory = [] self.cached_keywords = set() # 用于缓存历史关键词 diff --git a/src/chat/focus_chat/planners/planner.py b/src/chat/focus_chat/planners/planner.py index ab1c9f429..1ece41fe4 100644 --- a/src/chat/focus_chat/planners/planner.py +++ b/src/chat/focus_chat/planners/planner.py @@ -79,7 +79,7 @@ class ActionPlanner: self.planner_llm = LLMRequest( model=global_config.model.focus_planner, max_tokens=1000, - request_type="focus_planner", # 用于动作规划 + request_type="focus.planner", # 用于动作规划 ) self.action_manager = action_manager diff --git a/src/chat/focus_chat/working_memory/memory_manager.py b/src/chat/focus_chat/working_memory/memory_manager.py index 9ecbe6104..bdbb429e7 100644 --- a/src/chat/focus_chat/working_memory/memory_manager.py +++ b/src/chat/focus_chat/working_memory/memory_manager.py @@ -36,7 +36,7 @@ class MemoryManager: model=global_config.model.focus_working_memory, temperature=0.3, max_tokens=512, - request_type="memory_summarization", + request_type="focus.processor.working_memory", ) @property diff --git a/src/individuality/expression_style.py b/src/individuality/expression_style.py index 841d44e57..f4eed60b5 100644 --- a/src/individuality/expression_style.py +++ b/src/individuality/expression_style.py @@ -36,7 +36,7 @@ class PersonalityExpression: model=global_config.model.focus_expressor, temperature=0.1, max_tokens=256, - request_type="learn_expression", + request_type="expressor.learner", ) self.meta_file_path = os.path.join("data", "expression", "personality", "expression_style_meta.json") self.expressions_file_path = os.path.join("data", "expression", "personality", "expressions.json") diff --git a/src/person_info/person_info.py b/src/person_info/person_info.py index b06820786..11f8dd2bf 100644 --- a/src/person_info/person_info.py +++ b/src/person_info/person_info.py @@ -60,7 +60,7 @@ class PersonInfoManager: self.qv_name_llm = LLMRequest( model=global_config.model.utils, max_tokens=256, - request_type="qv_name", + request_type="relation.qv_name", ) try: db.connect(reuse_if_open=True)