From 0181c26a54598148b4f299a09fee123ad78b1af0 Mon Sep 17 00:00:00 2001 From: SengokuCola <1026294844@qq.com> Date: Sun, 6 Jul 2025 23:34:32 +0800 Subject: [PATCH] =?UTF-8?q?fix=EF=BC=9A=E4=BF=AE=E5=A4=8D=E6=A8=A1?= =?UTF-8?q?=E5=9E=8B=E9=85=8D=E7=BD=AE=E5=BA=94=E7=94=A8=E9=94=99=E8=AF=AF?= =?UTF-8?q?=EF=BC=8C=E4=BF=AE=E5=A4=8Dno=5Faction=E6=89=A7=E8=A1=8C?= =?UTF-8?q?=E9=94=99=E8=AF=AF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/chat/memory_system/memory_activator.py | 10 ++++++---- src/chat/planner_actions/planner.py | 3 +-- src/person_info/relationship_fetcher.py | 6 +++--- 3 files changed, 10 insertions(+), 9 deletions(-) diff --git a/src/chat/memory_system/memory_activator.py b/src/chat/memory_system/memory_activator.py index 104d0c88f..b9a6248ff 100644 --- a/src/chat/memory_system/memory_activator.py +++ b/src/chat/memory_system/memory_activator.py @@ -69,11 +69,13 @@ def init_prompt(): class MemoryActivator: def __init__(self): # TODO: API-Adapter修改标记 - self.summary_model = LLMRequest( - model=global_config.model.memory_summary, - temperature=0.7, + + self.key_words_model = LLMRequest( + model=global_config.model.utils_small, + temperature=0.5, request_type="memory_activator", ) + self.running_memory = [] self.cached_keywords = set() # 用于缓存历史关键词 @@ -97,7 +99,7 @@ class MemoryActivator: # logger.debug(f"prompt: {prompt}") - response, (reasoning_content, model_name) = await self.summary_model.generate_response_async(prompt) + response, (reasoning_content, model_name) = await self.key_words_model.generate_response_async(prompt) keywords = list(get_keywords_from_json(response)) diff --git a/src/chat/planner_actions/planner.py b/src/chat/planner_actions/planner.py index 02a504a43..8dd4ecdc3 100644 --- a/src/chat/planner_actions/planner.py +++ b/src/chat/planner_actions/planner.py @@ -154,8 +154,7 @@ class ActionPlanner: action_data[key] = value if action == "no_action": - action = "no_reply" - reasoning = "决定不使用额外动作" + reasoning = "normal决定不使用额外动作" elif action not in current_available_actions: logger.warning( f"{self.log_prefix}LLM 返回了当前不可用或无效的动作: '{action}' (可用: {list(current_available_actions.keys())}),将强制使用 'no_reply'" diff --git a/src/person_info/relationship_fetcher.py b/src/person_info/relationship_fetcher.py index ea220e46a..7f23cf031 100644 --- a/src/person_info/relationship_fetcher.py +++ b/src/person_info/relationship_fetcher.py @@ -70,14 +70,14 @@ class RelationshipFetcher: # LLM模型配置 self.llm_model = LLMRequest( - model=global_config.model.relation, - request_type="relation", + model=global_config.model.utils_small, + request_type="relation.fetcher", ) # 小模型用于即时信息提取 self.instant_llm_model = LLMRequest( model=global_config.model.utils_small, - request_type="relation.instant", + request_type="relation.fetch", ) name = get_chat_manager().get_stream_name(self.chat_id)