update:发布0.6.3前的准备工作,changlog,readme,配置文件修改

This commit is contained in:
SengokuCola
2025-04-30 17:50:20 +08:00
parent 4c51b6a09c
commit 7d19a6728f
11 changed files with 90 additions and 77 deletions

View File

@@ -29,7 +29,7 @@ class NormalChatGenerator:
)
self.model_sum = LLMRequest(
model=global_config.llm_summary_by_topic, temperature=0.7, max_tokens=3000, request_type="relation"
model=global_config.llm_summary, temperature=0.7, max_tokens=3000, request_type="relation"
)
self.current_model_type = "r1" # 默认使用 R1
self.current_model_name = "unknown model"

View File

@@ -189,7 +189,7 @@ class Hippocampus:
def __init__(self):
self.memory_graph = MemoryGraph()
self.llm_topic_judge = None
self.llm_summary_by_topic = None
self.llm_summary = None
self.entorhinal_cortex = None
self.parahippocampal_gyrus = None
self.config = None
@@ -203,7 +203,7 @@ class Hippocampus:
# 从数据库加载记忆图
self.entorhinal_cortex.sync_memory_from_db()
self.llm_topic_judge = LLMRequest(self.config.llm_topic_judge, request_type="memory")
self.llm_summary_by_topic = LLMRequest(self.config.llm_summary_by_topic, request_type="memory")
self.llm_summary = LLMRequest(self.config.llm_summary, request_type="memory")
def get_all_node_names(self) -> list:
"""获取记忆图中所有节点的名字列表"""
@@ -1169,7 +1169,7 @@ class ParahippocampalGyrus:
# 调用修改后的 topic_what不再需要 time_info
topic_what_prompt = self.hippocampus.topic_what(input_text, topic)
try:
task = self.hippocampus.llm_summary_by_topic.generate_response_async(topic_what_prompt)
task = self.hippocampus.llm_summary.generate_response_async(topic_what_prompt)
tasks.append((topic.strip(), task))
except Exception as e:
logger.error(f"生成话题 '{topic}' 的摘要时发生错误: {e}")

View File

@@ -24,7 +24,7 @@ class MemoryConfig:
consolidate_memory_interval: int # 记忆整合间隔
llm_topic_judge: str # 话题判断模型
llm_summary_by_topic: str # 话题总结模型
llm_summary: str # 话题总结模型
@classmethod
def from_global_config(cls, global_config):
@@ -44,7 +44,7 @@ class MemoryConfig:
consolidate_memory_percentage=getattr(global_config, "consolidate_memory_percentage", 0.01),
consolidate_memory_interval=getattr(global_config, "consolidate_memory_interval", 1000),
llm_topic_judge=getattr(global_config, "llm_topic_judge", "default_judge_model"), # 添加默认模型名
llm_summary_by_topic=getattr(
global_config, "llm_summary_by_topic", "default_summary_model"
llm_summary=getattr(
global_config, "llm_summary", "default_summary_model"
), # 添加默认模型名
)