fix: remove unused imports and comments

This commit is contained in:
墨梓柒
2025-07-15 17:13:15 +08:00
parent 5148901f05
commit 1966b4eaf8
3 changed files with 9 additions and 8 deletions

View File

@@ -11,7 +11,7 @@ import pandas as pd
import faiss
# from .llm_client import LLMClient
from .lpmmconfig import global_config
# from .lpmmconfig import global_config
from .utils.hash import get_sha256
from .global_logger import logger
from rich.traceback import install
@@ -27,15 +27,12 @@ from rich.progress import (
)
from src.manager.local_store_manager import local_storage
from src.chat.utils.utils import get_embedding
from src.config.config import global_config
install(extra_lines=3)
ROOT_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", ".."))
EMBEDDING_DATA_DIR = (
os.path.join(ROOT_PATH, "data", "embedding")
if global_config["persistence"]["embedding_data_dir"] is None
else os.path.join(ROOT_PATH, global_config["persistence"]["embedding_data_dir"])
)
EMBEDDING_DATA_DIR = os.path.join(ROOT_PATH, "data", "embedding")
EMBEDDING_DATA_DIR_STR = str(EMBEDDING_DATA_DIR).replace("\\", "/")
TOTAL_EMBEDDING_TIMES = 3 # 统计嵌入次数
@@ -260,7 +257,7 @@ class EmbeddingStore:
# L2归一化
faiss.normalize_L2(embeddings)
# 构建索引
self.faiss_index = faiss.IndexFlatIP(global_config["embedding"]["dimension"])
self.faiss_index = faiss.IndexFlatIP(global_config.lpmm_knowledge.embedding_dimension)
self.faiss_index.add(embeddings)
def search_top_k(self, query: List[float], k: int) -> List[Tuple[str, float]]:

View File

@@ -589,6 +589,9 @@ class LPMMKnowledgeConfig(ConfigBase):
qa_res_top_k: int = 10
"""QA最终结果的Top K数量"""
embedding_dimension: int = 1024
"""嵌入向量维度,应该与模型的输出维度一致"""
@dataclass
class ModelConfig(ConfigBase):

View File

@@ -1,5 +1,5 @@
[inner]
version = "4.1.1"
version = "4.2.0"
#----以下是给开发人员阅读的,如果你只是部署了麦麦,不需要阅读----
#如果你想要修改配置文件请在修改后将version的值进行变更
@@ -158,6 +158,7 @@ qa_paragraph_node_weight = 0.05 # 段落节点权重(在图搜索&PPR计算中
qa_ent_filter_top_k = 10 # 实体过滤TopK
qa_ppr_damping = 0.8 # PPR阻尼系数
qa_res_top_k = 3 # 最终提供的文段TopK
embedding_dimension = 1024 # 嵌入向量维度,应该与模型的输出维度一致
# keyword_rules 用于设置关键词触发的额外回复知识
# 添加新规则方法:在 keyword_rules 数组中增加一项,格式如下: