Merge branch 'dev' of https://github.com/MoFox-Studio/MoFox_Bot into dev
This commit is contained in:
@@ -61,27 +61,18 @@ def find_available_data_files() -> List[Path]:
|
||||
return sorted(files, key=lambda f: f.stat().st_mtime, reverse=True)
|
||||
|
||||
|
||||
def load_graph_data_from_file(
|
||||
file_path: Optional[Path] = None,
|
||||
nodes_page: Optional[int] = None,
|
||||
nodes_per_page: Optional[int] = None,
|
||||
edges_page: Optional[int] = None,
|
||||
edges_per_page: Optional[int] = None,
|
||||
) -> Dict[str, Any]:
|
||||
def load_graph_data_from_file(file_path: Optional[Path] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
从磁盘加载图数据, 支持分页。
|
||||
如果不提供分页参数, 则加载并缓存所有数据。
|
||||
从磁盘加载图数据,并构建索引以加速查询。
|
||||
哼,别看我代码写得多,这叫专业!一次性把事情做对,就不用返工了。
|
||||
"""
|
||||
global graph_data_cache, current_data_file
|
||||
|
||||
# 如果是请求分页数据, 则不使用缓存的全量数据
|
||||
is_paged_request = nodes_page is not None or edges_page is not None
|
||||
|
||||
if file_path and file_path != current_data_file:
|
||||
graph_data_cache = None
|
||||
current_data_file = file_path
|
||||
|
||||
if graph_data_cache and not is_paged_request:
|
||||
if graph_data_cache:
|
||||
return graph_data_cache
|
||||
|
||||
try:
|
||||
@@ -89,92 +80,84 @@ def load_graph_data_from_file(
|
||||
if not graph_file:
|
||||
available_files = find_available_data_files()
|
||||
if not available_files:
|
||||
return {"error": "未找到数据文件", "nodes": [], "edges": [], "stats": {}}
|
||||
return {"error": "未找到数据文件", "nodes": [], "edges": [], "stats": {}, "nodes_dict": {}, "adjacency_list": {}}
|
||||
graph_file = available_files[0]
|
||||
current_data_file = graph_file
|
||||
|
||||
if not graph_file.exists():
|
||||
return {"error": f"文件不存在: {graph_file}", "nodes": [], "edges": [], "stats": {}}
|
||||
return {"error": f"文件不存在: {graph_file}", "nodes": [], "edges": [], "stats": {}, "nodes_dict": {}, "adjacency_list": {}}
|
||||
|
||||
# 只有在没有缓存时才从磁盘读取和处理文件
|
||||
if not graph_data_cache:
|
||||
with open(graph_file, "r", encoding="utf-8") as f:
|
||||
data = orjson.loads(f.read())
|
||||
with open(graph_file, "r", encoding="utf-8") as f:
|
||||
data = orjson.loads(f.read())
|
||||
|
||||
nodes = data.get("nodes", [])
|
||||
edges = data.get("edges", [])
|
||||
metadata = data.get("metadata", {})
|
||||
nodes = data.get("nodes", [])
|
||||
edges = data.get("edges", [])
|
||||
metadata = data.get("metadata", {})
|
||||
|
||||
nodes_dict = {
|
||||
node["id"]: {
|
||||
**node,
|
||||
"label": node.get("content", ""),
|
||||
"group": node.get("node_type", ""),
|
||||
"title": f"{node.get('node_type', '')}: {node.get('content', '')}",
|
||||
nodes_dict = {
|
||||
node["id"]: {
|
||||
**node,
|
||||
"label": node.get("content", ""),
|
||||
"group": node.get("node_type", ""),
|
||||
"title": f"{node.get('node_type', '')}: {node.get('content', '')}",
|
||||
"degree": 0, # 初始化度为0
|
||||
}
|
||||
for node in nodes
|
||||
if node.get("id")
|
||||
}
|
||||
|
||||
edges_list = []
|
||||
seen_edge_ids = set()
|
||||
adjacency_list = {node_id: [] for node_id in nodes_dict}
|
||||
|
||||
for edge in edges:
|
||||
edge_id = edge.get("id")
|
||||
source_id = edge.get("source", edge.get("source_id"))
|
||||
target_id = edge.get("target", edge.get("target_id"))
|
||||
|
||||
if edge_id and edge_id not in seen_edge_ids and source_id in nodes_dict and target_id in nodes_dict:
|
||||
formatted_edge = {
|
||||
**edge,
|
||||
"from": source_id,
|
||||
"to": target_id,
|
||||
"label": edge.get("relation", ""),
|
||||
"arrows": "to",
|
||||
}
|
||||
for node in nodes
|
||||
if node.get("id")
|
||||
}
|
||||
edges_list.append(formatted_edge)
|
||||
seen_edge_ids.add(edge_id)
|
||||
|
||||
edges_list = []
|
||||
seen_edge_ids = set()
|
||||
for edge in edges:
|
||||
edge_id = edge.get("id")
|
||||
if edge_id and edge_id not in seen_edge_ids:
|
||||
edges_list.append(
|
||||
{
|
||||
**edge,
|
||||
"from": edge.get("source", edge.get("source_id")),
|
||||
"to": edge.get("target", edge.get("target_id")),
|
||||
"label": edge.get("relation", ""),
|
||||
"arrows": "to",
|
||||
}
|
||||
)
|
||||
seen_edge_ids.add(edge_id)
|
||||
# 构建邻接表并计算度
|
||||
adjacency_list[source_id].append(formatted_edge)
|
||||
adjacency_list[target_id].append(formatted_edge)
|
||||
nodes_dict[source_id]["degree"] += 1
|
||||
nodes_dict[target_id]["degree"] += 1
|
||||
|
||||
stats = metadata.get("statistics", {})
|
||||
total_memories = stats.get("total_memories", 0)
|
||||
|
||||
graph_data_cache = {
|
||||
"nodes": list(nodes_dict.values()),
|
||||
"edges": edges_list,
|
||||
"memories": [], # TODO: 未来也可以考虑分页加载记忆
|
||||
"stats": {
|
||||
"total_nodes": len(nodes_dict),
|
||||
"total_edges": len(edges_list),
|
||||
"total_memories": total_memories,
|
||||
},
|
||||
"current_file": str(graph_file),
|
||||
"file_size": graph_file.stat().st_size,
|
||||
"file_modified": datetime.fromtimestamp(graph_file.stat().st_mtime).isoformat(),
|
||||
}
|
||||
|
||||
# 如果是分页请求, 则从缓存中切片数据
|
||||
if is_paged_request:
|
||||
paged_data = graph_data_cache.copy() # 浅拷贝一份, 避免修改缓存
|
||||
|
||||
# 分页节点
|
||||
if nodes_page is not None and nodes_per_page is not None:
|
||||
node_start = (nodes_page - 1) * nodes_per_page
|
||||
node_end = node_start + nodes_per_page
|
||||
paged_data["nodes"] = graph_data_cache["nodes"][node_start:node_end]
|
||||
|
||||
# 分页边
|
||||
if edges_page is not None and edges_per_page is not None:
|
||||
edge_start = (edges_page - 1) * edges_per_page
|
||||
edge_end = edge_start + edges_per_page
|
||||
paged_data["edges"] = graph_data_cache["edges"][edge_start:edge_end]
|
||||
|
||||
return paged_data
|
||||
stats = metadata.get("statistics", {})
|
||||
total_memories = stats.get("total_memories", 0)
|
||||
|
||||
# 缓存所有处理过的数据,包括索引
|
||||
graph_data_cache = {
|
||||
"nodes": list(nodes_dict.values()),
|
||||
"edges": edges_list,
|
||||
"nodes_dict": nodes_dict, # 缓存节点字典,方便快速查找
|
||||
"adjacency_list": adjacency_list, # 缓存邻接表,光速定位邻居
|
||||
"memories": [],
|
||||
"stats": {
|
||||
"total_nodes": len(nodes_dict),
|
||||
"total_edges": len(edges_list),
|
||||
"total_memories": total_memories,
|
||||
},
|
||||
"current_file": str(graph_file),
|
||||
"file_size": graph_file.stat().st_size,
|
||||
"file_modified": datetime.fromtimestamp(graph_file.stat().st_mtime).isoformat(),
|
||||
}
|
||||
return graph_data_cache
|
||||
|
||||
except Exception as e:
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
raise HTTPException(status_code=500, detail=f"加载图数据失败: {e}")
|
||||
|
||||
|
||||
@router.get("/", response_class=HTMLResponse)
|
||||
async def index(request: Request):
|
||||
"""主页面"""
|
||||
@@ -235,67 +218,90 @@ def _format_graph_data_from_manager(memory_manager) -> Dict[str, Any]:
|
||||
"current_file": "memory_manager (实时数据)",
|
||||
}
|
||||
|
||||
@router.get("/api/graph/paged")
|
||||
async def get_paged_graph(
|
||||
nodes_page: int = 1, nodes_per_page: int = 100, edges_page: int = 1, edges_per_page: int = 200
|
||||
):
|
||||
"""获取分页的记忆图数据"""
|
||||
@router.get("/api/graph/core")
|
||||
async def get_core_graph(limit: int = 100):
|
||||
"""
|
||||
获取核心图数据。
|
||||
这可比一下子把所有东西都丢给前端聪明多了,哼。
|
||||
"""
|
||||
try:
|
||||
# 确保全量数据已加载到缓存
|
||||
full_data = load_graph_data_from_file()
|
||||
if "error" in full_data:
|
||||
raise HTTPException(status_code=404, detail=full_data["error"])
|
||||
return JSONResponse(content={"success": False, "error": full_data["error"]}, status_code=404)
|
||||
|
||||
# 从缓存中获取全量数据
|
||||
# 智能选择核心节点: 优先选择度最高的节点
|
||||
# 这是一个简单的策略,但比随机选择要好得多
|
||||
all_nodes = full_data.get("nodes", [])
|
||||
all_edges = full_data.get("edges", [])
|
||||
total_nodes = len(all_nodes)
|
||||
total_edges = len(all_edges)
|
||||
|
||||
# 计算节点分页
|
||||
node_start = (nodes_page - 1) * nodes_per_page
|
||||
node_end = node_start + nodes_per_page
|
||||
paginated_nodes = all_nodes[node_start:node_end]
|
||||
|
||||
# 计算边分页
|
||||
edge_start = (edges_page - 1) * edges_per_page
|
||||
edge_end = edge_start + edges_per_page
|
||||
paginated_edges = all_edges[edge_start:edge_end]
|
||||
|
||||
return JSONResponse(
|
||||
content={
|
||||
"success": True,
|
||||
"data": {
|
||||
"nodes": paginated_nodes,
|
||||
"edges": paginated_edges,
|
||||
"pagination": {
|
||||
"nodes": {
|
||||
"page": nodes_page,
|
||||
"per_page": nodes_per_page,
|
||||
"total": total_nodes,
|
||||
"total_pages": (total_nodes + nodes_per_page - 1) // nodes_per_page,
|
||||
},
|
||||
"edges": {
|
||||
"page": edges_page,
|
||||
"per_page": edges_per_page,
|
||||
"total": total_edges,
|
||||
"total_pages": (total_edges + edges_per_page - 1) // edges_per_page,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
# 按度(degree)降序排序,如果度相同,则按创建时间(如果可用)降序
|
||||
sorted_nodes = sorted(
|
||||
all_nodes,
|
||||
key=lambda n: (n.get("degree", 0), n.get("created_at", 0)),
|
||||
reverse=True
|
||||
)
|
||||
|
||||
core_nodes = sorted_nodes[:limit]
|
||||
core_node_ids = {node["id"] for node in core_nodes}
|
||||
|
||||
# 只包含核心节点之间的边,保持初始视图的整洁
|
||||
core_edges = [
|
||||
edge for edge in full_data.get("edges", [])
|
||||
if edge.get("from") in core_node_ids and edge.get("to") in core_node_ids
|
||||
]
|
||||
# 确保返回的数据结构和前端期望的一致
|
||||
data_to_send = {
|
||||
"nodes": core_nodes,
|
||||
"edges": core_edges,
|
||||
"memories": [], # 初始加载不需要完整的记忆列表
|
||||
"stats": full_data.get("stats", {}), # 统计数据还是完整的
|
||||
"current_file": full_data.get("current_file", "")
|
||||
}
|
||||
|
||||
return JSONResponse(content={"success": True, "data": data_to_send})
|
||||
except Exception as e:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return JSONResponse(content={"success": False, "error": str(e)}, status_code=500)
|
||||
|
||||
@router.get("/api/nodes/{node_id}/expand")
|
||||
async def expand_node(node_id: str):
|
||||
"""
|
||||
获取指定节点的所有邻居节点和相关的边。
|
||||
看,这就是按需加载的魔法。我可真是个天才,哼!
|
||||
"""
|
||||
try:
|
||||
full_data = load_graph_data_from_file()
|
||||
if "error" in full_data:
|
||||
return JSONResponse(content={"success": False, "error": full_data["error"]}, status_code=404)
|
||||
|
||||
nodes_dict = full_data.get("nodes_dict", {})
|
||||
adjacency_list = full_data.get("adjacency_list", {})
|
||||
|
||||
if node_id not in nodes_dict:
|
||||
return JSONResponse(content={"success": False, "error": "节点未找到"}, status_code=404)
|
||||
|
||||
neighbor_edges = adjacency_list.get(node_id, [])
|
||||
neighbor_node_ids = set()
|
||||
for edge in neighbor_edges:
|
||||
neighbor_node_ids.add(edge["from"])
|
||||
neighbor_node_ids.add(edge["to"])
|
||||
|
||||
# 从 nodes_dict 中获取完整的邻居节点信息
|
||||
neighbor_nodes = [nodes_dict[nid] for nid in neighbor_node_ids if nid in nodes_dict]
|
||||
|
||||
return JSONResponse(content={
|
||||
"success": True,
|
||||
"data": {
|
||||
"nodes": neighbor_nodes,
|
||||
"edges": neighbor_edges
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return JSONResponse(content={"success": False, "error": str(e)}, status_code=500)
|
||||
|
||||
|
||||
@router.get("/api/graph/full")
|
||||
async def get_full_graph_deprecated():
|
||||
"""
|
||||
(已废弃) 获取完整记忆图数据。
|
||||
此接口现在只返回第一页的数据, 请使用 /api/graph/paged 进行分页获取。
|
||||
"""
|
||||
return await get_paged_graph(nodes_page=1, nodes_per_page=100, edges_page=1, edges_per_page=200)
|
||||
|
||||
|
||||
@router.get("/api/files")
|
||||
|
||||
@@ -532,20 +532,18 @@
|
||||
<script>
|
||||
let network = null;
|
||||
let availableFiles = [];
|
||||
// 现在的数据集是动态增长的,我们需要用 vis.DataSet 来管理
|
||||
let nodesDataSet = new vis.DataSet([])
|
||||
;
|
||||
let edgesDataSet = new vis.DataSet([]);
|
||||
|
||||
let graphData = {
|
||||
nodes: new vis.DataSet([])
|
||||
,
|
||||
edges: new vis.DataSet([])
|
||||
nodes: [], // 这将作为原始数据的备份
|
||||
edges: [],
|
||||
memories: []
|
||||
};
|
||||
let originalData = null; // 用于过滤器
|
||||
|
||||
// 分页状态
|
||||
let pagination = {
|
||||
nodes: { page: 1, per_page: 200, total_pages: 1, total: 0 },
|
||||
edges: { page: 1, per_page: 500, total_pages: 1, total: 0 }
|
||||
};
|
||||
let isLoading = false;
|
||||
|
||||
// 节点颜色配置
|
||||
const nodeColors = {
|
||||
'SUBJECT': '#FF6B6B',
|
||||
@@ -625,26 +623,32 @@
|
||||
dragView: true
|
||||
}
|
||||
};
|
||||
// 初始化时使用我们可动态管理的 DataSet
|
||||
const data = {
|
||||
nodes: nodesDataSet,
|
||||
edges: edgesDataSet
|
||||
};
|
||||
|
||||
const data = {
|
||||
nodes: new vis.DataSet([]),
|
||||
edges: new vis.DataSet([])
|
||||
};
|
||||
|
||||
network = new vis.Network(container, data, options);
|
||||
network = new vis.Network(container, data, options);
|
||||
|
||||
// 添加事件监听
|
||||
network.on('click', function(params) {
|
||||
if (params.nodes.length > 0) {
|
||||
const nodeId = params.nodes[0];
|
||||
showNodeInfo(nodeId);
|
||||
highlightConnectedNodes(nodeId);
|
||||
// 单击时只高亮,不再执行复杂的BFS
|
||||
} else {
|
||||
// 点击空白处,恢复所有节点
|
||||
resetNodeHighlight();
|
||||
resetNodeHighlight(); // 点击空白处恢复
|
||||
}
|
||||
});
|
||||
|
||||
// 这才是我们的秘密武器: 双击扩展! 哼哼~
|
||||
network.on('doubleClick', async function(params) {
|
||||
if (params.nodes.length > 0) {
|
||||
const nodeId = params.nodes[0];
|
||||
await expandNode(nodeId);
|
||||
}
|
||||
});
|
||||
// 稳定化完成后停止物理引擎
|
||||
network.on('stabilizationIterationsDone', function() {
|
||||
console.log('初始稳定化完成,停止物理引擎');
|
||||
@@ -660,125 +664,86 @@
|
||||
});
|
||||
}
|
||||
|
||||
// 重置并加载第一页数据
|
||||
// 加载图形数据
|
||||
async function loadGraph() {
|
||||
if (isLoading) return;
|
||||
console.log('开始加载初始图数据...');
|
||||
|
||||
// 重置状态
|
||||
graphData.nodes.clear();
|
||||
graphData.edges.clear();
|
||||
pagination.nodes.page = 1;
|
||||
pagination.edges.page = 1;
|
||||
|
||||
try {
|
||||
// 先获取一次完整的统计信息
|
||||
const statsResponse = await fetch('/visualizer/api/stats');
|
||||
const statsResult = await statsResponse.json();
|
||||
if(statsResult.success) {
|
||||
updateStats(statsResult.data);
|
||||
pagination.nodes.total = statsResult.data.total_nodes;
|
||||
pagination.edges.total = statsResult.data.total_edges;
|
||||
pagination.nodes.total_pages = Math.ceil(statsResult.data.total_nodes / pagination.nodes.per_page);
|
||||
pagination.edges.total_pages = Math.ceil(statsResult.data.total_edges / pagination.edges.per_page);
|
||||
} else {
|
||||
throw new Error('获取统计信息失败: ' + statsResult.error);
|
||||
}
|
||||
|
||||
// 加载第一页
|
||||
await loadMoreData();
|
||||
|
||||
} catch (error) {
|
||||
console.error('初始加载失败:', error);
|
||||
alert('初始加载失败: ' + error.message);
|
||||
}
|
||||
}
|
||||
|
||||
// 加载更多数据(分页核心)
|
||||
async function loadMoreData() {
|
||||
if (isLoading) return;
|
||||
|
||||
const canLoadNodes = pagination.nodes.page <= pagination.nodes.total_pages;
|
||||
const canLoadEdges = pagination.edges.page <= pagination.edges.total_pages;
|
||||
|
||||
if (!canLoadNodes && !canLoadEdges) {
|
||||
console.log('所有数据已加载完毕');
|
||||
return;
|
||||
}
|
||||
|
||||
isLoading = true;
|
||||
document.getElementById('loading').style.display = 'block';
|
||||
|
||||
try {
|
||||
const url = `/visualizer/api/graph/paged?nodes_page=${pagination.nodes.page}&nodes_per_page=${pagination.nodes.per_page}&edges_page=${pagination.edges.page}&edges_per_page=${pagination.edges.per_page}`;
|
||||
console.log(`正在请求: ${url}`);
|
||||
const response = await fetch(url);
|
||||
document.getElementById('loading').style.display = 'block';
|
||||
// 请求新的核心节点接口,而不是那个又笨又重的full接口
|
||||
const response = await fetch('/visualizer/api/graph/core');
|
||||
const result = await response.json();
|
||||
|
||||
if (result.success) {
|
||||
console.log(`成功获取 ${result.data.nodes.length} 个节点, ${result.data.edges.length} 个边`);
|
||||
updateGraph(result.data); // 追加数据
|
||||
originalData = result.data; // 保存原始数据用于过滤
|
||||
// 初始加载时,清空旧数据
|
||||
nodesDataSet.clear();
|
||||
edgesDataSet.clear();
|
||||
|
||||
// 更新分页信息
|
||||
if (result.data.pagination) {
|
||||
pagination.nodes.page++;
|
||||
pagination.edges.page++;
|
||||
}
|
||||
updateGraph(result.data, true); // true表示是初始加载
|
||||
updateStats(result.data.stats);
|
||||
} else {
|
||||
throw new Error('加载分页数据失败: ' + result.error);
|
||||
alert('加载核心节点失败: ' + result.error);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('加载更多数据失败:', error);
|
||||
console.error('加载图形失败:', error);
|
||||
alert('加载失败: ' + error.message);
|
||||
} finally {
|
||||
isLoading = false;
|
||||
document.getElementById('loading').style.display = 'none';
|
||||
}
|
||||
}
|
||||
// 更新图形显示(追加数据)
|
||||
function updateGraph(data) {
|
||||
// originalData 用于过滤器, 这里只追加, 不完全覆盖
|
||||
if (!originalData) {
|
||||
originalData = { nodes: [], edges: [] };
|
||||
// 更新图形显示
|
||||
function updateGraph(data, isInitialLoad = false) {
|
||||
if (isInitialLoad) {
|
||||
// 如果是初始加载,则完全替换数据
|
||||
graphData = data;
|
||||
} else {
|
||||
// 如果是扩展,则合并数据
|
||||
// 使用一个Set来避免重复添加节点
|
||||
const existingNodeIds = new Set(graphData.nodes.map(n => n.id));
|
||||
data.nodes.forEach(newNode => {
|
||||
if (!existingNodeIds.has(newNode.id)) {
|
||||
graphData.nodes.push(newNode);
|
||||
existingNodeIds.add(newNode.id);
|
||||
}
|
||||
});
|
||||
|
||||
// 同样避免重复添加边
|
||||
const existingEdgeIds = new Set(graphData.edges.map(e => e.id));
|
||||
data.edges.forEach(newEdge => {
|
||||
if (!existingEdgeIds.has(newEdge.id)) {
|
||||
graphData.edges.push(newEdge);
|
||||
existingEdgeIds.add(newEdge.id);
|
||||
}
|
||||
});
|
||||
}
|
||||
originalData.nodes.push(...data.nodes);
|
||||
originalData.edges.push(...data.edges);
|
||||
|
||||
|
||||
// 处理节点数据
|
||||
const newNodes = data.nodes.map(node => ({
|
||||
// 处理节点数据,添加或更新到DataSet
|
||||
const nodesToAdd = data.nodes.map(node => ({
|
||||
id: node.id,
|
||||
label: node.label,
|
||||
title: node.title,
|
||||
group: node.group,
|
||||
color: nodeColors[node.group] || '#999',
|
||||
// 瞧,现在节点越大,就说明它越重要,是不是很酷?
|
||||
size: 15 + Math.min((node.degree || 0) * 2, 20),
|
||||
metadata: node.metadata
|
||||
}));
|
||||
nodesDataSet.update(nodesToAdd);
|
||||
|
||||
// 处理边数据
|
||||
const newEdges = data.edges.map(edge => ({
|
||||
// 处理边数据,添加到DataSet
|
||||
const edgesToAdd = data.edges.map(edge => ({
|
||||
id: edge.id,
|
||||
from: edge.from,
|
||||
to: edge.to,
|
||||
label: edge.label,
|
||||
title: edge.title,
|
||||
// 根据重要性调整边的宽度
|
||||
width: (edge.importance || 0.5) * 2 + 1
|
||||
}));
|
||||
|
||||
// 追加数据到 DataSet
|
||||
if (newNodes.length > 0) {
|
||||
graphData.nodes.add(newNodes);
|
||||
}
|
||||
if (newEdges.length > 0) {
|
||||
graphData.edges.add(newEdges);
|
||||
}
|
||||
|
||||
// 第一次加载时设置数据
|
||||
if (pagination.nodes.page === 2) { // 意味着第一页刚加载完
|
||||
network.setData({
|
||||
nodes: graphData.nodes,
|
||||
edges: graphData.edges
|
||||
});
|
||||
edgesDataSet.update(edgesToAdd);
|
||||
|
||||
// 只有在添加新节点时才需要重新稳定布局
|
||||
if (nodesToAdd.length > 0) {
|
||||
network.stabilize();
|
||||
}
|
||||
}
|
||||
// 更新统计信息
|
||||
@@ -1084,18 +1049,40 @@
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// 适应窗口
|
||||
function fitNetwork() {
|
||||
if (network) {
|
||||
network.fit({
|
||||
animation: {
|
||||
duration: 1000,
|
||||
easingFunction: 'easeInOutQuad'
|
||||
}
|
||||
});
|
||||
// 适应窗口
|
||||
function fitNetwork() {
|
||||
if (network) {
|
||||
network.fit({
|
||||
animation: {
|
||||
duration: 1000,
|
||||
easingFunction: 'easeInOutQuad'
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// 新增: 扩展节点的函数
|
||||
async function expandNode(nodeId) {
|
||||
console.log(`正在扩展节点: ${nodeId}`);
|
||||
document.getElementById('loading').style.display = 'block';
|
||||
|
||||
try {
|
||||
const response = await fetch(`/visualizer/api/nodes/${nodeId}/expand`);
|
||||
const result = await response.json();
|
||||
|
||||
if (result.success) {
|
||||
console.log(`收到 ${result.data.nodes.length} 个新节点, ${result.data.edges.length} 条新边`);
|
||||
updateGraph(result.data);
|
||||
} else {
|
||||
alert(`扩展节点失败: ${result.error}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('扩展节点失败:', error);
|
||||
alert('扩展节点失败: ' + error.message);
|
||||
} finally {
|
||||
document.getElementById('loading').style.display = 'none';
|
||||
}
|
||||
}
|
||||
|
||||
// 导出图形数据
|
||||
function exportGraph() {
|
||||
@@ -1234,42 +1221,13 @@
|
||||
closeFileSelector();
|
||||
}
|
||||
}
|
||||
// 页面加载完成后初始化
|
||||
window.addEventListener('load', function() {
|
||||
initNetwork();
|
||||
loadGraph(); // 加载初始数据
|
||||
loadFileList();
|
||||
|
||||
// 添加滚动加载监听器
|
||||
const graphContainer = document.getElementById('memory-graph');
|
||||
graphContainer.addEventListener('mousewheel', async (event) => {
|
||||
if(network) {
|
||||
const canvasHeight = network.canvas.body.height;
|
||||
const viewPosition = network.getViewPosition();
|
||||
const scale = network.getScale();
|
||||
const viewHeight = canvasHeight / scale;
|
||||
|
||||
// 简单的滚动到底部检测(可能需要根据实际情况微调)
|
||||
if (event.deltaY > 0 && !isLoading) {
|
||||
const isAtBottom = viewPosition.y > (canvasHeight/2 - viewHeight/2) * 0.8;
|
||||
if (isAtBottom) {
|
||||
console.log("滚动到底部,加载更多数据...");
|
||||
await loadMoreData();
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
// 添加一个按钮用于手动加载
|
||||
const loadMoreBtn = document.createElement('button');
|
||||
loadMoreBtn.textContent = '加载更多';
|
||||
loadMoreBtn.className = 'btn';
|
||||
loadMoreBtn.style.position = 'absolute';
|
||||
loadMoreBtn.style.bottom = '20px';
|
||||
loadMoreBtn.style.right = '20px';
|
||||
loadMoreBtn.style.zIndex = '10';
|
||||
loadMoreBtn.onclick = loadMoreData;
|
||||
document.querySelector('.graph-container').appendChild(loadMoreBtn);
|
||||
});
|
||||
// 页面加载完成后初始化
|
||||
window.addEventListener('load', function() {
|
||||
initNetwork();
|
||||
loadGraph();
|
||||
loadFileList();
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
Reference in New Issue
Block a user