Compare commits

3 Commits

Author SHA1 Message Date
Oxy8
030e60bce2 Add README / Set current objectives 2026-02-23 15:31:56 -03:00
Oxy8
343055b7dd Inicia integrar Anzograph 2026-02-13 16:39:41 -03:00
Oxy8
f3db6af958 Primary and Secondary 2026-02-10 17:38:54 -03:00
15 changed files with 944 additions and 200311 deletions

2
.gitignore vendored
View File

@@ -1,2 +1,4 @@
.direnv/ .direnv/
.envrc .envrc
.env
data/

View File

@@ -15,4 +15,4 @@ COPY . .
EXPOSE 5173 EXPOSE 5173
# Compute layout, then start the dev server with --host for external access # Compute layout, then start the dev server with --host for external access
CMD ["sh", "-c", "npm run layout && npm run dev -- --host"] CMD ["sh", "-c", "npm run dev -- --host"]

67
README.md Normal file
View File

@@ -0,0 +1,67 @@
# Large Instanced Ontology Visualizer
An experimental visualizer designed to render and explore massive instanced ontologies (millions of nodes) with interactive performance.
## 🚀 The Core Challenge
Ontologies with millions of instances present a significant rendering challenge for traditional graph visualization tools. This project solves this by:
1. **Selective Rendering:** Only rendering up to a set limit of nodes (e.g., 2 million) at any given time.
2. **Adaptive Sampling:** When zoomed out, it provides a representative spatial sample of the nodes. When zoomed in, the number of nodes within the viewport naturally falls below the rendering limit, allowing for 100% detail with zero performance degradation.
3. **Spatial Indexing:** Using a custom Quadtree to manage millions of points in memory and efficiently determine visibility.
## 🛠 Technical Architecture
### 1. Data Pipeline & AnzoGraph Integration
The project features an automated pipeline to extract and prepare data from an **AnzoGraph** DB:
- **SPARQL Extraction:** `scripts/fetch_from_db.ts` connects to AnzoGraph via its SPARQL endpoint. It fetches a seed set of subjects and their related triples, identifying "primary" nodes (objects of `rdf:type`).
- **Graph Classification:** Instances are categorized to distinguish between classes and relationships.
- **Force-Directed Layout:** `scripts/compute_layout.ts` calculates 2D positions for the nodes using a **Barnes-Hut** optimized force-directed simulation, ensuring scalability for large graphs.
### 2. Quadtree Spatial Index
To handle millions of nodes without per-frame object allocation:
- **In-place Sorting:** The Quadtree (`src/quadtree.ts`) spatially sorts the raw `Float32Array` of positions at build-time.
- **Index-Based Access:** Leaves store only the index ranges into the sorted array, pointing directly to the data sent to the GPU.
- **Fast Lookups:** Used for both frustum culling and efficient "find node under cursor" calculations.
### 3. WebGL 2 High-Performance Renderer
The renderer (`src/renderer.ts`) is built for maximum throughput:
- **`WEBGL_multi_draw` Extension:** Batches multiple leaf nodes into single draw calls, minimizing CPU overhead.
- **Zero-Allocation Render Loop:** The frame loop uses pre-allocated typed arrays to prevent GC pauses.
- **Dynamic Level of Detail (LOD):**
- **Points:** Always visible, with adaptive density based on zoom.
- **Lines:** Automatically rendered when zoomed in deep enough to see individual relationships (< 20k visible nodes).
- **Selection:** Interactive selection of nodes highlights immediate neighbors (incoming/outgoing edges).
## 🚦 Getting Started
### Prerequisites
- Docker and Docker Compose
- Node.js (for local development)
### Deployment
The project includes a `docker-compose.yml` that spins up both the **AnzoGraph** database and the visualizer app.
```bash
# Start the services
docker-compose up -d
# Inside the app container, the following will run automatically:
# 1. Fetch data from AnzoGraph (fetch_from_db.ts)
# 2. Compute the 2D layout (compute_layout.ts)
# 3. Start the Vite development server
```
The app will be available at `http://localhost:5173`.
## 🖱 Interactions
- **Drag:** Pan the view.
- **Scroll:** Zoom in/out at the cursor position.
- **Click:** Select a node to see its URI/Label and highlight its neighbors.
- **HUD:** Real-time stats on FPS, nodes drawn, and current sampling ratio.
## TODO
- **Positioning:** Use better algorithm to position nodes, trying to avoid as much as possible any edges crossing, but at the same time trying to keep the graph compact.
- **Positioning:** Decide how to handle classes which are both instances and classes.
- **Functionality:** Find every equipment with a specific property or that participate in a specific process.
- **Functionality:** Find every equipment which is connecte to a well.
- **Functionality:** Show every connection witin a specified depth.
- **Functionality:** Show every element of a specific class.

View File

@@ -1,9 +1,23 @@
services: services:
app: app:
build: . build: .
depends_on:
- anzograph
ports: ports:
- "5173:5173" - "5173:5173"
command: sh -c "npx tsx scripts/compute_layout.ts && npm run dev -- --host" env_file:
- .env
command: sh -c "npm run layout && npm run dev -- --host"
volumes: volumes:
- .:/app - .:/app:Z
- /app/node_modules # Prevents local node_modules from overwriting the container's - /app/node_modules
anzograph:
image: cambridgesemantics/anzograph:latest
container_name: anzograph
ports:
- "8080:8080"
- "8443:8443"
volumes:
- ./data:/opt/shared-files:Z

View File

@@ -7,7 +7,7 @@
"dev": "vite", "dev": "vite",
"build": "vite build", "build": "vite build",
"preview": "vite preview", "preview": "vite preview",
"layout": "tsx scripts/compute_layout.ts" "layout": "npx tsx scripts/fetch_from_db.ts && npx tsx scripts/compute_layout.ts"
}, },
"dependencies": { "dependencies": {
"@webgpu/types": "^0.1.69", "@webgpu/types": "^0.1.69",

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

1
public/primary_edges.csv Normal file
View File

@@ -0,0 +1 @@
source,target
1 source target

View File

@@ -0,0 +1 @@
source,target
1 source target

1
public/uri_map.csv Normal file
View File

@@ -0,0 +1 @@
id,uri,label,isPrimary
1 id uri label isPrimary

View File

@@ -1,17 +1,22 @@
#!/usr/bin/env npx tsx #!/usr/bin/env npx tsx
/** /**
* Tree-Aware Force Layout * Graph Layout
* *
* Generates a random tree (via generate_tree), computes a radial tree layout, * Computes a 2D layout for a general graph (not necessarily a tree).
* then applies gentle force refinement and writes node_positions.csv.
* *
* Usage: npm run layout * - Primary nodes (from primary_edges.csv) are placed first in a radial layout
* - Remaining nodes are placed near their connected primary neighbors
* - Barnes-Hut force simulation relaxes the layout
*
* Reads: primary_edges.csv, secondary_edges.csv
* Writes: node_positions.csv
*
* Usage: npx tsx scripts/compute_layout.ts
*/ */
import { writeFileSync } from "fs"; import { writeFileSync, readFileSync, existsSync } from "fs";
import { join, dirname } from "path"; import { join, dirname } from "path";
import { fileURLToPath } from "url"; import { fileURLToPath } from "url";
import { generateTree } from "./generate_tree.js";
const __dirname = dirname(fileURLToPath(import.meta.url)); const __dirname = dirname(fileURLToPath(import.meta.url));
const PUBLIC_DIR = join(__dirname, "..", "public"); const PUBLIC_DIR = join(__dirname, "..", "public");
@@ -20,320 +25,218 @@ const PUBLIC_DIR = join(__dirname, "..", "public");
// Configuration // Configuration
// ══════════════════════════════════════════════════════════ // ══════════════════════════════════════════════════════════
const ENABLE_FORCE_SIM = true; // Set to false to skip force simulation const ITERATIONS = 200; // Force iterations
const ITERATIONS = 100; // Force iterations (gentle) const REPULSION_K = 200; // Repulsion strength
const REPULSION_K = 80; // Repulsion strength (1% of original 8000) const EDGE_LENGTH = 80; // Desired edge rest length
const EDGE_LENGTH = 120; // Desired edge rest length const ATTRACTION_K = 0.005; // Spring stiffness for edges
const ATTRACTION_K = 0.0002; // Spring stiffness for edges (1% of original 0.02) const INITIAL_MAX_DISP = 20; // Starting max displacement
const THETA = 0.7; // Barnes-Hut accuracy const COOLING = 0.995; // Cooling per iteration
const INITIAL_MAX_DISP = 15; // Starting max displacement
const COOLING = 0.998; // Very slow cooling per iteration
const MIN_DIST = 0.5; const MIN_DIST = 0.5;
const PRINT_EVERY = 10; // Print progress every N iterations const PRINT_EVERY = 20; // Print progress every N iterations
const BH_THETA = 0.8; // Barnes-Hut opening angle
// Scale radius so the tree is nicely spread // Primary node radial placement
const RADIUS_PER_DEPTH = EDGE_LENGTH * 1.2; const PRIMARY_RADIUS = 300; // Radius for primary node ring
// ── Special nodes with longer parent-edges ──
// Add vertex IDs here to give them longer edges to their parent.
// These nodes (and all their descendants) will be pushed further out.
const LONG_EDGE_NODES = new Set<number>([
// e.g. 42, 99, 150
]);
const LONG_EDGE_MULTIPLIER = 3.0; // How many times longer than normal
// ══════════════════════════════════════════════════════════ // ══════════════════════════════════════════════════════════
// Generate tree (in-memory) // Read edge data from CSVs
// ══════════════════════════════════════════════════════════ // ══════════════════════════════════════════════════════════
const { root, nodeCount: N, childrenOf, parentOf } = generateTree(); const primaryPath = join(PUBLIC_DIR, "primary_edges.csv");
const secondaryPath = join(PUBLIC_DIR, "secondary_edges.csv");
const nodeIds: number[] = []; if (!existsSync(primaryPath) || !existsSync(secondaryPath)) {
for (let i = 0; i < N; i++) nodeIds.push(i); console.error(`Error: Missing input files!`);
console.error(` Expected: ${primaryPath}`);
console.error(` Expected: ${secondaryPath}`);
console.error(` Run 'npx tsx scripts/fetch_from_db.ts' first.`);
process.exit(1);
}
// Dense index mapping (identity since IDs are 0..N-1) function parseEdges(path: string): Array<[number, number]> {
const content = readFileSync(path, "utf-8");
const lines = content.trim().split("\n");
const edges: Array<[number, number]> = [];
for (let i = 1; i < lines.length; i++) {
const line = lines[i].trim();
if (!line) continue;
const [src, tgt] = line.split(",").map(Number);
if (!isNaN(src) && !isNaN(tgt)) {
edges.push([src, tgt]);
}
}
return edges;
}
const primaryEdges = parseEdges(primaryPath);
const secondaryEdges = parseEdges(secondaryPath);
const allEdges = [...primaryEdges, ...secondaryEdges];
// ══════════════════════════════════════════════════════════
// Build adjacency
// ══════════════════════════════════════════════════════════
const allNodes = new Set<number>();
const primaryNodes = new Set<number>();
const neighbors = new Map<number, Set<number>>();
function addNeighbor(a: number, b: number) {
if (!neighbors.has(a)) neighbors.set(a, new Set());
neighbors.get(a)!.add(b);
if (!neighbors.has(b)) neighbors.set(b, new Set());
neighbors.get(b)!.add(a);
}
for (const [src, dst] of primaryEdges) {
allNodes.add(src);
allNodes.add(dst);
primaryNodes.add(src);
primaryNodes.add(dst);
addNeighbor(src, dst);
}
for (const [src, dst] of secondaryEdges) {
allNodes.add(src);
allNodes.add(dst);
addNeighbor(src, dst);
}
const N = allNodes.size;
const nodeIds = Array.from(allNodes).sort((a, b) => a - b);
const idToIdx = new Map<number, number>(); const idToIdx = new Map<number, number>();
for (let i = 0; i < N; i++) idToIdx.set(i, i); nodeIds.forEach((id, idx) => idToIdx.set(id, idx));
// Edge list as index pairs (child, parent) console.log(
const edges: Array<[number, number]> = []; `Read graph: ${N} nodes, ${allEdges.length} edges (P=${primaryEdges.length}, S=${secondaryEdges.length})`
for (const [child, parent] of parentOf) { );
edges.push([child, parent]); console.log(`Primary nodes: ${primaryNodes.size}`);
}
// Per-node neighbor list (for edge traversal)
const neighbors: number[][] = Array.from({ length: N }, () => []);
for (const [a, b] of edges) {
neighbors[a].push(b);
neighbors[b].push(a);
}
console.log(`Tree: ${N} nodes, ${edges.length} edges, root=${root}`);
// ══════════════════════════════════════════════════════════ // ══════════════════════════════════════════════════════════
// Step 1: Radial tree layout (generous spacing, no crossings) // Initial placement
// ══════════════════════════════════════════════════════════ // ══════════════════════════════════════════════════════════
const x = new Float64Array(N); const x = new Float64Array(N);
const y = new Float64Array(N); const y = new Float64Array(N);
const depth = new Uint32Array(N);
const nodeRadius = new Float64Array(N); // cumulative radius from root
// Compute subtree sizes // Step 1: Place primary nodes in a radial layout
const subtreeSize = new Uint32Array(N).fill(1); const primaryArr = Array.from(primaryNodes).sort((a, b) => a - b);
{ const angleStep = (2 * Math.PI) / Math.max(1, primaryArr.length);
const rootIdx = idToIdx.get(root)!; const radius = PRIMARY_RADIUS * Math.max(1, Math.sqrt(primaryArr.length / 10));
const stack: Array<{ idx: number; phase: "enter" | "exit" }> = [
{ idx: rootIdx, phase: "enter" }, for (let i = 0; i < primaryArr.length; i++) {
]; const idx = idToIdx.get(primaryArr[i])!;
while (stack.length > 0) { const angle = i * angleStep;
const { idx, phase } = stack.pop()!; x[idx] = radius * Math.cos(angle);
if (phase === "enter") { y[idx] = radius * Math.sin(angle);
stack.push({ idx, phase: "exit" }); }
const kids = childrenOf.get(nodeIds[idx]);
if (kids) { console.log(`Placed ${primaryArr.length} primary nodes in radial layout (r=${radius.toFixed(0)})`);
for (const kid of kids) {
stack.push({ idx: idToIdx.get(kid)!, phase: "enter" }); // Step 2: Place remaining nodes near their connected neighbors
} // BFS from already-placed nodes
} const placed = new Set<number>(primaryNodes);
} else { const queue: number[] = [...primaryArr];
const kids = childrenOf.get(nodeIds[idx]); let head = 0;
if (kids) {
for (const kid of kids) { while (head < queue.length) {
subtreeSize[idx] += subtreeSize[idToIdx.get(kid)!]; const nodeId = queue[head++];
} const nodeNeighbors = neighbors.get(nodeId);
} if (!nodeNeighbors) continue;
}
for (const nbId of nodeNeighbors) {
if (placed.has(nbId)) continue;
placed.add(nbId);
// Place near this neighbor with some jitter
const parentIdx = idToIdx.get(nodeId)!;
const childIdx = idToIdx.get(nbId)!;
const jitterAngle = Math.random() * 2 * Math.PI;
const jitterDist = EDGE_LENGTH * (0.5 + Math.random() * 0.5);
x[childIdx] = x[parentIdx] + jitterDist * Math.cos(jitterAngle);
y[childIdx] = y[parentIdx] + jitterDist * Math.sin(jitterAngle);
queue.push(nbId);
} }
} }
// Compute depths & max depth // Handle disconnected nodes (place randomly)
let maxDepth = 0; for (const id of nodeIds) {
{ if (!placed.has(id)) {
const rootIdx = idToIdx.get(root)!; const idx = idToIdx.get(id)!;
const stack: Array<{ idx: number; d: number }> = [{ idx: rootIdx, d: 0 }]; const angle = Math.random() * 2 * Math.PI;
while (stack.length > 0) { const r = radius * (1 + Math.random());
const { idx, d } = stack.pop()!; x[idx] = r * Math.cos(angle);
depth[idx] = d; y[idx] = r * Math.sin(angle);
if (d > maxDepth) maxDepth = d; placed.add(id);
const kids = childrenOf.get(nodeIds[idx]);
if (kids) {
for (const kid of kids) {
stack.push({ idx: idToIdx.get(kid)!, d: d + 1 });
}
}
} }
} }
// BFS radial assignment (cumulative radii to support per-edge lengths) console.log(`Initial placement complete: ${placed.size} nodes`);
{
const rootIdx = idToIdx.get(root)!;
x[rootIdx] = 0;
y[rootIdx] = 0;
nodeRadius[rootIdx] = 0;
interface Entry {
idx: number;
d: number;
aStart: number;
aEnd: number;
}
const queue: Entry[] = [{ idx: rootIdx, d: 0, aStart: 0, aEnd: 2 * Math.PI }];
let head = 0;
while (head < queue.length) {
const { idx, d, aStart, aEnd } = queue[head++];
const kids = childrenOf.get(nodeIds[idx]);
if (!kids || kids.length === 0) continue;
// Sort children by subtree size (largest sectors together for balance)
const sortedKids = [...kids].sort(
(a, b) => (subtreeSize[idToIdx.get(b)!]) - (subtreeSize[idToIdx.get(a)!])
);
const totalWeight = sortedKids.reduce(
(s, k) => s + subtreeSize[idToIdx.get(k)!], 0
);
let angle = aStart;
for (const kid of sortedKids) {
const kidIdx = idToIdx.get(kid)!;
const w = subtreeSize[kidIdx];
const sector = (w / totalWeight) * (aEnd - aStart);
const mid = angle + sector / 2;
// Cumulative radius: parent's radius + edge step (longer for special nodes)
const step = LONG_EDGE_NODES.has(kid)
? RADIUS_PER_DEPTH * LONG_EDGE_MULTIPLIER
: RADIUS_PER_DEPTH;
const r = nodeRadius[idx] + step;
nodeRadius[kidIdx] = r;
x[kidIdx] = r * Math.cos(mid);
y[kidIdx] = r * Math.sin(mid);
queue.push({ idx: kidIdx, d: d + 1, aStart: angle, aEnd: angle + sector });
angle += sector;
}
}
}
console.log(`Radial layout done (depth=${maxDepth}, radius_step=${RADIUS_PER_DEPTH})`);
// ══════════════════════════════════════════════════════════ // ══════════════════════════════════════════════════════════
// Step 2: Gentle force refinement (preserves non-crossing) // Force-directed layout with Barnes-Hut
// ══════════════════════════════════════════════════════════ // ══════════════════════════════════════════════════════════
// Barnes-Hut quadtree for repulsion console.log(`Running force simulation (${ITERATIONS} iterations, ${N} nodes, ${allEdges.length} edges)...`);
interface BHNode {
cx: number; cy: number;
mass: number;
size: number;
children: (BHNode | null)[];
bodyIdx: number;
}
function buildBHTree(): BHNode { const t0 = performance.now();
let minX = Infinity, maxX = -Infinity, minY = Infinity, maxY = -Infinity; let maxDisp = INITIAL_MAX_DISP;
for (let iter = 0; iter < ITERATIONS; iter++) {
const bhRoot = buildBHTree(x, y, N);
const fx = new Float64Array(N);
const fy = new Float64Array(N);
// 1. Repulsion via Barnes-Hut
for (let i = 0; i < N; i++) { for (let i = 0; i < N; i++) {
if (x[i] < minX) minX = x[i]; calcBHForce(bhRoot, x[i], y[i], fx, fy, i, BH_THETA, x, y);
if (x[i] > maxX) maxX = x[i];
if (y[i] < minY) minY = y[i];
if (y[i] > maxY) maxY = y[i];
} }
const size = Math.max(maxX - minX, maxY - minY, 1) * 1.01;
const cx = (minX + maxX) / 2;
const cy = (minY + maxY) / 2;
const root: BHNode = { // 2. Edge attraction (spring force)
cx: 0, cy: 0, mass: 0, size, for (const [aId, bId] of allEdges) {
children: [null, null, null, null], bodyIdx: -1, const a = idToIdx.get(aId)!;
}; const b = idToIdx.get(bId)!;
const dx = x[b] - x[a];
const dy = y[b] - y[a];
const d = Math.sqrt(dx * dx + dy * dy) || MIN_DIST;
const displacement = d - EDGE_LENGTH;
const f = ATTRACTION_K * displacement;
const ux = dx / d;
const uy = dy / d;
fx[a] += ux * f;
fy[a] += uy * f;
fx[b] -= ux * f;
fy[b] -= uy * f;
}
// 3. Apply forces with displacement capping
let totalForce = 0;
for (let i = 0; i < N; i++) { for (let i = 0; i < N; i++) {
insert(root, i, cx, cy, size); const mag = Math.sqrt(fx[i] * fx[i] + fy[i] * fy[i]);
} totalForce += mag;
return root; if (mag > 0) {
} const cap = Math.min(maxDisp, mag) / mag;
x[i] += fx[i] * cap;
function insert(node: BHNode, idx: number, ncx: number, ncy: number, ns: number): void { y[i] += fy[i] * cap;
if (node.mass === 0) {
node.bodyIdx = idx;
node.cx = x[idx]; node.cy = y[idx];
node.mass = 1;
return;
}
if (node.bodyIdx >= 0) {
const old = node.bodyIdx;
node.bodyIdx = -1;
putInQuadrant(node, old, ncx, ncy, ns);
}
putInQuadrant(node, idx, ncx, ncy, ns);
const tm = node.mass + 1;
node.cx = (node.cx * node.mass + x[idx]) / tm;
node.cy = (node.cy * node.mass + y[idx]) / tm;
node.mass = tm;
}
function putInQuadrant(node: BHNode, idx: number, ncx: number, ncy: number, ns: number): void {
const hs = ns / 2;
const qx = x[idx] >= ncx ? 1 : 0;
const qy = y[idx] >= ncy ? 1 : 0;
const q = qy * 2 + qx;
const ccx = ncx + (qx ? hs / 2 : -hs / 2);
const ccy = ncy + (qy ? hs / 2 : -hs / 2);
if (!node.children[q]) {
node.children[q] = {
cx: 0, cy: 0, mass: 0, size: hs,
children: [null, null, null, null], bodyIdx: -1,
};
}
insert(node.children[q]!, idx, ccx, ccy, hs);
}
function repulse(node: BHNode, idx: number, fx: Float64Array, fy: Float64Array): void {
if (node.mass === 0 || node.bodyIdx === idx) return;
const dx = x[idx] - node.cx;
const dy = y[idx] - node.cy;
const d2 = dx * dx + dy * dy;
const d = Math.sqrt(d2) || MIN_DIST;
if (node.bodyIdx >= 0 || (node.size / d) < THETA) {
const f = REPULSION_K * node.mass / (d2 + MIN_DIST);
fx[idx] += (dx / d) * f;
fy[idx] += (dy / d) * f;
return;
}
for (const c of node.children) {
if (c) repulse(c, idx, fx, fy);
}
}
// ── Force simulation ──
if (ENABLE_FORCE_SIM) {
console.log(`Applying gentle forces (${ITERATIONS} steps, 1% strength)...`);
const t0 = performance.now();
let maxDisp = INITIAL_MAX_DISP;
for (let iter = 0; iter < ITERATIONS; iter++) {
const fx = new Float64Array(N);
const fy = new Float64Array(N);
// 1. Repulsion
const tree = buildBHTree();
for (let i = 0; i < N; i++) {
repulse(tree, i, fx, fy);
}
// 2. Edge attraction (spring toward per-edge rest length)
for (const [a, b] of edges) {
const dx = x[b] - x[a];
const dy = y[b] - y[a];
const d = Math.sqrt(dx * dx + dy * dy) || MIN_DIST;
const aId = nodeIds[a], bId = nodeIds[b];
const isLong = LONG_EDGE_NODES.has(aId) || LONG_EDGE_NODES.has(bId);
const restLen = isLong ? EDGE_LENGTH * LONG_EDGE_MULTIPLIER : EDGE_LENGTH;
const displacement = d - restLen;
const f = ATTRACTION_K * displacement;
const ux = dx / d, uy = dy / d;
fx[a] += ux * f;
fy[a] += uy * f;
fx[b] -= ux * f;
fy[b] -= uy * f;
}
// 3. Apply forces with displacement cap (cooling reduces it over time)
for (let i = 0; i < N; i++) {
const mag = Math.sqrt(fx[i] * fx[i] + fy[i] * fy[i]);
if (mag > 0) {
const cap = Math.min(maxDisp, mag) / mag;
x[i] += fx[i] * cap;
y[i] += fy[i] * cap;
}
}
// 4. Cool down
maxDisp *= COOLING;
if ((iter + 1) % PRINT_EVERY === 0) {
let totalForce = 0;
for (let i = 0; i < N; i++) totalForce += Math.sqrt(fx[i] * fx[i] + fy[i] * fy[i]);
console.log(` iter ${iter + 1}/${ITERATIONS} max_disp=${maxDisp.toFixed(2)} avg_force=${(totalForce / N).toFixed(2)}`);
} }
} }
const elapsed = performance.now() - t0; maxDisp *= COOLING;
console.log(`Force simulation done in ${(elapsed / 1000).toFixed(1)}s`);
} else { if ((iter + 1) % PRINT_EVERY === 0 || iter === 0) {
console.log("Force simulation SKIPPED (ENABLE_FORCE_SIM = false)"); console.log(
` iter ${iter + 1}/${ITERATIONS} max_disp=${maxDisp.toFixed(2)} avg_force=${(totalForce / N).toFixed(2)}`
);
}
} }
const elapsed = performance.now() - t0;
console.log(`Force simulation done in ${(elapsed / 1000).toFixed(1)}s`);
// ══════════════════════════════════════════════════════════ // ══════════════════════════════════════════════════════════
// Write output // Write output
// ══════════════════════════════════════════════════════════ // ══════════════════════════════════════════════════════════
// Write node positions
const outLines: string[] = ["vertex,x,y"]; const outLines: string[] = ["vertex,x,y"];
for (let i = 0; i < N; i++) { for (let i = 0; i < N; i++) {
outLines.push(`${nodeIds[i]},${x[i]},${y[i]}`); outLines.push(`${nodeIds[i]},${x[i]},${y[i]}`);
@@ -342,13 +245,132 @@ for (let i = 0; i < N; i++) {
const outPath = join(PUBLIC_DIR, "node_positions.csv"); const outPath = join(PUBLIC_DIR, "node_positions.csv");
writeFileSync(outPath, outLines.join("\n") + "\n"); writeFileSync(outPath, outLines.join("\n") + "\n");
console.log(`Wrote ${N} positions to ${outPath}`); console.log(`Wrote ${N} positions to ${outPath}`);
console.log(`Layout complete.`);
// Write edges (so the renderer can draw them) // ══════════════════════════════════════════════════════════
const edgeLines: string[] = ["source,target"]; // Barnes-Hut Helpers
for (const [child, parent] of parentOf) { // ══════════════════════════════════════════════════════════
edgeLines.push(`${child},${parent}`);
interface BHNode {
mass: number;
cx: number;
cy: number;
minX: number;
maxX: number;
minY: number;
maxY: number;
children?: BHNode[];
pointIdx?: number;
} }
const edgesPath = join(PUBLIC_DIR, "edges.csv"); function buildBHTree(x: Float64Array, y: Float64Array, n: number): BHNode {
writeFileSync(edgesPath, edgeLines.join("\n") + "\n"); let minX = Infinity, maxX = -Infinity, minY = Infinity, maxY = -Infinity;
console.log(`Wrote ${edges.length} edges to ${edgesPath}`); for (let i = 0; i < n; i++) {
if (x[i] < minX) minX = x[i];
if (x[i] > maxX) maxX = x[i];
if (y[i] < minY) minY = y[i];
if (y[i] > maxY) maxY = y[i];
}
const cx = (minX + maxX) / 2;
const cy = (minY + maxY) / 2;
const halfDim = Math.max(maxX - minX, maxY - minY) / 2 + 0.01;
const root: BHNode = {
mass: 0, cx: 0, cy: 0,
minX: cx - halfDim, maxX: cx + halfDim,
minY: cy - halfDim, maxY: cy + halfDim,
};
for (let i = 0; i < n; i++) {
insertBH(root, i, x[i], y[i], x, y);
}
calcBHMass(root, x, y);
return root;
}
function insertBH(node: BHNode, idx: number, px: number, py: number, x: Float64Array, y: Float64Array) {
if (!node.children && node.pointIdx === undefined) {
node.pointIdx = idx;
return;
}
if (!node.children && node.pointIdx !== undefined) {
const oldIdx = node.pointIdx;
node.pointIdx = undefined;
subdivideBH(node);
insertBH(node, oldIdx, x[oldIdx], y[oldIdx], x, y);
}
if (node.children) {
const mx = (node.minX + node.maxX) / 2;
const my = (node.minY + node.maxY) / 2;
let q = 0;
if (px > mx) q += 1;
if (py > my) q += 2;
insertBH(node.children[q], idx, px, py, x, y);
}
}
function subdivideBH(node: BHNode) {
const mx = (node.minX + node.maxX) / 2;
const my = (node.minY + node.maxY) / 2;
node.children = [
{ mass: 0, cx: 0, cy: 0, minX: node.minX, maxX: mx, minY: node.minY, maxY: my },
{ mass: 0, cx: 0, cy: 0, minX: mx, maxX: node.maxX, minY: node.minY, maxY: my },
{ mass: 0, cx: 0, cy: 0, minX: node.minX, maxX: mx, minY: my, maxY: node.maxY },
{ mass: 0, cx: 0, cy: 0, minX: mx, maxX: node.maxX, minY: my, maxY: node.maxY },
];
}
function calcBHMass(node: BHNode, x: Float64Array, y: Float64Array) {
if (node.pointIdx !== undefined) {
node.mass = 1;
node.cx = x[node.pointIdx];
node.cy = y[node.pointIdx];
return;
}
if (node.children) {
let m = 0, sx = 0, sy = 0;
for (const c of node.children) {
calcBHMass(c, x, y);
m += c.mass;
sx += c.cx * c.mass;
sy += c.cy * c.mass;
}
node.mass = m;
if (m > 0) {
node.cx = sx / m;
node.cy = sy / m;
} else {
node.cx = (node.minX + node.maxX) / 2;
node.cy = (node.minY + node.maxY) / 2;
}
}
}
function calcBHForce(
node: BHNode,
px: number, py: number,
fx: Float64Array, fy: Float64Array,
idx: number, theta: number,
x: Float64Array, y: Float64Array,
) {
const dx = px - node.cx;
const dy = py - node.cy;
const d2 = dx * dx + dy * dy;
const dist = Math.sqrt(d2);
const width = node.maxX - node.minX;
if (width / dist < theta || !node.children) {
if (node.mass > 0 && node.pointIdx !== idx) {
const dEff = Math.max(dist, MIN_DIST);
const f = (REPULSION_K * node.mass) / (dEff * dEff);
fx[idx] += (dx / dEff) * f;
fy[idx] += (dy / dEff) * f;
}
} else {
for (const c of node.children) {
calcBHForce(c, px, py, fx, fy, idx, theta, x, y);
}
}
}

390
scripts/fetch_from_db.ts Normal file
View File

@@ -0,0 +1,390 @@
#!/usr/bin/env npx tsx
/**
* Fetch RDF Data from AnzoGraph DB
*
* 1. Query the first 1000 distinct subject URIs
* 2. Fetch all triples where those URIs appear as subject or object
* 3. Identify primary nodes (objects of rdf:type)
* 4. Write primary_edges.csv, secondary_edges.csv, and uri_map.csv
*
* Usage: npx tsx scripts/fetch_from_db.ts [--host http://localhost:8080]
*/
import { writeFileSync } from "fs";
import { join, dirname } from "path";
import { fileURLToPath } from "url";
const __dirname = dirname(fileURLToPath(import.meta.url));
const PUBLIC_DIR = join(__dirname, "..", "public");
// ══════════════════════════════════════════════════════════
// Configuration
// ══════════════════════════════════════════════════════════
const RDF_TYPE = "http://www.w3.org/1999/02/22-rdf-syntax-ns#type";
const BATCH_SIZE = 100; // URIs per VALUES batch query
const MAX_RETRIES = 30; // Wait up to ~120s for AnzoGraph to start
const RETRY_DELAY_MS = 4000;
// Path to TTL file inside the AnzoGraph container (mapped via docker-compose volume)
const DATA_FILE = process.env.SPARQL_DATA_FILE || "file:///opt/shared-files/vkg-materialized.ttl";
// Parse --host flag, default to http://localhost:8080
function getEndpoint(): string {
const hostIdx = process.argv.indexOf("--host");
if (hostIdx !== -1 && process.argv[hostIdx + 1]) {
return process.argv[hostIdx + 1];
}
// Inside Docker, use service name; otherwise localhost
return process.env.SPARQL_HOST || "http://localhost:8080";
}
const SPARQL_ENDPOINT = `${getEndpoint()}/sparql`;
// Auth credentials (AnzoGraph defaults)
const SPARQL_USER = process.env.SPARQL_USER || "admin";
const SPARQL_PASS = process.env.SPARQL_PASS || "Passw0rd1";
const AUTH_HEADER = "Basic " + Buffer.from(`${SPARQL_USER}:${SPARQL_PASS}`).toString("base64");
// ══════════════════════════════════════════════════════════
// SPARQL helpers
// ══════════════════════════════════════════════════════════
interface SparqlBinding {
[key: string]: { type: string; value: string };
}
function sleep(ms: number): Promise<void> {
return new Promise((resolve) => setTimeout(resolve, ms));
}
async function sparqlQuery(query: string, retries = 5): Promise<SparqlBinding[]> {
for (let attempt = 1; attempt <= retries; attempt++) {
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), 300_000); // 5 min timeout
try {
const t0 = performance.now();
const response = await fetch(SPARQL_ENDPOINT, {
method: "POST",
headers: {
"Content-Type": "application/x-www-form-urlencoded",
"Accept": "application/sparql-results+json",
"Authorization": AUTH_HEADER,
},
body: "query=" + encodeURIComponent(query),
signal: controller.signal,
});
const t1 = performance.now();
console.log(` [sparql] response status=${response.status} in ${((t1 - t0) / 1000).toFixed(1)}s`);
if (!response.ok) {
const text = await response.text();
throw new Error(`SPARQL query failed (${response.status}): ${text}`);
}
const text = await response.text();
const t2 = performance.now();
console.log(` [sparql] body read (${(text.length / 1024).toFixed(0)} KB) in ${((t2 - t1) / 1000).toFixed(1)}s`);
const json = JSON.parse(text);
return json.results.bindings;
} catch (err: any) {
clearTimeout(timeout);
const msg = err instanceof Error ? err.message : String(err);
const isTransient = msg.includes("fetch failed") || msg.includes("Timeout") || msg.includes("ABORT") || msg.includes("abort");
if (isTransient && attempt < retries) {
console.log(` [sparql] transient error (attempt ${attempt}/${retries}): ${msg.substring(0, 100)}`);
console.log(` [sparql] retrying in 10s (AnzoGraph may still be indexing after LOAD)...`);
await sleep(10_000);
continue;
}
throw err;
} finally {
clearTimeout(timeout);
}
}
throw new Error("sparqlQuery: should not reach here");
}
async function waitForAnzoGraph(): Promise<void> {
console.log(`Waiting for AnzoGraph at ${SPARQL_ENDPOINT}...`);
for (let attempt = 1; attempt <= MAX_RETRIES; attempt++) {
try {
const response = await fetch(SPARQL_ENDPOINT, {
method: "POST",
headers: {
"Content-Type": "application/x-www-form-urlencoded",
"Accept": "application/sparql-results+json",
"Authorization": AUTH_HEADER,
},
body: "query=" + encodeURIComponent("ASK WHERE { ?s ?p ?o }"),
});
const text = await response.text();
// Verify it's actual JSON (not a plain-text error from a half-started engine)
JSON.parse(text);
console.log(` AnzoGraph is ready (attempt ${attempt})`);
return;
} catch (err: any) {
const msg = err instanceof Error ? err.message : String(err);
console.log(` Attempt ${attempt}/${MAX_RETRIES}: ${msg.substring(0, 100)}`);
if (attempt < MAX_RETRIES) {
await sleep(RETRY_DELAY_MS);
}
}
}
throw new Error(`AnzoGraph not available after ${MAX_RETRIES} attempts`);
}
async function sparqlUpdate(update: string): Promise<string> {
const response = await fetch(SPARQL_ENDPOINT, {
method: "POST",
headers: {
"Content-Type": "application/sparql-update",
"Accept": "application/json",
"Authorization": AUTH_HEADER,
},
body: update,
});
const text = await response.text();
if (!response.ok) {
throw new Error(`SPARQL update failed (${response.status}): ${text}`);
}
return text;
}
async function loadData(): Promise<void> {
console.log(`Loading data from ${DATA_FILE}...`);
const t0 = performance.now();
const result = await sparqlUpdate(`LOAD <${DATA_FILE}>`);
const elapsed = ((performance.now() - t0) / 1000).toFixed(1);
console.log(` Load complete in ${elapsed}s: ${result.substring(0, 200)}`);
}
// ══════════════════════════════════════════════════════════
// Step 1: Fetch seed URIs
// ══════════════════════════════════════════════════════════
async function fetchSeedURIs(): Promise<string[]> {
console.log("Querying first 1000 distinct subject URIs...");
const t0 = performance.now();
const query = `
SELECT DISTINCT ?s
WHERE { ?s ?p ?o }
LIMIT 1000
`;
const bindings = await sparqlQuery(query);
const elapsed = ((performance.now() - t0) / 1000).toFixed(1);
const uris = bindings.map((b) => b.s.value);
console.log(` Got ${uris.length} seed URIs in ${elapsed}s`);
return uris;
}
// ══════════════════════════════════════════════════════════
// Step 2: Fetch all triples involving seed URIs
// ══════════════════════════════════════════════════════════
interface Triple {
s: string;
p: string;
o: string;
oType: string; // "uri" or "literal"
}
async function fetchTriples(seedURIs: string[]): Promise<Triple[]> {
console.log(`Fetching triples for ${seedURIs.length} seed URIs (batch size: ${BATCH_SIZE})...`);
const allTriples: Triple[] = [];
for (let i = 0; i < seedURIs.length; i += BATCH_SIZE) {
const batch = seedURIs.slice(i, i + BATCH_SIZE);
const valuesClause = batch.map((u) => `<${u}>`).join(" ");
const query = `
SELECT ?s ?p ?o
WHERE {
VALUES ?uri { ${valuesClause} }
{
?uri ?p ?o .
BIND(?uri AS ?s)
}
UNION
{
?s ?p ?uri .
BIND(?uri AS ?o)
}
}
`;
const bindings = await sparqlQuery(query);
for (const b of bindings) {
allTriples.push({
s: b.s.value,
p: b.p.value,
o: b.o.value,
oType: b.o.type,
});
}
const progress = Math.min(i + BATCH_SIZE, seedURIs.length);
process.stdout.write(`\r Fetched triples: batch ${Math.ceil(progress / BATCH_SIZE)}/${Math.ceil(seedURIs.length / BATCH_SIZE)} (${allTriples.length} triples so far)`);
}
console.log(`\n Total triples: ${allTriples.length}`);
return allTriples;
}
// ══════════════════════════════════════════════════════════
// Step 3: Build graph data
// ══════════════════════════════════════════════════════════
interface GraphData {
nodeURIs: string[]; // All unique URIs (subjects & objects that are URIs)
uriToId: Map<string, number>;
primaryNodeIds: Set<number>; // Nodes that are objects of rdf:type
edges: Array<[number, number]>; // [source, target] as numeric IDs
primaryEdges: Array<[number, number]>;
secondaryEdges: Array<[number, number]>;
}
function buildGraphData(triples: Triple[]): GraphData {
console.log("Building graph data...");
// Collect all unique URI nodes (skip literal objects)
const uriSet = new Set<string>();
for (const t of triples) {
uriSet.add(t.s);
if (t.oType === "uri") {
uriSet.add(t.o);
}
}
// Assign numeric IDs
const nodeURIs = Array.from(uriSet).sort();
const uriToId = new Map<string, number>();
nodeURIs.forEach((uri, idx) => uriToId.set(uri, idx));
// Identify primary nodes: objects of rdf:type triples
const primaryNodeIds = new Set<number>();
for (const t of triples) {
if (t.p === RDF_TYPE && t.oType === "uri") {
const objId = uriToId.get(t.o);
if (objId !== undefined) {
primaryNodeIds.add(objId);
}
}
}
// Build edges (only between URI nodes, skip literal objects)
const edgeSet = new Set<string>();
const edges: Array<[number, number]> = [];
for (const t of triples) {
if (t.oType !== "uri") continue;
const srcId = uriToId.get(t.s);
const dstId = uriToId.get(t.o);
if (srcId === undefined || dstId === undefined) continue;
if (srcId === dstId) continue; // Skip self-loops
const key = `${srcId},${dstId}`;
if (edgeSet.has(key)) continue; // Deduplicate
edgeSet.add(key);
edges.push([srcId, dstId]);
}
// Classify edges into primary (touches a primary node) and secondary
const primaryEdges: Array<[number, number]> = [];
const secondaryEdges: Array<[number, number]> = [];
for (const [src, dst] of edges) {
if (primaryNodeIds.has(src) || primaryNodeIds.has(dst)) {
primaryEdges.push([src, dst]);
} else {
secondaryEdges.push([src, dst]);
}
}
console.log(` Nodes: ${nodeURIs.length}`);
console.log(` Primary nodes (rdf:type objects): ${primaryNodeIds.size}`);
console.log(` Edges: ${edges.length} (primary: ${primaryEdges.length}, secondary: ${secondaryEdges.length})`);
return { nodeURIs, uriToId, primaryNodeIds, edges, primaryEdges, secondaryEdges };
}
// ══════════════════════════════════════════════════════════
// Step 4: Write CSV files
// ══════════════════════════════════════════════════════════
function extractLabel(uri: string): string {
// Extract local name: after # or last /
const hashIdx = uri.lastIndexOf("#");
if (hashIdx !== -1) return uri.substring(hashIdx + 1);
const slashIdx = uri.lastIndexOf("/");
if (slashIdx !== -1) return uri.substring(slashIdx + 1);
return uri;
}
function writeCSVs(data: GraphData): void {
// Write primary_edges.csv
const pLines = ["source,target"];
for (const [src, dst] of data.primaryEdges) {
pLines.push(`${src},${dst}`);
}
const pPath = join(PUBLIC_DIR, "primary_edges.csv");
writeFileSync(pPath, pLines.join("\n") + "\n");
console.log(`Wrote ${data.primaryEdges.length} primary edges to ${pPath}`);
// Write secondary_edges.csv
const sLines = ["source,target"];
for (const [src, dst] of data.secondaryEdges) {
sLines.push(`${src},${dst}`);
}
const sPath = join(PUBLIC_DIR, "secondary_edges.csv");
writeFileSync(sPath, sLines.join("\n") + "\n");
console.log(`Wrote ${data.secondaryEdges.length} secondary edges to ${sPath}`);
// Write uri_map.csv (id,uri,label,isPrimary)
const uLines = ["id,uri,label,isPrimary"];
for (let i = 0; i < data.nodeURIs.length; i++) {
const uri = data.nodeURIs[i];
const label = extractLabel(uri);
const isPrimary = data.primaryNodeIds.has(i) ? "1" : "0";
// Escape commas in URIs by quoting
const safeUri = uri.includes(",") ? `"${uri}"` : uri;
const safeLabel = label.includes(",") ? `"${label}"` : label;
uLines.push(`${i},${safeUri},${safeLabel},${isPrimary}`);
}
const uPath = join(PUBLIC_DIR, "uri_map.csv");
writeFileSync(uPath, uLines.join("\n") + "\n");
console.log(`Wrote ${data.nodeURIs.length} URI mappings to ${uPath}`);
}
// ══════════════════════════════════════════════════════════
// Main
// ══════════════════════════════════════════════════════════
async function main() {
console.log(`SPARQL endpoint: ${SPARQL_ENDPOINT}`);
const t0 = performance.now();
await waitForAnzoGraph();
await loadData();
// Smoke test: simplest possible query to verify connectivity
console.log("Smoke test: SELECT ?s ?p ?o LIMIT 3...");
const smokeT0 = performance.now();
const smokeResult = await sparqlQuery("SELECT ?s ?p ?o WHERE { ?s ?p ?o } LIMIT 3");
const smokeElapsed = ((performance.now() - smokeT0) / 1000).toFixed(1);
console.log(` Smoke test OK: ${smokeResult.length} results in ${smokeElapsed}s`);
if (smokeResult.length > 0) {
console.log(` First triple: ${smokeResult[0].s.value} ${smokeResult[0].p.value} ${smokeResult[0].o.value}`);
}
const seedURIs = await fetchSeedURIs();
const triples = await fetchTriples(seedURIs);
const graphData = buildGraphData(triples);
writeCSVs(graphData);
const elapsed = ((performance.now() - t0) / 1000).toFixed(1);
console.log(`\nDone in ${elapsed}s`);
}
main().catch((err) => {
console.error("Fatal error:", err);
process.exit(1);
});

View File

@@ -2,15 +2,25 @@
* Random Tree Generator * Random Tree Generator
* *
* Generates a random tree with 1MAX_CHILDREN children per node. * Generates a random tree with 1MAX_CHILDREN children per node.
* Exports a function that returns the tree data in memory. * Splits edges into primary (depth ≤ PRIMARY_DEPTH) and secondary.
*
* Usage: npx tsx scripts/generate_tree.ts
*/ */
import { writeFileSync } from "fs";
import { join, dirname } from "path";
import { fileURLToPath } from "url";
const __dirname = dirname(fileURLToPath(import.meta.url));
const PUBLIC_DIR = join(__dirname, "..", "public");
// ══════════════════════════════════════════════════════════ // ══════════════════════════════════════════════════════════
// Configuration // Configuration
// ══════════════════════════════════════════════════════════ // ══════════════════════════════════════════════════════════
const TARGET_NODES = 100000; // Approximate number of nodes to generate const TARGET_NODES = 10000; // Approximate number of nodes to generate
const MAX_CHILDREN = 3; // Each node gets 1..MAX_CHILDREN children const MAX_CHILDREN = 4; // Each node gets 1..MAX_CHILDREN children
const PRIMARY_DEPTH = 4; // Nodes at depth ≤ this form the primary skeleton
// ══════════════════════════════════════════════════════════ // ══════════════════════════════════════════════════════════
// Tree data types // Tree data types
@@ -21,6 +31,10 @@ export interface TreeData {
nodeCount: number; nodeCount: number;
childrenOf: Map<number, number[]>; childrenOf: Map<number, number[]>;
parentOf: Map<number, number>; parentOf: Map<number, number>;
depthOf: Map<number, number>;
primaryNodes: Set<number>; // all nodes at depth ≤ PRIMARY_DEPTH
primaryEdges: Array<[number, number]>; // [child, parent] edges within primary
secondaryEdges: Array<[number, number]>;// remaining edges
} }
// ══════════════════════════════════════════════════════════ // ══════════════════════════════════════════════════════════
@@ -30,14 +44,17 @@ export interface TreeData {
export function generateTree(): TreeData { export function generateTree(): TreeData {
const childrenOf = new Map<number, number[]>(); const childrenOf = new Map<number, number[]>();
const parentOf = new Map<number, number>(); const parentOf = new Map<number, number>();
const depthOf = new Map<number, number>();
const root = 0; const root = 0;
depthOf.set(root, 0);
let nextId = 1; let nextId = 1;
const queue: number[] = [root]; const queue: number[] = [root];
let head = 0; let head = 0;
while (head < queue.length && nextId < TARGET_NODES) { while (head < queue.length && nextId < TARGET_NODES) {
const parent = queue[head++]; const parent = queue[head++];
const parentDepth = depthOf.get(parent)!;
const nKids = 1 + Math.floor(Math.random() * MAX_CHILDREN); // 1..MAX_CHILDREN const nKids = 1 + Math.floor(Math.random() * MAX_CHILDREN); // 1..MAX_CHILDREN
const kids: number[] = []; const kids: number[] = [];
@@ -45,17 +62,71 @@ export function generateTree(): TreeData {
const child = nextId++; const child = nextId++;
kids.push(child); kids.push(child);
parentOf.set(child, parent); parentOf.set(child, parent);
depthOf.set(child, parentDepth + 1);
queue.push(child); queue.push(child);
} }
childrenOf.set(parent, kids); childrenOf.set(parent, kids);
} }
console.log(`Generated tree: ${nextId} nodes, ${parentOf.size} edges, root=${root}`); // Classify edges and nodes by depth
const primaryNodes = new Set<number>();
const primaryEdges: Array<[number, number]> = [];
const secondaryEdges: Array<[number, number]> = [];
// Root is always primary
primaryNodes.add(root);
for (const [child, parent] of parentOf) {
const childDepth = depthOf.get(child)!;
if (childDepth <= PRIMARY_DEPTH) {
primaryNodes.add(child);
primaryNodes.add(parent);
primaryEdges.push([child, parent]);
} else {
secondaryEdges.push([child, parent]);
}
}
console.log(
`Generated tree: ${nextId} nodes, ` +
`${primaryEdges.length} primary edges (depth ≤ ${PRIMARY_DEPTH}), ` +
`${secondaryEdges.length} secondary edges`
);
return { return {
root, root,
nodeCount: nextId, nodeCount: nextId,
childrenOf, childrenOf,
parentOf, parentOf,
depthOf,
primaryNodes,
primaryEdges,
secondaryEdges,
}; };
} }
// ══════════════════════════════════════════════════════════
// Run if executed directly
// ══════════════════════════════════════════════════════════
if (import.meta.url === `file://${process.argv[1]}`) {
const data = generateTree();
// Write primary_edges.csv
const pLines: string[] = ["source,target"];
for (const [child, parent] of data.primaryEdges) {
pLines.push(`${child},${parent}`);
}
const pPath = join(PUBLIC_DIR, "primary_edges.csv");
writeFileSync(pPath, pLines.join("\n") + "\n");
console.log(`Wrote ${data.primaryEdges.length} edges to ${pPath}`);
// Write secondary_edges.csv
const sLines: string[] = ["source,target"];
for (const [child, parent] of data.secondaryEdges) {
sLines.push(`${child},${parent}`);
}
const sPath = join(PUBLIC_DIR, "secondary_edges.csv");
writeFileSync(sPath, sLines.join("\n") + "\n");
console.log(`Wrote ${data.secondaryEdges.length} edges to ${sPath}`);
}

View File

@@ -6,6 +6,7 @@ export default function App() {
const rendererRef = useRef<Renderer | null>(null); const rendererRef = useRef<Renderer | null>(null);
const [status, setStatus] = useState("Loading node positions…"); const [status, setStatus] = useState("Loading node positions…");
const [nodeCount, setNodeCount] = useState(0); const [nodeCount, setNodeCount] = useState(0);
const uriMapRef = useRef<Map<number, { uri: string; label: string; isPrimary: boolean }>>(new Map());
const [stats, setStats] = useState({ const [stats, setStats] = useState({
fps: 0, fps: 0,
drawn: 0, drawn: 0,
@@ -14,7 +15,7 @@ export default function App() {
ptSize: 0, ptSize: 0,
}); });
const [error, setError] = useState(""); const [error, setError] = useState("");
const [hoveredNode, setHoveredNode] = useState<{ x: number; y: number; screenX: number; screenY: number } | null>(null); const [hoveredNode, setHoveredNode] = useState<{ x: number; y: number; screenX: number; screenY: number; index?: number } | null>(null);
const [selectedNodes, setSelectedNodes] = useState<Set<number>>(new Set()); const [selectedNodes, setSelectedNodes] = useState<Set<number>>(new Set());
// Store mouse position in a ref so it can be accessed in render loop without re-renders // Store mouse position in a ref so it can be accessed in render loop without re-renders
@@ -39,16 +40,21 @@ export default function App() {
(async () => { (async () => {
try { try {
setStatus("Fetching data files…"); setStatus("Fetching data files…");
const [nodesResponse, edgesResponse] = await Promise.all([ const [nodesResponse, primaryEdgesResponse, secondaryEdgesResponse, uriMapResponse] = await Promise.all([
fetch("/node_positions.csv"), fetch("/node_positions.csv"),
fetch("/edges.csv"), fetch("/primary_edges.csv"),
fetch("/secondary_edges.csv"),
fetch("/uri_map.csv"),
]); ]);
if (!nodesResponse.ok) throw new Error(`Failed to fetch nodes: ${nodesResponse.status}`); if (!nodesResponse.ok) throw new Error(`Failed to fetch nodes: ${nodesResponse.status}`);
if (!edgesResponse.ok) throw new Error(`Failed to fetch edges: ${edgesResponse.status}`); if (!primaryEdgesResponse.ok) throw new Error(`Failed to fetch primary edges: ${primaryEdgesResponse.status}`);
if (!secondaryEdgesResponse.ok) throw new Error(`Failed to fetch secondary edges: ${secondaryEdgesResponse.status}`);
const [nodesText, edgesText] = await Promise.all([ const [nodesText, primaryEdgesText, secondaryEdgesText, uriMapText] = await Promise.all([
nodesResponse.text(), nodesResponse.text(),
edgesResponse.text(), primaryEdgesResponse.text(),
secondaryEdgesResponse.text(),
uriMapResponse.ok ? uriMapResponse.text() : Promise.resolve(""),
]); ]);
if (cancelled) return; if (cancelled) return;
@@ -67,12 +73,39 @@ export default function App() {
} }
setStatus("Parsing edges…"); setStatus("Parsing edges…");
const edgeLines = edgesText.split("\n").slice(1).filter(l => l.trim().length > 0); const pLines = primaryEdgesText.split("\n").slice(1).filter(l => l.trim().length > 0);
const edgeData = new Uint32Array(edgeLines.length * 2); const sLines = secondaryEdgesText.split("\n").slice(1).filter(l => l.trim().length > 0);
for (let i = 0; i < edgeLines.length; i++) {
const parts = edgeLines[i].split(","); const totalEdges = pLines.length + sLines.length;
edgeData[i * 2] = parseInt(parts[0], 10); const edgeData = new Uint32Array(totalEdges * 2);
edgeData[i * 2 + 1] = parseInt(parts[1], 10);
let idx = 0;
// Parse primary
for (let i = 0; i < pLines.length; i++) {
const parts = pLines[i].split(",");
edgeData[idx++] = parseInt(parts[0], 10);
edgeData[idx++] = parseInt(parts[1], 10);
}
// Parse secondary
for (let i = 0; i < sLines.length; i++) {
const parts = sLines[i].split(",");
edgeData[idx++] = parseInt(parts[0], 10);
edgeData[idx++] = parseInt(parts[1], 10);
}
// Parse URI map if available
if (uriMapText) {
const uriLines = uriMapText.split("\n").slice(1).filter(l => l.trim().length > 0);
for (const line of uriLines) {
const parts = line.split(",");
if (parts.length >= 4) {
const id = parseInt(parts[0], 10);
const uri = parts[1];
const label = parts[2];
const isPrimary = parts[3].trim() === "1";
uriMapRef.current.set(id, { uri, label, isPrimary });
}
}
} }
if (cancelled) return; if (cancelled) return;
@@ -83,7 +116,7 @@ export default function App() {
const buildMs = renderer.init(xs, ys, vertexIds, edgeData); const buildMs = renderer.init(xs, ys, vertexIds, edgeData);
setNodeCount(renderer.getNodeCount()); setNodeCount(renderer.getNodeCount());
setStatus(""); setStatus("");
console.log(`Init complete: ${count.toLocaleString()} nodes, ${edgeLines.length.toLocaleString()} edges in ${buildMs.toFixed(0)}ms`); console.log(`Init complete: ${count.toLocaleString()} nodes, ${totalEdges.toLocaleString()} edges in ${buildMs.toFixed(0)}ms`);
} catch (e) { } catch (e) {
if (!cancelled) { if (!cancelled) {
setError(e instanceof Error ? e.message : String(e)); setError(e instanceof Error ? e.message : String(e));
@@ -167,9 +200,9 @@ export default function App() {
frameCount++; frameCount++;
// Find hovered node using quadtree // Find hovered node using quadtree
const node = renderer.findNodeAt(mousePos.current.x, mousePos.current.y); const nodeResult = renderer.findNodeIndexAt(mousePos.current.x, mousePos.current.y);
if (node) { if (nodeResult) {
setHoveredNode({ ...node, screenX: mousePos.current.x, screenY: mousePos.current.y }); setHoveredNode({ x: nodeResult.x, y: nodeResult.y, screenX: mousePos.current.x, screenY: mousePos.current.y, index: nodeResult.index });
} else { } else {
setHoveredNode(null); setHoveredNode(null);
} }
@@ -316,7 +349,22 @@ export default function App() {
boxShadow: "0 2px 8px rgba(0,0,0,0.5)", boxShadow: "0 2px 8px rgba(0,0,0,0.5)",
}} }}
> >
({hoveredNode.x.toFixed(2)}, {hoveredNode.y.toFixed(2)}) {(() => {
if (hoveredNode.index !== undefined && rendererRef.current) {
const vertexId = rendererRef.current.getVertexId(hoveredNode.index);
const info = vertexId !== undefined ? uriMapRef.current.get(vertexId) : undefined;
if (info) {
return (
<>
<div style={{ fontWeight: "bold", marginBottom: 2 }}>{info.label}</div>
<div style={{ fontSize: "10px", color: "#8cf", wordBreak: "break-all", maxWidth: 400 }}>{info.uri}</div>
{info.isPrimary && <div style={{ color: "#ff0", fontSize: "10px", marginTop: 2 }}> Primary (rdf:type)</div>}
</>
);
}
}
return <>({hoveredNode.x.toFixed(2)}, {hoveredNode.y.toFixed(2)})</>;
})()}
</div> </div>
)} )}
</> </>

View File

@@ -83,6 +83,7 @@ export class Renderer {
private nodeCount = 0; private nodeCount = 0;
private edgeCount = 0; private edgeCount = 0;
private neighborMap: Map<number, number[]> = new Map(); private neighborMap: Map<number, number[]> = new Map();
private sortedToVertexId: Uint32Array = new Uint32Array(0);
private leafEdgeStarts: Uint32Array = new Uint32Array(0); private leafEdgeStarts: Uint32Array = new Uint32Array(0);
private leafEdgeCounts: Uint32Array = new Uint32Array(0); private leafEdgeCounts: Uint32Array = new Uint32Array(0);
private maxPtSize = 256; private maxPtSize = 256;
@@ -213,6 +214,12 @@ export class Renderer {
gl.bufferData(gl.ARRAY_BUFFER, sorted, gl.STATIC_DRAW); gl.bufferData(gl.ARRAY_BUFFER, sorted, gl.STATIC_DRAW);
gl.bindVertexArray(null); gl.bindVertexArray(null);
// Build sorted index → vertex ID mapping for hover lookups
this.sortedToVertexId = new Uint32Array(count);
for (let i = 0; i < count; i++) {
this.sortedToVertexId[i] = vertexIds[order[i]];
}
// Build vertex ID → original input index mapping // Build vertex ID → original input index mapping
const vertexIdToOriginal = new Map<number, number>(); const vertexIdToOriginal = new Map<number, number>();
for (let i = 0; i < count; i++) { for (let i = 0; i < count; i++) {
@@ -331,6 +338,15 @@ export class Renderer {
return this.nodeCount; return this.nodeCount;
} }
/**
* Get the original vertex ID for a given sorted index.
* Useful for looking up URI labels from the URI map.
*/
getVertexId(sortedIndex: number): number | undefined {
if (sortedIndex < 0 || sortedIndex >= this.sortedToVertexId.length) return undefined;
return this.sortedToVertexId[sortedIndex];
}
/** /**
* Convert screen coordinates (CSS pixels) to world coordinates. * Convert screen coordinates (CSS pixels) to world coordinates.
*/ */