Primary and Secondary
This commit is contained in:
@@ -1,17 +1,17 @@
|
||||
#!/usr/bin/env npx tsx
|
||||
/**
|
||||
* Tree-Aware Force Layout
|
||||
* Two-Phase Tree Layout
|
||||
*
|
||||
* Generates a random tree (via generate_tree), computes a radial tree layout,
|
||||
* then applies gentle force refinement and writes node_positions.csv.
|
||||
* Phase 1: Position a primary skeleton (nodes from primary_edges.csv)
|
||||
* with generous spacing, then force-simulate the skeleton.
|
||||
* Phase 2: Fill in remaining subtrees (secondary_edges.csv) within sectors.
|
||||
*
|
||||
* Usage: npm run layout
|
||||
* Usage: npm run layout-only (after generating tree)
|
||||
*/
|
||||
|
||||
import { writeFileSync } from "fs";
|
||||
import { writeFileSync, readFileSync, existsSync } from "fs";
|
||||
import { join, dirname } from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
import { generateTree } from "./generate_tree.js";
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
const PUBLIC_DIR = join(__dirname, "..", "public");
|
||||
@@ -20,259 +20,245 @@ const PUBLIC_DIR = join(__dirname, "..", "public");
|
||||
// Configuration
|
||||
// ══════════════════════════════════════════════════════════
|
||||
|
||||
const ENABLE_FORCE_SIM = true; // Set to false to skip force simulation
|
||||
const ITERATIONS = 100; // Force iterations (gentle)
|
||||
const REPULSION_K = 80; // Repulsion strength (1% of original 8000)
|
||||
const EDGE_LENGTH = 120; // Desired edge rest length
|
||||
const ATTRACTION_K = 0.0002; // Spring stiffness for edges (1% of original 0.02)
|
||||
const THETA = 0.7; // Barnes-Hut accuracy
|
||||
const INITIAL_MAX_DISP = 15; // Starting max displacement
|
||||
const COOLING = 0.998; // Very slow cooling per iteration
|
||||
const ENABLE_FORCE_SIM = true; // Set to false to skip force simulation
|
||||
const ITERATIONS = 100; // Force iterations
|
||||
const REPULSION_K = 80; // Repulsion strength
|
||||
const EDGE_LENGTH = 120; // Desired edge rest length
|
||||
const ATTRACTION_K = 0.0002; // Spring stiffness for edges
|
||||
const INITIAL_MAX_DISP = 15; // Starting max displacement
|
||||
const COOLING = 0.998; // Cooling per iteration
|
||||
const MIN_DIST = 0.5;
|
||||
const PRINT_EVERY = 10; // Print progress every N iterations
|
||||
const PRINT_EVERY = 10; // Print progress every N iterations
|
||||
|
||||
// Scale radius so the tree is nicely spread
|
||||
const RADIUS_PER_DEPTH = EDGE_LENGTH * 1.2;
|
||||
|
||||
// ── Special nodes with longer parent-edges ──
|
||||
// Add vertex IDs here to give them longer edges to their parent.
|
||||
// These nodes (and all their descendants) will be pushed further out.
|
||||
const LONG_EDGE_NODES = new Set<number>([
|
||||
// e.g. 42, 99, 150
|
||||
]);
|
||||
const LONG_EDGE_MULTIPLIER = 3.0; // How many times longer than normal
|
||||
// How many times longer skeleton edges are vs. normal edges
|
||||
const LONG_EDGE_MULTIPLIER = 39.0;
|
||||
|
||||
const SKELETON_STEP = RADIUS_PER_DEPTH * LONG_EDGE_MULTIPLIER;
|
||||
|
||||
|
||||
|
||||
// ══════════════════════════════════════════════════════════
|
||||
// Generate tree (in-memory)
|
||||
// Read tree data from CSVs
|
||||
// ══════════════════════════════════════════════════════════
|
||||
|
||||
const { root, nodeCount: N, childrenOf, parentOf } = generateTree();
|
||||
const primaryPath = join(PUBLIC_DIR, "primary_edges.csv");
|
||||
const secondaryPath = join(PUBLIC_DIR, "secondary_edges.csv");
|
||||
|
||||
const nodeIds: number[] = [];
|
||||
for (let i = 0; i < N; i++) nodeIds.push(i);
|
||||
if (!existsSync(primaryPath) || !existsSync(secondaryPath)) {
|
||||
console.error(`Error: Missing input files!`);
|
||||
console.error(` Expected: ${primaryPath}`);
|
||||
console.error(` Expected: ${secondaryPath}`);
|
||||
console.error(` Run 'npx tsx scripts/generate_tree.ts' first.`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Dense index mapping (identity since IDs are 0..N-1)
|
||||
// ── Helper to parse CSV edge list ──
|
||||
function parseEdges(path: string): Array<[number, number]> {
|
||||
const content = readFileSync(path, "utf-8");
|
||||
const lines = content.trim().split("\n");
|
||||
const edges: Array<[number, number]> = [];
|
||||
// Skip header "source,target"
|
||||
for (let i = 1; i < lines.length; i++) {
|
||||
const line = lines[i].trim();
|
||||
if (!line) continue;
|
||||
const [src, tgt] = line.split(",").map(Number);
|
||||
if (!isNaN(src) && !isNaN(tgt)) {
|
||||
edges.push([src, tgt]);
|
||||
}
|
||||
}
|
||||
return edges;
|
||||
}
|
||||
|
||||
const primaryEdges = parseEdges(primaryPath);
|
||||
const secondaryEdges = parseEdges(secondaryPath);
|
||||
const allEdges = [...primaryEdges, ...secondaryEdges];
|
||||
|
||||
// ── Reconstruct tree connectivity ──
|
||||
|
||||
const childrenOf = new Map<number, number[]>();
|
||||
const parentOf = new Map<number, number>();
|
||||
const allNodes = new Set<number>();
|
||||
const primaryNodes = new Set<number>(); // Nodes involved in primary edges
|
||||
|
||||
// Process primary edges first (to classify primary nodes)
|
||||
for (const [child, parent] of primaryEdges) {
|
||||
allNodes.add(child);
|
||||
allNodes.add(parent);
|
||||
primaryNodes.add(child);
|
||||
primaryNodes.add(parent);
|
||||
|
||||
parentOf.set(child, parent);
|
||||
if (!childrenOf.has(parent)) childrenOf.set(parent, []);
|
||||
childrenOf.get(parent)!.push(child);
|
||||
}
|
||||
|
||||
// Process secondary edges
|
||||
for (const [child, parent] of secondaryEdges) {
|
||||
allNodes.add(child);
|
||||
allNodes.add(parent);
|
||||
|
||||
parentOf.set(child, parent);
|
||||
if (!childrenOf.has(parent)) childrenOf.set(parent, []);
|
||||
childrenOf.get(parent)!.push(child);
|
||||
}
|
||||
|
||||
const N = allNodes.size;
|
||||
const nodeIds = Array.from(allNodes).sort((a, b) => a - b);
|
||||
|
||||
// Find root (node with no parent)
|
||||
// Assuming single root for now. If multiple, pick smallest ID or error.
|
||||
let root = -1;
|
||||
for (const node of allNodes) {
|
||||
if (!parentOf.has(node)) {
|
||||
root = node;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (primaryNodes.size === 0 && N > 0) {
|
||||
// Edge case: no primary edges?
|
||||
root = nodeIds[0];
|
||||
primaryNodes.add(root);
|
||||
}
|
||||
|
||||
console.log(
|
||||
`Read tree: ${N} nodes, ${allEdges.length} edges (P=${primaryEdges.length}, S=${secondaryEdges.length}), root=${root}`
|
||||
);
|
||||
|
||||
// ══════════════════════════════════════════════════════════
|
||||
// Compute full-tree subtree sizes
|
||||
// ══════════════════════════════════════════════════════════
|
||||
|
||||
const subtreeSize = new Map<number, number>();
|
||||
for (const id of nodeIds) subtreeSize.set(id, 1);
|
||||
|
||||
{
|
||||
// Post-order traversal to sum subtree sizes
|
||||
// Or iterative with two stacks
|
||||
const stack: Array<{ id: number; phase: "enter" | "exit" }> = [
|
||||
{ id: root, phase: "enter" },
|
||||
];
|
||||
while (stack.length > 0) {
|
||||
const { id, phase } = stack.pop()!;
|
||||
if (phase === "enter") {
|
||||
stack.push({ id, phase: "exit" });
|
||||
const kids = childrenOf.get(id);
|
||||
if (kids) for (const kid of kids) stack.push({ id: kid, phase: "enter" });
|
||||
} else {
|
||||
const kids = childrenOf.get(id);
|
||||
if (kids) {
|
||||
let sum = 0;
|
||||
for (const kid of kids) sum += subtreeSize.get(kid)!;
|
||||
subtreeSize.set(id, 1 + sum);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ══════════════════════════════════════════════════════════
|
||||
// Skeleton = primary nodes
|
||||
// ══════════════════════════════════════════════════════════
|
||||
|
||||
const skeleton = primaryNodes;
|
||||
console.log(`Skeleton: ${skeleton.size} nodes, ${primaryEdges.length} edges`);
|
||||
|
||||
// ══════════════════════════════════════════════════════════
|
||||
// Position arrays & per-node tracking
|
||||
// ══════════════════════════════════════════════════════════
|
||||
|
||||
// We use dense arrays logic, but node IDs might be sparse if loaded from file.
|
||||
// However, generate_tree produced sequential IDs starting at 0.
|
||||
// Let's assume dense 0..N-1 for array indexing, mapped via nodeIds if needed.
|
||||
// Actually, let's keep it simple: assume maxId < 2*N or use Maps for positions?
|
||||
// The current code uses Float64Array(N) and assumes `nodeIds[i]` corresponds to index `i`?
|
||||
// No, the previous code pushed `nodeIds` as `0..N-1`.
|
||||
// Here, `nodeIds` IS verified to be `0..N-1` because generate_tree did `nextId++`.
|
||||
// So `nodeIds[i] === i`. We can directly use `x[i]`.
|
||||
// But if input file has gaps, we'd need a map. To be safe, let's build an `idToIdx` map.
|
||||
|
||||
const maxId = Math.max(...nodeIds);
|
||||
const mapSize = maxId + 1; // Or just use `N` if we remap. Let's remap.
|
||||
const idToIdx = new Map<number, number>();
|
||||
for (let i = 0; i < N; i++) idToIdx.set(i, i);
|
||||
|
||||
// Edge list as index pairs (child, parent)
|
||||
const edges: Array<[number, number]> = [];
|
||||
for (const [child, parent] of parentOf) {
|
||||
edges.push([child, parent]);
|
||||
}
|
||||
|
||||
// Per-node neighbor list (for edge traversal)
|
||||
const neighbors: number[][] = Array.from({ length: N }, () => []);
|
||||
for (const [a, b] of edges) {
|
||||
neighbors[a].push(b);
|
||||
neighbors[b].push(a);
|
||||
}
|
||||
|
||||
console.log(`Tree: ${N} nodes, ${edges.length} edges, root=${root}`);
|
||||
|
||||
// ══════════════════════════════════════════════════════════
|
||||
// Step 1: Radial tree layout (generous spacing, no crossings)
|
||||
// ══════════════════════════════════════════════════════════
|
||||
nodeIds.forEach((id, idx) => idToIdx.set(id, idx));
|
||||
|
||||
const x = new Float64Array(N);
|
||||
const y = new Float64Array(N);
|
||||
const depth = new Uint32Array(N);
|
||||
const nodeRadius = new Float64Array(N); // cumulative radius from root
|
||||
const nodeRadius = new Float64Array(N); // distance from origin
|
||||
const sectorStart = new Float64Array(N);
|
||||
const sectorEnd = new Float64Array(N);
|
||||
const positioned = new Set<number>();
|
||||
|
||||
// ══════════════════════════════════════════════════════════
|
||||
// Phase 1: Layout skeleton with long edges
|
||||
// ══════════════════════════════════════════════════════════
|
||||
|
||||
const rootIdx = idToIdx.get(root)!;
|
||||
x[rootIdx] = 0;
|
||||
y[rootIdx] = 0;
|
||||
nodeRadius[rootIdx] = 0;
|
||||
sectorStart[rootIdx] = 0;
|
||||
sectorEnd[rootIdx] = 2 * Math.PI;
|
||||
positioned.add(root);
|
||||
|
||||
// Compute subtree sizes
|
||||
const subtreeSize = new Uint32Array(N).fill(1);
|
||||
{
|
||||
const rootIdx = idToIdx.get(root)!;
|
||||
const stack: Array<{ idx: number; phase: "enter" | "exit" }> = [
|
||||
{ idx: rootIdx, phase: "enter" },
|
||||
];
|
||||
while (stack.length > 0) {
|
||||
const { idx, phase } = stack.pop()!;
|
||||
if (phase === "enter") {
|
||||
stack.push({ idx, phase: "exit" });
|
||||
const kids = childrenOf.get(nodeIds[idx]);
|
||||
if (kids) {
|
||||
for (const kid of kids) {
|
||||
stack.push({ idx: idToIdx.get(kid)!, phase: "enter" });
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const kids = childrenOf.get(nodeIds[idx]);
|
||||
if (kids) {
|
||||
for (const kid of kids) {
|
||||
subtreeSize[idx] += subtreeSize[idToIdx.get(kid)!];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Compute depths & max depth
|
||||
let maxDepth = 0;
|
||||
{
|
||||
const rootIdx = idToIdx.get(root)!;
|
||||
const stack: Array<{ idx: number; d: number }> = [{ idx: rootIdx, d: 0 }];
|
||||
while (stack.length > 0) {
|
||||
const { idx, d } = stack.pop()!;
|
||||
depth[idx] = d;
|
||||
if (d > maxDepth) maxDepth = d;
|
||||
const kids = childrenOf.get(nodeIds[idx]);
|
||||
if (kids) {
|
||||
for (const kid of kids) {
|
||||
stack.push({ idx: idToIdx.get(kid)!, d: d + 1 });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// BFS radial assignment (cumulative radii to support per-edge lengths)
|
||||
{
|
||||
const rootIdx = idToIdx.get(root)!;
|
||||
x[rootIdx] = 0;
|
||||
y[rootIdx] = 0;
|
||||
nodeRadius[rootIdx] = 0;
|
||||
|
||||
interface Entry {
|
||||
idx: number;
|
||||
d: number;
|
||||
aStart: number;
|
||||
aEnd: number;
|
||||
}
|
||||
|
||||
const queue: Entry[] = [{ idx: rootIdx, d: 0, aStart: 0, aEnd: 2 * Math.PI }];
|
||||
const queue: number[] = [root];
|
||||
let head = 0;
|
||||
|
||||
while (head < queue.length) {
|
||||
const { idx, d, aStart, aEnd } = queue[head++];
|
||||
const kids = childrenOf.get(nodeIds[idx]);
|
||||
const parentId = queue[head++];
|
||||
const parentIdx = idToIdx.get(parentId)!;
|
||||
const kids = childrenOf.get(parentId);
|
||||
if (!kids || kids.length === 0) continue;
|
||||
|
||||
// Sort children by subtree size (largest sectors together for balance)
|
||||
const sortedKids = [...kids].sort(
|
||||
(a, b) => (subtreeSize[idToIdx.get(b)!]) - (subtreeSize[idToIdx.get(a)!])
|
||||
);
|
||||
const aStart = sectorStart[parentIdx];
|
||||
const aEnd = sectorEnd[parentIdx];
|
||||
const totalWeight = kids.reduce((s, k) => s + subtreeSize.get(k)!, 0);
|
||||
|
||||
const totalWeight = sortedKids.reduce(
|
||||
(s, k) => s + subtreeSize[idToIdx.get(k)!], 0
|
||||
// Sort children by subtree size
|
||||
const sortedKids = [...kids].sort(
|
||||
(a, b) => subtreeSize.get(b)! - subtreeSize.get(a)!
|
||||
);
|
||||
|
||||
let angle = aStart;
|
||||
for (const kid of sortedKids) {
|
||||
const kidIdx = idToIdx.get(kid)!;
|
||||
const w = subtreeSize[kidIdx];
|
||||
const w = subtreeSize.get(kid)!;
|
||||
const sector = (w / totalWeight) * (aEnd - aStart);
|
||||
const mid = angle + sector / 2;
|
||||
sectorStart[kidIdx] = angle;
|
||||
sectorEnd[kidIdx] = angle + sector;
|
||||
|
||||
// Cumulative radius: parent's radius + edge step (longer for special nodes)
|
||||
const step = LONG_EDGE_NODES.has(kid)
|
||||
? RADIUS_PER_DEPTH * LONG_EDGE_MULTIPLIER
|
||||
: RADIUS_PER_DEPTH;
|
||||
const r = nodeRadius[idx] + step;
|
||||
nodeRadius[kidIdx] = r;
|
||||
// Only position skeleton children now
|
||||
if (skeleton.has(kid)) {
|
||||
const midAngle = angle + sector / 2;
|
||||
const r = nodeRadius[parentIdx] + SKELETON_STEP;
|
||||
nodeRadius[kidIdx] = r;
|
||||
x[kidIdx] = r * Math.cos(midAngle);
|
||||
y[kidIdx] = r * Math.sin(midAngle);
|
||||
positioned.add(kid);
|
||||
queue.push(kid); // continue BFS within skeleton
|
||||
}
|
||||
|
||||
x[kidIdx] = r * Math.cos(mid);
|
||||
y[kidIdx] = r * Math.sin(mid);
|
||||
|
||||
queue.push({ idx: kidIdx, d: d + 1, aStart: angle, aEnd: angle + sector });
|
||||
angle += sector;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Radial layout done (depth=${maxDepth}, radius_step=${RADIUS_PER_DEPTH})`);
|
||||
console.log(`Phase 1: Positioned ${positioned.size} skeleton nodes`);
|
||||
|
||||
// ══════════════════════════════════════════════════════════
|
||||
// Step 2: Gentle force refinement (preserves non-crossing)
|
||||
// Force simulation on skeleton only
|
||||
// ══════════════════════════════════════════════════════════
|
||||
|
||||
// Barnes-Hut quadtree for repulsion
|
||||
interface BHNode {
|
||||
cx: number; cy: number;
|
||||
mass: number;
|
||||
size: number;
|
||||
children: (BHNode | null)[];
|
||||
bodyIdx: number;
|
||||
}
|
||||
if (ENABLE_FORCE_SIM && skeleton.size > 1) {
|
||||
const skeletonArr = Array.from(skeleton);
|
||||
const skeletonIndices = skeletonArr.map(id => idToIdx.get(id)!);
|
||||
|
||||
function buildBHTree(): BHNode {
|
||||
let minX = Infinity, maxX = -Infinity, minY = Infinity, maxY = -Infinity;
|
||||
for (let i = 0; i < N; i++) {
|
||||
if (x[i] < minX) minX = x[i];
|
||||
if (x[i] > maxX) maxX = x[i];
|
||||
if (y[i] < minY) minY = y[i];
|
||||
if (y[i] > maxY) maxY = y[i];
|
||||
}
|
||||
const size = Math.max(maxX - minX, maxY - minY, 1) * 1.01;
|
||||
const cx = (minX + maxX) / 2;
|
||||
const cy = (minY + maxY) / 2;
|
||||
console.log(
|
||||
`Force sim on skeleton (${skeletonArr.length} nodes, ${primaryEdges.length} edges)...`
|
||||
);
|
||||
|
||||
const root: BHNode = {
|
||||
cx: 0, cy: 0, mass: 0, size,
|
||||
children: [null, null, null, null], bodyIdx: -1,
|
||||
};
|
||||
|
||||
for (let i = 0; i < N; i++) {
|
||||
insert(root, i, cx, cy, size);
|
||||
}
|
||||
return root;
|
||||
}
|
||||
|
||||
function insert(node: BHNode, idx: number, ncx: number, ncy: number, ns: number): void {
|
||||
if (node.mass === 0) {
|
||||
node.bodyIdx = idx;
|
||||
node.cx = x[idx]; node.cy = y[idx];
|
||||
node.mass = 1;
|
||||
return;
|
||||
}
|
||||
if (node.bodyIdx >= 0) {
|
||||
const old = node.bodyIdx;
|
||||
node.bodyIdx = -1;
|
||||
putInQuadrant(node, old, ncx, ncy, ns);
|
||||
}
|
||||
putInQuadrant(node, idx, ncx, ncy, ns);
|
||||
const tm = node.mass + 1;
|
||||
node.cx = (node.cx * node.mass + x[idx]) / tm;
|
||||
node.cy = (node.cy * node.mass + y[idx]) / tm;
|
||||
node.mass = tm;
|
||||
}
|
||||
|
||||
function putInQuadrant(node: BHNode, idx: number, ncx: number, ncy: number, ns: number): void {
|
||||
const hs = ns / 2;
|
||||
const qx = x[idx] >= ncx ? 1 : 0;
|
||||
const qy = y[idx] >= ncy ? 1 : 0;
|
||||
const q = qy * 2 + qx;
|
||||
const ccx = ncx + (qx ? hs / 2 : -hs / 2);
|
||||
const ccy = ncy + (qy ? hs / 2 : -hs / 2);
|
||||
if (!node.children[q]) {
|
||||
node.children[q] = {
|
||||
cx: 0, cy: 0, mass: 0, size: hs,
|
||||
children: [null, null, null, null], bodyIdx: -1,
|
||||
};
|
||||
}
|
||||
insert(node.children[q]!, idx, ccx, ccy, hs);
|
||||
}
|
||||
|
||||
function repulse(node: BHNode, idx: number, fx: Float64Array, fy: Float64Array): void {
|
||||
if (node.mass === 0 || node.bodyIdx === idx) return;
|
||||
const dx = x[idx] - node.cx;
|
||||
const dy = y[idx] - node.cy;
|
||||
const d2 = dx * dx + dy * dy;
|
||||
const d = Math.sqrt(d2) || MIN_DIST;
|
||||
|
||||
if (node.bodyIdx >= 0 || (node.size / d) < THETA) {
|
||||
const f = REPULSION_K * node.mass / (d2 + MIN_DIST);
|
||||
fx[idx] += (dx / d) * f;
|
||||
fy[idx] += (dy / d) * f;
|
||||
return;
|
||||
}
|
||||
for (const c of node.children) {
|
||||
if (c) repulse(c, idx, fx, fy);
|
||||
}
|
||||
}
|
||||
|
||||
// ── Force simulation ──
|
||||
if (ENABLE_FORCE_SIM) {
|
||||
console.log(`Applying gentle forces (${ITERATIONS} steps, 1% strength)...`);
|
||||
const t0 = performance.now();
|
||||
let maxDisp = INITIAL_MAX_DISP;
|
||||
|
||||
@@ -280,22 +266,32 @@ if (ENABLE_FORCE_SIM) {
|
||||
const fx = new Float64Array(N);
|
||||
const fy = new Float64Array(N);
|
||||
|
||||
// 1. Repulsion
|
||||
const tree = buildBHTree();
|
||||
for (let i = 0; i < N; i++) {
|
||||
repulse(tree, i, fx, fy);
|
||||
// 1. Pairwise repulsion
|
||||
for (let i = 0; i < skeletonIndices.length; i++) {
|
||||
const u = skeletonIndices[i];
|
||||
for (let j = i + 1; j < skeletonIndices.length; j++) {
|
||||
const v = skeletonIndices[j];
|
||||
const dx = x[u] - x[v];
|
||||
const dy = y[u] - y[v];
|
||||
const d2 = dx * dx + dy * dy;
|
||||
const d = Math.sqrt(d2) || MIN_DIST;
|
||||
const f = REPULSION_K / (d2 + MIN_DIST);
|
||||
fx[u] += (dx / d) * f;
|
||||
fy[u] += (dy / d) * f;
|
||||
fx[v] -= (dx / d) * f;
|
||||
fy[v] -= (dy / d) * f;
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Edge attraction (spring toward per-edge rest length)
|
||||
for (const [a, b] of edges) {
|
||||
// 2. Edge attraction
|
||||
for (const [aId, bId] of primaryEdges) {
|
||||
const a = idToIdx.get(aId)!;
|
||||
const b = idToIdx.get(bId)!;
|
||||
const dx = x[b] - x[a];
|
||||
const dy = y[b] - y[a];
|
||||
const d = Math.sqrt(dx * dx + dy * dy) || MIN_DIST;
|
||||
const aId = nodeIds[a], bId = nodeIds[b];
|
||||
const isLong = LONG_EDGE_NODES.has(aId) || LONG_EDGE_NODES.has(bId);
|
||||
const restLen = isLong ? EDGE_LENGTH * LONG_EDGE_MULTIPLIER : EDGE_LENGTH;
|
||||
const displacement = d - restLen;
|
||||
const f = ATTRACTION_K * displacement;
|
||||
const displacement = d - SKELETON_STEP;
|
||||
const f = (ATTRACTION_K / LONG_EDGE_MULTIPLIER) * displacement;
|
||||
const ux = dx / d, uy = dy / d;
|
||||
fx[a] += ux * f;
|
||||
fy[a] += uy * f;
|
||||
@@ -303,32 +299,181 @@ if (ENABLE_FORCE_SIM) {
|
||||
fy[b] -= uy * f;
|
||||
}
|
||||
|
||||
// 3. Apply forces with displacement cap (cooling reduces it over time)
|
||||
for (let i = 0; i < N; i++) {
|
||||
const mag = Math.sqrt(fx[i] * fx[i] + fy[i] * fy[i]);
|
||||
// 3. Apply forces (skip root)
|
||||
for (const idx of skeletonIndices) {
|
||||
if (nodeIds[idx] === root) continue;
|
||||
const mag = Math.sqrt(fx[idx] * fx[idx] + fy[idx] * fy[idx]);
|
||||
if (mag > 0) {
|
||||
const cap = Math.min(maxDisp, mag) / mag;
|
||||
x[i] += fx[i] * cap;
|
||||
y[i] += fy[i] * cap;
|
||||
x[idx] += fx[idx] * cap;
|
||||
y[idx] += fy[idx] * cap;
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Cool down
|
||||
maxDisp *= COOLING;
|
||||
|
||||
if ((iter + 1) % PRINT_EVERY === 0) {
|
||||
let totalForce = 0;
|
||||
for (let i = 0; i < N; i++) totalForce += Math.sqrt(fx[i] * fx[i] + fy[i] * fy[i]);
|
||||
console.log(` iter ${iter + 1}/${ITERATIONS} max_disp=${maxDisp.toFixed(2)} avg_force=${(totalForce / N).toFixed(2)}`);
|
||||
for (const idx of skeletonIndices) {
|
||||
totalForce += Math.sqrt(fx[idx] * fx[idx] + fy[idx] * fy[idx]);
|
||||
}
|
||||
console.log(
|
||||
` iter ${iter + 1}/${ITERATIONS} max_disp=${maxDisp.toFixed(2)} avg_force=${(totalForce / skeletonIndices.length).toFixed(2)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const elapsed = performance.now() - t0;
|
||||
console.log(`Force simulation done in ${(elapsed / 1000).toFixed(1)}s`);
|
||||
} else {
|
||||
console.log("Force simulation SKIPPED (ENABLE_FORCE_SIM = false)");
|
||||
console.log(`Skeleton force sim done in ${(elapsed / 1000).toFixed(1)}s`);
|
||||
}
|
||||
|
||||
// ══════════════════════════════════════════════════════════
|
||||
// Phase 2: Fill subtrees
|
||||
// ══════════════════════════════════════════════════════════
|
||||
|
||||
{
|
||||
const queue: number[] = Array.from(positioned);
|
||||
let head = 0;
|
||||
while (head < queue.length) {
|
||||
const parentId = queue[head++];
|
||||
const parentIdx = idToIdx.get(parentId)!;
|
||||
const kids = childrenOf.get(parentId);
|
||||
|
||||
if (!kids) continue;
|
||||
|
||||
const unpositionedKids = kids.filter(k => !positioned.has(k));
|
||||
if (unpositionedKids.length === 0) continue;
|
||||
|
||||
unpositionedKids.sort((a, b) => subtreeSize.get(b)! - subtreeSize.get(a)!);
|
||||
|
||||
const px = x[parentIdx];
|
||||
const py = y[parentIdx];
|
||||
|
||||
// Determine available angular sector
|
||||
// If parent is SKELETON, we reset to full 360 (local root behavior).
|
||||
// If parent is NORMAL, we strictly use the sector allocated to it by its parent.
|
||||
const isSkeleton = skeleton.has(parentId);
|
||||
let currentAngle = isSkeleton ? 0 : sectorStart[parentIdx];
|
||||
const endAngle = isSkeleton ? 2 * Math.PI : sectorEnd[parentIdx];
|
||||
const totalSpan = endAngle - currentAngle;
|
||||
|
||||
const totalWeight = unpositionedKids.reduce((s, k) => s + subtreeSize.get(k)!, 0);
|
||||
|
||||
for (const kid of unpositionedKids) {
|
||||
const kidIdx = idToIdx.get(kid)!;
|
||||
const w = subtreeSize.get(kid)!;
|
||||
|
||||
// Allocate a portion of the available sector based on subtree weight
|
||||
const span = (w / totalWeight) * totalSpan;
|
||||
|
||||
// Track the sector for this child so ITS children are constrained
|
||||
sectorStart[kidIdx] = currentAngle;
|
||||
sectorEnd[kidIdx] = currentAngle + span;
|
||||
|
||||
const midAngle = currentAngle + span / 2;
|
||||
const r = RADIUS_PER_DEPTH;
|
||||
|
||||
x[kidIdx] = px + r * Math.cos(midAngle);
|
||||
y[kidIdx] = py + r * Math.sin(midAngle);
|
||||
|
||||
positioned.add(kid);
|
||||
queue.push(kid);
|
||||
|
||||
currentAngle += span;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Phase 2: Positioned ${positioned.size} total nodes (of ${N})`);
|
||||
|
||||
// ══════════════════════════════════════════════════════════
|
||||
// Phase 3: Final Relaxation (Force Sim on ALL nodes)
|
||||
// ══════════════════════════════════════════════════════════
|
||||
{
|
||||
console.log(`Phase 3: Final relaxation on ${N} nodes...`);
|
||||
const FINAL_ITERATIONS = 50;
|
||||
const FINAL_MAX_DISP = 5.0;
|
||||
const BH_THETA = 0.5;
|
||||
|
||||
// We use slightly weaker forces for final polish
|
||||
// Keep repulsion same but limit displacement strongly
|
||||
// Use Barnes-Hut for performance with 10k nodes
|
||||
|
||||
for (let iter = 0; iter < FINAL_ITERATIONS; iter++) {
|
||||
const rootBH = buildBHTree(nodeIds, x, y);
|
||||
const fx = new Float64Array(N);
|
||||
const fy = new Float64Array(N);
|
||||
|
||||
// 1. Repulsion via Barnes-Hut
|
||||
for (let i = 0; i < N; i++) {
|
||||
calcBHForce(rootBH, x[i], y[i], fx, fy, i, BH_THETA);
|
||||
}
|
||||
|
||||
// 2. Attraction edges
|
||||
// Only attract if displacement > rest length?
|
||||
// Standard spring: f = k * (d - L)
|
||||
// L = EDGE_LENGTH for normal, SKELETON_STEP for skeleton?
|
||||
// We can just use standard EDGE_LENGTH as "rest" for everyone to pull tight?
|
||||
// Or respect hierarchy?
|
||||
// With 10k nodes, we just want to relax overlaps.
|
||||
|
||||
for (const [uId, vId] of allEdges) {
|
||||
const u = idToIdx.get(uId)!;
|
||||
const v = idToIdx.get(vId)!;
|
||||
const dx = x[v] - x[u];
|
||||
const dy = y[v] - y[u];
|
||||
const d = Math.sqrt(dx * dx + dy * dy) || MIN_DIST;
|
||||
|
||||
// Identifying if edge is skeletal?
|
||||
// If u and v both skeleton, use longer length.
|
||||
// Else normal length.
|
||||
let restLen = EDGE_LENGTH;
|
||||
let k = ATTRACTION_K;
|
||||
|
||||
if (primaryNodes.has(uId) && primaryNodes.has(vId)) {
|
||||
restLen = SKELETON_STEP;
|
||||
k = ATTRACTION_K / LONG_EDGE_MULTIPLIER;
|
||||
}
|
||||
|
||||
const displacement = d - restLen;
|
||||
const f = k * displacement;
|
||||
const ux = dx / d, uy = dy / d;
|
||||
|
||||
fx[u] += ux * f;
|
||||
fy[u] += uy * f;
|
||||
fx[v] -= ux * f;
|
||||
fy[v] -= uy * f;
|
||||
}
|
||||
|
||||
// 3. Apply forces
|
||||
let totalDisp = 0;
|
||||
let maxD = 0;
|
||||
const currentLimit = FINAL_MAX_DISP * (1 - iter / FINAL_ITERATIONS); // Cool down
|
||||
|
||||
for (let i = 0; i < N; i++) {
|
||||
if (nodeIds[i] === root) continue; // Pin root
|
||||
|
||||
const dx = fx[i];
|
||||
const dy = fy[i];
|
||||
const dist = Math.sqrt(dx * dx + dy * dy);
|
||||
|
||||
if (dist > 0) {
|
||||
const limit = Math.min(currentLimit, dist);
|
||||
const scale = limit / dist;
|
||||
x[i] += dx * scale;
|
||||
y[i] += dy * scale;
|
||||
totalDisp += limit;
|
||||
maxD = Math.max(maxD, limit);
|
||||
}
|
||||
}
|
||||
|
||||
if (iter % 10 === 0) {
|
||||
console.log(` Phase 3 iter ${iter}: max movement ${maxD.toFixed(3)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// ══════════════════════════════════════════════════════════
|
||||
// Write output
|
||||
// ══════════════════════════════════════════════════════════
|
||||
@@ -343,12 +488,152 @@ const outPath = join(PUBLIC_DIR, "node_positions.csv");
|
||||
writeFileSync(outPath, outLines.join("\n") + "\n");
|
||||
console.log(`Wrote ${N} positions to ${outPath}`);
|
||||
|
||||
// Write edges (so the renderer can draw them)
|
||||
const edgeLines: string[] = ["source,target"];
|
||||
for (const [child, parent] of parentOf) {
|
||||
edgeLines.push(`${child},${parent}`);
|
||||
// Edges are provided via primary_edges.csv and secondary_edges.csv generated by generate_tree.ts
|
||||
// We do not write a consolidated edges.csv anymore.
|
||||
console.log(`Layout complete.`);
|
||||
|
||||
// ══════════════════════════════════════════════════════════
|
||||
// Barnes-Hut Helpers
|
||||
// ══════════════════════════════════════════════════════════
|
||||
|
||||
interface BHNode {
|
||||
mass: number;
|
||||
x: number;
|
||||
y: number;
|
||||
minX: number;
|
||||
maxX: number;
|
||||
minY: number;
|
||||
maxY: number;
|
||||
children?: BHNode[]; // NW, NE, SW, SE
|
||||
pointIdx?: number; // Leaf node index
|
||||
}
|
||||
|
||||
const edgesPath = join(PUBLIC_DIR, "edges.csv");
|
||||
writeFileSync(edgesPath, edgeLines.join("\n") + "\n");
|
||||
console.log(`Wrote ${edges.length} edges to ${edgesPath}`);
|
||||
function buildBHTree(indices: number[], x: Float64Array, y: Float64Array): BHNode {
|
||||
// Determine bounds
|
||||
let minX = Infinity, maxX = -Infinity, minY = Infinity, maxY = -Infinity;
|
||||
for (let i = 0; i < x.length; i++) {
|
||||
if (x[i] < minX) minX = x[i];
|
||||
if (x[i] > maxX) maxX = x[i];
|
||||
if (y[i] < minY) minY = y[i];
|
||||
if (y[i] > maxY) maxY = y[i];
|
||||
}
|
||||
// Square bounds for quadtree
|
||||
const cx = (minX + maxX) / 2;
|
||||
const cy = (minY + maxY) / 2;
|
||||
const halfDim = Math.max(maxX - minX, maxY - minY) / 2 + 0.01;
|
||||
|
||||
const root: BHNode = {
|
||||
mass: 0, x: 0, y: 0,
|
||||
minX: cx - halfDim, maxX: cx + halfDim,
|
||||
minY: cy - halfDim, maxY: cy + halfDim
|
||||
};
|
||||
|
||||
for (let i = 0; i < x.length; i++) {
|
||||
insertBH(root, i, x[i], y[i]);
|
||||
}
|
||||
calcBHMass(root);
|
||||
return root;
|
||||
}
|
||||
|
||||
function insertBH(node: BHNode, idx: number, px: number, py: number) {
|
||||
if (!node.children && node.pointIdx === undefined) {
|
||||
// Empty leaf -> Put point here
|
||||
node.pointIdx = idx;
|
||||
return;
|
||||
}
|
||||
|
||||
if (!node.children && node.pointIdx !== undefined) {
|
||||
// Occupied leaf -> Subdivide
|
||||
const oldIdx = node.pointIdx;
|
||||
node.pointIdx = undefined;
|
||||
subdivideBH(node);
|
||||
// Re-insert old point and new point
|
||||
// Note: oldIdx needs x,y. But we don't pass array. Wait, BHTree function scope?
|
||||
// We need explicit x,y access. But passing array everywhere is ugly.
|
||||
// Hack: The recursive function needs access to global x/y or passed in values.
|
||||
// But here we are inserting one by one.
|
||||
// Wait, to re-insert oldIdx, WE NEED ITS COORDS.
|
||||
// This simple 'insertBH' signature is insufficient unless we capture x/y closure or pass them.
|
||||
// Let's assume x, y are available globally or we redesign.
|
||||
// Since this script is top-level, x and y are available in scope!
|
||||
// But `insertBH` is defined outside main scope if hoisted? No, it's inside module.
|
||||
// If defined as function `function insertBH`, it captures module scope `x`, `y`?
|
||||
// `x` and `y` are const Float64Array defined at line ~120.
|
||||
// So yes, they are captured!
|
||||
insertBH(node, oldIdx, x[oldIdx], y[oldIdx]);
|
||||
// Then fall through to insert new point
|
||||
}
|
||||
|
||||
if (node.children) {
|
||||
const mx = (node.minX + node.maxX) / 2;
|
||||
const my = (node.minY + node.maxY) / 2;
|
||||
let q = 0;
|
||||
if (px > mx) q += 1; // East
|
||||
if (py > my) q += 2; // South
|
||||
insertBH(node.children[q], idx, px, py);
|
||||
}
|
||||
}
|
||||
|
||||
function subdivideBH(node: BHNode) {
|
||||
const mx = (node.minX + node.maxX) / 2;
|
||||
const my = (node.minY + node.maxY) / 2;
|
||||
node.children = [
|
||||
{ mass: 0, x: 0, y: 0, minX: node.minX, maxX: mx, minY: node.minY, maxY: my }, // NW
|
||||
{ mass: 0, x: 0, y: 0, minX: mx, maxX: node.maxX, minY: node.minY, maxY: my }, // NE
|
||||
{ mass: 0, x: 0, y: 0, minX: node.minX, maxX: mx, minY: my, maxY: node.maxY }, // SW
|
||||
{ mass: 0, x: 0, y: 0, minX: mx, maxX: node.maxX, minY: my, maxY: node.maxY } // SE
|
||||
];
|
||||
}
|
||||
|
||||
function calcBHMass(node: BHNode) {
|
||||
if (node.pointIdx !== undefined) {
|
||||
node.mass = 1;
|
||||
node.x = x[node.pointIdx];
|
||||
node.y = y[node.pointIdx];
|
||||
return;
|
||||
}
|
||||
if (node.children) {
|
||||
let m = 0, cx = 0, cy = 0;
|
||||
for (const c of node.children) {
|
||||
calcBHMass(c);
|
||||
m += c.mass;
|
||||
cx += c.x * c.mass;
|
||||
cy += c.y * c.mass;
|
||||
}
|
||||
node.mass = m;
|
||||
if (m > 0) {
|
||||
node.x = cx / m;
|
||||
node.y = cy / m;
|
||||
} else {
|
||||
// Center of box if empty
|
||||
node.x = (node.minX + node.maxX) / 2;
|
||||
node.y = (node.minY + node.maxY) / 2;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function calcBHForce(node: BHNode, px: number, py: number, fx: Float64Array, fy: Float64Array, idx: number, theta: number) {
|
||||
const dx = px - node.x;
|
||||
const dy = py - node.y;
|
||||
const d2 = dx * dx + dy * dy;
|
||||
const dist = Math.sqrt(d2);
|
||||
const width = node.maxX - node.minX;
|
||||
|
||||
if (width / dist < theta || !node.children) {
|
||||
// Treat as single body
|
||||
if (node.mass > 0 && (node.pointIdx !== idx)) {
|
||||
// Apply repulsion
|
||||
// F = K * mass / dist^2
|
||||
// Direction: from node to p
|
||||
const dEff = Math.max(dist, MIN_DIST);
|
||||
const f = (REPULSION_K * node.mass) / (dEff * dEff); // d^2 repulsion
|
||||
fx[idx] += (dx / dEff) * f;
|
||||
fy[idx] += (dy / dEff) * f;
|
||||
}
|
||||
} else {
|
||||
// Recurse
|
||||
for (const c of node.children) {
|
||||
calcBHForce(c, px, py, fx, fy, idx, theta);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,15 +2,25 @@
|
||||
* Random Tree Generator
|
||||
*
|
||||
* Generates a random tree with 1–MAX_CHILDREN children per node.
|
||||
* Exports a function that returns the tree data in memory.
|
||||
* Splits edges into primary (depth ≤ PRIMARY_DEPTH) and secondary.
|
||||
*
|
||||
* Usage: npx tsx scripts/generate_tree.ts
|
||||
*/
|
||||
|
||||
import { writeFileSync } from "fs";
|
||||
import { join, dirname } from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
const PUBLIC_DIR = join(__dirname, "..", "public");
|
||||
|
||||
// ══════════════════════════════════════════════════════════
|
||||
// Configuration
|
||||
// ══════════════════════════════════════════════════════════
|
||||
|
||||
const TARGET_NODES = 100000; // Approximate number of nodes to generate
|
||||
const MAX_CHILDREN = 3; // Each node gets 1..MAX_CHILDREN children
|
||||
const TARGET_NODES = 10000; // Approximate number of nodes to generate
|
||||
const MAX_CHILDREN = 4; // Each node gets 1..MAX_CHILDREN children
|
||||
const PRIMARY_DEPTH = 3; // Nodes at depth ≤ this form the primary skeleton
|
||||
|
||||
// ══════════════════════════════════════════════════════════
|
||||
// Tree data types
|
||||
@@ -21,6 +31,10 @@ export interface TreeData {
|
||||
nodeCount: number;
|
||||
childrenOf: Map<number, number[]>;
|
||||
parentOf: Map<number, number>;
|
||||
depthOf: Map<number, number>;
|
||||
primaryNodes: Set<number>; // all nodes at depth ≤ PRIMARY_DEPTH
|
||||
primaryEdges: Array<[number, number]>; // [child, parent] edges within primary
|
||||
secondaryEdges: Array<[number, number]>;// remaining edges
|
||||
}
|
||||
|
||||
// ══════════════════════════════════════════════════════════
|
||||
@@ -30,14 +44,17 @@ export interface TreeData {
|
||||
export function generateTree(): TreeData {
|
||||
const childrenOf = new Map<number, number[]>();
|
||||
const parentOf = new Map<number, number>();
|
||||
const depthOf = new Map<number, number>();
|
||||
|
||||
const root = 0;
|
||||
depthOf.set(root, 0);
|
||||
let nextId = 1;
|
||||
const queue: number[] = [root];
|
||||
let head = 0;
|
||||
|
||||
while (head < queue.length && nextId < TARGET_NODES) {
|
||||
const parent = queue[head++];
|
||||
const parentDepth = depthOf.get(parent)!;
|
||||
const nKids = 1 + Math.floor(Math.random() * MAX_CHILDREN); // 1..MAX_CHILDREN
|
||||
|
||||
const kids: number[] = [];
|
||||
@@ -45,17 +62,71 @@ export function generateTree(): TreeData {
|
||||
const child = nextId++;
|
||||
kids.push(child);
|
||||
parentOf.set(child, parent);
|
||||
depthOf.set(child, parentDepth + 1);
|
||||
queue.push(child);
|
||||
}
|
||||
childrenOf.set(parent, kids);
|
||||
}
|
||||
|
||||
console.log(`Generated tree: ${nextId} nodes, ${parentOf.size} edges, root=${root}`);
|
||||
// Classify edges and nodes by depth
|
||||
const primaryNodes = new Set<number>();
|
||||
const primaryEdges: Array<[number, number]> = [];
|
||||
const secondaryEdges: Array<[number, number]> = [];
|
||||
|
||||
// Root is always primary
|
||||
primaryNodes.add(root);
|
||||
|
||||
for (const [child, parent] of parentOf) {
|
||||
const childDepth = depthOf.get(child)!;
|
||||
if (childDepth <= PRIMARY_DEPTH) {
|
||||
primaryNodes.add(child);
|
||||
primaryNodes.add(parent);
|
||||
primaryEdges.push([child, parent]);
|
||||
} else {
|
||||
secondaryEdges.push([child, parent]);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(
|
||||
`Generated tree: ${nextId} nodes, ` +
|
||||
`${primaryEdges.length} primary edges (depth ≤ ${PRIMARY_DEPTH}), ` +
|
||||
`${secondaryEdges.length} secondary edges`
|
||||
);
|
||||
|
||||
return {
|
||||
root,
|
||||
nodeCount: nextId,
|
||||
childrenOf,
|
||||
parentOf,
|
||||
depthOf,
|
||||
primaryNodes,
|
||||
primaryEdges,
|
||||
secondaryEdges,
|
||||
};
|
||||
}
|
||||
|
||||
// ══════════════════════════════════════════════════════════
|
||||
// Run if executed directly
|
||||
// ══════════════════════════════════════════════════════════
|
||||
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
const data = generateTree();
|
||||
|
||||
// Write primary_edges.csv
|
||||
const pLines: string[] = ["source,target"];
|
||||
for (const [child, parent] of data.primaryEdges) {
|
||||
pLines.push(`${child},${parent}`);
|
||||
}
|
||||
const pPath = join(PUBLIC_DIR, "primary_edges.csv");
|
||||
writeFileSync(pPath, pLines.join("\n") + "\n");
|
||||
console.log(`Wrote ${data.primaryEdges.length} edges to ${pPath}`);
|
||||
|
||||
// Write secondary_edges.csv
|
||||
const sLines: string[] = ["source,target"];
|
||||
for (const [child, parent] of data.secondaryEdges) {
|
||||
sLines.push(`${child},${parent}`);
|
||||
}
|
||||
const sPath = join(PUBLIC_DIR, "secondary_edges.csv");
|
||||
writeFileSync(sPath, sLines.join("\n") + "\n");
|
||||
console.log(`Wrote ${data.secondaryEdges.length} edges to ${sPath}`);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user