355 lines
13 KiB
TypeScript
355 lines
13 KiB
TypeScript
#!/usr/bin/env npx tsx
|
|
/**
|
|
* Tree-Aware Force Layout
|
|
*
|
|
* Generates a random tree (via generate_tree), computes a radial tree layout,
|
|
* then applies gentle force refinement and writes node_positions.csv.
|
|
*
|
|
* Usage: npm run layout
|
|
*/
|
|
|
|
import { writeFileSync } from "fs";
|
|
import { join, dirname } from "path";
|
|
import { fileURLToPath } from "url";
|
|
import { generateTree } from "./generate_tree.js";
|
|
|
|
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
const PUBLIC_DIR = join(__dirname, "..", "public");
|
|
|
|
// ══════════════════════════════════════════════════════════
|
|
// Configuration
|
|
// ══════════════════════════════════════════════════════════
|
|
|
|
const ENABLE_FORCE_SIM = true; // Set to false to skip force simulation
|
|
const ITERATIONS = 100; // Force iterations (gentle)
|
|
const REPULSION_K = 80; // Repulsion strength (1% of original 8000)
|
|
const EDGE_LENGTH = 120; // Desired edge rest length
|
|
const ATTRACTION_K = 0.0002; // Spring stiffness for edges (1% of original 0.02)
|
|
const THETA = 0.7; // Barnes-Hut accuracy
|
|
const INITIAL_MAX_DISP = 15; // Starting max displacement
|
|
const COOLING = 0.998; // Very slow cooling per iteration
|
|
const MIN_DIST = 0.5;
|
|
const PRINT_EVERY = 10; // Print progress every N iterations
|
|
|
|
// Scale radius so the tree is nicely spread
|
|
const RADIUS_PER_DEPTH = EDGE_LENGTH * 1.2;
|
|
|
|
// ── Special nodes with longer parent-edges ──
|
|
// Add vertex IDs here to give them longer edges to their parent.
|
|
// These nodes (and all their descendants) will be pushed further out.
|
|
const LONG_EDGE_NODES = new Set<number>([
|
|
// e.g. 42, 99, 150
|
|
]);
|
|
const LONG_EDGE_MULTIPLIER = 3.0; // How many times longer than normal
|
|
|
|
// ══════════════════════════════════════════════════════════
|
|
// Generate tree (in-memory)
|
|
// ══════════════════════════════════════════════════════════
|
|
|
|
const { root, nodeCount: N, childrenOf, parentOf } = generateTree();
|
|
|
|
const nodeIds: number[] = [];
|
|
for (let i = 0; i < N; i++) nodeIds.push(i);
|
|
|
|
// Dense index mapping (identity since IDs are 0..N-1)
|
|
const idToIdx = new Map<number, number>();
|
|
for (let i = 0; i < N; i++) idToIdx.set(i, i);
|
|
|
|
// Edge list as index pairs (child, parent)
|
|
const edges: Array<[number, number]> = [];
|
|
for (const [child, parent] of parentOf) {
|
|
edges.push([child, parent]);
|
|
}
|
|
|
|
// Per-node neighbor list (for edge traversal)
|
|
const neighbors: number[][] = Array.from({ length: N }, () => []);
|
|
for (const [a, b] of edges) {
|
|
neighbors[a].push(b);
|
|
neighbors[b].push(a);
|
|
}
|
|
|
|
console.log(`Tree: ${N} nodes, ${edges.length} edges, root=${root}`);
|
|
|
|
// ══════════════════════════════════════════════════════════
|
|
// Step 1: Radial tree layout (generous spacing, no crossings)
|
|
// ══════════════════════════════════════════════════════════
|
|
|
|
const x = new Float64Array(N);
|
|
const y = new Float64Array(N);
|
|
const depth = new Uint32Array(N);
|
|
const nodeRadius = new Float64Array(N); // cumulative radius from root
|
|
|
|
// Compute subtree sizes
|
|
const subtreeSize = new Uint32Array(N).fill(1);
|
|
{
|
|
const rootIdx = idToIdx.get(root)!;
|
|
const stack: Array<{ idx: number; phase: "enter" | "exit" }> = [
|
|
{ idx: rootIdx, phase: "enter" },
|
|
];
|
|
while (stack.length > 0) {
|
|
const { idx, phase } = stack.pop()!;
|
|
if (phase === "enter") {
|
|
stack.push({ idx, phase: "exit" });
|
|
const kids = childrenOf.get(nodeIds[idx]);
|
|
if (kids) {
|
|
for (const kid of kids) {
|
|
stack.push({ idx: idToIdx.get(kid)!, phase: "enter" });
|
|
}
|
|
}
|
|
} else {
|
|
const kids = childrenOf.get(nodeIds[idx]);
|
|
if (kids) {
|
|
for (const kid of kids) {
|
|
subtreeSize[idx] += subtreeSize[idToIdx.get(kid)!];
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// Compute depths & max depth
|
|
let maxDepth = 0;
|
|
{
|
|
const rootIdx = idToIdx.get(root)!;
|
|
const stack: Array<{ idx: number; d: number }> = [{ idx: rootIdx, d: 0 }];
|
|
while (stack.length > 0) {
|
|
const { idx, d } = stack.pop()!;
|
|
depth[idx] = d;
|
|
if (d > maxDepth) maxDepth = d;
|
|
const kids = childrenOf.get(nodeIds[idx]);
|
|
if (kids) {
|
|
for (const kid of kids) {
|
|
stack.push({ idx: idToIdx.get(kid)!, d: d + 1 });
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// BFS radial assignment (cumulative radii to support per-edge lengths)
|
|
{
|
|
const rootIdx = idToIdx.get(root)!;
|
|
x[rootIdx] = 0;
|
|
y[rootIdx] = 0;
|
|
nodeRadius[rootIdx] = 0;
|
|
|
|
interface Entry {
|
|
idx: number;
|
|
d: number;
|
|
aStart: number;
|
|
aEnd: number;
|
|
}
|
|
|
|
const queue: Entry[] = [{ idx: rootIdx, d: 0, aStart: 0, aEnd: 2 * Math.PI }];
|
|
let head = 0;
|
|
|
|
while (head < queue.length) {
|
|
const { idx, d, aStart, aEnd } = queue[head++];
|
|
const kids = childrenOf.get(nodeIds[idx]);
|
|
if (!kids || kids.length === 0) continue;
|
|
|
|
// Sort children by subtree size (largest sectors together for balance)
|
|
const sortedKids = [...kids].sort(
|
|
(a, b) => (subtreeSize[idToIdx.get(b)!]) - (subtreeSize[idToIdx.get(a)!])
|
|
);
|
|
|
|
const totalWeight = sortedKids.reduce(
|
|
(s, k) => s + subtreeSize[idToIdx.get(k)!], 0
|
|
);
|
|
|
|
let angle = aStart;
|
|
for (const kid of sortedKids) {
|
|
const kidIdx = idToIdx.get(kid)!;
|
|
const w = subtreeSize[kidIdx];
|
|
const sector = (w / totalWeight) * (aEnd - aStart);
|
|
const mid = angle + sector / 2;
|
|
|
|
// Cumulative radius: parent's radius + edge step (longer for special nodes)
|
|
const step = LONG_EDGE_NODES.has(kid)
|
|
? RADIUS_PER_DEPTH * LONG_EDGE_MULTIPLIER
|
|
: RADIUS_PER_DEPTH;
|
|
const r = nodeRadius[idx] + step;
|
|
nodeRadius[kidIdx] = r;
|
|
|
|
x[kidIdx] = r * Math.cos(mid);
|
|
y[kidIdx] = r * Math.sin(mid);
|
|
|
|
queue.push({ idx: kidIdx, d: d + 1, aStart: angle, aEnd: angle + sector });
|
|
angle += sector;
|
|
}
|
|
}
|
|
}
|
|
|
|
console.log(`Radial layout done (depth=${maxDepth}, radius_step=${RADIUS_PER_DEPTH})`);
|
|
|
|
// ══════════════════════════════════════════════════════════
|
|
// Step 2: Gentle force refinement (preserves non-crossing)
|
|
// ══════════════════════════════════════════════════════════
|
|
|
|
// Barnes-Hut quadtree for repulsion
|
|
interface BHNode {
|
|
cx: number; cy: number;
|
|
mass: number;
|
|
size: number;
|
|
children: (BHNode | null)[];
|
|
bodyIdx: number;
|
|
}
|
|
|
|
function buildBHTree(): BHNode {
|
|
let minX = Infinity, maxX = -Infinity, minY = Infinity, maxY = -Infinity;
|
|
for (let i = 0; i < N; i++) {
|
|
if (x[i] < minX) minX = x[i];
|
|
if (x[i] > maxX) maxX = x[i];
|
|
if (y[i] < minY) minY = y[i];
|
|
if (y[i] > maxY) maxY = y[i];
|
|
}
|
|
const size = Math.max(maxX - minX, maxY - minY, 1) * 1.01;
|
|
const cx = (minX + maxX) / 2;
|
|
const cy = (minY + maxY) / 2;
|
|
|
|
const root: BHNode = {
|
|
cx: 0, cy: 0, mass: 0, size,
|
|
children: [null, null, null, null], bodyIdx: -1,
|
|
};
|
|
|
|
for (let i = 0; i < N; i++) {
|
|
insert(root, i, cx, cy, size);
|
|
}
|
|
return root;
|
|
}
|
|
|
|
function insert(node: BHNode, idx: number, ncx: number, ncy: number, ns: number): void {
|
|
if (node.mass === 0) {
|
|
node.bodyIdx = idx;
|
|
node.cx = x[idx]; node.cy = y[idx];
|
|
node.mass = 1;
|
|
return;
|
|
}
|
|
if (node.bodyIdx >= 0) {
|
|
const old = node.bodyIdx;
|
|
node.bodyIdx = -1;
|
|
putInQuadrant(node, old, ncx, ncy, ns);
|
|
}
|
|
putInQuadrant(node, idx, ncx, ncy, ns);
|
|
const tm = node.mass + 1;
|
|
node.cx = (node.cx * node.mass + x[idx]) / tm;
|
|
node.cy = (node.cy * node.mass + y[idx]) / tm;
|
|
node.mass = tm;
|
|
}
|
|
|
|
function putInQuadrant(node: BHNode, idx: number, ncx: number, ncy: number, ns: number): void {
|
|
const hs = ns / 2;
|
|
const qx = x[idx] >= ncx ? 1 : 0;
|
|
const qy = y[idx] >= ncy ? 1 : 0;
|
|
const q = qy * 2 + qx;
|
|
const ccx = ncx + (qx ? hs / 2 : -hs / 2);
|
|
const ccy = ncy + (qy ? hs / 2 : -hs / 2);
|
|
if (!node.children[q]) {
|
|
node.children[q] = {
|
|
cx: 0, cy: 0, mass: 0, size: hs,
|
|
children: [null, null, null, null], bodyIdx: -1,
|
|
};
|
|
}
|
|
insert(node.children[q]!, idx, ccx, ccy, hs);
|
|
}
|
|
|
|
function repulse(node: BHNode, idx: number, fx: Float64Array, fy: Float64Array): void {
|
|
if (node.mass === 0 || node.bodyIdx === idx) return;
|
|
const dx = x[idx] - node.cx;
|
|
const dy = y[idx] - node.cy;
|
|
const d2 = dx * dx + dy * dy;
|
|
const d = Math.sqrt(d2) || MIN_DIST;
|
|
|
|
if (node.bodyIdx >= 0 || (node.size / d) < THETA) {
|
|
const f = REPULSION_K * node.mass / (d2 + MIN_DIST);
|
|
fx[idx] += (dx / d) * f;
|
|
fy[idx] += (dy / d) * f;
|
|
return;
|
|
}
|
|
for (const c of node.children) {
|
|
if (c) repulse(c, idx, fx, fy);
|
|
}
|
|
}
|
|
|
|
// ── Force simulation ──
|
|
if (ENABLE_FORCE_SIM) {
|
|
console.log(`Applying gentle forces (${ITERATIONS} steps, 1% strength)...`);
|
|
const t0 = performance.now();
|
|
let maxDisp = INITIAL_MAX_DISP;
|
|
|
|
for (let iter = 0; iter < ITERATIONS; iter++) {
|
|
const fx = new Float64Array(N);
|
|
const fy = new Float64Array(N);
|
|
|
|
// 1. Repulsion
|
|
const tree = buildBHTree();
|
|
for (let i = 0; i < N; i++) {
|
|
repulse(tree, i, fx, fy);
|
|
}
|
|
|
|
// 2. Edge attraction (spring toward per-edge rest length)
|
|
for (const [a, b] of edges) {
|
|
const dx = x[b] - x[a];
|
|
const dy = y[b] - y[a];
|
|
const d = Math.sqrt(dx * dx + dy * dy) || MIN_DIST;
|
|
const aId = nodeIds[a], bId = nodeIds[b];
|
|
const isLong = LONG_EDGE_NODES.has(aId) || LONG_EDGE_NODES.has(bId);
|
|
const restLen = isLong ? EDGE_LENGTH * LONG_EDGE_MULTIPLIER : EDGE_LENGTH;
|
|
const displacement = d - restLen;
|
|
const f = ATTRACTION_K * displacement;
|
|
const ux = dx / d, uy = dy / d;
|
|
fx[a] += ux * f;
|
|
fy[a] += uy * f;
|
|
fx[b] -= ux * f;
|
|
fy[b] -= uy * f;
|
|
}
|
|
|
|
// 3. Apply forces with displacement cap (cooling reduces it over time)
|
|
for (let i = 0; i < N; i++) {
|
|
const mag = Math.sqrt(fx[i] * fx[i] + fy[i] * fy[i]);
|
|
if (mag > 0) {
|
|
const cap = Math.min(maxDisp, mag) / mag;
|
|
x[i] += fx[i] * cap;
|
|
y[i] += fy[i] * cap;
|
|
}
|
|
}
|
|
|
|
// 4. Cool down
|
|
maxDisp *= COOLING;
|
|
|
|
if ((iter + 1) % PRINT_EVERY === 0) {
|
|
let totalForce = 0;
|
|
for (let i = 0; i < N; i++) totalForce += Math.sqrt(fx[i] * fx[i] + fy[i] * fy[i]);
|
|
console.log(` iter ${iter + 1}/${ITERATIONS} max_disp=${maxDisp.toFixed(2)} avg_force=${(totalForce / N).toFixed(2)}`);
|
|
}
|
|
}
|
|
|
|
const elapsed = performance.now() - t0;
|
|
console.log(`Force simulation done in ${(elapsed / 1000).toFixed(1)}s`);
|
|
} else {
|
|
console.log("Force simulation SKIPPED (ENABLE_FORCE_SIM = false)");
|
|
}
|
|
|
|
// ══════════════════════════════════════════════════════════
|
|
// Write output
|
|
// ══════════════════════════════════════════════════════════
|
|
|
|
// Write node positions
|
|
const outLines: string[] = ["vertex,x,y"];
|
|
for (let i = 0; i < N; i++) {
|
|
outLines.push(`${nodeIds[i]},${x[i]},${y[i]}`);
|
|
}
|
|
|
|
const outPath = join(PUBLIC_DIR, "node_positions.csv");
|
|
writeFileSync(outPath, outLines.join("\n") + "\n");
|
|
console.log(`Wrote ${N} positions to ${outPath}`);
|
|
|
|
// Write edges (so the renderer can draw them)
|
|
const edgeLines: string[] = ["source,target"];
|
|
for (const [child, parent] of parentOf) {
|
|
edgeLines.push(`${child},${parent}`);
|
|
}
|
|
|
|
const edgesPath = join(PUBLIC_DIR, "edges.csv");
|
|
writeFileSync(edgesPath, edgeLines.join("\n") + "\n");
|
|
console.log(`Wrote ${edges.length} edges to ${edgesPath}`);
|