Inicia integrar Anzograph

This commit is contained in:
Oxy8
2026-02-13 16:39:41 -03:00
parent f3db6af958
commit 343055b7dd
13 changed files with 666 additions and 20472 deletions

View File

@@ -1,12 +1,17 @@
#!/usr/bin/env npx tsx
/**
* Two-Phase Tree Layout
* Graph Layout
*
* Phase 1: Position a primary skeleton (nodes from primary_edges.csv)
* with generous spacing, then force-simulate the skeleton.
* Phase 2: Fill in remaining subtrees (secondary_edges.csv) within sectors.
* Computes a 2D layout for a general graph (not necessarily a tree).
*
* - Primary nodes (from primary_edges.csv) are placed first in a radial layout
* - Remaining nodes are placed near their connected primary neighbors
* - Barnes-Hut force simulation relaxes the layout
*
* Usage: npm run layout-only (after generating tree)
* Reads: primary_edges.csv, secondary_edges.csv
* Writes: node_positions.csv
*
* Usage: npx tsx scripts/compute_layout.ts
*/
import { writeFileSync, readFileSync, existsSync } from "fs";
@@ -20,28 +25,21 @@ const PUBLIC_DIR = join(__dirname, "..", "public");
// Configuration
// ══════════════════════════════════════════════════════════
const ENABLE_FORCE_SIM = true; // Set to false to skip force simulation
const ITERATIONS = 100; // Force iterations
const REPULSION_K = 80; // Repulsion strength
const EDGE_LENGTH = 120; // Desired edge rest length
const ATTRACTION_K = 0.0002; // Spring stiffness for edges
const INITIAL_MAX_DISP = 15; // Starting max displacement
const COOLING = 0.998; // Cooling per iteration
const ITERATIONS = 200; // Force iterations
const REPULSION_K = 200; // Repulsion strength
const EDGE_LENGTH = 80; // Desired edge rest length
const ATTRACTION_K = 0.005; // Spring stiffness for edges
const INITIAL_MAX_DISP = 20; // Starting max displacement
const COOLING = 0.995; // Cooling per iteration
const MIN_DIST = 0.5;
const PRINT_EVERY = 10; // Print progress every N iterations
// Scale radius so the tree is nicely spread
const RADIUS_PER_DEPTH = EDGE_LENGTH * 1.2;
// How many times longer skeleton edges are vs. normal edges
const LONG_EDGE_MULTIPLIER = 39.0;
const SKELETON_STEP = RADIUS_PER_DEPTH * LONG_EDGE_MULTIPLIER;
const PRINT_EVERY = 20; // Print progress every N iterations
const BH_THETA = 0.8; // Barnes-Hut opening angle
// Primary node radial placement
const PRIMARY_RADIUS = 300; // Radius for primary node ring
// ══════════════════════════════════════════════════════════
// Read tree data from CSVs
// Read edge data from CSVs
// ══════════════════════════════════════════════════════════
const primaryPath = join(PUBLIC_DIR, "primary_edges.csv");
@@ -51,16 +49,14 @@ if (!existsSync(primaryPath) || !existsSync(secondaryPath)) {
console.error(`Error: Missing input files!`);
console.error(` Expected: ${primaryPath}`);
console.error(` Expected: ${secondaryPath}`);
console.error(` Run 'npx tsx scripts/generate_tree.ts' first.`);
console.error(` Run 'npx tsx scripts/fetch_from_db.ts' first.`);
process.exit(1);
}
// ── Helper to parse CSV edge list ──
function parseEdges(path: string): Array<[number, number]> {
const content = readFileSync(path, "utf-8");
const lines = content.trim().split("\n");
const edges: Array<[number, number]> = [];
// Skip header "source,target"
for (let i = 1; i < lines.length; i++) {
const line = lines[i].trim();
if (!line) continue;
@@ -76,409 +72,171 @@ const primaryEdges = parseEdges(primaryPath);
const secondaryEdges = parseEdges(secondaryPath);
const allEdges = [...primaryEdges, ...secondaryEdges];
// ── Reconstruct tree connectivity ──
// ══════════════════════════════════════════════════════════
// Build adjacency
// ══════════════════════════════════════════════════════════
const childrenOf = new Map<number, number[]>();
const parentOf = new Map<number, number>();
const allNodes = new Set<number>();
const primaryNodes = new Set<number>(); // Nodes involved in primary edges
const primaryNodes = new Set<number>();
const neighbors = new Map<number, Set<number>>();
// Process primary edges first (to classify primary nodes)
for (const [child, parent] of primaryEdges) {
allNodes.add(child);
allNodes.add(parent);
primaryNodes.add(child);
primaryNodes.add(parent);
parentOf.set(child, parent);
if (!childrenOf.has(parent)) childrenOf.set(parent, []);
childrenOf.get(parent)!.push(child);
function addNeighbor(a: number, b: number) {
if (!neighbors.has(a)) neighbors.set(a, new Set());
neighbors.get(a)!.add(b);
if (!neighbors.has(b)) neighbors.set(b, new Set());
neighbors.get(b)!.add(a);
}
// Process secondary edges
for (const [child, parent] of secondaryEdges) {
allNodes.add(child);
allNodes.add(parent);
for (const [src, dst] of primaryEdges) {
allNodes.add(src);
allNodes.add(dst);
primaryNodes.add(src);
primaryNodes.add(dst);
addNeighbor(src, dst);
}
parentOf.set(child, parent);
if (!childrenOf.has(parent)) childrenOf.set(parent, []);
childrenOf.get(parent)!.push(child);
for (const [src, dst] of secondaryEdges) {
allNodes.add(src);
allNodes.add(dst);
addNeighbor(src, dst);
}
const N = allNodes.size;
const nodeIds = Array.from(allNodes).sort((a, b) => a - b);
// Find root (node with no parent)
// Assuming single root for now. If multiple, pick smallest ID or error.
let root = -1;
for (const node of allNodes) {
if (!parentOf.has(node)) {
root = node;
break;
}
}
if (primaryNodes.size === 0 && N > 0) {
// Edge case: no primary edges?
root = nodeIds[0];
primaryNodes.add(root);
}
console.log(
`Read tree: ${N} nodes, ${allEdges.length} edges (P=${primaryEdges.length}, S=${secondaryEdges.length}), root=${root}`
);
// ══════════════════════════════════════════════════════════
// Compute full-tree subtree sizes
// ══════════════════════════════════════════════════════════
const subtreeSize = new Map<number, number>();
for (const id of nodeIds) subtreeSize.set(id, 1);
{
// Post-order traversal to sum subtree sizes
// Or iterative with two stacks
const stack: Array<{ id: number; phase: "enter" | "exit" }> = [
{ id: root, phase: "enter" },
];
while (stack.length > 0) {
const { id, phase } = stack.pop()!;
if (phase === "enter") {
stack.push({ id, phase: "exit" });
const kids = childrenOf.get(id);
if (kids) for (const kid of kids) stack.push({ id: kid, phase: "enter" });
} else {
const kids = childrenOf.get(id);
if (kids) {
let sum = 0;
for (const kid of kids) sum += subtreeSize.get(kid)!;
subtreeSize.set(id, 1 + sum);
}
}
}
}
// ══════════════════════════════════════════════════════════
// Skeleton = primary nodes
// ══════════════════════════════════════════════════════════
const skeleton = primaryNodes;
console.log(`Skeleton: ${skeleton.size} nodes, ${primaryEdges.length} edges`);
// ══════════════════════════════════════════════════════════
// Position arrays & per-node tracking
// ══════════════════════════════════════════════════════════
// We use dense arrays logic, but node IDs might be sparse if loaded from file.
// However, generate_tree produced sequential IDs starting at 0.
// Let's assume dense 0..N-1 for array indexing, mapped via nodeIds if needed.
// Actually, let's keep it simple: assume maxId < 2*N or use Maps for positions?
// The current code uses Float64Array(N) and assumes `nodeIds[i]` corresponds to index `i`?
// No, the previous code pushed `nodeIds` as `0..N-1`.
// Here, `nodeIds` IS verified to be `0..N-1` because generate_tree did `nextId++`.
// So `nodeIds[i] === i`. We can directly use `x[i]`.
// But if input file has gaps, we'd need a map. To be safe, let's build an `idToIdx` map.
const maxId = Math.max(...nodeIds);
const mapSize = maxId + 1; // Or just use `N` if we remap. Let's remap.
const idToIdx = new Map<number, number>();
nodeIds.forEach((id, idx) => idToIdx.set(id, idx));
console.log(
`Read graph: ${N} nodes, ${allEdges.length} edges (P=${primaryEdges.length}, S=${secondaryEdges.length})`
);
console.log(`Primary nodes: ${primaryNodes.size}`);
// ══════════════════════════════════════════════════════════
// Initial placement
// ══════════════════════════════════════════════════════════
const x = new Float64Array(N);
const y = new Float64Array(N);
const nodeRadius = new Float64Array(N); // distance from origin
const sectorStart = new Float64Array(N);
const sectorEnd = new Float64Array(N);
const positioned = new Set<number>();
// Step 1: Place primary nodes in a radial layout
const primaryArr = Array.from(primaryNodes).sort((a, b) => a - b);
const angleStep = (2 * Math.PI) / Math.max(1, primaryArr.length);
const radius = PRIMARY_RADIUS * Math.max(1, Math.sqrt(primaryArr.length / 10));
for (let i = 0; i < primaryArr.length; i++) {
const idx = idToIdx.get(primaryArr[i])!;
const angle = i * angleStep;
x[idx] = radius * Math.cos(angle);
y[idx] = radius * Math.sin(angle);
}
console.log(`Placed ${primaryArr.length} primary nodes in radial layout (r=${radius.toFixed(0)})`);
// Step 2: Place remaining nodes near their connected neighbors
// BFS from already-placed nodes
const placed = new Set<number>(primaryNodes);
const queue: number[] = [...primaryArr];
let head = 0;
while (head < queue.length) {
const nodeId = queue[head++];
const nodeNeighbors = neighbors.get(nodeId);
if (!nodeNeighbors) continue;
for (const nbId of nodeNeighbors) {
if (placed.has(nbId)) continue;
placed.add(nbId);
// Place near this neighbor with some jitter
const parentIdx = idToIdx.get(nodeId)!;
const childIdx = idToIdx.get(nbId)!;
const jitterAngle = Math.random() * 2 * Math.PI;
const jitterDist = EDGE_LENGTH * (0.5 + Math.random() * 0.5);
x[childIdx] = x[parentIdx] + jitterDist * Math.cos(jitterAngle);
y[childIdx] = y[parentIdx] + jitterDist * Math.sin(jitterAngle);
queue.push(nbId);
}
}
// Handle disconnected nodes (place randomly)
for (const id of nodeIds) {
if (!placed.has(id)) {
const idx = idToIdx.get(id)!;
const angle = Math.random() * 2 * Math.PI;
const r = radius * (1 + Math.random());
x[idx] = r * Math.cos(angle);
y[idx] = r * Math.sin(angle);
placed.add(id);
}
}
console.log(`Initial placement complete: ${placed.size} nodes`);
// ══════════════════════════════════════════════════════════
// Phase 1: Layout skeleton with long edges
// Force-directed layout with Barnes-Hut
// ══════════════════════════════════════════════════════════
const rootIdx = idToIdx.get(root)!;
x[rootIdx] = 0;
y[rootIdx] = 0;
nodeRadius[rootIdx] = 0;
sectorStart[rootIdx] = 0;
sectorEnd[rootIdx] = 2 * Math.PI;
positioned.add(root);
console.log(`Running force simulation (${ITERATIONS} iterations, ${N} nodes, ${allEdges.length} edges)...`);
{
const queue: number[] = [root];
let head = 0;
const t0 = performance.now();
let maxDisp = INITIAL_MAX_DISP;
while (head < queue.length) {
const parentId = queue[head++];
const parentIdx = idToIdx.get(parentId)!;
const kids = childrenOf.get(parentId);
if (!kids || kids.length === 0) continue;
for (let iter = 0; iter < ITERATIONS; iter++) {
const bhRoot = buildBHTree(x, y, N);
const fx = new Float64Array(N);
const fy = new Float64Array(N);
const aStart = sectorStart[parentIdx];
const aEnd = sectorEnd[parentIdx];
const totalWeight = kids.reduce((s, k) => s + subtreeSize.get(k)!, 0);
// 1. Repulsion via Barnes-Hut
for (let i = 0; i < N; i++) {
calcBHForce(bhRoot, x[i], y[i], fx, fy, i, BH_THETA, x, y);
}
// Sort children by subtree size
const sortedKids = [...kids].sort(
(a, b) => subtreeSize.get(b)! - subtreeSize.get(a)!
// 2. Edge attraction (spring force)
for (const [aId, bId] of allEdges) {
const a = idToIdx.get(aId)!;
const b = idToIdx.get(bId)!;
const dx = x[b] - x[a];
const dy = y[b] - y[a];
const d = Math.sqrt(dx * dx + dy * dy) || MIN_DIST;
const displacement = d - EDGE_LENGTH;
const f = ATTRACTION_K * displacement;
const ux = dx / d;
const uy = dy / d;
fx[a] += ux * f;
fy[a] += uy * f;
fx[b] -= ux * f;
fy[b] -= uy * f;
}
// 3. Apply forces with displacement capping
let totalForce = 0;
for (let i = 0; i < N; i++) {
const mag = Math.sqrt(fx[i] * fx[i] + fy[i] * fy[i]);
totalForce += mag;
if (mag > 0) {
const cap = Math.min(maxDisp, mag) / mag;
x[i] += fx[i] * cap;
y[i] += fy[i] * cap;
}
}
maxDisp *= COOLING;
if ((iter + 1) % PRINT_EVERY === 0 || iter === 0) {
console.log(
` iter ${iter + 1}/${ITERATIONS} max_disp=${maxDisp.toFixed(2)} avg_force=${(totalForce / N).toFixed(2)}`
);
let angle = aStart;
for (const kid of sortedKids) {
const kidIdx = idToIdx.get(kid)!;
const w = subtreeSize.get(kid)!;
const sector = (w / totalWeight) * (aEnd - aStart);
sectorStart[kidIdx] = angle;
sectorEnd[kidIdx] = angle + sector;
// Only position skeleton children now
if (skeleton.has(kid)) {
const midAngle = angle + sector / 2;
const r = nodeRadius[parentIdx] + SKELETON_STEP;
nodeRadius[kidIdx] = r;
x[kidIdx] = r * Math.cos(midAngle);
y[kidIdx] = r * Math.sin(midAngle);
positioned.add(kid);
queue.push(kid); // continue BFS within skeleton
}
angle += sector;
}
}
}
console.log(`Phase 1: Positioned ${positioned.size} skeleton nodes`);
// ══════════════════════════════════════════════════════════
// Force simulation on skeleton only
// ══════════════════════════════════════════════════════════
if (ENABLE_FORCE_SIM && skeleton.size > 1) {
const skeletonArr = Array.from(skeleton);
const skeletonIndices = skeletonArr.map(id => idToIdx.get(id)!);
console.log(
`Force sim on skeleton (${skeletonArr.length} nodes, ${primaryEdges.length} edges)...`
);
const t0 = performance.now();
let maxDisp = INITIAL_MAX_DISP;
for (let iter = 0; iter < ITERATIONS; iter++) {
const fx = new Float64Array(N);
const fy = new Float64Array(N);
// 1. Pairwise repulsion
for (let i = 0; i < skeletonIndices.length; i++) {
const u = skeletonIndices[i];
for (let j = i + 1; j < skeletonIndices.length; j++) {
const v = skeletonIndices[j];
const dx = x[u] - x[v];
const dy = y[u] - y[v];
const d2 = dx * dx + dy * dy;
const d = Math.sqrt(d2) || MIN_DIST;
const f = REPULSION_K / (d2 + MIN_DIST);
fx[u] += (dx / d) * f;
fy[u] += (dy / d) * f;
fx[v] -= (dx / d) * f;
fy[v] -= (dy / d) * f;
}
}
// 2. Edge attraction
for (const [aId, bId] of primaryEdges) {
const a = idToIdx.get(aId)!;
const b = idToIdx.get(bId)!;
const dx = x[b] - x[a];
const dy = y[b] - y[a];
const d = Math.sqrt(dx * dx + dy * dy) || MIN_DIST;
const displacement = d - SKELETON_STEP;
const f = (ATTRACTION_K / LONG_EDGE_MULTIPLIER) * displacement;
const ux = dx / d, uy = dy / d;
fx[a] += ux * f;
fy[a] += uy * f;
fx[b] -= ux * f;
fy[b] -= uy * f;
}
// 3. Apply forces (skip root)
for (const idx of skeletonIndices) {
if (nodeIds[idx] === root) continue;
const mag = Math.sqrt(fx[idx] * fx[idx] + fy[idx] * fy[idx]);
if (mag > 0) {
const cap = Math.min(maxDisp, mag) / mag;
x[idx] += fx[idx] * cap;
y[idx] += fy[idx] * cap;
}
}
maxDisp *= COOLING;
if ((iter + 1) % PRINT_EVERY === 0) {
let totalForce = 0;
for (const idx of skeletonIndices) {
totalForce += Math.sqrt(fx[idx] * fx[idx] + fy[idx] * fy[idx]);
}
console.log(
` iter ${iter + 1}/${ITERATIONS} max_disp=${maxDisp.toFixed(2)} avg_force=${(totalForce / skeletonIndices.length).toFixed(2)}`
);
}
}
const elapsed = performance.now() - t0;
console.log(`Skeleton force sim done in ${(elapsed / 1000).toFixed(1)}s`);
}
// ══════════════════════════════════════════════════════════
// Phase 2: Fill subtrees
// ══════════════════════════════════════════════════════════
{
const queue: number[] = Array.from(positioned);
let head = 0;
while (head < queue.length) {
const parentId = queue[head++];
const parentIdx = idToIdx.get(parentId)!;
const kids = childrenOf.get(parentId);
if (!kids) continue;
const unpositionedKids = kids.filter(k => !positioned.has(k));
if (unpositionedKids.length === 0) continue;
unpositionedKids.sort((a, b) => subtreeSize.get(b)! - subtreeSize.get(a)!);
const px = x[parentIdx];
const py = y[parentIdx];
// Determine available angular sector
// If parent is SKELETON, we reset to full 360 (local root behavior).
// If parent is NORMAL, we strictly use the sector allocated to it by its parent.
const isSkeleton = skeleton.has(parentId);
let currentAngle = isSkeleton ? 0 : sectorStart[parentIdx];
const endAngle = isSkeleton ? 2 * Math.PI : sectorEnd[parentIdx];
const totalSpan = endAngle - currentAngle;
const totalWeight = unpositionedKids.reduce((s, k) => s + subtreeSize.get(k)!, 0);
for (const kid of unpositionedKids) {
const kidIdx = idToIdx.get(kid)!;
const w = subtreeSize.get(kid)!;
// Allocate a portion of the available sector based on subtree weight
const span = (w / totalWeight) * totalSpan;
// Track the sector for this child so ITS children are constrained
sectorStart[kidIdx] = currentAngle;
sectorEnd[kidIdx] = currentAngle + span;
const midAngle = currentAngle + span / 2;
const r = RADIUS_PER_DEPTH;
x[kidIdx] = px + r * Math.cos(midAngle);
y[kidIdx] = py + r * Math.sin(midAngle);
positioned.add(kid);
queue.push(kid);
currentAngle += span;
}
}
}
console.log(`Phase 2: Positioned ${positioned.size} total nodes (of ${N})`);
// ══════════════════════════════════════════════════════════
// Phase 3: Final Relaxation (Force Sim on ALL nodes)
// ══════════════════════════════════════════════════════════
{
console.log(`Phase 3: Final relaxation on ${N} nodes...`);
const FINAL_ITERATIONS = 50;
const FINAL_MAX_DISP = 5.0;
const BH_THETA = 0.5;
// We use slightly weaker forces for final polish
// Keep repulsion same but limit displacement strongly
// Use Barnes-Hut for performance with 10k nodes
for (let iter = 0; iter < FINAL_ITERATIONS; iter++) {
const rootBH = buildBHTree(nodeIds, x, y);
const fx = new Float64Array(N);
const fy = new Float64Array(N);
// 1. Repulsion via Barnes-Hut
for (let i = 0; i < N; i++) {
calcBHForce(rootBH, x[i], y[i], fx, fy, i, BH_THETA);
}
// 2. Attraction edges
// Only attract if displacement > rest length?
// Standard spring: f = k * (d - L)
// L = EDGE_LENGTH for normal, SKELETON_STEP for skeleton?
// We can just use standard EDGE_LENGTH as "rest" for everyone to pull tight?
// Or respect hierarchy?
// With 10k nodes, we just want to relax overlaps.
for (const [uId, vId] of allEdges) {
const u = idToIdx.get(uId)!;
const v = idToIdx.get(vId)!;
const dx = x[v] - x[u];
const dy = y[v] - y[u];
const d = Math.sqrt(dx * dx + dy * dy) || MIN_DIST;
// Identifying if edge is skeletal?
// If u and v both skeleton, use longer length.
// Else normal length.
let restLen = EDGE_LENGTH;
let k = ATTRACTION_K;
if (primaryNodes.has(uId) && primaryNodes.has(vId)) {
restLen = SKELETON_STEP;
k = ATTRACTION_K / LONG_EDGE_MULTIPLIER;
}
const displacement = d - restLen;
const f = k * displacement;
const ux = dx / d, uy = dy / d;
fx[u] += ux * f;
fy[u] += uy * f;
fx[v] -= ux * f;
fy[v] -= uy * f;
}
// 3. Apply forces
let totalDisp = 0;
let maxD = 0;
const currentLimit = FINAL_MAX_DISP * (1 - iter / FINAL_ITERATIONS); // Cool down
for (let i = 0; i < N; i++) {
if (nodeIds[i] === root) continue; // Pin root
const dx = fx[i];
const dy = fy[i];
const dist = Math.sqrt(dx * dx + dy * dy);
if (dist > 0) {
const limit = Math.min(currentLimit, dist);
const scale = limit / dist;
x[i] += dx * scale;
y[i] += dy * scale;
totalDisp += limit;
maxD = Math.max(maxD, limit);
}
}
if (iter % 10 === 0) {
console.log(` Phase 3 iter ${iter}: max movement ${maxD.toFixed(3)}`);
}
}
}
const elapsed = performance.now() - t0;
console.log(`Force simulation done in ${(elapsed / 1000).toFixed(1)}s`);
// ══════════════════════════════════════════════════════════
// Write output
// ══════════════════════════════════════════════════════════
// Write node positions
const outLines: string[] = ["vertex,x,y"];
for (let i = 0; i < N; i++) {
outLines.push(`${nodeIds[i]},${x[i]},${y[i]}`);
@@ -487,9 +245,6 @@ for (let i = 0; i < N; i++) {
const outPath = join(PUBLIC_DIR, "node_positions.csv");
writeFileSync(outPath, outLines.join("\n") + "\n");
console.log(`Wrote ${N} positions to ${outPath}`);
// Edges are provided via primary_edges.csv and secondary_edges.csv generated by generate_tree.ts
// We do not write a consolidated edges.csv anymore.
console.log(`Layout complete.`);
// ══════════════════════════════════════════════════════════
@@ -498,79 +253,61 @@ console.log(`Layout complete.`);
interface BHNode {
mass: number;
x: number;
y: number;
cx: number;
cy: number;
minX: number;
maxX: number;
minY: number;
maxY: number;
children?: BHNode[]; // NW, NE, SW, SE
pointIdx?: number; // Leaf node index
children?: BHNode[];
pointIdx?: number;
}
function buildBHTree(indices: number[], x: Float64Array, y: Float64Array): BHNode {
// Determine bounds
function buildBHTree(x: Float64Array, y: Float64Array, n: number): BHNode {
let minX = Infinity, maxX = -Infinity, minY = Infinity, maxY = -Infinity;
for (let i = 0; i < x.length; i++) {
for (let i = 0; i < n; i++) {
if (x[i] < minX) minX = x[i];
if (x[i] > maxX) maxX = x[i];
if (y[i] < minY) minY = y[i];
if (y[i] > maxY) maxY = y[i];
}
// Square bounds for quadtree
const cx = (minX + maxX) / 2;
const cy = (minY + maxY) / 2;
const halfDim = Math.max(maxX - minX, maxY - minY) / 2 + 0.01;
const root: BHNode = {
mass: 0, x: 0, y: 0,
mass: 0, cx: 0, cy: 0,
minX: cx - halfDim, maxX: cx + halfDim,
minY: cy - halfDim, maxY: cy + halfDim
minY: cy - halfDim, maxY: cy + halfDim,
};
for (let i = 0; i < x.length; i++) {
insertBH(root, i, x[i], y[i]);
for (let i = 0; i < n; i++) {
insertBH(root, i, x[i], y[i], x, y);
}
calcBHMass(root);
calcBHMass(root, x, y);
return root;
}
function insertBH(node: BHNode, idx: number, px: number, py: number) {
function insertBH(node: BHNode, idx: number, px: number, py: number, x: Float64Array, y: Float64Array) {
if (!node.children && node.pointIdx === undefined) {
// Empty leaf -> Put point here
node.pointIdx = idx;
return;
}
if (!node.children && node.pointIdx !== undefined) {
// Occupied leaf -> Subdivide
const oldIdx = node.pointIdx;
node.pointIdx = undefined;
subdivideBH(node);
// Re-insert old point and new point
// Note: oldIdx needs x,y. But we don't pass array. Wait, BHTree function scope?
// We need explicit x,y access. But passing array everywhere is ugly.
// Hack: The recursive function needs access to global x/y or passed in values.
// But here we are inserting one by one.
// Wait, to re-insert oldIdx, WE NEED ITS COORDS.
// This simple 'insertBH' signature is insufficient unless we capture x/y closure or pass them.
// Let's assume x, y are available globally or we redesign.
// Since this script is top-level, x and y are available in scope!
// But `insertBH` is defined outside main scope if hoisted? No, it's inside module.
// If defined as function `function insertBH`, it captures module scope `x`, `y`?
// `x` and `y` are const Float64Array defined at line ~120.
// So yes, they are captured!
insertBH(node, oldIdx, x[oldIdx], y[oldIdx]);
// Then fall through to insert new point
insertBH(node, oldIdx, x[oldIdx], y[oldIdx], x, y);
}
if (node.children) {
const mx = (node.minX + node.maxX) / 2;
const my = (node.minY + node.maxY) / 2;
let q = 0;
if (px > mx) q += 1; // East
if (py > my) q += 2; // South
insertBH(node.children[q], idx, px, py);
if (px > mx) q += 1;
if (py > my) q += 2;
insertBH(node.children[q], idx, px, py, x, y);
}
}
@@ -578,62 +315,62 @@ function subdivideBH(node: BHNode) {
const mx = (node.minX + node.maxX) / 2;
const my = (node.minY + node.maxY) / 2;
node.children = [
{ mass: 0, x: 0, y: 0, minX: node.minX, maxX: mx, minY: node.minY, maxY: my }, // NW
{ mass: 0, x: 0, y: 0, minX: mx, maxX: node.maxX, minY: node.minY, maxY: my }, // NE
{ mass: 0, x: 0, y: 0, minX: node.minX, maxX: mx, minY: my, maxY: node.maxY }, // SW
{ mass: 0, x: 0, y: 0, minX: mx, maxX: node.maxX, minY: my, maxY: node.maxY } // SE
{ mass: 0, cx: 0, cy: 0, minX: node.minX, maxX: mx, minY: node.minY, maxY: my },
{ mass: 0, cx: 0, cy: 0, minX: mx, maxX: node.maxX, minY: node.minY, maxY: my },
{ mass: 0, cx: 0, cy: 0, minX: node.minX, maxX: mx, minY: my, maxY: node.maxY },
{ mass: 0, cx: 0, cy: 0, minX: mx, maxX: node.maxX, minY: my, maxY: node.maxY },
];
}
function calcBHMass(node: BHNode) {
function calcBHMass(node: BHNode, x: Float64Array, y: Float64Array) {
if (node.pointIdx !== undefined) {
node.mass = 1;
node.x = x[node.pointIdx];
node.y = y[node.pointIdx];
node.cx = x[node.pointIdx];
node.cy = y[node.pointIdx];
return;
}
if (node.children) {
let m = 0, cx = 0, cy = 0;
let m = 0, sx = 0, sy = 0;
for (const c of node.children) {
calcBHMass(c);
calcBHMass(c, x, y);
m += c.mass;
cx += c.x * c.mass;
cy += c.y * c.mass;
sx += c.cx * c.mass;
sy += c.cy * c.mass;
}
node.mass = m;
if (m > 0) {
node.x = cx / m;
node.y = cy / m;
node.cx = sx / m;
node.cy = sy / m;
} else {
// Center of box if empty
node.x = (node.minX + node.maxX) / 2;
node.y = (node.minY + node.maxY) / 2;
node.cx = (node.minX + node.maxX) / 2;
node.cy = (node.minY + node.maxY) / 2;
}
}
}
function calcBHForce(node: BHNode, px: number, py: number, fx: Float64Array, fy: Float64Array, idx: number, theta: number) {
const dx = px - node.x;
const dy = py - node.y;
function calcBHForce(
node: BHNode,
px: number, py: number,
fx: Float64Array, fy: Float64Array,
idx: number, theta: number,
x: Float64Array, y: Float64Array,
) {
const dx = px - node.cx;
const dy = py - node.cy;
const d2 = dx * dx + dy * dy;
const dist = Math.sqrt(d2);
const width = node.maxX - node.minX;
if (width / dist < theta || !node.children) {
// Treat as single body
if (node.mass > 0 && (node.pointIdx !== idx)) {
// Apply repulsion
// F = K * mass / dist^2
// Direction: from node to p
if (node.mass > 0 && node.pointIdx !== idx) {
const dEff = Math.max(dist, MIN_DIST);
const f = (REPULSION_K * node.mass) / (dEff * dEff); // d^2 repulsion
const f = (REPULSION_K * node.mass) / (dEff * dEff);
fx[idx] += (dx / dEff) * f;
fy[idx] += (dy / dEff) * f;
}
} else {
// Recurse
for (const c of node.children) {
calcBHForce(c, px, py, fx, fy, idx, theta);
calcBHForce(c, px, py, fx, fy, idx, theta, x, y);
}
}
}