diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..328cb61 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,6 @@ +data +.git +.gitignore +frontend/node_modules +frontend/dist +radial_sugiyama/target diff --git a/.env.example b/.env.example index e6a45f9..6eb25ea 100644 --- a/.env.example +++ b/.env.example @@ -39,6 +39,20 @@ SPARQL_READY_TIMEOUT_S=10 CORS_ORIGINS=http://localhost:5173 VITE_BACKEND_URL=http://backend:8000 +# Frontend right-pane cosmos.gl layout +VITE_COSMOS_ENABLE_SIMULATION=true +VITE_COSMOS_DEBUG_LAYOUT=false +VITE_COSMOS_SPACE_SIZE=4096 +VITE_COSMOS_CURVED_LINKS=true +VITE_COSMOS_FIT_VIEW_PADDING=0.12 +VITE_COSMOS_SIMULATION_DECAY=5000 +VITE_COSMOS_SIMULATION_GRAVITY=0 +VITE_COSMOS_SIMULATION_CENTER=0.05 +VITE_COSMOS_SIMULATION_REPULSION=0.5 +VITE_COSMOS_SIMULATION_LINK_SPRING=1 +VITE_COSMOS_SIMULATION_LINK_DISTANCE=10 +VITE_COSMOS_SIMULATION_FRICTION=0.1 + # Debugging LOG_SNAPSHOT_TIMINGS=false FREE_OS_MEMORY_AFTER_SNAPSHOT=false diff --git a/.gitignore b/.gitignore index f8c9630..2caa30e 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,4 @@ frontend/dist/ .npm/ .vite/ data/ +target/ \ No newline at end of file diff --git a/Requisitos.md b/Requisitos.md index e69de29..7e0bd61 100644 --- a/Requisitos.md +++ b/Requisitos.md @@ -0,0 +1,23 @@ +## Objetivos Gerais: + + +#### Visualizar caracteristicas estruturais (todas valvulas que participam de um processo x) + +#### Visualizar todos equipamentos que conectam a um poço y + +#### Visualizar todos elementos de uma classe. + + + + +## Como Requisitos (query para cada nodo selecionado): + + +#### Encontrar Subclasses + +#### Encontrar Superclasses + +#### Encontrar Vizinhos + +#### Encontrar n-hop Vizinhos + diff --git a/backend_go/Dockerfile b/backend_go/Dockerfile index f71fe82..53ce558 100644 --- a/backend_go/Dockerfile +++ b/backend_go/Dockerfile @@ -1,23 +1,34 @@ ARG GO_VERSION=1.24 -FROM golang:${GO_VERSION}-alpine AS builder +FROM rust:bookworm AS rust-builder -WORKDIR /src +WORKDIR /src/radial_sugiyama -COPY go.mod /src/go.mod +COPY radial_sugiyama /src/radial_sugiyama + +RUN cargo build --release --bin radial_sugiyama_go_bridge + +FROM golang:${GO_VERSION}-alpine AS go-builder + +WORKDIR /src/backend_go + +COPY backend_go/go.mod /src/backend_go/go.mod RUN go mod download -COPY . /src +COPY backend_go /src/backend_go RUN CGO_ENABLED=0 GOOS=linux go build -trimpath -ldflags="-s -w" -o /out/backend ./ -FROM alpine:3.20 +FROM debian:bookworm-slim -RUN apk add --no-cache ca-certificates curl +RUN apt-get update \ + && apt-get install -y --no-install-recommends ca-certificates curl \ + && rm -rf /var/lib/apt/lists/* WORKDIR /app -COPY --from=builder /out/backend /app/backend +COPY --from=go-builder /out/backend /app/backend +COPY --from=rust-builder /src/radial_sugiyama/target/release/radial_sugiyama_go_bridge /app/radial_sugiyama_go_bridge EXPOSE 8000 diff --git a/backend_go/config.go b/backend_go/config.go index 98753af..c60a961 100644 --- a/backend_go/config.go +++ b/backend_go/config.go @@ -38,6 +38,12 @@ type Config struct { SparqlReadyDelay time.Duration SparqlReadyTimeout time.Duration + HierarchyLayoutEngine string + HierarchyLayoutBridgeBin string + HierarchyLayoutBridgeWorkdir string + HierarchyLayoutTimeout time.Duration + HierarchyLayoutRootIRI string + ListenAddr string } @@ -64,6 +70,11 @@ func LoadConfig() (Config, error) { SparqlLoadOnStart: envBool("SPARQL_LOAD_ON_START", false), SparqlClearOnStart: envBool("SPARQL_CLEAR_ON_START", false), + HierarchyLayoutEngine: envString("HIERARCHY_LAYOUT_ENGINE", "go"), + HierarchyLayoutBridgeBin: envString("HIERARCHY_LAYOUT_BRIDGE_BIN", "/app/radial_sugiyama_go_bridge"), + HierarchyLayoutBridgeWorkdir: envString("HIERARCHY_LAYOUT_BRIDGE_WORKDIR", "/workspace/radial_sugiyama"), + HierarchyLayoutRootIRI: envString("HIERARCHY_LAYOUT_ROOT_IRI", "http://purl.obolibrary.org/obo/BFO_0000001"), + SparqlReadyRetries: envInt("SPARQL_READY_RETRIES", 30), ListenAddr: envString("LISTEN_ADDR", ":8000"), } @@ -81,6 +92,10 @@ func LoadConfig() (Config, error) { if err != nil { return Config{}, err } + cfg.HierarchyLayoutTimeout, err = envSeconds("HIERARCHY_LAYOUT_TIMEOUT_S", 60) + if err != nil { + return Config{}, err + } if cfg.SparqlLoadOnStart && strings.TrimSpace(cfg.SparqlDataFile) == "" { return Config{}, fmt.Errorf("SPARQL_LOAD_ON_START=true but SPARQL_DATA_FILE is not set") @@ -110,6 +125,24 @@ func LoadConfig() (Config, error) { if cfg.EdgeBatchSize > cfg.MaxEdgeLimit { return Config{}, fmt.Errorf("EDGE_BATCH_SIZE must be <= MAX_EDGE_LIMIT") } + switch strings.ToLower(strings.TrimSpace(cfg.HierarchyLayoutEngine)) { + case "go", "rust": + cfg.HierarchyLayoutEngine = strings.ToLower(strings.TrimSpace(cfg.HierarchyLayoutEngine)) + default: + return Config{}, fmt.Errorf("HIERARCHY_LAYOUT_ENGINE must be 'go' or 'rust'") + } + if strings.TrimSpace(cfg.HierarchyLayoutBridgeBin) == "" { + return Config{}, fmt.Errorf("HIERARCHY_LAYOUT_BRIDGE_BIN must not be empty") + } + if strings.TrimSpace(cfg.HierarchyLayoutBridgeWorkdir) == "" { + return Config{}, fmt.Errorf("HIERARCHY_LAYOUT_BRIDGE_WORKDIR must not be empty") + } + if strings.TrimSpace(cfg.HierarchyLayoutRootIRI) == "" { + return Config{}, fmt.Errorf("HIERARCHY_LAYOUT_ROOT_IRI must not be empty") + } + if cfg.HierarchyLayoutTimeout <= 0 { + return Config{}, fmt.Errorf("HIERARCHY_LAYOUT_TIMEOUT_S must be > 0") + } return cfg, nil } diff --git a/backend_go/graph_snapshot.go b/backend_go/graph_snapshot.go index 4e97036..a35b302 100644 --- a/backend_go/graph_snapshot.go +++ b/backend_go/graph_snapshot.go @@ -168,40 +168,56 @@ func fetchGraphSnapshot( nodes := acc.nodes edges := acc.edges + routeSegments := []RouteSegment(nil) + layoutEngine := "go" + var layoutRootIRI *string - // Layout: invert edges for hierarchy (target -> source). - hierEdges := make([][2]int, 0, len(edges)) - for _, e := range edges { - hierEdges = append(hierEdges, [2]int{int(e.Target), int(e.Source)}) - } - - layers, cycleErr := levelSynchronousKahnLayers(len(nodes), hierEdges) - if cycleErr != nil { - sample := make([]string, 0, 20) - for _, nid := range cycleErr.RemainingNodeIDs { - if len(sample) >= 20 { - break - } - if nid >= 0 && nid < len(nodes) { - sample = append(sample, nodes[nid].IRI) - } + if shouldUseRustHierarchyLayout(cfg, graphQueryID) { + layoutResult, err := layoutHierarchyWithRust(ctx, cfg, nodes, edges, preds) + if err != nil { + return GraphResponse{}, err + } + nodes = layoutResult.Nodes + edges = layoutResult.Edges + routeSegments = layoutResult.RouteSegments + layoutEngine = rustHierarchyLayoutEngineID + rootIRI := cfg.HierarchyLayoutRootIRI + layoutRootIRI = &rootIRI + } else { + // Layout: invert edges for hierarchy (target -> source). + hierEdges := make([][2]int, 0, len(edges)) + for _, e := range edges { + hierEdges = append(hierEdges, [2]int{int(e.Target), int(e.Source)}) } - cycleErr.RemainingIRISample = sample - return GraphResponse{}, cycleErr - } - idToIRI := make([]string, len(nodes)) - for i := range nodes { - idToIRI[i] = nodes[i].IRI - } - for _, layer := range layers { - sortLayerByIRI(layer, idToIRI) - } + layers, cycleErr := levelSynchronousKahnLayers(len(nodes), hierEdges) + if cycleErr != nil { + sample := make([]string, 0, 20) + for _, nid := range cycleErr.RemainingNodeIDs { + if len(sample) >= 20 { + break + } + if nid >= 0 && nid < len(nodes) { + sample = append(sample, nodes[nid].IRI) + } + } + cycleErr.RemainingIRISample = sample + return GraphResponse{}, cycleErr + } - xs, ys := radialPositionsFromLayers(len(nodes), layers, 5000.0) - for i := range nodes { - nodes[i].X = xs[i] - nodes[i].Y = ys[i] + idToIRI := make([]string, len(nodes)) + for i := range nodes { + idToIRI[i] = nodes[i].IRI + } + for _, layer := range layers { + sortLayerByIRI(layer, idToIRI) + } + + xs, ys := radialPositionsFromLayers(len(nodes), layers, 5000.0) + for i := range nodes { + nodes[i].X = xs[i] + nodes[i].Y = ys[i] + } } // Attach labels for URI nodes. @@ -240,9 +256,11 @@ func fetchGraphSnapshot( EdgeLimit: edgeLimit, Nodes: len(nodes), Edges: len(edges), + LayoutEngine: layoutEngine, + LayoutRootIRI: layoutRootIRI, } - return GraphResponse{Nodes: nodes, Edges: edges, Meta: meta}, nil + return GraphResponse{Nodes: nodes, Edges: edges, RouteSegments: routeSegments, Meta: meta}, nil } type bestLabel struct { diff --git a/backend_go/hierarchy_layout_bridge.go b/backend_go/hierarchy_layout_bridge.go new file mode 100644 index 0000000..7c77433 --- /dev/null +++ b/backend_go/hierarchy_layout_bridge.go @@ -0,0 +1,268 @@ +package main + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "os/exec" + "strings" +) + +const ( + hierarchyGraphQueryID = "hierarchy" + rustHierarchyLayoutEngineID = "rust_radial_sugiyama" +) + +type hierarchyLayoutResult struct { + Nodes []Node + Edges []Edge + RouteSegments []RouteSegment +} + +type hierarchyLayoutPrepared struct { + Request hierarchyLayoutRequest + NormalizedEdges []Edge +} + +type hierarchyLayoutRequest struct { + RootIRI string `json:"root_iri"` + Nodes []hierarchyLayoutRequestNode `json:"nodes"` + Edges []hierarchyLayoutRequestEdge `json:"edges"` +} + +type hierarchyLayoutRequestNode struct { + NodeID uint32 `json:"node_id"` + IRI string `json:"iri"` +} + +type hierarchyLayoutRequestEdge struct { + EdgeIndex int `json:"edge_index"` + ParentID uint32 `json:"parent_id"` + ChildID uint32 `json:"child_id"` + PredicateIRI *string `json:"predicate_iri,omitempty"` +} + +type hierarchyLayoutResponse struct { + Nodes []hierarchyLayoutResponseNode `json:"nodes"` + RouteSegments []hierarchyLayoutResponseRouteSegment `json:"route_segments"` +} + +type hierarchyLayoutResponseNode struct { + NodeID uint32 `json:"node_id"` + X float64 `json:"x"` + Y float64 `json:"y"` + Level int `json:"level"` +} + +type hierarchyLayoutResponseRouteSegment struct { + EdgeIndex int `json:"edge_index"` + Kind string `json:"kind"` + Points []hierarchyLayoutResponseRoutePoint `json:"points"` +} + +type hierarchyLayoutResponseRoutePoint struct { + X float64 `json:"x"` + Y float64 `json:"y"` +} + +type hierarchyEdgeKey struct { + ParentID uint32 + ChildID uint32 +} + +func shouldUseRustHierarchyLayout(cfg Config, graphQueryID string) bool { + return cfg.HierarchyLayoutEngine == "rust" && graphQueryID == hierarchyGraphQueryID +} + +func prepareHierarchyLayoutRequest( + rootIRI string, + nodes []Node, + edges []Edge, + preds *PredicateDict, +) hierarchyLayoutPrepared { + requestNodes := make([]hierarchyLayoutRequestNode, 0, len(nodes)) + for _, node := range nodes { + requestNodes = append(requestNodes, hierarchyLayoutRequestNode{ + NodeID: node.ID, + IRI: node.IRI, + }) + } + + predicateIRIs := []string(nil) + if preds != nil { + predicateIRIs = preds.IRIs() + } + + seenEdges := make(map[hierarchyEdgeKey]struct{}, len(edges)) + normalizedEdges := make([]Edge, 0, len(edges)) + requestEdges := make([]hierarchyLayoutRequestEdge, 0, len(edges)) + for _, edge := range edges { + parentID := edge.Target + childID := edge.Source + if parentID == childID { + continue + } + + key := hierarchyEdgeKey{ParentID: parentID, ChildID: childID} + if _, ok := seenEdges[key]; ok { + continue + } + seenEdges[key] = struct{}{} + + normalizedEdges = append(normalizedEdges, edge) + + var predicateIRI *string + if int(edge.PredicateID) >= 0 && int(edge.PredicateID) < len(predicateIRIs) { + value := predicateIRIs[edge.PredicateID] + if strings.TrimSpace(value) != "" { + predicateIRI = &value + } + } + + requestEdges = append(requestEdges, hierarchyLayoutRequestEdge{ + EdgeIndex: len(normalizedEdges) - 1, + ParentID: parentID, + ChildID: childID, + PredicateIRI: predicateIRI, + }) + } + + return hierarchyLayoutPrepared{ + Request: hierarchyLayoutRequest{ + RootIRI: rootIRI, + Nodes: requestNodes, + Edges: requestEdges, + }, + NormalizedEdges: normalizedEdges, + } +} + +func applyHierarchyLayoutResponse( + nodes []Node, + normalizedEdges []Edge, + response hierarchyLayoutResponse, +) (hierarchyLayoutResult, error) { + positionByID := make(map[uint32]hierarchyLayoutResponseNode, len(response.Nodes)) + for _, node := range response.Nodes { + if _, ok := positionByID[node.NodeID]; ok { + return hierarchyLayoutResult{}, fmt.Errorf("hierarchy layout bridge returned duplicate node_id %d", node.NodeID) + } + positionByID[node.NodeID] = node + } + + filteredNodes := make([]Node, 0, len(response.Nodes)) + keptNodeIDs := make(map[uint32]struct{}, len(response.Nodes)) + for _, node := range nodes { + position, ok := positionByID[node.ID] + if !ok { + continue + } + node.X = position.X + node.Y = position.Y + filteredNodes = append(filteredNodes, node) + keptNodeIDs[node.ID] = struct{}{} + } + if len(filteredNodes) != len(response.Nodes) { + return hierarchyLayoutResult{}, fmt.Errorf("hierarchy layout bridge returned unknown node ids") + } + + filteredEdges := make([]Edge, 0, len(normalizedEdges)) + normalizedToFilteredEdge := make(map[int]int, len(normalizedEdges)) + for normalizedIndex, edge := range normalizedEdges { + if _, ok := keptNodeIDs[edge.Source]; !ok { + continue + } + if _, ok := keptNodeIDs[edge.Target]; !ok { + continue + } + normalizedToFilteredEdge[normalizedIndex] = len(filteredEdges) + filteredEdges = append(filteredEdges, edge) + } + + routeSegments := make([]RouteSegment, 0, len(response.RouteSegments)) + for _, segment := range response.RouteSegments { + filteredEdgeIndex, ok := normalizedToFilteredEdge[segment.EdgeIndex] + if !ok { + return hierarchyLayoutResult{}, fmt.Errorf("hierarchy layout bridge returned route for unknown edge_index %d", segment.EdgeIndex) + } + points := make([]RoutePoint, 0, len(segment.Points)) + for _, point := range segment.Points { + points = append(points, RoutePoint{ + X: point.X, + Y: point.Y, + }) + } + routeSegments = append(routeSegments, RouteSegment{ + EdgeIndex: filteredEdgeIndex, + Kind: segment.Kind, + Points: points, + }) + } + + return hierarchyLayoutResult{ + Nodes: filteredNodes, + Edges: filteredEdges, + RouteSegments: routeSegments, + }, nil +} + +func runHierarchyLayoutBridge( + ctx context.Context, + cfg Config, + request hierarchyLayoutRequest, +) (hierarchyLayoutResponse, error) { + input, err := json.Marshal(request) + if err != nil { + return hierarchyLayoutResponse{}, fmt.Errorf("marshal hierarchy layout request failed: %w", err) + } + + bridgeCtx, cancel := context.WithTimeout(ctx, cfg.HierarchyLayoutTimeout) + defer cancel() + + cmd := exec.CommandContext(bridgeCtx, cfg.HierarchyLayoutBridgeBin) + cmd.Dir = cfg.HierarchyLayoutBridgeWorkdir + cmd.Stdin = bytes.NewReader(input) + + var stdout bytes.Buffer + var stderr bytes.Buffer + cmd.Stdout = &stdout + cmd.Stderr = &stderr + + if err := cmd.Run(); err != nil { + if bridgeCtx.Err() != nil { + return hierarchyLayoutResponse{}, fmt.Errorf("hierarchy layout bridge timed out after %s", cfg.HierarchyLayoutTimeout) + } + detail := strings.TrimSpace(stderr.String()) + if detail == "" { + detail = err.Error() + } + return hierarchyLayoutResponse{}, fmt.Errorf("hierarchy layout bridge failed: %s", detail) + } + + var response hierarchyLayoutResponse + if err := json.Unmarshal(stdout.Bytes(), &response); err != nil { + detail := strings.TrimSpace(stderr.String()) + if detail != "" { + return hierarchyLayoutResponse{}, fmt.Errorf("parse hierarchy layout bridge response failed: %v (stderr: %s)", err, detail) + } + return hierarchyLayoutResponse{}, fmt.Errorf("parse hierarchy layout bridge response failed: %w", err) + } + + return response, nil +} + +func layoutHierarchyWithRust( + ctx context.Context, + cfg Config, + nodes []Node, + edges []Edge, + preds *PredicateDict, +) (hierarchyLayoutResult, error) { + prepared := prepareHierarchyLayoutRequest(cfg.HierarchyLayoutRootIRI, nodes, edges, preds) + response, err := runHierarchyLayoutBridge(ctx, cfg, prepared.Request) + if err != nil { + return hierarchyLayoutResult{}, err + } + return applyHierarchyLayoutResponse(nodes, prepared.NormalizedEdges, response) +} diff --git a/backend_go/hierarchy_layout_bridge_test.go b/backend_go/hierarchy_layout_bridge_test.go new file mode 100644 index 0000000..7c4b2fe --- /dev/null +++ b/backend_go/hierarchy_layout_bridge_test.go @@ -0,0 +1,158 @@ +package main + +import ( + "context" + "os" + "path/filepath" + "strings" + "testing" + "time" +) + +func TestPrepareHierarchyLayoutRequestNormalizesEdges(t *testing.T) { + nodes := []Node{ + {ID: 0, TermType: "uri", IRI: "http://example.com/root"}, + {ID: 1, TermType: "uri", IRI: "http://example.com/child"}, + {ID: 2, TermType: "uri", IRI: "http://example.com/leaf"}, + } + preds := NewPredicateDict([]string{"http://www.w3.org/2000/01/rdf-schema#subClassOf"}) + edges := []Edge{ + {Source: 1, Target: 0, PredicateID: 0}, + {Source: 1, Target: 0, PredicateID: 0}, + {Source: 2, Target: 2, PredicateID: 0}, + {Source: 2, Target: 1, PredicateID: 0}, + } + + prepared := prepareHierarchyLayoutRequest("http://example.com/root", nodes, edges, preds) + + if got, want := len(prepared.Request.Nodes), 3; got != want { + t.Fatalf("len(request.nodes)=%d want %d", got, want) + } + if got, want := len(prepared.Request.Edges), 2; got != want { + t.Fatalf("len(request.edges)=%d want %d", got, want) + } + if prepared.Request.Edges[0].ParentID != 0 || prepared.Request.Edges[0].ChildID != 1 { + t.Fatalf("first normalized edge = %+v, want parent=0 child=1", prepared.Request.Edges[0]) + } + if prepared.Request.Edges[1].ParentID != 1 || prepared.Request.Edges[1].ChildID != 2 { + t.Fatalf("second normalized edge = %+v, want parent=1 child=2", prepared.Request.Edges[1]) + } + if prepared.Request.Edges[0].PredicateIRI == nil || *prepared.Request.Edges[0].PredicateIRI == "" { + t.Fatalf("expected predicate iri to be preserved") + } +} + +func TestApplyHierarchyLayoutResponsePreservesIDsAndRemapsRoutes(t *testing.T) { + nodes := []Node{ + {ID: 0, TermType: "uri", IRI: "http://example.com/root"}, + {ID: 1, TermType: "uri", IRI: "http://example.com/child"}, + {ID: 2, TermType: "uri", IRI: "http://example.com/leaf"}, + } + normalizedEdges := []Edge{ + {Source: 1, Target: 0, PredicateID: 0}, + {Source: 2, Target: 0, PredicateID: 0}, + } + response := hierarchyLayoutResponse{ + Nodes: []hierarchyLayoutResponseNode{ + {NodeID: 0, X: 10, Y: 20}, + {NodeID: 2, X: 30, Y: 40}, + }, + RouteSegments: []hierarchyLayoutResponseRouteSegment{ + { + EdgeIndex: 1, + Kind: "spiral", + Points: []hierarchyLayoutResponseRoutePoint{ + {X: 10, Y: 20}, + {X: 30, Y: 40}, + }, + }, + }, + } + + result, err := applyHierarchyLayoutResponse(nodes, normalizedEdges, response) + if err != nil { + t.Fatalf("applyHierarchyLayoutResponse returned error: %v", err) + } + + if got, want := len(result.Nodes), 2; got != want { + t.Fatalf("len(nodes)=%d want %d", got, want) + } + if result.Nodes[0].ID != 0 || result.Nodes[1].ID != 2 { + t.Fatalf("filtered node ids = [%d %d], want [0 2]", result.Nodes[0].ID, result.Nodes[1].ID) + } + if result.Nodes[0].X != 10 || result.Nodes[0].Y != 20 || result.Nodes[1].X != 30 || result.Nodes[1].Y != 40 { + t.Fatalf("positions were not applied to filtered nodes: %+v", result.Nodes) + } + if got, want := len(result.Edges), 1; got != want { + t.Fatalf("len(edges)=%d want %d", got, want) + } + if result.Edges[0] != normalizedEdges[1] { + t.Fatalf("filtered edge = %+v, want %+v", result.Edges[0], normalizedEdges[1]) + } + if got, want := len(result.RouteSegments), 1; got != want { + t.Fatalf("len(route_segments)=%d want %d", got, want) + } + if result.RouteSegments[0].EdgeIndex != 0 { + t.Fatalf("route edge index = %d want 0", result.RouteSegments[0].EdgeIndex) + } +} + +func TestRunHierarchyLayoutBridgeUsesConfiguredWorkingDirectory(t *testing.T) { + tmpDir := t.TempDir() + outputPath := filepath.Join(tmpDir, "pwd.txt") + scriptPath := filepath.Join(tmpDir, "bridge.sh") + script := "#!/bin/sh\npwd > \"" + outputPath + "\"\ncat >/dev/null\nprintf '{\"nodes\":[{\"node_id\":1,\"x\":10,\"y\":20,\"level\":0}],\"route_segments\":[]}'\n" + if err := os.WriteFile(scriptPath, []byte(script), 0o755); err != nil { + t.Fatalf("write script: %v", err) + } + + cfg := Config{ + HierarchyLayoutBridgeBin: scriptPath, + HierarchyLayoutBridgeWorkdir: tmpDir, + HierarchyLayoutTimeout: 2 * time.Second, + } + response, err := runHierarchyLayoutBridge(context.Background(), cfg, hierarchyLayoutRequest{ + RootIRI: "root", + Nodes: []hierarchyLayoutRequestNode{ + {NodeID: 1, IRI: "root"}, + }, + }) + if err != nil { + t.Fatalf("runHierarchyLayoutBridge returned error: %v", err) + } + if got, want := len(response.Nodes), 1; got != want { + t.Fatalf("len(response.nodes)=%d want %d", got, want) + } + + pwdBytes, err := os.ReadFile(outputPath) + if err != nil { + t.Fatalf("read pwd output: %v", err) + } + if got, want := strings.TrimSpace(string(pwdBytes)), tmpDir; got != want { + t.Fatalf("bridge working directory=%q want %q", got, want) + } +} + +func TestRunHierarchyLayoutBridgeReturnsSvgWriteFailure(t *testing.T) { + tmpDir := t.TempDir() + scriptPath := filepath.Join(tmpDir, "bridge_fail.sh") + script := "#!/bin/sh\ncat >/dev/null\necho 'failed to write SVG output: permission denied' >&2\nexit 1\n" + if err := os.WriteFile(scriptPath, []byte(script), 0o755); err != nil { + t.Fatalf("write script: %v", err) + } + + cfg := Config{ + HierarchyLayoutBridgeBin: scriptPath, + HierarchyLayoutBridgeWorkdir: tmpDir, + HierarchyLayoutTimeout: 2 * time.Second, + } + _, err := runHierarchyLayoutBridge(context.Background(), cfg, hierarchyLayoutRequest{ + RootIRI: "root", + }) + if err == nil { + t.Fatalf("expected hierarchy layout bridge error") + } + if !strings.Contains(err.Error(), "failed to write SVG output") { + t.Fatalf("error=%q does not mention SVG write failure", err) + } +} diff --git a/backend_go/models.go b/backend_go/models.go index 77a0dbc..8a701eb 100644 --- a/backend_go/models.go +++ b/backend_go/models.go @@ -1,5 +1,7 @@ package main +import selectionqueries "visualizador_instanciados/backend_go/selection_queries" + type ErrorResponse struct { Detail string `json:"detail"` } @@ -18,28 +20,42 @@ type Node struct { } type Edge struct { - Source uint32 `json:"source"` - Target uint32 `json:"target"` + Source uint32 `json:"source"` + Target uint32 `json:"target"` PredicateID uint32 `json:"predicate_id"` } +type RoutePoint struct { + X float64 `json:"x"` + Y float64 `json:"y"` +} + +type RouteSegment struct { + EdgeIndex int `json:"edge_index"` + Kind string `json:"kind"` + Points []RoutePoint `json:"points"` +} + type GraphMeta struct { - Backend string `json:"backend"` - TTLPath *string `json:"ttl_path"` - SparqlEndpoint string `json:"sparql_endpoint"` - IncludeBNodes bool `json:"include_bnodes"` - GraphQueryID string `json:"graph_query_id"` + Backend string `json:"backend"` + TTLPath *string `json:"ttl_path"` + SparqlEndpoint string `json:"sparql_endpoint"` + IncludeBNodes bool `json:"include_bnodes"` + GraphQueryID string `json:"graph_query_id"` Predicates []string `json:"predicates,omitempty"` // index = predicate_id - NodeLimit int `json:"node_limit"` - EdgeLimit int `json:"edge_limit"` - Nodes int `json:"nodes"` - Edges int `json:"edges"` + NodeLimit int `json:"node_limit"` + EdgeLimit int `json:"edge_limit"` + Nodes int `json:"nodes"` + Edges int `json:"edges"` + LayoutEngine string `json:"layout_engine,omitempty"` + LayoutRootIRI *string `json:"layout_root_iri,omitempty"` } type GraphResponse struct { - Nodes []Node `json:"nodes"` - Edges []Edge `json:"edges"` - Meta *GraphMeta `json:"meta"` + Nodes []Node `json:"nodes"` + Edges []Edge `json:"edges"` + RouteSegments []RouteSegment `json:"route_segments,omitempty"` + Meta *GraphMeta `json:"meta"` } type StatsResponse struct { @@ -80,3 +96,9 @@ type SelectionQueryResponse struct { SelectedIDs []uint32 `json:"selected_ids"` NeighborIDs []uint32 `json:"neighbor_ids"` } + +type SelectionTriplesResponse struct { + QueryID string `json:"query_id"` + SelectedIDs []uint32 `json:"selected_ids"` + Triples []selectionqueries.Triple `json:"triples"` +} diff --git a/backend_go/selection_queries/helpers.go b/backend_go/selection_queries/helpers.go index 4563cde..4c1f6f3 100644 --- a/backend_go/selection_queries/helpers.go +++ b/backend_go/selection_queries/helpers.go @@ -3,6 +3,7 @@ package selection_queries import ( "encoding/json" "fmt" + "log" "sort" "strings" ) @@ -66,30 +67,83 @@ func selectedNodesFromIDs(idx Index, selectedIDs []uint32, includeBNodes bool) ( return out, set } -func idsFromBindings(raw []byte, varName string, idx Index, selectedSet map[uint32]struct{}, includeBNodes bool) ([]uint32, error) { +func idFromSparqlTerm(term sparqlTerm, idx Index, includeBNodes bool) (uint32, bool) { + key, ok := termKeyFromSparqlTerm(term, includeBNodes) + if !ok { + return 0, false + } + nid, ok := idx.KeyToID[key] + return nid, ok +} + +func tripleTermFromSparqlTerm(term sparqlTerm) TripleTerm { + return TripleTerm{ + Type: term.Type, + Value: term.Value, + Lang: term.Lang, + } +} + +func logQueryExecutionFailure(queryName string, selectedIDs []uint32, includeBNodes bool, sparql string, err error) { + log.Printf( + "%s: SPARQL execution failed selected_ids=%v include_bnodes=%t err=%v\nSPARQL:\n%s", + queryName, + selectedIDs, + includeBNodes, + err, + strings.TrimSpace(sparql), + ) +} + +func resultFromTripleBindings(raw []byte, idx Index, selectedSet map[uint32]struct{}, includeBNodes bool) (Result, error) { var res sparqlResponse if err := json.Unmarshal(raw, &res); err != nil { - return nil, fmt.Errorf("failed to parse SPARQL JSON: %w", err) + return Result{}, fmt.Errorf("failed to parse SPARQL JSON: %w", err) } neighborSet := make(map[uint32]struct{}) + triples := make([]Triple, 0, len(res.Results.Bindings)) for _, b := range res.Results.Bindings { - term, ok := b[varName] - if !ok { + sTerm, okS := b["s"] + pTerm, okP := b["p"] + oTerm, okO := b["o"] + if !okS || !okP || !okO { continue } - key, ok := termKeyFromSparqlTerm(term, includeBNodes) - if !ok { - continue + + triple := Triple{ + S: tripleTermFromSparqlTerm(sTerm), + P: tripleTermFromSparqlTerm(pTerm), + O: tripleTermFromSparqlTerm(oTerm), } - nid, ok := idx.KeyToID[key] - if !ok { - continue + + subjID, subjOK := idFromSparqlTerm(sTerm, idx, includeBNodes) + if subjOK { + id := subjID + triple.SubjectID = &id } - if _, sel := selectedSet[nid]; sel { - continue + objID, objOK := idFromSparqlTerm(oTerm, idx, includeBNodes) + if objOK { + id := objID + triple.ObjectID = &id } - neighborSet[nid] = struct{}{} + if pTerm.Type == "uri" { + if predID, ok := idx.PredicateIDByIRI[pTerm.Value]; ok { + id := predID + triple.PredicateID = &id + } + } + + _, subjSelected := selectedSet[subjID] + _, objSelected := selectedSet[objID] + if subjOK && subjSelected && objOK && !objSelected { + neighborSet[objID] = struct{}{} + } + if objOK && objSelected && subjOK && !subjSelected { + neighborSet[subjID] = struct{}{} + } + + triples = append(triples, triple) } ids := make([]uint32, 0, len(neighborSet)) @@ -97,5 +151,5 @@ func idsFromBindings(raw []byte, varName string, idx Index, selectedSet map[uint ids = append(ids, nid) } sort.Slice(ids, func(i, j int) bool { return ids[i] < ids[j] }) - return ids, nil + return Result{NeighborIDs: ids, Triples: triples}, nil } diff --git a/backend_go/selection_queries/neighbors.go b/backend_go/selection_queries/neighbors.go index 60ccd91..4722466 100644 --- a/backend_go/selection_queries/neighbors.go +++ b/backend_go/selection_queries/neighbors.go @@ -17,12 +17,12 @@ func neighborsQuery(selectedNodes []NodeRef, includeBNodes bool) string { } if len(valuesTerms) == 0 { - return "SELECT ?nbr WHERE { FILTER(false) }" + return "SELECT ?s ?p ?o WHERE { FILTER(false) }" } bnodeFilter := "" if !includeBNodes { - bnodeFilter = "FILTER(!isBlank(?nbr))" + bnodeFilter = "FILTER(!isBlank(?s) && !isBlank(?o))" } values := strings.Join(valuesTerms, " ") @@ -31,46 +31,55 @@ PREFIX rdf: PREFIX rdfs: PREFIX owl: -SELECT DISTINCT ?nbr +SELECT DISTINCT ?s ?p ?o WHERE { - VALUES ?sel { %s } { - ?sel rdf:type ?o . + VALUES ?sel { %s } + BIND(?sel AS ?s) + VALUES ?p { rdf:type } + ?s ?p ?o . ?o rdf:type owl:Class . - BIND(?o AS ?nbr) } UNION { - ?s rdf:type ?sel . + VALUES ?sel { %s } + VALUES ?p { rdf:type } + ?s ?p ?sel . ?sel rdf:type owl:Class . - BIND(?s AS ?nbr) + BIND(?sel AS ?o) } UNION { - ?sel rdfs:subClassOf ?o . - BIND(?o AS ?nbr) + VALUES ?sel { %s } + BIND(?sel AS ?s) + VALUES ?p { rdfs:subClassOf } + ?s ?p ?o . } UNION { - ?s rdfs:subClassOf ?sel . - BIND(?s AS ?nbr) + VALUES ?sel { %s } + VALUES ?p { rdfs:subClassOf } + ?s ?p ?sel . + BIND(?sel AS ?o) } - FILTER(!isLiteral(?nbr)) - FILTER(?nbr != ?sel) + FILTER(!isLiteral(?o)) + FILTER(?s != ?o) %s } -`, values, bnodeFilter) +`, values, values, values, values, bnodeFilter) } -func runNeighbors(ctx context.Context, q Querier, idx Index, selectedIDs []uint32, includeBNodes bool) ([]uint32, error) { +func runNeighbors(ctx context.Context, q Querier, idx Index, selectedIDs []uint32, includeBNodes bool) (Result, error) { selectedNodes, selectedSet := selectedNodesFromIDs(idx, selectedIDs, includeBNodes) if len(selectedNodes) == 0 { - return []uint32{}, nil + return Result{NeighborIDs: []uint32{}, Triples: []Triple{}}, nil } - raw, err := q.Query(ctx, neighborsQuery(selectedNodes, includeBNodes)) + query := neighborsQuery(selectedNodes, includeBNodes) + raw, err := q.Query(ctx, query) if err != nil { - return nil, err + logQueryExecutionFailure("neighbors", selectedIDs, includeBNodes, query, err) + return Result{}, err } - return idsFromBindings(raw, "nbr", idx, selectedSet, includeBNodes) + return resultFromTripleBindings(raw, idx, selectedSet, includeBNodes) } diff --git a/backend_go/selection_queries/subclasses.go b/backend_go/selection_queries/subclasses.go index 9de4151..fba0b1a 100644 --- a/backend_go/selection_queries/subclasses.go +++ b/backend_go/selection_queries/subclasses.go @@ -17,38 +17,42 @@ func subclassesQuery(selectedNodes []NodeRef, includeBNodes bool) string { } if len(valuesTerms) == 0 { - return "SELECT ?nbr WHERE { FILTER(false) }" + return "SELECT ?s ?p ?o WHERE { FILTER(false) }" } bnodeFilter := "" if !includeBNodes { - bnodeFilter = "FILTER(!isBlank(?nbr))" + bnodeFilter = "FILTER(!isBlank(?s) && !isBlank(?o))" } values := strings.Join(valuesTerms, " ") return fmt.Sprintf(` PREFIX rdfs: -SELECT DISTINCT ?nbr +SELECT DISTINCT ?s ?p ?o WHERE { VALUES ?sel { %s } - ?nbr rdfs:subClassOf ?sel . - FILTER(!isLiteral(?nbr)) - FILTER(?nbr != ?sel) + VALUES ?p { rdfs:subClassOf } + ?s ?p ?sel . + BIND(?sel AS ?o) + FILTER(!isLiteral(?o)) + FILTER(?s != ?o) %s } `, values, bnodeFilter) } -func runSubclasses(ctx context.Context, q Querier, idx Index, selectedIDs []uint32, includeBNodes bool) ([]uint32, error) { +func runSubclasses(ctx context.Context, q Querier, idx Index, selectedIDs []uint32, includeBNodes bool) (Result, error) { selectedNodes, selectedSet := selectedNodesFromIDs(idx, selectedIDs, includeBNodes) if len(selectedNodes) == 0 { - return []uint32{}, nil + return Result{NeighborIDs: []uint32{}, Triples: []Triple{}}, nil } - raw, err := q.Query(ctx, subclassesQuery(selectedNodes, includeBNodes)) + query := subclassesQuery(selectedNodes, includeBNodes) + raw, err := q.Query(ctx, query) if err != nil { - return nil, err + logQueryExecutionFailure("subclasses", selectedIDs, includeBNodes, query, err) + return Result{}, err } - return idsFromBindings(raw, "nbr", idx, selectedSet, includeBNodes) + return resultFromTripleBindings(raw, idx, selectedSet, includeBNodes) } diff --git a/backend_go/selection_queries/superclasses.go b/backend_go/selection_queries/superclasses.go index 8841941..c4c2220 100644 --- a/backend_go/selection_queries/superclasses.go +++ b/backend_go/selection_queries/superclasses.go @@ -17,38 +17,42 @@ func superclassesQuery(selectedNodes []NodeRef, includeBNodes bool) string { } if len(valuesTerms) == 0 { - return "SELECT ?nbr WHERE { FILTER(false) }" + return "SELECT ?s ?p ?o WHERE { FILTER(false) }" } bnodeFilter := "" if !includeBNodes { - bnodeFilter = "FILTER(!isBlank(?nbr))" + bnodeFilter = "FILTER(!isBlank(?s) && !isBlank(?o))" } values := strings.Join(valuesTerms, " ") return fmt.Sprintf(` PREFIX rdfs: -SELECT DISTINCT ?nbr +SELECT DISTINCT ?s ?p ?o WHERE { VALUES ?sel { %s } - ?sel rdfs:subClassOf ?nbr . - FILTER(!isLiteral(?nbr)) - FILTER(?nbr != ?sel) + BIND(?sel AS ?s) + VALUES ?p { rdfs:subClassOf } + ?s ?p ?o . + FILTER(!isLiteral(?o)) + FILTER(?s != ?o) %s } `, values, bnodeFilter) } -func runSuperclasses(ctx context.Context, q Querier, idx Index, selectedIDs []uint32, includeBNodes bool) ([]uint32, error) { +func runSuperclasses(ctx context.Context, q Querier, idx Index, selectedIDs []uint32, includeBNodes bool) (Result, error) { selectedNodes, selectedSet := selectedNodesFromIDs(idx, selectedIDs, includeBNodes) if len(selectedNodes) == 0 { - return []uint32{}, nil + return Result{NeighborIDs: []uint32{}, Triples: []Triple{}}, nil } - raw, err := q.Query(ctx, superclassesQuery(selectedNodes, includeBNodes)) + query := superclassesQuery(selectedNodes, includeBNodes) + raw, err := q.Query(ctx, query) if err != nil { - return nil, err + logQueryExecutionFailure("superclasses", selectedIDs, includeBNodes, query, err) + return Result{}, err } - return idsFromBindings(raw, "nbr", idx, selectedSet, includeBNodes) + return resultFromTripleBindings(raw, idx, selectedSet, includeBNodes) } diff --git a/backend_go/selection_queries/types.go b/backend_go/selection_queries/types.go index 62467bb..c0699b0 100644 --- a/backend_go/selection_queries/types.go +++ b/backend_go/selection_queries/types.go @@ -13,8 +13,9 @@ type NodeRef struct { } type Index struct { - IDToNode map[uint32]NodeRef - KeyToID map[string]uint32 + IDToNode map[uint32]NodeRef + KeyToID map[string]uint32 + PredicateIDByIRI map[string]uint32 } type Meta struct { @@ -22,7 +23,27 @@ type Meta struct { Label string `json:"label"` } +type TripleTerm struct { + Type string `json:"type"` + Value string `json:"value"` + Lang string `json:"lang,omitempty"` +} + +type Triple struct { + S TripleTerm `json:"s"` + P TripleTerm `json:"p"` + O TripleTerm `json:"o"` + SubjectID *uint32 `json:"subject_id,omitempty"` + ObjectID *uint32 `json:"object_id,omitempty"` + PredicateID *uint32 `json:"predicate_id,omitempty"` +} + +type Result struct { + NeighborIDs []uint32 `json:"neighbor_ids"` + Triples []Triple `json:"triples"` +} + type Definition struct { Meta Meta - Run func(ctx context.Context, q Querier, idx Index, selectedIDs []uint32, includeBNodes bool) ([]uint32, error) + Run func(ctx context.Context, q Querier, idx Index, selectedIDs []uint32, includeBNodes bool) (Result, error) } diff --git a/backend_go/selection_query.go b/backend_go/selection_query.go index 825296d..929ab7c 100644 --- a/backend_go/selection_query.go +++ b/backend_go/selection_query.go @@ -14,19 +14,32 @@ func runSelectionQuery( queryID string, selectedIDs []uint32, includeBNodes bool, -) ([]uint32, error) { +) (selectionqueries.Result, error) { def, ok := selectionqueries.Get(queryID) if !ok { - return nil, fmt.Errorf("unknown query_id: %s", queryID) + return selectionqueries.Result{}, fmt.Errorf("unknown query_id: %s", queryID) } idToNode := make(map[uint32]selectionqueries.NodeRef, len(snapshot.Nodes)) keyToID := make(map[string]uint32, len(snapshot.Nodes)) + predicateIDByIRI := make(map[string]uint32) for _, n := range snapshot.Nodes { nr := selectionqueries.NodeRef{ID: n.ID, TermType: n.TermType, IRI: n.IRI} idToNode[n.ID] = nr keyToID[n.TermType+"\x00"+n.IRI] = n.ID } + if snapshot.Meta != nil { + for predID, iri := range snapshot.Meta.Predicates { + if iri == "" { + continue + } + predicateIDByIRI[iri] = uint32(predID) + } + } - return def.Run(ctx, sparql, selectionqueries.Index{IDToNode: idToNode, KeyToID: keyToID}, selectedIDs, includeBNodes) + return def.Run(ctx, sparql, selectionqueries.Index{ + IDToNode: idToNode, + KeyToID: keyToID, + PredicateIDByIRI: predicateIDByIRI, + }, selectedIDs, includeBNodes) } diff --git a/backend_go/server.go b/backend_go/server.go index 5a042ec..69e014e 100644 --- a/backend_go/server.go +++ b/backend_go/server.go @@ -26,6 +26,7 @@ func (s *APIServer) handler() http.Handler { mux.HandleFunc("/api/graph_queries", s.handleGraphQueries) mux.HandleFunc("/api/selection_queries", s.handleSelectionQueries) mux.HandleFunc("/api/selection_query", s.handleSelectionQuery) + mux.HandleFunc("/api/selection_triples", s.handleSelectionTriples) mux.HandleFunc("/api/neighbors", s.handleNeighbors) return s.corsMiddleware(mux) @@ -134,14 +135,14 @@ func (s *APIServer) handleGraph(w http.ResponseWriter, r *http.Request) { return } - graphQueryID := strings.TrimSpace(r.URL.Query().Get("graph_query_id")) - if graphQueryID == "" { - graphQueryID = graphqueries.DefaultID - } - if _, ok := graphqueries.Get(graphQueryID); !ok { - writeError(w, http.StatusUnprocessableEntity, "unknown graph_query_id") - return - } + graphQueryID := strings.TrimSpace(r.URL.Query().Get("graph_query_id")) + if graphQueryID == "" { + graphQueryID = graphqueries.DefaultID + } + if _, ok := graphqueries.Get(graphQueryID); !ok { + writeError(w, http.StatusUnprocessableEntity, "unknown graph_query_id") + return + } snap, err := s.snapshots.Get(r.Context(), nodeLimit, edgeLimit, graphQueryID) if err != nil { @@ -225,8 +226,18 @@ func (s *APIServer) handleSelectionQuery(w http.ResponseWriter, r *http.Request) return } - ids, err := runSelectionQuery(r.Context(), s.sparql, snap, req.QueryID, req.SelectedIDs, s.cfg.IncludeBNodes) + result, err := runSelectionQuery(r.Context(), s.sparql, snap, req.QueryID, req.SelectedIDs, s.cfg.IncludeBNodes) if err != nil { + log.Printf( + "handleSelectionQuery: returning 502 query_id=%s graph_query_id=%s selected_ids=%v node_limit=%d edge_limit=%d include_bnodes=%t err=%v", + req.QueryID, + graphQueryID, + req.SelectedIDs, + nodeLimit, + edgeLimit, + s.cfg.IncludeBNodes, + err, + ) writeError(w, http.StatusBadGateway, err.Error()) return } @@ -234,23 +245,31 @@ func (s *APIServer) handleSelectionQuery(w http.ResponseWriter, r *http.Request) writeJSON(w, http.StatusOK, SelectionQueryResponse{ QueryID: req.QueryID, SelectedIDs: req.SelectedIDs, - NeighborIDs: ids, + NeighborIDs: result.NeighborIDs, }) } -func (s *APIServer) handleNeighbors(w http.ResponseWriter, r *http.Request) { +func (s *APIServer) handleSelectionTriples(w http.ResponseWriter, r *http.Request) { if r.Method != http.MethodPost { w.WriteHeader(http.StatusMethodNotAllowed) return } - var req NeighborsRequest - if err := decodeJSON(r.Body, &req); err != nil { + var req SelectionQueryRequest + if err := decodeJSON(r.Body, &req); err != nil || strings.TrimSpace(req.QueryID) == "" { writeError(w, http.StatusUnprocessableEntity, "invalid request body") return } + if _, ok := selectionqueries.Get(req.QueryID); !ok { + writeError(w, http.StatusUnprocessableEntity, "unknown query_id") + return + } if len(req.SelectedIDs) == 0 { - writeJSON(w, http.StatusOK, NeighborsResponse{SelectedIDs: req.SelectedIDs, NeighborIDs: []uint32{}}) + writeJSON(w, http.StatusOK, SelectionTriplesResponse{ + QueryID: req.QueryID, + SelectedIDs: req.SelectedIDs, + Triples: []selectionqueries.Triple{}, + }) return } @@ -282,13 +301,96 @@ func (s *APIServer) handleNeighbors(w http.ResponseWriter, r *http.Request) { return } - nbrs, err := runSelectionQuery(r.Context(), s.sparql, snap, "neighbors", req.SelectedIDs, s.cfg.IncludeBNodes) + result, err := runSelectionQuery(r.Context(), s.sparql, snap, req.QueryID, req.SelectedIDs, s.cfg.IncludeBNodes) if err != nil { + log.Printf( + "handleSelectionTriples: returning 502 query_id=%s graph_query_id=%s selected_ids=%v node_limit=%d edge_limit=%d include_bnodes=%t err=%v", + req.QueryID, + graphQueryID, + req.SelectedIDs, + nodeLimit, + edgeLimit, + s.cfg.IncludeBNodes, + err, + ) writeError(w, http.StatusBadGateway, err.Error()) return } - writeJSON(w, http.StatusOK, NeighborsResponse{SelectedIDs: req.SelectedIDs, NeighborIDs: nbrs}) + writeJSON(w, http.StatusOK, SelectionTriplesResponse{ + QueryID: req.QueryID, + SelectedIDs: req.SelectedIDs, + Triples: result.Triples, + }) +} + +func (s *APIServer) handleNeighbors(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodPost { + w.WriteHeader(http.StatusMethodNotAllowed) + return + } + + var req NeighborsRequest + if err := decodeJSON(r.Body, &req); err != nil { + writeError(w, http.StatusUnprocessableEntity, "invalid request body") + return + } + if len(req.SelectedIDs) == 0 { + writeJSON(w, http.StatusOK, NeighborsResponse{ + SelectedIDs: req.SelectedIDs, + NeighborIDs: []uint32{}, + }) + return + } + + graphQueryID := graphqueries.DefaultID + if req.GraphQueryID != nil && strings.TrimSpace(*req.GraphQueryID) != "" { + graphQueryID = strings.TrimSpace(*req.GraphQueryID) + } + if _, ok := graphqueries.Get(graphQueryID); !ok { + writeError(w, http.StatusUnprocessableEntity, "unknown graph_query_id") + return + } + + nodeLimit := s.cfg.DefaultNodeLimit + edgeLimit := s.cfg.DefaultEdgeLimit + if req.NodeLimit != nil { + nodeLimit = *req.NodeLimit + } + if req.EdgeLimit != nil { + edgeLimit = *req.EdgeLimit + } + if nodeLimit < 1 || nodeLimit > s.cfg.MaxNodeLimit || edgeLimit < 1 || edgeLimit > s.cfg.MaxEdgeLimit { + writeError(w, http.StatusUnprocessableEntity, "invalid node_limit/edge_limit") + return + } + + snap, err := s.snapshots.Get(r.Context(), nodeLimit, edgeLimit, graphQueryID) + if err != nil { + writeError(w, http.StatusInternalServerError, err.Error()) + return + } + + result, err := runSelectionQuery(r.Context(), s.sparql, snap, "neighbors", req.SelectedIDs, s.cfg.IncludeBNodes) + if err != nil { + log.Printf( + "handleNeighbors: returning 502 query_id=%s graph_query_id=%s selected_ids=%v node_limit=%d edge_limit=%d include_bnodes=%t err=%v", + "neighbors", + graphQueryID, + req.SelectedIDs, + nodeLimit, + edgeLimit, + s.cfg.IncludeBNodes, + err, + ) + writeError(w, http.StatusBadGateway, err.Error()) + return + } + + writeJSON(w, http.StatusOK, NeighborsResponse{ + SelectedIDs: req.SelectedIDs, + NeighborIDs: result.NeighborIDs, + }) } func intQuery(r *http.Request, name string, def int) (int, error) { diff --git a/backend_go/snapshot_service.go b/backend_go/snapshot_service.go index 3a112b0..99e2896 100644 --- a/backend_go/snapshot_service.go +++ b/backend_go/snapshot_service.go @@ -6,10 +6,12 @@ import ( ) type snapshotKey struct { - NodeLimit int - EdgeLimit int - IncludeBNodes bool - GraphQueryID string + NodeLimit int + EdgeLimit int + IncludeBNodes bool + GraphQueryID string + LayoutEngine string + LayoutRootIRI string } type snapshotInflight struct { @@ -37,7 +39,14 @@ func NewGraphSnapshotService(sparql *AnzoGraphClient, cfg Config) *GraphSnapshot } func (s *GraphSnapshotService) Get(ctx context.Context, nodeLimit int, edgeLimit int, graphQueryID string) (GraphResponse, error) { - key := snapshotKey{NodeLimit: nodeLimit, EdgeLimit: edgeLimit, IncludeBNodes: s.cfg.IncludeBNodes, GraphQueryID: graphQueryID} + key := snapshotKey{ + NodeLimit: nodeLimit, + EdgeLimit: edgeLimit, + IncludeBNodes: s.cfg.IncludeBNodes, + GraphQueryID: graphQueryID, + LayoutEngine: s.cfg.HierarchyLayoutEngine, + LayoutRootIRI: s.cfg.HierarchyLayoutRootIRI, + } s.mu.Lock() if snap, ok := s.cache[key]; ok { diff --git a/docker-compose.yml b/docker-compose.yml index 957c254..18c8aa4 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -11,8 +11,22 @@ services: volumes: - ./data:/data:Z + radial_sugiyama: + profiles: ["radial"] + build: ./radial_sugiyama + working_dir: /workspace + env_file: + - ./radial_sugiyama/.env + volumes: + - ./radial_sugiyama:/workspace:Z + restart: "no" + backend: - build: ./backend_go + build: + context: . + dockerfile: backend_go/Dockerfile + env_file: + - ./radial_sugiyama/.env ports: - "8000:8000" environment: @@ -37,6 +51,11 @@ services: - EDGE_BATCH_SIZE=${EDGE_BATCH_SIZE:-100000} - FREE_OS_MEMORY_AFTER_SNAPSHOT=${FREE_OS_MEMORY_AFTER_SNAPSHOT:-false} - LOG_SNAPSHOT_TIMINGS=${LOG_SNAPSHOT_TIMINGS:-false} + - HIERARCHY_LAYOUT_ENGINE=${HIERARCHY_LAYOUT_ENGINE:-go} + - HIERARCHY_LAYOUT_BRIDGE_BIN=${HIERARCHY_LAYOUT_BRIDGE_BIN:-/app/radial_sugiyama_go_bridge} + - HIERARCHY_LAYOUT_BRIDGE_WORKDIR=${HIERARCHY_LAYOUT_BRIDGE_WORKDIR:-/workspace/radial_sugiyama} + - HIERARCHY_LAYOUT_TIMEOUT_S=${HIERARCHY_LAYOUT_TIMEOUT_S:-60} + - HIERARCHY_LAYOUT_ROOT_IRI=${HIERARCHY_LAYOUT_ROOT_IRI:-http://purl.obolibrary.org/obo/BFO_0000001} depends_on: owl_imports_combiner: condition: service_completed_successfully @@ -44,6 +63,7 @@ services: condition: service_started volumes: - ./data:/data:Z + - ./radial_sugiyama:/workspace/radial_sugiyama:Z healthcheck: test: ["CMD", "curl", "-fsS", "http://localhost:8000/api/health"] interval: 5s @@ -56,6 +76,18 @@ services: - "5173:5173" environment: - VITE_BACKEND_URL=${VITE_BACKEND_URL:-http://backend:8000} + - VITE_COSMOS_ENABLE_SIMULATION=${VITE_COSMOS_ENABLE_SIMULATION:-true} + - VITE_COSMOS_DEBUG_LAYOUT=${VITE_COSMOS_DEBUG_LAYOUT:-false} + - VITE_COSMOS_SPACE_SIZE=${VITE_COSMOS_SPACE_SIZE:-4096} + - VITE_COSMOS_CURVED_LINKS=${VITE_COSMOS_CURVED_LINKS:-true} + - VITE_COSMOS_FIT_VIEW_PADDING=${VITE_COSMOS_FIT_VIEW_PADDING:-0.12} + - VITE_COSMOS_SIMULATION_DECAY=${VITE_COSMOS_SIMULATION_DECAY:-5000} + - VITE_COSMOS_SIMULATION_GRAVITY=${VITE_COSMOS_SIMULATION_GRAVITY:-0} + - VITE_COSMOS_SIMULATION_CENTER=${VITE_COSMOS_SIMULATION_CENTER:-0.05} + - VITE_COSMOS_SIMULATION_REPULSION=${VITE_COSMOS_SIMULATION_REPULSION:-0.5} + - VITE_COSMOS_SIMULATION_LINK_SPRING=${VITE_COSMOS_SIMULATION_LINK_SPRING:-1} + - VITE_COSMOS_SIMULATION_LINK_DISTANCE=${VITE_COSMOS_SIMULATION_LINK_DISTANCE:-10} + - VITE_COSMOS_SIMULATION_FRICTION=${VITE_COSMOS_SIMULATION_FRICTION:-0.1} volumes: - ./frontend:/app - /app/node_modules @@ -75,4 +107,4 @@ services: - ./data/app_home:/opt/anzograph/app-home:Z - ./data/persistence:/opt/anzograph/persistence:Z - ./data/config:/opt/anzograph/config:Z - - ./data/internal:/opt/anzograph/internal:Z \ No newline at end of file + - ./data/internal:/opt/anzograph/internal:Z diff --git a/frontend/README.md b/frontend/README.md index 1ba0847..9591584 100644 --- a/frontend/README.md +++ b/frontend/README.md @@ -19,6 +19,23 @@ Open: `http://localhost:5173` ## Configuration - `VITE_BACKEND_URL` controls where `/api/*` is proxied (see `frontend/vite.config.ts`). +- The right-side cosmos graph reads these `VITE_...` settings at dev-server startup: + - `VITE_COSMOS_ENABLE_SIMULATION` + - `VITE_COSMOS_DEBUG_LAYOUT` + - `VITE_COSMOS_SIMULATION_REPULSION` + - `VITE_COSMOS_SIMULATION_LINK_SPRING` + - `VITE_COSMOS_SIMULATION_LINK_DISTANCE` + - `VITE_COSMOS_SIMULATION_GRAVITY` + - `VITE_COSMOS_SIMULATION_CENTER` + - `VITE_COSMOS_SIMULATION_DECAY` + - `VITE_COSMOS_SIMULATION_FRICTION` + - `VITE_COSMOS_SPACE_SIZE` + - `VITE_COSMOS_CURVED_LINKS` + - `VITE_COSMOS_FIT_VIEW_PADDING` +- The right pane keeps a static camera after an explicit `fitViewByPointPositions(...)` from the current seed positions. +- `VITE_COSMOS_SIMULATION_CENTER` is the main knob for keeping the graph mass near the viewport center during force layout. +- `VITE_COSMOS_DEBUG_LAYOUT=true` enables a small debug overlay and `console.debug` logs for graph-space centroid/bounds, screen-space origin/centroid placement, zoom, alpha/progress, and space-boundary pressure. +- In Docker Compose, set them in the repo-root `.env` and restart the `frontend` service. ## UI diff --git a/frontend/package-lock.json b/frontend/package-lock.json index f5871ab..af5f4d3 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -8,6 +8,7 @@ "name": "react-vite-tailwind", "version": "0.0.0", "dependencies": { + "@cosmos.gl/graph": "^2.6.4", "@webgpu/types": "^0.1.69", "clsx": "2.1.1", "react": "19.2.3", @@ -21,6 +22,7 @@ "@types/react-dom": "19.2.3", "@vitejs/plugin-react": "5.1.1", "tailwindcss": "4.1.17", + "tsx": "^4.0.0", "typescript": "5.9.3", "vite": "7.2.4", "vite-plugin-singlefile": "2.3.0" @@ -308,6 +310,31 @@ "node": ">=6.9.0" } }, + "node_modules/@cosmos.gl/graph": { + "version": "2.6.4", + "resolved": "https://registry.npmjs.org/@cosmos.gl/graph/-/graph-2.6.4.tgz", + "integrity": "sha512-i+N9lSpAjGLTUPelo/bKNbQnKPDqt3k2UnRlfIWe2Lrambc4J3QFgOfpR8AalQ/1tgLRoeNtVBZ1GPpsNqae5w==", + "license": "MIT", + "dependencies": { + "d3-array": "^3.2.0", + "d3-color": "^3.1.0", + "d3-drag": "^3.0.0", + "d3-ease": "^3.0.1", + "d3-scale": "^4.0.2", + "d3-selection": "^3.0.0", + "d3-transition": "^3.0.1", + "d3-zoom": "^3.0.0", + "dompurify": "^3.2.6", + "gl-bench": "^1.0.42", + "gl-matrix": "^3.4.3", + "random": "^4.1.0", + "regl": "^2.1.0" + }, + "engines": { + "node": ">=12.2.0", + "npm": ">=7.0.0" + } + }, "node_modules/@esbuild/aix-ppc64": { "version": "0.25.12", "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.12.tgz", @@ -1511,6 +1538,13 @@ "@types/react": "^19.2.0" } }, + "node_modules/@types/trusted-types": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.7.tgz", + "integrity": "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==", + "license": "MIT", + "optional": true + }, "node_modules/@vitejs/plugin-react": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-5.1.1.tgz", @@ -1639,6 +1673,172 @@ "dev": true, "license": "MIT" }, + "node_modules/d3-array": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz", + "integrity": "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==", + "license": "ISC", + "dependencies": { + "internmap": "1 - 2" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-color": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz", + "integrity": "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-dispatch": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-dispatch/-/d3-dispatch-3.0.1.tgz", + "integrity": "sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-drag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-drag/-/d3-drag-3.0.0.tgz", + "integrity": "sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==", + "license": "ISC", + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-selection": "3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-ease": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-3.0.1.tgz", + "integrity": "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-format": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-3.1.2.tgz", + "integrity": "sha512-AJDdYOdnyRDV5b6ArilzCPPwc1ejkHcoyFarqlPqT7zRYjhavcT3uSrqcMvsgh2CgoPbK3RCwyHaVyxYcP2Arg==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-interpolate": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-3.0.1.tgz", + "integrity": "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==", + "license": "ISC", + "dependencies": { + "d3-color": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-scale": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz", + "integrity": "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==", + "license": "ISC", + "dependencies": { + "d3-array": "2.10.0 - 3", + "d3-format": "1 - 3", + "d3-interpolate": "1.2.0 - 3", + "d3-time": "2.1.1 - 3", + "d3-time-format": "2 - 4" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-selection": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz", + "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-time": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz", + "integrity": "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==", + "license": "ISC", + "dependencies": { + "d3-array": "2 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-time-format": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz", + "integrity": "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==", + "license": "ISC", + "dependencies": { + "d3-time": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-timer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-3.0.1.tgz", + "integrity": "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-transition": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-transition/-/d3-transition-3.0.1.tgz", + "integrity": "sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==", + "license": "ISC", + "dependencies": { + "d3-color": "1 - 3", + "d3-dispatch": "1 - 3", + "d3-ease": "1 - 3", + "d3-interpolate": "1 - 3", + "d3-timer": "1 - 3" + }, + "engines": { + "node": ">=12" + }, + "peerDependencies": { + "d3-selection": "2 - 3" + } + }, + "node_modules/d3-zoom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-zoom/-/d3-zoom-3.0.0.tgz", + "integrity": "sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==", + "license": "ISC", + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-drag": "2 - 3", + "d3-interpolate": "1 - 3", + "d3-selection": "2 - 3", + "d3-transition": "2 - 3" + }, + "engines": { + "node": ">=12" + } + }, "node_modules/debug": { "version": "4.4.3", "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", @@ -1667,6 +1867,15 @@ "node": ">=8" } }, + "node_modules/dompurify": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.3.3.tgz", + "integrity": "sha512-Oj6pzI2+RqBfFG+qOaOLbFXLQ90ARpcGG6UePL82bJLtdsa6CYJD7nmiU8MW9nQNOtCHV3lZ/Bzq1X0QYbBZCA==", + "license": "(MPL-2.0 OR Apache-2.0)", + "optionalDependencies": { + "@types/trusted-types": "^2.0.7" + } + }, "node_modules/electron-to-chromium": { "version": "1.5.286", "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.286.tgz", @@ -1796,6 +2005,31 @@ "node": ">=6.9.0" } }, + "node_modules/get-tsconfig": { + "version": "4.13.6", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.13.6.tgz", + "integrity": "sha512-shZT/QMiSHc/YBLxxOkMtgSid5HFoauqCE3/exfsEcwg1WkeqjG+V40yBbBrsD+jW2HDXcs28xOfcbm2jI8Ddw==", + "dev": true, + "license": "MIT", + "dependencies": { + "resolve-pkg-maps": "^1.0.0" + }, + "funding": { + "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" + } + }, + "node_modules/gl-bench": { + "version": "1.0.42", + "resolved": "https://registry.npmjs.org/gl-bench/-/gl-bench-1.0.42.tgz", + "integrity": "sha512-zuMsA/NCPmI8dPy6q3zTUH8OUM5cqKg7uVWwqzrtXJPBqoypM0XeFWEc8iFOqbf/1qtXieWOrbmgFEByKTQt4Q==", + "license": "MIT" + }, + "node_modules/gl-matrix": { + "version": "3.4.4", + "resolved": "https://registry.npmjs.org/gl-matrix/-/gl-matrix-3.4.4.tgz", + "integrity": "sha512-latSnyDNt/8zYUB6VIJ6PCh2jBjJX6gnDsoCZ7LyW7GkqrD51EWwa9qCoGixj8YqBtETQK/xY7OmpTF8xz1DdQ==", + "license": "MIT" + }, "node_modules/graceful-fs": { "version": "4.2.11", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", @@ -1803,6 +2037,15 @@ "dev": true, "license": "ISC" }, + "node_modules/internmap": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz", + "integrity": "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, "node_modules/is-number": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", @@ -2246,6 +2489,18 @@ "node": "^10 || ^12 || >=14" } }, + "node_modules/random": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/random/-/random-4.1.0.tgz", + "integrity": "sha512-6Ajb7XmMSE9EFAMGC3kg9mvE7fGlBip25mYYuSMzw/uUSrmGilvZo2qwX3RnTRjwXkwkS+4swse9otZ92VjAtQ==", + "license": "MIT", + "dependencies": { + "seedrandom": "^3.0.5" + }, + "engines": { + "node": ">=14" + } + }, "node_modules/react": { "version": "19.2.3", "resolved": "https://registry.npmjs.org/react/-/react-19.2.3.tgz", @@ -2277,6 +2532,22 @@ "node": ">=0.10.0" } }, + "node_modules/regl": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/regl/-/regl-2.1.1.tgz", + "integrity": "sha512-+IOGrxl3FZ8ZM9ixCWQZzFRiRn7Rzn9bu3iFHwg/yz4tlOUQgbO4PHLgG+1ZT60zcIV8tief6Qrmyl8qcoJP0g==", + "license": "MIT" + }, + "node_modules/resolve-pkg-maps": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", + "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" + } + }, "node_modules/rollup": { "version": "4.57.1", "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.57.1.tgz", @@ -2328,6 +2599,12 @@ "integrity": "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q==", "license": "MIT" }, + "node_modules/seedrandom": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/seedrandom/-/seedrandom-3.0.5.tgz", + "integrity": "sha512-8OwmbklUNzwezjGInmZ+2clQmExQPvomqjL7LFqOYqtmuxRgQYqOD3mHaU+MvZn5FLUeVxVfQjwLZW/n/JFuqg==", + "license": "MIT" + }, "node_modules/semver": { "version": "6.3.1", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", @@ -2409,6 +2686,510 @@ "node": ">=8.0" } }, + "node_modules/tsx": { + "version": "4.21.0", + "resolved": "https://registry.npmjs.org/tsx/-/tsx-4.21.0.tgz", + "integrity": "sha512-5C1sg4USs1lfG0GFb2RLXsdpXqBSEhAaA/0kPL01wxzpMqLILNxIxIOKiILz+cdg/pLnOUxFYOR5yhHU666wbw==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "~0.27.0", + "get-tsconfig": "^4.7.5" + }, + "bin": { + "tsx": "dist/cli.mjs" + }, + "engines": { + "node": ">=18.0.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + } + }, + "node_modules/tsx/node_modules/@esbuild/aix-ppc64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.4.tgz", + "integrity": "sha512-cQPwL2mp2nSmHHJlCyoXgHGhbEPMrEEU5xhkcy3Hs/O7nGZqEpZ2sUtLaL9MORLtDfRvVl2/3PAuEkYZH0Ty8Q==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/android-arm": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.4.tgz", + "integrity": "sha512-X9bUgvxiC8CHAGKYufLIHGXPJWnr0OCdR0anD2e21vdvgCI8lIfqFbnoeOz7lBjdrAGUhqLZLcQo6MLhTO2DKQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/android-arm64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.4.tgz", + "integrity": "sha512-gdLscB7v75wRfu7QSm/zg6Rx29VLdy9eTr2t44sfTW7CxwAtQghZ4ZnqHk3/ogz7xao0QAgrkradbBzcqFPasw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/android-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.4.tgz", + "integrity": "sha512-PzPFnBNVF292sfpfhiyiXCGSn9HZg5BcAz+ivBuSsl6Rk4ga1oEXAamhOXRFyMcjwr2DVtm40G65N3GLeH1Lvw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/darwin-arm64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.4.tgz", + "integrity": "sha512-b7xaGIwdJlht8ZFCvMkpDN6uiSmnxxK56N2GDTMYPr2/gzvfdQN8rTfBsvVKmIVY/X7EM+/hJKEIbbHs9oA4tQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/darwin-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.4.tgz", + "integrity": "sha512-sR+OiKLwd15nmCdqpXMnuJ9W2kpy0KigzqScqHI3Hqwr7IXxBp3Yva+yJwoqh7rE8V77tdoheRYataNKL4QrPw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/freebsd-arm64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.4.tgz", + "integrity": "sha512-jnfpKe+p79tCnm4GVav68A7tUFeKQwQyLgESwEAUzyxk/TJr4QdGog9sqWNcUbr/bZt/O/HXouspuQDd9JxFSw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/freebsd-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.4.tgz", + "integrity": "sha512-2kb4ceA/CpfUrIcTUl1wrP/9ad9Atrp5J94Lq69w7UwOMolPIGrfLSvAKJp0RTvkPPyn6CIWrNy13kyLikZRZQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-arm": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.4.tgz", + "integrity": "sha512-aBYgcIxX/wd5n2ys0yESGeYMGF+pv6g0DhZr3G1ZG4jMfruU9Tl1i2Z+Wnj9/KjGz1lTLCcorqE2viePZqj4Eg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-arm64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.4.tgz", + "integrity": "sha512-7nQOttdzVGth1iz57kxg9uCz57dxQLHWxopL6mYuYthohPKEK0vU0C3O21CcBK6KDlkYVcnDXY099HcCDXd9dA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-ia32": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.4.tgz", + "integrity": "sha512-oPtixtAIzgvzYcKBQM/qZ3R+9TEUd1aNJQu0HhGyqtx6oS7qTpvjheIWBbes4+qu1bNlo2V4cbkISr8q6gRBFA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-loong64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.4.tgz", + "integrity": "sha512-8mL/vh8qeCoRcFH2nM8wm5uJP+ZcVYGGayMavi8GmRJjuI3g1v6Z7Ni0JJKAJW+m0EtUuARb6Lmp4hMjzCBWzA==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-mips64el": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.4.tgz", + "integrity": "sha512-1RdrWFFiiLIW7LQq9Q2NES+HiD4NyT8Itj9AUeCl0IVCA459WnPhREKgwrpaIfTOe+/2rdntisegiPWn/r/aAw==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-ppc64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.4.tgz", + "integrity": "sha512-tLCwNG47l3sd9lpfyx9LAGEGItCUeRCWeAx6x2Jmbav65nAwoPXfewtAdtbtit/pJFLUWOhpv0FpS6GQAmPrHA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-riscv64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.4.tgz", + "integrity": "sha512-BnASypppbUWyqjd1KIpU4AUBiIhVr6YlHx/cnPgqEkNoVOhHg+YiSVxM1RLfiy4t9cAulbRGTNCKOcqHrEQLIw==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-s390x": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.4.tgz", + "integrity": "sha512-+eUqgb/Z7vxVLezG8bVB9SfBie89gMueS+I0xYh2tJdw3vqA/0ImZJ2ROeWwVJN59ihBeZ7Tu92dF/5dy5FttA==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/linux-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.4.tgz", + "integrity": "sha512-S5qOXrKV8BQEzJPVxAwnryi2+Iq5pB40gTEIT69BQONqR7JH1EPIcQ/Uiv9mCnn05jff9umq/5nqzxlqTOg9NA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/netbsd-arm64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.4.tgz", + "integrity": "sha512-xHT8X4sb0GS8qTqiwzHqpY00C95DPAq7nAwX35Ie/s+LO9830hrMd3oX0ZMKLvy7vsonee73x0lmcdOVXFzd6Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/netbsd-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.4.tgz", + "integrity": "sha512-RugOvOdXfdyi5Tyv40kgQnI0byv66BFgAqjdgtAKqHoZTbTF2QqfQrFwa7cHEORJf6X2ht+l9ABLMP0dnKYsgg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/openbsd-arm64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.4.tgz", + "integrity": "sha512-2MyL3IAaTX+1/qP0O1SwskwcwCoOI4kV2IBX1xYnDDqthmq5ArrW94qSIKCAuRraMgPOmG0RDTA74mzYNQA9ow==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/openbsd-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.4.tgz", + "integrity": "sha512-u8fg/jQ5aQDfsnIV6+KwLOf1CmJnfu1ShpwqdwC0uA7ZPwFws55Ngc12vBdeUdnuWoQYx/SOQLGDcdlfXhYmXQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/openharmony-arm64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.4.tgz", + "integrity": "sha512-JkTZrl6VbyO8lDQO3yv26nNr2RM2yZzNrNHEsj9bm6dOwwu9OYN28CjzZkH57bh4w0I2F7IodpQvUAEd1mbWXg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/sunos-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.4.tgz", + "integrity": "sha512-/gOzgaewZJfeJTlsWhvUEmUG4tWEY2Spp5M20INYRg2ZKl9QPO3QEEgPeRtLjEWSW8FilRNacPOg8R1uaYkA6g==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/win32-arm64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.4.tgz", + "integrity": "sha512-Z9SExBg2y32smoDQdf1HRwHRt6vAHLXcxD2uGgO/v2jK7Y718Ix4ndsbNMU/+1Qiem9OiOdaqitioZwxivhXYg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/win32-ia32": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.4.tgz", + "integrity": "sha512-DAyGLS0Jz5G5iixEbMHi5KdiApqHBWMGzTtMiJ72ZOLhbu/bzxgAe8Ue8CTS3n3HbIUHQz/L51yMdGMeoxXNJw==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/@esbuild/win32-x64": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.4.tgz", + "integrity": "sha512-+knoa0BDoeXgkNvvV1vvbZX4+hizelrkwmGJBdT17t8FNPwG2lKemmuMZlmaNQ3ws3DKKCxpb4zRZEIp3UxFCg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/tsx/node_modules/esbuild": { + "version": "0.27.4", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.4.tgz", + "integrity": "sha512-Rq4vbHnYkK5fws5NF7MYTU68FPRE1ajX7heQ/8QXXWqNgqqJ/GkmmyxIzUnf2Sr/bakf8l54716CcMGHYhMrrQ==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.27.4", + "@esbuild/android-arm": "0.27.4", + "@esbuild/android-arm64": "0.27.4", + "@esbuild/android-x64": "0.27.4", + "@esbuild/darwin-arm64": "0.27.4", + "@esbuild/darwin-x64": "0.27.4", + "@esbuild/freebsd-arm64": "0.27.4", + "@esbuild/freebsd-x64": "0.27.4", + "@esbuild/linux-arm": "0.27.4", + "@esbuild/linux-arm64": "0.27.4", + "@esbuild/linux-ia32": "0.27.4", + "@esbuild/linux-loong64": "0.27.4", + "@esbuild/linux-mips64el": "0.27.4", + "@esbuild/linux-ppc64": "0.27.4", + "@esbuild/linux-riscv64": "0.27.4", + "@esbuild/linux-s390x": "0.27.4", + "@esbuild/linux-x64": "0.27.4", + "@esbuild/netbsd-arm64": "0.27.4", + "@esbuild/netbsd-x64": "0.27.4", + "@esbuild/openbsd-arm64": "0.27.4", + "@esbuild/openbsd-x64": "0.27.4", + "@esbuild/openharmony-arm64": "0.27.4", + "@esbuild/sunos-x64": "0.27.4", + "@esbuild/win32-arm64": "0.27.4", + "@esbuild/win32-ia32": "0.27.4", + "@esbuild/win32-x64": "0.27.4" + } + }, "node_modules/typescript": { "version": "5.9.3", "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", diff --git a/frontend/package.json b/frontend/package.json index 46eb7ee..b681701 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -10,6 +10,7 @@ "layout": "tsx scripts/compute_layout.ts" }, "dependencies": { + "@cosmos.gl/graph": "^2.6.4", "@webgpu/types": "^0.1.69", "clsx": "2.1.1", "react": "19.2.3", @@ -23,9 +24,9 @@ "@types/react-dom": "19.2.3", "@vitejs/plugin-react": "5.1.1", "tailwindcss": "4.1.17", - "typescript": "5.9.3", "tsx": "^4.0.0", + "typescript": "5.9.3", "vite": "7.2.4", "vite-plugin-singlefile": "2.3.0" } -} \ No newline at end of file +} diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index 1a91e57..e6021e7 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -2,13 +2,96 @@ import { useEffect, useRef, useState } from "react"; import { Renderer } from "./renderer"; import { fetchGraphQueries } from "./graph_queries"; import type { GraphQueryMeta } from "./graph_queries"; -import { fetchSelectionQueries, runSelectionQuery } from "./selection_queries"; -import type { GraphMeta, SelectionQueryMeta } from "./selection_queries"; +import { fetchSelectionQueries, runSelectionQuery, runSelectionTripleQuery } from "./selection_queries"; +import { cosmosRuntimeConfig } from "./cosmos_config"; +import type { GraphMeta, GraphRoutePoint, GraphRouteSegment, SelectionQueryMeta, SelectionTriple } from "./selection_queries"; +import { TripleGraphView } from "./TripleGraphView"; +import { buildTripleGraphModel, type TripleGraphModel } from "./triple_graph"; function sleep(ms: number): Promise { return new Promise((r) => setTimeout(r, ms)); } +type GraphNodeMeta = { + id?: number; + iri?: string; + label?: string; + x?: number; + y?: number; +}; + +function graphRoutePoint(value: unknown): GraphRoutePoint | null { + if (!value || typeof value !== "object") return null; + const record = value as Record; + if (typeof record.x !== "number" || typeof record.y !== "number") return null; + return { + x: record.x, + y: record.y, + }; +} + +function graphRouteSegmentArray(value: unknown): GraphRouteSegment[] { + if (!Array.isArray(value)) return []; + const out: GraphRouteSegment[] = []; + for (const item of value) { + if (!item || typeof item !== "object") continue; + const record = item as Record; + if (typeof record.edge_index !== "number" || typeof record.kind !== "string") continue; + if (!Array.isArray(record.points)) continue; + const points: GraphRoutePoint[] = []; + for (const point of record.points) { + const parsed = graphRoutePoint(point); + if (!parsed) continue; + points.push(parsed); + } + if (points.length < 2) continue; + out.push({ + edge_index: record.edge_index, + kind: record.kind, + points, + }); + } + return out; +} + +function buildRouteLineVertices(routeSegments: GraphRouteSegment[]): Float32Array { + let lineCount = 0; + for (const route of routeSegments) { + lineCount += Math.max(0, route.points.length - 1); + } + + const out = new Float32Array(lineCount * 4); + let offset = 0; + for (const route of routeSegments) { + for (let i = 1; i < route.points.length; i++) { + const previous = route.points[i - 1]; + const current = route.points[i]; + out[offset++] = previous.x; + out[offset++] = previous.y; + out[offset++] = current.x; + out[offset++] = current.y; + } + } + return out; +} + +type TripleResultState = { + status: "idle" | "loading" | "ready" | "error"; + queryId: string; + selectedIds: number[]; + triples: SelectionTriple[]; + errorMessage?: string; +}; + +function idleTripleResult(queryId: string): TripleResultState { + return { + status: "idle", + queryId, + selectedIds: [], + triples: [], + }; +} + export default function App() { const canvasRef = useRef(null); const rendererRef = useRef(null); @@ -28,14 +111,17 @@ export default function App() { const [activeGraphQueryId, setActiveGraphQueryId] = useState("default"); const [selectionQueries, setSelectionQueries] = useState([]); const [activeSelectionQueryId, setActiveSelectionQueryId] = useState("neighbors"); + const [tripleResult, setTripleResult] = useState(() => idleTripleResult("neighbors")); + const [tripleGraphModel, setTripleGraphModel] = useState(null); const [backendStats, setBackendStats] = useState<{ nodes: number; edges: number; backend?: string } | null>(null); const graphMetaRef = useRef(null); const selectionReqIdRef = useRef(0); + const tripleReqIdRef = useRef(0); const graphInitializedRef = useRef(false); // Store mouse position in a ref so it can be accessed in render loop without re-renders const mousePos = useRef({ x: 0, y: 0 }); - const nodesRef = useRef([]); + const nodesRef = useRef([]); async function loadGraph(graphQueryId: string, signal: AbortSignal): Promise { const renderer = rendererRef.current; @@ -60,11 +146,13 @@ export default function App() { const nodes = Array.isArray(graph.nodes) ? graph.nodes : []; const edges = Array.isArray(graph.edges) ? graph.edges : []; + const routeSegments = graphRouteSegmentArray(graph.route_segments); const meta = graph.meta || null; const count = nodes.length; nodesRef.current = nodes; graphMetaRef.current = meta && typeof meta === "object" ? (meta as GraphMeta) : null; + setTripleResult(idleTripleResult(activeSelectionQueryId)); // Build positions from backend-provided node coordinates. setStatus("Preparing buffers…"); @@ -90,6 +178,7 @@ export default function App() { edgeData[i * 2] = typeof s === "number" ? s >>> 0 : 0; edgeData[i * 2 + 1] = typeof t === "number" ? t >>> 0 : 0; } + const routeLineVertices = buildRouteLineVertices(routeSegments); // Use /api/graph meta; don't do a second expensive backend call. if (meta && typeof meta.nodes === "number" && typeof meta.edges === "number") { @@ -106,13 +195,32 @@ export default function App() { await new Promise((r) => setTimeout(r, 0)); if (signal.aborted) return; - const buildMs = renderer.init(xs, ys, vertexIds, edgeData); + const buildMs = renderer.init( + xs, + ys, + vertexIds, + edgeData, + routeLineVertices.length > 0 ? routeLineVertices : null + ); setNodeCount(renderer.getNodeCount()); setSelectedNodes(new Set()); setStatus(""); console.log(`Init complete: ${count.toLocaleString()} nodes, ${edges.length.toLocaleString()} edges in ${buildMs.toFixed(0)}ms`); } + function getSelectedIds(renderer: Renderer, selected: Set): number[] { + const selectedIds: number[] = []; + for (const sortedIdx of selected) { + const origIdx = renderer.sortedIndexToOriginalIndex(sortedIdx); + if (origIdx === null) continue; + const node = nodesRef.current?.[origIdx]; + const nodeId = node?.id; + if (typeof nodeId !== "number") continue; + selectedIds.push(nodeId); + } + return selectedIds; + } + useEffect(() => { const canvas = canvasRef.current; if (!canvas) return; @@ -186,14 +294,14 @@ export default function App() { } })(); - // ── Input handling ── + // Input handling let dragging = false; - let didDrag = false; // true if mouse moved significantly during drag + let didDrag = false; let downX = 0; let downY = 0; let lastX = 0; let lastY = 0; - const DRAG_THRESHOLD = 5; // pixels + const DRAG_THRESHOLD = 5; const onDown = (e: MouseEvent) => { dragging = true; @@ -207,7 +315,6 @@ export default function App() { mousePos.current = { x: e.clientX, y: e.clientY }; if (!dragging) return; - // Check if we've moved enough to consider it a drag const dx = e.clientX - downX; const dy = e.clientY - downY; if (Math.abs(dx) > DRAG_THRESHOLD || Math.abs(dy) > DRAG_THRESHOLD) { @@ -220,15 +327,14 @@ export default function App() { }; const onUp = (e: MouseEvent) => { if (dragging && !didDrag) { - // This was a click, not a drag - handle selection const node = renderer.findNodeIndexAt(e.clientX, e.clientY); if (node) { setSelectedNodes((prev: Set) => { const next = new Set(prev); if (next.has(node.index)) { - next.delete(node.index); // Deselect if already selected + next.delete(node.index); } else { - next.add(node.index); // Select + next.add(node.index); } return next; }); @@ -252,7 +358,7 @@ export default function App() { canvas.addEventListener("wheel", onWheel, { passive: false }); canvas.addEventListener("mouseleave", onMouseLeave); - // ── Render loop ── + // Render loop let frameCount = 0; let lastTime = performance.now(); let raf = 0; @@ -261,7 +367,6 @@ export default function App() { const result = renderer.render(); frameCount++; - // Find hovered node using quadtree const hit = renderer.findNodeIndexAt(mousePos.current.x, mousePos.current.y); if (hit) { const origIdx = renderer.sortedIndexToOriginalIndex(hit.index); @@ -328,44 +433,30 @@ export default function App() { return () => ctrl.abort(); }, [activeGraphQueryId]); - // Sync selection state to renderer + // Left-side selection highlighting path useEffect(() => { const renderer = rendererRef.current; if (!renderer) return; - // Optimistically reflect selection immediately; highlights will be filled in by backend. renderer.updateSelection(selectedNodes, new Set()); - - // Invalidate any in-flight request for the previous selection/mode. const reqId = ++selectionReqIdRef.current; - - // Convert selected sorted indices to backend node IDs (graph-export dense IDs). - const selectedIds: number[] = []; - for (const sortedIdx of selectedNodes) { - const origIdx = renderer.sortedIndexToOriginalIndex(sortedIdx); - if (origIdx === null) continue; - const n = nodesRef.current?.[origIdx]; - const nodeId = n?.id; - if (typeof nodeId !== "number") continue; - selectedIds.push(nodeId); - } + const selectedIds = getSelectedIds(renderer, selectedNodes); + const queryId = (activeSelectionQueryId || selectionQueries[0]?.id || "neighbors").trim(); if (selectedIds.length === 0) { return; } - const queryId = (activeSelectionQueryId || selectionQueries[0]?.id || "neighbors").trim(); - const ctrl = new AbortController(); (async () => { try { - const neighborIds = await runSelectionQuery(queryId, selectedIds, graphMetaRef.current, ctrl.signal); + const result = await runSelectionQuery(queryId, selectedIds, graphMetaRef.current, ctrl.signal); if (ctrl.signal.aborted) return; if (reqId !== selectionReqIdRef.current) return; const neighborSorted = new Set(); - for (const id of neighborIds) { + for (const id of result.neighborIds) { if (typeof id !== "number") continue; const sorted = renderer.vertexIdToSortedIndexOrNull(id); if (sorted === null) continue; @@ -375,8 +466,8 @@ export default function App() { renderer.updateSelection(selectedNodes, neighborSorted); } catch (e) { if (ctrl.signal.aborted) return; + if (reqId !== selectionReqIdRef.current) return; console.warn(e); - // Keep the UI usable even if neighbors fail to load. renderer.updateSelection(selectedNodes, new Set()); } })(); @@ -384,213 +475,369 @@ export default function App() { return () => ctrl.abort(); }, [selectedNodes, activeSelectionQueryId]); + // Right-side triple graph path + useEffect(() => { + const renderer = rendererRef.current; + if (!renderer) return; + + const reqId = ++tripleReqIdRef.current; + const selectedIds = getSelectedIds(renderer, selectedNodes); + const queryId = (activeSelectionQueryId || selectionQueries[0]?.id || "neighbors").trim(); + + if (selectedIds.length === 0) { + setTripleResult(idleTripleResult(queryId)); + return; + } + + const ctrl = new AbortController(); + setTripleResult({ + status: "loading", + queryId, + selectedIds, + triples: [], + }); + + (async () => { + try { + const result = await runSelectionTripleQuery(queryId, selectedIds, graphMetaRef.current, ctrl.signal); + if (ctrl.signal.aborted) return; + if (reqId !== tripleReqIdRef.current) return; + + setTripleResult({ + status: "ready", + queryId: result.queryId, + selectedIds: result.selectedIds, + triples: result.triples, + }); + } catch (e) { + if (ctrl.signal.aborted) return; + if (reqId !== tripleReqIdRef.current) return; + console.warn(e); + setTripleResult({ + status: "error", + queryId, + selectedIds, + triples: [], + errorMessage: e instanceof Error ? e.message : String(e), + }); + } + })(); + + return () => ctrl.abort(); + }, [selectedNodes, activeSelectionQueryId]); + + useEffect(() => { + if (tripleResult.status !== "ready") { + setTripleGraphModel(null); + return; + } + setTripleGraphModel(buildTripleGraphModel(tripleResult.triples, tripleResult.selectedIds)); + }, [tripleResult]); + + const resultQueryId = (tripleResult.queryId || activeSelectionQueryId || selectionQueries[0]?.id || "neighbors").trim(); + const resultQueryLabel = selectionQueries.find((q) => q.id === resultQueryId)?.label ?? resultQueryId; + return ( -
- +
+
+ - {/* Loading overlay */} - {status && ( -
- {status} -
- )} - - {/* Error overlay */} - {error && ( -
- Error: {error} -
- )} - - {/* HUD */} - {!status && !error && ( - <> + {status && (
-
FPS: {stats.fps}
-
Drawn: {stats.drawn.toLocaleString()} / {nodeCount.toLocaleString()}
-
Mode: {stats.mode}
-
Zoom: {stats.zoom < 0.01 ? stats.zoom.toExponential(2) : stats.zoom.toFixed(2)} px/unit
-
Pt Size: {stats.ptSize.toFixed(1)}px
-
Selected: {selectedNodes.size}
- {backendStats && ( -
- Backend{backendStats.backend ? ` (${backendStats.backend})` : ""}: {backendStats.nodes.toLocaleString()} nodes, {backendStats.edges.toLocaleString()} edges -
- )} + {status}
+ )} + + {error && (
- Drag to pan · Scroll to zoom · Click to select + Error: {error}
+ )} - {/* Selection query buttons */} - {selectionQueries.length > 0 && ( + {!status && !error && ( + <>
- {selectionQueries.map((q) => { - const active = q.id === activeSelectionQueryId; - return ( - - ); - })} +
FPS: {stats.fps}
+
Drawn: {stats.drawn.toLocaleString()} / {nodeCount.toLocaleString()}
+
Mode: {stats.mode}
+
Zoom: {stats.zoom < 0.01 ? stats.zoom.toExponential(2) : stats.zoom.toFixed(2)} px/unit
+
Pt Size: {stats.ptSize.toFixed(1)}px
+
Selected: {selectedNodes.size}
+ {backendStats && ( +
+ Backend{backendStats.backend ? ` (${backendStats.backend})` : ""}: {backendStats.nodes.toLocaleString()} nodes, {backendStats.edges.toLocaleString()} edges +
+ )}
- )} - - {/* Graph query buttons */} - {graphQueries.length > 0 && (
- {graphQueries.map((q) => { - const active = q.id === activeGraphQueryId; - return ( - - ); - })} + Drag to pan · Scroll to zoom · Click to select +
+ + {selectionQueries.length > 0 && ( +
+ {selectionQueries.map((q) => { + const active = q.id === activeSelectionQueryId; + return ( + + ); + })} +
+ )} + + {graphQueries.length > 0 && ( +
+ {graphQueries.map((q) => { + const active = q.id === activeGraphQueryId; + return ( + + ); + })} +
+ )} + + {hoveredNode && ( +
+
+ {hoveredNode.label || hoveredNode.iri || "(unknown)"} +
+
+ ({hoveredNode.x.toFixed(2)}, {hoveredNode.y.toFixed(2)}) +
+
+ )} + + )} +
+ +
+
+
+ {resultQueryLabel} +
+
Selection Graph
+
+ Nodes: {(tripleGraphModel?.nodeCount ?? 0).toLocaleString()} · Edges: {(tripleGraphModel?.edgeCount ?? 0).toLocaleString()} +
+
+ Layout: {cosmosRuntimeConfig.enableSimulation ? "force-directed" : "static"} · Camera: static · Center force: {cosmosRuntimeConfig.simulationCenter} · Repulsion: {cosmosRuntimeConfig.simulationRepulsion} · Link spring: {cosmosRuntimeConfig.simulationLinkSpring} · Friction: {cosmosRuntimeConfig.simulationFriction} +
+
+ +
+ {tripleResult.status === "idle" && ( +
+ Select nodes on the left to view returned triples
)} - {/* Hover tooltip */} - {hoveredNode && ( + {tripleResult.status === "loading" && (
-
- {hoveredNode.label || hoveredNode.iri || "(unknown)"} -
-
- ({hoveredNode.x.toFixed(2)}, {hoveredNode.y.toFixed(2)}) -
+ Running triple query…
)} - - )} + + {tripleResult.status === "error" && ( +
+ {tripleResult.errorMessage || "Triple query failed"} +
+ )} + + {tripleResult.status === "ready" && (!tripleGraphModel || tripleGraphModel.edgeCount === 0) && ( +
+ No returned graph +
+ )} + + {tripleResult.status === "ready" && tripleGraphModel && tripleGraphModel.edgeCount > 0 && ( + + )} +
+
); } diff --git a/frontend/src/TripleGraphView.tsx b/frontend/src/TripleGraphView.tsx new file mode 100644 index 0000000..1c5d61e --- /dev/null +++ b/frontend/src/TripleGraphView.tsx @@ -0,0 +1,484 @@ +import { memo, useEffect, useMemo, useRef, useState } from "react"; +import { Graph, type GraphConfig } from "@cosmos.gl/graph"; +import { cosmosRuntimeConfig } from "./cosmos_config"; +import { + computeLayoutMetrics, + type GraphLayoutMetrics, + type TripleGraphLink, + type TripleGraphModel, + type TripleGraphNode, +} from "./triple_graph"; + +type TripleGraphViewProps = { + model: TripleGraphModel; +}; + +type InspectState = + | { kind: "node"; node: TripleGraphNode } + | { kind: "link"; link: TripleGraphLink } + | null; + +type LayoutDebugState = { + phase: "idle" | "running" | "ended"; + alpha: number | null; + progress: number; + currentMetrics: GraphLayoutMetrics; + lastEvent: string; + zoomLevel: number; + screenCenter: { x: number; y: number }; + screenOrigin: { x: number; y: number }; + screenCentroid: { x: number; y: number }; + originDelta: { x: number; y: number }; + centroidDelta: { x: number; y: number }; + nearSpaceBoundary: boolean; +}; + +export const TripleGraphView = memo(function TripleGraphView({ model }: TripleGraphViewProps) { + const containerRef = useRef(null); + const graphRef = useRef(null); + const modelRef = useRef(model); + const debugLogTimeRef = useRef(0); + const [hovered, setHovered] = useState(null); + const [pinned, setPinned] = useState(null); + const [layoutDebug, setLayoutDebug] = useState({ + phase: "idle", + alpha: null, + progress: 0, + currentMetrics: model.seedMetrics, + lastEvent: "seed", + zoomLevel: 0, + screenCenter: { x: 0, y: 0 }, + screenOrigin: { x: 0, y: 0 }, + screenCentroid: { x: 0, y: 0 }, + originDelta: { x: 0, y: 0 }, + centroidDelta: { x: 0, y: 0 }, + nearSpaceBoundary: false, + }); + + const activeDetail = useMemo(() => pinned ?? hovered, [pinned, hovered]); + + useEffect(() => { + modelRef.current = model; + }, [model]); + + useEffect(() => { + setLayoutDebug({ + phase: "idle", + alpha: null, + progress: 0, + currentMetrics: model.seedMetrics, + lastEvent: "seed", + zoomLevel: 0, + screenCenter: { x: 0, y: 0 }, + screenOrigin: { x: 0, y: 0 }, + screenCentroid: { x: 0, y: 0 }, + originDelta: { x: 0, y: 0 }, + centroidDelta: { x: 0, y: 0 }, + nearSpaceBoundary: false, + }); + if (cosmosRuntimeConfig.debugLayout) { + console.debug("[cosmos-layout]", { + event: "seed-applied", + seedCentroid: { + x: Number(model.seedMetrics.centroidX.toFixed(3)), + y: Number(model.seedMetrics.centroidY.toFixed(3)), + }, + bounds: { + width: Number(model.seedMetrics.width.toFixed(3)), + height: Number(model.seedMetrics.height.toFixed(3)), + maxRadius: Number(model.seedMetrics.maxRadius.toFixed(3)), + }, + }); + } + }, [model]); + + useEffect(() => { + const container = containerRef.current; + if (!container) return; + + const reheatSimulation = () => { + if (!cosmosRuntimeConfig.enableSimulation) return; + graphRef.current?.start(0.25); + }; + + const reportLayout = (event: string, phase: LayoutDebugState["phase"], alpha?: number) => { + const graph = graphRef.current; + if (!graph || !cosmosRuntimeConfig.debugLayout) return; + const currentMetrics = computeLayoutMetrics(graph.getPointPositions()); + const containerRect = container.getBoundingClientRect(); + const screenCenter = { + x: containerRect.width / 2, + y: containerRect.height / 2, + }; + const screenOriginTuple = graph.spaceToScreenPosition([0, 0]); + const screenCentroidTuple = graph.spaceToScreenPosition([ + currentMetrics.centroidX, + currentMetrics.centroidY, + ]); + const screenOrigin = { x: screenOriginTuple[0], y: screenOriginTuple[1] }; + const screenCentroid = { x: screenCentroidTuple[0], y: screenCentroidTuple[1] }; + const originDelta = { + x: screenOrigin.x - screenCenter.x, + y: screenOrigin.y - screenCenter.y, + }; + const centroidDelta = { + x: screenCentroid.x - screenCenter.x, + y: screenCentroid.y - screenCenter.y, + }; + const boundaryMargin = cosmosRuntimeConfig.spaceSize * 0.02; + const nearSpaceBoundary = + currentMetrics.minX <= boundaryMargin || + currentMetrics.maxX >= cosmosRuntimeConfig.spaceSize - boundaryMargin || + currentMetrics.minY <= boundaryMargin || + currentMetrics.maxY >= cosmosRuntimeConfig.spaceSize - boundaryMargin; + const now = performance.now(); + const shouldPublish = event !== "tick" || now - debugLogTimeRef.current >= 250; + const next: LayoutDebugState = { + phase, + alpha: typeof alpha === "number" ? alpha : null, + progress: graph.progress, + currentMetrics, + lastEvent: event, + zoomLevel: graph.getZoomLevel(), + screenCenter, + screenOrigin, + screenCentroid, + originDelta, + centroidDelta, + nearSpaceBoundary, + }; + if (!shouldPublish) return; + debugLogTimeRef.current = now; + setLayoutDebug(next); + console.debug("[cosmos-layout]", { + event, + phase, + alpha: next.alpha, + progress: Number(next.progress.toFixed(4)), + seedCentroid: { + x: Number(modelRef.current.seedMetrics.centroidX.toFixed(3)), + y: Number(modelRef.current.seedMetrics.centroidY.toFixed(3)), + }, + currentCentroid: { + x: Number(currentMetrics.centroidX.toFixed(3)), + y: Number(currentMetrics.centroidY.toFixed(3)), + }, + screenCenter: { + x: Number(screenCenter.x.toFixed(2)), + y: Number(screenCenter.y.toFixed(2)), + }, + screenOrigin: { + x: Number(screenOrigin.x.toFixed(2)), + y: Number(screenOrigin.y.toFixed(2)), + }, + screenCentroid: { + x: Number(screenCentroid.x.toFixed(2)), + y: Number(screenCentroid.y.toFixed(2)), + }, + originDelta: { + x: Number(originDelta.x.toFixed(2)), + y: Number(originDelta.y.toFixed(2)), + }, + centroidDelta: { + x: Number(centroidDelta.x.toFixed(2)), + y: Number(centroidDelta.y.toFixed(2)), + }, + zoomLevel: Number(next.zoomLevel.toFixed(4)), + nearSpaceBoundary, + bounds: { + width: Number(currentMetrics.width.toFixed(3)), + height: Number(currentMetrics.height.toFixed(3)), + maxRadius: Number(currentMetrics.maxRadius.toFixed(3)), + }, + }); + }; + + const config: GraphConfig = { + backgroundColor: "#05070a", + spaceSize: cosmosRuntimeConfig.spaceSize, + enableSimulation: cosmosRuntimeConfig.enableSimulation, + enableDrag: true, + enableZoom: true, + fitViewOnInit: false, + fitViewPadding: cosmosRuntimeConfig.fitViewPadding, + rescalePositions: false, + curvedLinks: cosmosRuntimeConfig.curvedLinks, + simulationDecay: cosmosRuntimeConfig.simulationDecay, + simulationGravity: cosmosRuntimeConfig.simulationGravity, + simulationCenter: cosmosRuntimeConfig.simulationCenter, + simulationRepulsion: cosmosRuntimeConfig.simulationRepulsion, + simulationLinkSpring: cosmosRuntimeConfig.simulationLinkSpring, + simulationLinkDistance: cosmosRuntimeConfig.simulationLinkDistance, + simulationFriction: cosmosRuntimeConfig.simulationFriction, + renderHoveredPointRing: true, + hoveredPointRingColor: "#35d6ff", + hoveredPointCursor: "pointer", + hoveredLinkCursor: "pointer", + hoveredLinkColor: "#ffd166", + hoveredLinkWidthIncrease: 2.5, + onSimulationStart: () => { + reportLayout("simulation-start", "running", 1); + }, + onSimulationTick: (alpha) => { + reportLayout("tick", "running", alpha); + }, + onSimulationEnd: () => { + reportLayout("simulation-end", "ended", 0); + }, + onPointMouseOver: (index) => { + const node = modelRef.current.nodes[index]; + if (!node) return; + setHovered({ kind: "node", node }); + }, + onPointMouseOut: () => { + setHovered((prev) => (prev?.kind === "node" ? null : prev)); + }, + onLinkMouseOver: (linkIndex) => { + const link = modelRef.current.linksMeta[linkIndex]; + if (!link) return; + setHovered({ kind: "link", link }); + }, + onLinkMouseOut: () => { + setHovered((prev) => (prev?.kind === "link" ? null : prev)); + }, + onPointClick: (index) => { + const node = modelRef.current.nodes[index]; + if (!node) return; + setPinned({ kind: "node", node }); + }, + onLinkClick: (linkIndex) => { + const link = modelRef.current.linksMeta[linkIndex]; + if (!link) return; + setPinned({ kind: "link", link }); + }, + onClick: (index) => { + if (typeof index === "number") return; + setPinned(null); + }, + onDragStart: () => { + reportLayout("drag-start", "running"); + reheatSimulation(); + }, + onDragEnd: () => { + reportLayout("drag-end", "running"); + reheatSimulation(); + }, + }; + + const graph = new Graph(container, config); + graphRef.current = graph; + if (cosmosRuntimeConfig.debugLayout) { + console.debug("[cosmos-layout]", { + event: "graph-created", + seedCentroid: { + x: Number(modelRef.current.seedMetrics.centroidX.toFixed(3)), + y: Number(modelRef.current.seedMetrics.centroidY.toFixed(3)), + }, + seedRadius: Number(modelRef.current.seedMetrics.maxRadius.toFixed(3)), + }); + } + + return () => { + setHovered(null); + setPinned(null); + graphRef.current = null; + graph.destroy(); + }; + }, []); + + useEffect(() => { + const graph = graphRef.current; + if (!graph) return; + setHovered(null); + setPinned(null); + applyGraphModel(graph, model); + if (cosmosRuntimeConfig.debugLayout) { + requestAnimationFrame(() => { + const positionedGraph = graphRef.current; + if (!positionedGraph) return; + const currentMetrics = computeLayoutMetrics(positionedGraph.getPointPositions()); + const origin = positionedGraph.spaceToScreenPosition([0, 0]); + const centroid = positionedGraph.spaceToScreenPosition([ + currentMetrics.centroidX, + currentMetrics.centroidY, + ]); + console.debug("[cosmos-layout]", { + event: "after-fit-requested", + screenOrigin: { x: Number(origin[0].toFixed(2)), y: Number(origin[1].toFixed(2)) }, + screenCentroid: { x: Number(centroid[0].toFixed(2)), y: Number(centroid[1].toFixed(2)) }, + }); + }); + } + }, [model]); + + useEffect(() => { + const graph = graphRef.current; + if (!graph) return; + graph.setConfig({ + focusedPointIndex: activeDetail?.kind === "node" ? activeDetail.node.index : undefined, + }); + }, [activeDetail]); + + return ( +
+
+ + {cosmosRuntimeConfig.debugLayout && ( +
+
+ Layout Debug +
+
+ phase: {layoutDebug.phase} · event: {layoutDebug.lastEvent} +
+
+ alpha: {formatMaybeNumber(layoutDebug.alpha)} · progress: {formatNumber(layoutDebug.progress)} +
+
zoom: {formatNumber(layoutDebug.zoomLevel)}
+
seed centroid
+
+ ({formatNumber(model.seedMetrics.centroidX)}, {formatNumber(model.seedMetrics.centroidY)}) +
+
+ bounds: {formatNumber(model.seedMetrics.width)} × {formatNumber(model.seedMetrics.height)} · r={formatNumber(model.seedMetrics.maxRadius)} +
+
current centroid
+
+ ({formatNumber(layoutDebug.currentMetrics.centroidX)}, {formatNumber(layoutDebug.currentMetrics.centroidY)}) +
+
+ bounds: {formatNumber(layoutDebug.currentMetrics.width)} × {formatNumber(layoutDebug.currentMetrics.height)} · r={formatNumber(layoutDebug.currentMetrics.maxRadius)} +
+
screen center
+
+ ({formatNumber(layoutDebug.screenCenter.x)}, {formatNumber(layoutDebug.screenCenter.y)}) +
+
screen origin
+
+ ({formatNumber(layoutDebug.screenOrigin.x)}, {formatNumber(layoutDebug.screenOrigin.y)}) d=({formatNumber(layoutDebug.originDelta.x)}, {formatNumber(layoutDebug.originDelta.y)}) +
+
screen centroid
+
+ ({formatNumber(layoutDebug.screenCentroid.x)}, {formatNumber(layoutDebug.screenCentroid.y)}) d=({formatNumber(layoutDebug.centroidDelta.x)}, {formatNumber(layoutDebug.centroidDelta.y)}) +
+
+ near space boundary: {layoutDebug.nearSpaceBoundary ? "yes" : "no"} +
+
+ )} + +
+
+ {pinned ? "Pinned details" : activeDetail ? "Hovered details" : "Inspector"} +
+ + {!activeDetail && ( +
+ Hover a node or edge to inspect it. Click a node or edge to pin its details. +
+ )} + + {activeDetail?.kind === "node" && ( + <> +
+ Node +
+
{activeDetail.node.text}
+ {typeof activeDetail.node.backendId === "number" && ( +
+ backend id: {activeDetail.node.backendId} +
+ )} + {activeDetail.node.isSelectedSource && ( +
+ selected source node +
+ )} + + )} + + {activeDetail?.kind === "link" && ( + <> +
+ Edge +
+
{activeDetail.link.predicateText}
+
from
+
{activeDetail.link.sourceText}
+
to
+
{activeDetail.link.targetText}
+ {typeof activeDetail.link.predicateId === "number" && ( +
+ predicate id: {activeDetail.link.predicateId} +
+ )} + + )} +
+
+ ); +}); + +function applyGraphModel(graph: Graph, model: TripleGraphModel): void { + graph.setPointPositions(model.pointPositions); + graph.setLinks(model.links); + graph.setPointColors(model.pointColors); + graph.setPointSizes(model.pointSizes); + graph.setLinkColors(model.linkColors); + graph.setLinkWidths(model.linkWidths); + graph.render(0); + requestAnimationFrame(() => { + graph.fitViewByPointPositions(Array.from(model.pointPositions), 0, cosmosRuntimeConfig.fitViewPadding); + if (cosmosRuntimeConfig.enableSimulation) { + graph.start(1); + } + }); +} + +function formatNumber(value: number): string { + return value.toFixed(2); +} + +function formatMaybeNumber(value: number | null): string { + return value === null ? "-" : value.toFixed(3); +} diff --git a/frontend/src/cosmos_config.ts b/frontend/src/cosmos_config.ts new file mode 100644 index 0000000..65df336 --- /dev/null +++ b/frontend/src/cosmos_config.ts @@ -0,0 +1,28 @@ +function parseBoolean(value: string | undefined, fallback: boolean): boolean { + if (value === undefined) return fallback; + const normalized = value.trim().toLowerCase(); + if (["1", "true", "yes", "on"].includes(normalized)) return true; + if (["0", "false", "no", "off"].includes(normalized)) return false; + return fallback; +} + +function parseNumber(value: string | undefined, fallback: number): number { + if (value === undefined) return fallback; + const parsed = Number(value); + return Number.isFinite(parsed) ? parsed : fallback; +} + +export const cosmosRuntimeConfig = { + enableSimulation: parseBoolean(import.meta.env.VITE_COSMOS_ENABLE_SIMULATION, true), + debugLayout: parseBoolean(import.meta.env.VITE_COSMOS_DEBUG_LAYOUT, false), + spaceSize: parseNumber(import.meta.env.VITE_COSMOS_SPACE_SIZE, 4096), + curvedLinks: parseBoolean(import.meta.env.VITE_COSMOS_CURVED_LINKS, true), + fitViewPadding: parseNumber(import.meta.env.VITE_COSMOS_FIT_VIEW_PADDING, 0.12), + simulationDecay: parseNumber(import.meta.env.VITE_COSMOS_SIMULATION_DECAY, 5000), + simulationGravity: parseNumber(import.meta.env.VITE_COSMOS_SIMULATION_GRAVITY, 0), + simulationCenter: parseNumber(import.meta.env.VITE_COSMOS_SIMULATION_CENTER, 0.05), + simulationRepulsion: parseNumber(import.meta.env.VITE_COSMOS_SIMULATION_REPULSION, 0.5), + simulationLinkSpring: parseNumber(import.meta.env.VITE_COSMOS_SIMULATION_LINK_SPRING, 1), + simulationLinkDistance: parseNumber(import.meta.env.VITE_COSMOS_SIMULATION_LINK_DISTANCE, 10), + simulationFriction: parseNumber(import.meta.env.VITE_COSMOS_SIMULATION_FRICTION, 0.1), +} as const; diff --git a/frontend/src/renderer.ts b/frontend/src/renderer.ts index 2c74169..b0d4465 100644 --- a/frontend/src/renderer.ts +++ b/frontend/src/renderer.ts @@ -76,6 +76,9 @@ export class Renderer { private selectedProgram: WebGLProgram; private neighborProgram: WebGLProgram; private vao: WebGLVertexArrayObject; + private nodeVbo: WebGLBuffer; + private lineVao: WebGLVertexArrayObject; + private lineVbo: WebGLBuffer; // Data private leaves: Leaf[] = []; @@ -88,6 +91,8 @@ export class Renderer { private leafEdgeStarts: Uint32Array = new Uint32Array(0); private leafEdgeCounts: Uint32Array = new Uint32Array(0); private maxPtSize = 256; + private useRawLineSegments = false; + private rawLineVertexCount = 0; // Multi-draw extension private multiDrawExt: any = null; @@ -163,15 +168,23 @@ export class Renderer { // Create VAO + VBO (empty for now) this.vao = gl.createVertexArray()!; + this.nodeVbo = gl.createBuffer()!; gl.bindVertexArray(this.vao); - const vbo = gl.createBuffer()!; - gl.bindBuffer(gl.ARRAY_BUFFER, vbo); + gl.bindBuffer(gl.ARRAY_BUFFER, this.nodeVbo); // We forced a_pos to location 0 in compileProgram gl.enableVertexAttribArray(0); gl.vertexAttribPointer(0, 2, gl.FLOAT, false, 0, 0); gl.bindVertexArray(null); + this.lineVao = gl.createVertexArray()!; + this.lineVbo = gl.createBuffer()!; + gl.bindVertexArray(this.lineVao); + gl.bindBuffer(gl.ARRAY_BUFFER, this.lineVbo); + gl.enableVertexAttribArray(0); + gl.vertexAttribPointer(0, 2, gl.FLOAT, false, 0, 0); + gl.bindVertexArray(null); + this.linesIbo = gl.createBuffer()!; this.selectionIbo = gl.createBuffer()!; this.neighborIbo = gl.createBuffer()!; @@ -192,7 +205,8 @@ export class Renderer { xs: Float32Array, ys: Float32Array, vertexIds: Uint32Array, - edges: Uint32Array + edges: Uint32Array, + routeLineVertices: Float32Array | null = null ): number { const t0 = performance.now(); const gl = this.gl; @@ -213,6 +227,7 @@ export class Renderer { // Upload sorted particles to GPU as STATIC VBO (never changes) gl.bindVertexArray(this.vao); + gl.bindBuffer(gl.ARRAY_BUFFER, this.nodeVbo); gl.bufferData(gl.ARRAY_BUFFER, sorted, gl.STATIC_DRAW); gl.bindVertexArray(null); @@ -236,6 +251,19 @@ export class Renderer { } this.vertexIdToSortedIndex = vertexIdToSortedIndex; + this.useRawLineSegments = routeLineVertices !== null && routeLineVertices.length > 0; + this.rawLineVertexCount = this.useRawLineSegments && routeLineVertices ? routeLineVertices.length / 2 : 0; + if (this.useRawLineSegments && routeLineVertices) { + this.edgeCount = edgeCount; + this.leafEdgeStarts = new Uint32Array(0); + this.leafEdgeCounts = new Uint32Array(0); + gl.bindVertexArray(this.lineVao); + gl.bindBuffer(gl.ARRAY_BUFFER, this.lineVbo); + gl.bufferData(gl.ARRAY_BUFFER, routeLineVertices, gl.STATIC_DRAW); + gl.bindVertexArray(null); + return performance.now() - t0; + } + // Remap edges from vertex IDs to sorted indices const lineIndices = new Uint32Array(edgeCount * 2); let validEdges = 0; @@ -572,24 +600,30 @@ export class Renderer { } // 5. Draw Lines if deeply zoomed in (< 20k total visible particles) - if (totalVisibleParticles < 20000 && visibleCount > 0) { + if (totalVisibleParticles < 20000) { gl.useProgram(this.lineProgram); gl.uniform2f(this.uCenterLine, this.cx, this.cy); gl.uniform2f(this.uScaleLine, (this.zoom * 2) / cw, (-this.zoom * 2) / ch); - gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.linesIbo); + if (this.useRawLineSegments) { + gl.bindVertexArray(this.lineVao); + gl.drawArrays(gl.LINES, 0, this.rawLineVertexCount); + gl.bindVertexArray(this.vao); + } else if (visibleCount > 0) { + gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.linesIbo); - for (let i = 0; i < visibleCount; i++) { - const leafIdx = this.visibleLeafIndices[i]; - const edgeCount = this.leafEdgeCounts[leafIdx]; - if (edgeCount === 0) continue; - // Each edge is 2 indices (1 line segment) - // Offset is in bytes: edgeStart * 2 (indices per edge) * 4 (bytes per uint32) - const edgeStart = this.leafEdgeStarts[leafIdx]; - gl.drawElements(gl.LINES, edgeCount * 2, gl.UNSIGNED_INT, edgeStart * 2 * 4); + for (let i = 0; i < visibleCount; i++) { + const leafIdx = this.visibleLeafIndices[i]; + const edgeCount = this.leafEdgeCounts[leafIdx]; + if (edgeCount === 0) continue; + // Each edge is 2 indices (1 line segment) + // Offset is in bytes: edgeStart * 2 (indices per edge) * 4 (bytes per uint32) + const edgeStart = this.leafEdgeStarts[leafIdx]; + gl.drawElements(gl.LINES, edgeCount * 2, gl.UNSIGNED_INT, edgeStart * 2 * 4); + } + + gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null); } - - gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null); } // 6. Draw Neighbor Nodes (yellow) - drawn before selected so selected appears on top diff --git a/frontend/src/selection_queries/api.ts b/frontend/src/selection_queries/api.ts index 47503e4..c8422ef 100644 --- a/frontend/src/selection_queries/api.ts +++ b/frontend/src/selection_queries/api.ts @@ -1,4 +1,53 @@ -import type { GraphMeta, SelectionQueryMeta } from "./types"; +import type { + GraphMeta, + SelectionQueryMeta, + SelectionQueryResult, + SelectionTriple, + SelectionTripleResult, + SelectionTripleTerm, +} from "./types"; + +function numberArray(value: unknown): number[] { + if (!Array.isArray(value)) return []; + const out: number[] = []; + for (const item of value) { + if (typeof item === "number") out.push(item); + } + return out; +} + +function tripleTerm(value: unknown): SelectionTripleTerm | null { + if (!value || typeof value !== "object") return null; + const record = value as Record; + if (typeof record.type !== "string" || typeof record.value !== "string") return null; + return { + type: record.type, + value: record.value, + lang: typeof record.lang === "string" ? record.lang : undefined, + }; +} + +function tripleArray(value: unknown): SelectionTriple[] { + if (!Array.isArray(value)) return []; + const out: SelectionTriple[] = []; + for (const item of value) { + if (!item || typeof item !== "object") continue; + const record = item as Record; + const s = tripleTerm(record.s); + const p = tripleTerm(record.p); + const o = tripleTerm(record.o); + if (!s || !p || !o) continue; + out.push({ + s, + p, + o, + subject_id: typeof record.subject_id === "number" ? record.subject_id : undefined, + predicate_id: typeof record.predicate_id === "number" ? record.predicate_id : undefined, + object_id: typeof record.object_id === "number" ? record.object_id : undefined, + }); + } + return out; +} export async function fetchSelectionQueries(signal?: AbortSignal): Promise { const res = await fetch("/api/selection_queries", { signal }); @@ -12,7 +61,7 @@ export async function runSelectionQuery( selectedIds: number[], graphMeta: GraphMeta | null, signal: AbortSignal -): Promise { +): Promise { const body = { query_id: queryId, selected_ids: selectedIds, @@ -29,9 +78,40 @@ export async function runSelectionQuery( }); if (!res.ok) throw new Error(`POST /api/selection_query failed: ${res.status}`); const data = await res.json(); - const ids: unknown = data?.neighbor_ids; - if (!Array.isArray(ids)) return []; - const out: number[] = []; - for (const id of ids) if (typeof id === "number") out.push(id); - return out; + + return { + queryId: typeof data?.query_id === "string" ? data.query_id : queryId, + selectedIds: numberArray(data?.selected_ids), + neighborIds: numberArray(data?.neighbor_ids), + }; +} + +export async function runSelectionTripleQuery( + queryId: string, + selectedIds: number[], + graphMeta: GraphMeta | null, + signal: AbortSignal +): Promise { + const body = { + query_id: queryId, + selected_ids: selectedIds, + node_limit: typeof graphMeta?.node_limit === "number" ? graphMeta.node_limit : undefined, + edge_limit: typeof graphMeta?.edge_limit === "number" ? graphMeta.edge_limit : undefined, + graph_query_id: typeof graphMeta?.graph_query_id === "string" ? graphMeta.graph_query_id : undefined, + }; + + const res = await fetch("/api/selection_triples", { + method: "POST", + headers: { "content-type": "application/json" }, + body: JSON.stringify(body), + signal, + }); + if (!res.ok) throw new Error(`POST /api/selection_triples failed: ${res.status}`); + const data = await res.json(); + + return { + queryId: typeof data?.query_id === "string" ? data.query_id : queryId, + selectedIds: numberArray(data?.selected_ids), + triples: tripleArray(data?.triples), + }; } diff --git a/frontend/src/selection_queries/index.ts b/frontend/src/selection_queries/index.ts index b40a0d2..a89f097 100644 --- a/frontend/src/selection_queries/index.ts +++ b/frontend/src/selection_queries/index.ts @@ -1,3 +1,9 @@ -export { fetchSelectionQueries, runSelectionQuery } from "./api"; -export type { GraphMeta, SelectionQueryMeta } from "./types"; - +export { fetchSelectionQueries, runSelectionQuery, runSelectionTripleQuery } from "./api"; +export type { + GraphMeta, + GraphRoutePoint, + GraphRouteSegment, + SelectionQueryMeta, + SelectionTriple, + SelectionTripleResult, +} from "./types"; diff --git a/frontend/src/selection_queries/types.ts b/frontend/src/selection_queries/types.ts index d7eb77b..a85a998 100644 --- a/frontend/src/selection_queries/types.ts +++ b/frontend/src/selection_queries/types.ts @@ -8,9 +8,49 @@ export type GraphMeta = { edge_limit?: number; nodes?: number; edges?: number; + layout_engine?: string; + layout_root_iri?: string | null; +}; + +export type GraphRoutePoint = { + x: number; + y: number; +}; + +export type GraphRouteSegment = { + edge_index: number; + kind: string; + points: GraphRoutePoint[]; }; export type SelectionQueryMeta = { id: string; label: string; }; + +export type SelectionQueryResult = { + queryId: string; + selectedIds: number[]; + neighborIds: number[]; +}; + +export type SelectionTripleTerm = { + type: string; + value: string; + lang?: string; +}; + +export type SelectionTriple = { + s: SelectionTripleTerm; + p: SelectionTripleTerm; + o: SelectionTripleTerm; + subject_id?: number; + predicate_id?: number; + object_id?: number; +}; + +export type SelectionTripleResult = { + queryId: string; + selectedIds: number[]; + triples: SelectionTriple[]; +}; diff --git a/frontend/src/triple_graph.ts b/frontend/src/triple_graph.ts new file mode 100644 index 0000000..f316666 --- /dev/null +++ b/frontend/src/triple_graph.ts @@ -0,0 +1,363 @@ +import { cosmosRuntimeConfig } from "./cosmos_config"; +import type { SelectionTriple } from "./selection_queries"; + +export type TripleGraphTerm = SelectionTriple["s"]; + +export type TripleGraphNode = { + key: string; + index: number; + term: TripleGraphTerm; + text: string; + backendId?: number; + isSelectedSource: boolean; +}; + +export type TripleGraphLink = { + index: number; + sourceIndex: number; + targetIndex: number; + sourceText: string; + targetText: string; + predicate: SelectionTriple["p"]; + predicateText: string; + predicateId?: number; + triple: SelectionTriple; +}; + +export type TripleGraphModel = { + nodes: TripleGraphNode[]; + linksMeta: TripleGraphLink[]; + pointPositions: Float32Array; + seedMetrics: GraphLayoutMetrics; + pointColors: Float32Array; + pointSizes: Float32Array; + links: Float32Array; + linkColors: Float32Array; + linkWidths: Float32Array; + nodeCount: number; + edgeCount: number; +}; + +export type GraphLayoutMetrics = { + centroidX: number; + centroidY: number; + minX: number; + maxX: number; + minY: number; + maxY: number; + width: number; + height: number; + maxRadius: number; +}; + +type MutableNode = { + term: TripleGraphTerm; + text: string; + backendId?: number; + isSelectedSource: boolean; +}; + +export function buildTripleGraphModel(triples: SelectionTriple[], selectedIds: number[]): TripleGraphModel { + const selectedSet = new Set(selectedIds); + const nodeMap = new Map(); + + for (const triple of triples) { + addNode(nodeMap, triple.s, triple.subject_id, selectedSet); + addNode(nodeMap, triple.o, triple.object_id, selectedSet); + } + + const nodes = Array.from(nodeMap.entries()) + .sort(([leftKey], [rightKey]) => leftKey.localeCompare(rightKey)) + .map(([key, node], index) => ({ + key, + index, + term: node.term, + text: node.text, + backendId: node.backendId, + isSelectedSource: node.isSelectedSource, + })); + + const nodeIndexByKey = new Map(); + for (const node of nodes) { + nodeIndexByKey.set(node.key, node.index); + } + + const linksMeta: TripleGraphLink[] = []; + for (const triple of triples) { + const sourceIndex = nodeIndexByKey.get(termKey(triple.s)); + const targetIndex = nodeIndexByKey.get(termKey(triple.o)); + if (sourceIndex === undefined || targetIndex === undefined) continue; + linksMeta.push({ + index: linksMeta.length, + sourceIndex, + targetIndex, + sourceText: formatTermText(triple.s), + targetText: formatTermText(triple.o), + predicate: triple.p, + predicateText: formatTermText(triple.p), + predicateId: triple.predicate_id, + triple, + }); + } + + const pointPositions = buildPointPositions(nodes); + const seedMetrics = computeLayoutMetrics(pointPositions); + const pointColors = buildPointColors(nodes); + const pointSizes = buildPointSizes(nodes); + const links = buildLinks(linksMeta); + const linkColors = buildLinkColors(linksMeta); + const linkWidths = buildLinkWidths(linksMeta); + + return { + nodes, + linksMeta, + pointPositions, + seedMetrics, + pointColors, + pointSizes, + links, + linkColors, + linkWidths, + nodeCount: nodes.length, + edgeCount: linksMeta.length, + }; +} + +function addNode( + nodeMap: Map, + term: TripleGraphTerm, + backendId: number | undefined, + selectedSet: Set +): void { + const key = termKey(term); + const existing = nodeMap.get(key); + const isSelectedSource = typeof backendId === "number" && selectedSet.has(backendId); + if (existing) { + if (existing.backendId === undefined && typeof backendId === "number") { + existing.backendId = backendId; + } + if (isSelectedSource) existing.isSelectedSource = true; + return; + } + nodeMap.set(key, { + term, + text: formatTermText(term), + backendId, + isSelectedSource, + }); +} + +function termKey(term: TripleGraphTerm): string { + return `${term.type}\x00${term.value}`; +} + +function formatTermText(term: TripleGraphTerm): string { + if (term.type === "literal") { + if (term.lang) return `"${term.value}"@${term.lang}`; + return `"${term.value}"`; + } + return term.value; +} + +function buildPointPositions(nodes: TripleGraphNode[]): Float32Array { + const out = new Float32Array(nodes.length * 2); + const simulationSpaceCenter = cosmosRuntimeConfig.spaceSize / 2; + if (nodes.length === 0) return out; + if (nodes.length === 1) { + out[0] = simulationSpaceCenter; + out[1] = simulationSpaceCenter; + return out; + } + + for (const node of nodes) { + const primaryHash = hashString(node.key); + const secondaryHash = hashString(`${node.key}\x01`); + const angle = ((primaryHash % 3600) / 3600) * Math.PI * 2; + const radius = 80 + (((primaryHash >>> 12) % 1000) / 1000) * 70; + const jitterX = ((((secondaryHash >>> 4) % 200) / 200) - 0.5) * 18; + const jitterY = ((((secondaryHash >>> 12) % 200) / 200) - 0.5) * 18; + out[node.index * 2] = Math.cos(angle) * radius + jitterX; + out[node.index * 2 + 1] = Math.sin(angle) * radius + jitterY; + } + + recenterPointPositions(out); + offsetPointPositionsToSimulationCenter(out, simulationSpaceCenter); + return out; +} + +export function computeLayoutMetrics(pointPositions: ArrayLike): GraphLayoutMetrics { + const pairCount = Math.floor(pointPositions.length / 2); + if (pairCount === 0) { + return { + centroidX: 0, + centroidY: 0, + minX: 0, + maxX: 0, + minY: 0, + maxY: 0, + width: 0, + height: 0, + maxRadius: 0, + }; + } + + let sumX = 0; + let sumY = 0; + let minX = Number.POSITIVE_INFINITY; + let maxX = Number.NEGATIVE_INFINITY; + let minY = Number.POSITIVE_INFINITY; + let maxY = Number.NEGATIVE_INFINITY; + + for (let i = 0; i < pairCount; i++) { + const x = pointPositions[i * 2]; + const y = pointPositions[i * 2 + 1]; + sumX += x; + sumY += y; + if (x < minX) minX = x; + if (x > maxX) maxX = x; + if (y < minY) minY = y; + if (y > maxY) maxY = y; + } + + const centroidX = sumX / pairCount; + const centroidY = sumY / pairCount; + let maxRadius = 0; + for (let i = 0; i < pairCount; i++) { + const dx = pointPositions[i * 2] - centroidX; + const dy = pointPositions[i * 2 + 1] - centroidY; + const radius = Math.hypot(dx, dy); + if (radius > maxRadius) maxRadius = radius; + } + + return { + centroidX, + centroidY, + minX, + maxX, + minY, + maxY, + width: maxX - minX, + height: maxY - minY, + maxRadius, + }; +} + +function recenterPointPositions(pointPositions: Float32Array): void { + const metrics = computeLayoutMetrics(pointPositions); + if (metrics.centroidX === 0 && metrics.centroidY === 0) return; + const pairCount = Math.floor(pointPositions.length / 2); + for (let i = 0; i < pairCount; i++) { + pointPositions[i * 2] -= metrics.centroidX; + pointPositions[i * 2 + 1] -= metrics.centroidY; + } +} + +function offsetPointPositionsToSimulationCenter(pointPositions: Float32Array, center: number): void { + if (center === 0) return; + const pairCount = Math.floor(pointPositions.length / 2); + for (let i = 0; i < pairCount; i++) { + pointPositions[i * 2] += center; + pointPositions[i * 2 + 1] += center; + } +} + +function buildPointColors(nodes: TripleGraphNode[]): Float32Array { + const out = new Float32Array(nodes.length * 4); + for (const node of nodes) { + const offset = node.index * 4; + const color = node.isSelectedSource ? [53, 214, 255, 1] : colorFromHash(node.key, 210, 35, 58, 18, 8); + out[offset] = color[0]; + out[offset + 1] = color[1]; + out[offset + 2] = color[2]; + out[offset + 3] = color[3]; + } + return out; +} + +function buildPointSizes(nodes: TripleGraphNode[]): Float32Array { + const out = new Float32Array(nodes.length); + for (const node of nodes) { + out[node.index] = node.isSelectedSource ? 11 : 7.5; + } + return out; +} + +function buildLinks(linksMeta: TripleGraphLink[]): Float32Array { + const out = new Float32Array(linksMeta.length * 2); + for (const link of linksMeta) { + const offset = link.index * 2; + out[offset] = link.sourceIndex; + out[offset + 1] = link.targetIndex; + } + return out; +} + +function buildLinkColors(linksMeta: TripleGraphLink[]): Float32Array { + const out = new Float32Array(linksMeta.length * 4); + for (const link of linksMeta) { + const offset = link.index * 4; + const color = colorFromHash(link.predicateText, 28, 65, 58, 32, 10); + out[offset] = color[0]; + out[offset + 1] = color[1]; + out[offset + 2] = color[2]; + out[offset + 3] = color[3]; + } + return out; +} + +function buildLinkWidths(linksMeta: TripleGraphLink[]): Float32Array { + const out = new Float32Array(linksMeta.length); + for (const link of linksMeta) { + out[link.index] = 1.8; + } + return out; +} + +function colorFromHash( + value: string, + baseHue: number, + hueRange: number, + lightness: number, + saturation: number, + lightnessRange: number +): [number, number, number, number] { + const hash = hashString(value); + const hue = (baseHue + (hash % hueRange) + 360) % 360; + const sat = saturation + ((hash >>> 10) % 10); + const light = lightness + ((hash >>> 20) % lightnessRange) - lightnessRange / 2; + const [r, g, b] = hslToRgb(hue / 360, sat / 100, light / 100); + return [r, g, b, 1]; +} + +function hslToRgb(h: number, s: number, l: number): [number, number, number] { + if (s === 0) { + const value = Math.round(l * 255); + return [value, value, value]; + } + + const q = l < 0.5 ? l * (1 + s) : l + s - l * s; + const p = 2 * l - q; + const r = hueToRgb(p, q, h + 1 / 3); + const g = hueToRgb(p, q, h); + const b = hueToRgb(p, q, h - 1 / 3); + return [Math.round(r * 255), Math.round(g * 255), Math.round(b * 255)]; +} + +function hueToRgb(p: number, q: number, t: number): number { + let value = t; + if (value < 0) value += 1; + if (value > 1) value -= 1; + if (value < 1 / 6) return p + (q - p) * 6 * value; + if (value < 1 / 2) return q; + if (value < 2 / 3) return p + (q - p) * (2 / 3 - value) * 6; + return p; +} + +function hashString(value: string): number { + let hash = 2166136261; + for (let i = 0; i < value.length; i++) { + hash ^= value.charCodeAt(i); + hash = Math.imul(hash, 16777619); + } + return hash >>> 0; +} diff --git a/frontend/src/vite-env.d.ts b/frontend/src/vite-env.d.ts new file mode 100644 index 0000000..99f4f74 --- /dev/null +++ b/frontend/src/vite-env.d.ts @@ -0,0 +1,21 @@ +/// + +interface ImportMetaEnv { + readonly VITE_BACKEND_URL?: string; + readonly VITE_COSMOS_ENABLE_SIMULATION?: string; + readonly VITE_COSMOS_DEBUG_LAYOUT?: string; + readonly VITE_COSMOS_SPACE_SIZE?: string; + readonly VITE_COSMOS_CURVED_LINKS?: string; + readonly VITE_COSMOS_FIT_VIEW_PADDING?: string; + readonly VITE_COSMOS_SIMULATION_DECAY?: string; + readonly VITE_COSMOS_SIMULATION_GRAVITY?: string; + readonly VITE_COSMOS_SIMULATION_CENTER?: string; + readonly VITE_COSMOS_SIMULATION_REPULSION?: string; + readonly VITE_COSMOS_SIMULATION_LINK_SPRING?: string; + readonly VITE_COSMOS_SIMULATION_LINK_DISTANCE?: string; + readonly VITE_COSMOS_SIMULATION_FRICTION?: string; +} + +interface ImportMeta { + readonly env: ImportMetaEnv; +} diff --git a/radial_sugiyama/.dockerignore b/radial_sugiyama/.dockerignore new file mode 100644 index 0000000..a4caf20 --- /dev/null +++ b/radial_sugiyama/.dockerignore @@ -0,0 +1,6 @@ +target +out +data +.env +*.pdf +VISUALIZATION_TIMELINE.md diff --git a/radial_sugiyama/A_Radial_Adaptation_of_the_Sugiyama_Framework_for_Visualizing_Hierarchical_Information.pdf b/radial_sugiyama/A_Radial_Adaptation_of_the_Sugiyama_Framework_for_Visualizing_Hierarchical_Information.pdf new file mode 100644 index 0000000..665a14d Binary files /dev/null and b/radial_sugiyama/A_Radial_Adaptation_of_the_Sugiyama_Framework_for_Visualizing_Hierarchical_Information.pdf differ diff --git a/radial_sugiyama/Cargo.lock b/radial_sugiyama/Cargo.lock new file mode 100644 index 0000000..8e7529c --- /dev/null +++ b/radial_sugiyama/Cargo.lock @@ -0,0 +1,286 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "cfg-if" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "dotenvy" +version = "0.15.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" + +[[package]] +name = "getrandom" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" +dependencies = [ + "cfg-if", + "libc", + "r-efi", + "wasip2", +] + +[[package]] +name = "itoa" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f42a60cbdf9a97f5d2305f08a87dc4e09308d1276d28c869c684d7777685682" + +[[package]] +name = "libc" +version = "0.2.183" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5b646652bf6661599e1da8901b3b9522896f01e736bad5f723fe7a3a27f899d" + +[[package]] +name = "memchr" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" + +[[package]] +name = "oxilangtag" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23f3f87617a86af77fa3691e6350483e7154c2ead9f1261b75130e21ca0f8acb" +dependencies = [ + "serde", +] + +[[package]] +name = "oxiri" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54b4ed3a7192fa19f5f48f99871f2755047fabefd7f222f12a1df1773796a102" + +[[package]] +name = "oxrdf" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0afd5c28e4a399c57ee2bc3accd40c7b671fdc7b6537499f14e95b265af7d7e0" +dependencies = [ + "oxilangtag", + "oxiri", + "rand", + "thiserror", +] + +[[package]] +name = "oxttl" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f03fd471bd54c23d76631c0a2677aa4bb308d905f6e491ee35dcb0732b7c5c6c" +dependencies = [ + "memchr", + "oxilangtag", + "oxiri", + "oxrdf", + "thiserror", +] + +[[package]] +name = "ppv-lite86" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "proc-macro2" +version = "1.0.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + +[[package]] +name = "radial_sugiyama" +version = "0.1.0" +dependencies = [ + "dotenvy", + "oxrdf", + "oxttl", + "serde", + "serde_json", + "svg", +] + +[[package]] +name = "rand" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" +dependencies = [ + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76afc826de14238e6e8c374ddcc1fa19e374fd8dd986b0d2af0d02377261d83c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "serde" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.149" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" +dependencies = [ + "itoa", + "memchr", + "serde", + "serde_core", + "zmij", +] + +[[package]] +name = "svg" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "700efb40f3f559c23c18b446e8ed62b08b56b2bb3197b36d57e0470b4102779e" + +[[package]] +name = "syn" +version = "2.0.117" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "thiserror" +version = "2.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "unicode-ident" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" + +[[package]] +name = "wasip2" +version = "1.0.2+wasi-0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wit-bindgen" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" + +[[package]] +name = "zerocopy" +version = "0.8.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2578b716f8a7a858b7f02d5bd870c14bf4ddbbcf3a4c05414ba6503640505e3" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e6cc098ea4d3bd6246687de65af3f920c430e236bee1e3bf2e441463f08a02f" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zmij" +version = "1.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" diff --git a/radial_sugiyama/Cargo.toml b/radial_sugiyama/Cargo.toml new file mode 100644 index 0000000..f30494b --- /dev/null +++ b/radial_sugiyama/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "radial_sugiyama" +version = "0.1.0" +edition = "2021" + +[lib] +path = "src/lib.rs" + +[dependencies] +dotenvy = "0.15.7" +oxrdf = "0.3.3" +oxttl = "0.2.3" +serde = { version = "1.0.228", features = ["derive"] } +serde_json = "1.0.145" +svg = "0.17.0" diff --git a/radial_sugiyama/Dockerfile b/radial_sugiyama/Dockerfile new file mode 100644 index 0000000..c745723 --- /dev/null +++ b/radial_sugiyama/Dockerfile @@ -0,0 +1,20 @@ +FROM rust:bookworm AS builder + +WORKDIR /src + +COPY Cargo.toml Cargo.lock ./ +COPY src ./src + +RUN cargo build --release + +FROM debian:bookworm-slim + +RUN apt-get update \ + && apt-get install -y --no-install-recommends ca-certificates \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /workspace + +COPY --from=builder /src/target/release/radial_sugiyama /usr/local/bin/radial_sugiyama + +CMD ["radial_sugiyama"] diff --git a/radial_sugiyama/GO_PIPELINE_INTERSECTION.md b/radial_sugiyama/GO_PIPELINE_INTERSECTION.md new file mode 100644 index 0000000..83f6b0a --- /dev/null +++ b/radial_sugiyama/GO_PIPELINE_INTERSECTION.md @@ -0,0 +1,144 @@ +# Radial Sugiyama vs Go Snapshot Pipeline + +This note delimits the algorithmic intersection between the Rust pipeline in `radial_sugiyama/` and the Go snapshot/export path in: + +- `backend_go/graph_export.go` +- `backend_go/graph_snapshot.go` + +The goal is not to describe integration mechanics yet, but to mark where the two implementations solve the same problem, where they only touch indirectly, and where they are solving different problems. + +## Scope + +The Rust pipeline is a hierarchy-specific layout pipeline: + +1. import ontology hierarchy from Turtle +2. optionally filter to a rooted descendant subtree +3. validate DAG structure +4. assign hierarchy levels +5. insert dummy nodes for long edges +6. reduce crossings +7. assign coordinates +8. project to radial space +9. generate routed edge artifacts +10. export SVG + +The Go path is a snapshot/materialization pipeline: + +1. query predicates and edges from SPARQL +2. accumulate nodes and edges +3. build a graph response +4. run a lightweight hierarchy layering + radial placement +5. attach labels +6. return JSON to the frontend + +Because of that, the true intersection is narrow in `graph_export.go` and broader in the layout section of `graph_snapshot.go`. + +## Legend + +- `Direct overlap`: both sides implement essentially the same algorithmic concern +- `Adjacent overlap`: one side prepares or consumes the same kind of structure, but the algorithm differs materially +- `No overlap`: the stage exists only on one side + +## Intersection with `graph_export.go` + +`graph_export.go` overlaps with the Rust pipeline only at graph materialization time. + +| Algorithmic stage | Rust pipeline | `graph_export.go` | Intersection | Notes | +| --- | --- | --- | --- | --- | +| Node identity and deduplication | `ttl.rs` maps class IRIs to stable node indices | `graphAccumulator.getOrAddNode` maps SPARQL terms to stable node IDs | Direct overlap | Both build a unique node set from repeated source records. | +| Edge materialization | `ttl.rs` emits `superclass -> subclass` edges and deduplicates repeats | `graphAccumulator.addBindings` emits `source -> target` edges from SPARQL bindings | Adjacent overlap | Both convert raw triples/bindings into an in-memory graph, but Rust is specialized to `rdfs:subClassOf` while Go is predicate-agnostic. | +| Literal / blank-node filtering | `ttl.rs` ignores blank/literal hierarchy endpoints | `getOrAddNode` skips literals and optionally keeps blank nodes | Adjacent overlap | Similar sanitation step, but not identical semantics. | +| Predicate preservation | Rust discards all predicates except `rdfs:subClassOf` | Go preserves predicate IDs through `PredicateDict` | No overlap | This is Go-only in the compared files. | +| Graph limits / capacity management | Rust does not enforce snapshot-style node and edge caps here | Go enforces `nodeLimit` and preallocates with edge hints | No overlap | This is an operational concern of the Go snapshot path. | + +### Boundary for `graph_export.go` + +The clean algorithmic seam is: + +- Go owns generic SPARQL binding ingestion and generic graph materialization. +- Rust owns hierarchy-specialized interpretation once a hierarchy graph has already been isolated. + +That means `graph_export.go` is not competing with the Rust layout pipeline. It is only producing the kind of node/edge structure that Rust would eventually need as input. + +## Intersection with `graph_snapshot.go` + +`graph_snapshot.go` intersects with the Rust pipeline in two different regions: + +1. graph acquisition and hierarchy preparation +2. lightweight layout assignment + +## Stage-by-stage comparison + +| Algorithmic stage | Rust pipeline | `graph_snapshot.go` | Intersection | Notes | +| --- | --- | --- | --- | --- | +| Source acquisition | `graph_from_ttl_path` parses Turtle directly | `fetchGraphSnapshot` queries SPARQL in batches | Adjacent overlap | Both acquire a graph, but from different upstream sources. | +| Hierarchy graph extraction | Rust keeps only `rdfs:subClassOf` during import | Go accepts a `graph_query_id` and accumulates whatever that query returns | Adjacent overlap | The overlap is meaningful only when the Go query is hierarchy-like. | +| Rooted subtree filtering | `filter_graph_to_descendants` keeps one configured root and its descendants | No equivalent in these two Go files | No overlap | This is currently Rust-only. | +| Cycle detection / DAG validation | `compute_hierarchy_levels` rejects cyclic graphs | `levelSynchronousKahnLayers` returns a `CycleError` if not all nodes are processed | Direct overlap | Both need a DAG to continue with hierarchy layout. | +| Level assignment | Rust computes longest-path hierarchy levels | Go computes level-synchronous Kahn layers | Direct overlap | Same problem, different algorithm. Both assign ring depth from DAG structure. | +| Per-level ordering | Rust later optimizes order for crossings | Go sorts each layer lexicographically by IRI | Adjacent overlap | Both define an order inside a level, but Go is a simple deterministic ordering while Rust is layout-driven. | +| Radial node placement | Rust projects coordinates to rings after Sugiyama coordinate assignment | Go uses `radialPositionsFromLayers` to place each layer on a ring | Direct overlap | Same output shape, very different sophistication. | +| Coordinate shifting / scaling controls | Rust has configurable radius, spacing, borders, and positive-coordinate shifting | Go uses a fixed `maxR = 5000.0` radial envelope | Adjacent overlap | Both map levels to 2D coordinates, but only Rust exposes tuned geometry controls. | +| Label enrichment | Rust keeps node labels as imported IRIs | Go fetches `rdfs:label` after layout | Adjacent overlap | Both carry node naming, but the enrichment algorithm is currently Go-only. | +| Response packaging | Rust writes SVG and layout artifacts | Go returns `GraphResponse` JSON plus metadata | No overlap | Same graph, different downstream consumers. | + +## Rust-only algorithms with no counterpart in the compared Go files + +These parts of the Rust pipeline do not currently intersect with `graph_export.go` or `graph_snapshot.go`: + +- rooted descendant filtering +- dummy-node insertion for long edges +- crossing reduction / sifting +- coordinate assignment before radial projection +- adaptive / packed / distributed ring projection modes +- routed edge generation +- layout artifact generation +- SVG rendering and export + +These are the parts that make the Rust pipeline a true Sugiyama-style layout engine rather than a simple radial snapshot placer. + +## Go-only algorithms with no counterpart in the Rust pipeline + +These parts of the compared Go files do not currently exist in Rust: + +- predicate dictionary construction from SPARQL results +- batched SPARQL edge fetching with memory management +- snapshot limits and backend metadata packaging +- `rdfs:label` lookup through SPARQL +- generic graph export over arbitrary predicate sets + +These are acquisition and serving concerns rather than layout concerns. + +## Algorithmic ownership boundary + +If the future integration wants a clean division of responsibility, the strongest ownership boundary is: + +### Go-owned stages + +- query execution against AnzoGraph / SPARQL +- predicate-aware graph accumulation +- generic graph snapshot materialization +- label fetching and API response orchestration + +### Rust-owned stages + +- hierarchy-specific filtering +- hierarchy-level assignment +- Sugiyama expansion with dummy nodes +- crossing minimization +- coordinate assignment +- radial projection and route generation +- layout artifact production + +## Most important practical conclusion + +At algorithm granularity, the Rust pipeline intersects only lightly with `graph_export.go`, but it intersects substantially with the hierarchy-layout portion of `graph_snapshot.go`. + +The main replacement candidates in a future integration are therefore not the generic export/materialization routines in `graph_export.go`, but these hierarchy-layout steps currently performed by `graph_snapshot.go`: + +1. DAG validation / cycle detection +2. layer assignment +3. per-layer ordering +4. radial coordinate generation + +Everything after that depends on how much of the Rust layout artifact model the future integration wants to expose to the frontend. diff --git a/radial_sugiyama/VISUALIZATION_TIMELINE.md b/radial_sugiyama/VISUALIZATION_TIMELINE.md new file mode 100644 index 0000000..9040603 --- /dev/null +++ b/radial_sugiyama/VISUALIZATION_TIMELINE.md @@ -0,0 +1,141 @@ +# Graph Visualization Improvement Timeline + +This document records the main ways the graph visualization pipeline has been refined during the current Rust migration and tuning work. + +## 2026-03-16 — Baseline migration and pipeline setup + +- Ported the radial Sugiyama-style layout pipeline from the Java implementation into the Rust crate `radial_sugiyama`. +- Kept the overall structure: + - hierarchy leveling + - dummy-node insertion + - crossing reduction + - coordinate assignment + - radial projection +- Changed the leveling source on purpose for the target use case: + - instead of centrality-based levels, levels are computed as hierarchy rings for a DAG + - this guarantees superclass/interface nodes are placed on inner rings and subclasses on outer rings + +## 2026-03-16 — Improve parity with the Java implementation + +- Closed Java → Rust gaps in crossing reduction: + - restored horizontal crossing counting + - restored mixed horizontal/vertical crossing counting + - aligned the sifting stage more closely with the active Java implementation +- Added richer layout artifacts so the pipeline outputs not only node coordinates, but also: + - edge offsets + - routed edge shapes + - routed node information + - layout center +- Ported route generation logic for: + - spiral inter-level edges + - intra-level edges + - straight root-level edges + +## 2026-03-16 — Add ontology input through Turtle + +- Added a Turtle import layer using `oxttl`. +- Imported only `rdfs:subClassOf` triples. +- Mapped ontology class IRIs to graph nodes. +- Preserved edge direction as: + - `superclass -> subclass` +- This made the layout pipeline usable directly from ontology data instead of requiring manual graph construction. + +## 2026-03-16 — Add environment-based execution and layout controls + +- Added a `.env`-driven runner so the pipeline can be configured without recompiling. +- Moved the main geometric drawing constants into env-backed config: + - input file location + - output location + - minimum radius + - level spacing + - positive-coordinate shifting + - spiral sampling quality + - border and node-distance scaling +- This was the first step toward making the visualization tunable instead of fixed. + +## 2026-03-16 — Add SVG export as the final output step + +- Added SVG generation after layout execution. +- Reused the computed graph geometry instead of inventing a separate renderer: + - node coordinates become SVG circles + - routed edge points become SVG paths + - ring levels become background circles + - labels are drawn from node IRIs +- This made the pipeline produce a directly inspectable visual artifact. + +## 2026-03-16 — Investigate readability problems in the first SVG output + +- Observed two major problems in the rendered output: + - many nodes on the same level were visually packed into a small arc of the ring + - some edges wrapped around the center with very long spiral paths +- Determined that these were not only SVG issues: + - node clustering came from the current radial projection rule + - edge wrapping came from the routed edge model and its offset-based spiral construction +- Compared this behavior with the paper and confirmed: + - the paper intentionally allows packed angular spans + - the paper intentionally allows winding spiral edges + - but these choices may be undesirable for the current ontology-navigation use case + +## 2026-03-16 — Add an SVG straight-edge mode for experimentation + +- Added `RADIAL_SVG_SHORTEST_EDGES` so the SVG renderer could ignore routed edge directions/offsets and draw direct shortest node-to-node segments instead. +- This improved edge length visually, but it introduced a conceptual mismatch: + - crossing reduction had optimized the graph for wrapped spiral edges + - rendering direct shortest segments reintroduced many crossings +- Result: + - useful as a diagnostic/preview mode + - not a principled replacement for the original routing objective + +## 2026-03-16 — Add configurable ring distribution mode + +- Added `RADIAL_RING_DISTRIBUTION` with two modes: + - `packed` + - `distributed` +- `packed` keeps the paper/Java-style projection: + - one global width is used to derive angular positions + - narrower levels may occupy only part of the circle +- `distributed` changes only the projection step: + - nodes on the same level are spread around the full ring + - ring order is preserved, but the level fills the full `2π` +- This was introduced specifically to improve readability when the packed projection makes ontology branches appear collapsed. + +## 2026-03-16 — Restrict the ontology view to the BFO `entity` subtree + +- Added `RADIAL_ROOT_CLASS_IRI`. +- Defaulted it to: + - `http://purl.obolibrary.org/obo/BFO_0000001` +- Added a preprocessing filter step that: + - imports the full `subClassOf` graph + - finds the configured root class by exact IRI + - keeps only the root and its descendants + - discards unrelated ontology branches before layout +- This makes the visualization more focused and reduces clutter for the target ontology exploration workflow. + +## Current visualization controls + +The current pipeline now supports these major readability/behavior controls through `.env`: + +- `RADIAL_ROOT_CLASS_IRI` — choose the ontology subtree root +- `RADIAL_RING_DISTRIBUTION` — choose packed vs distributed ring projection +- `RADIAL_SVG_SHORTEST_EDGES` — choose routed edges vs direct shortest SVG segments +- `RADIAL_MIN_RADIUS` +- `RADIAL_LEVEL_DISTANCE` +- `RADIAL_NODE_DISTANCE` +- `RADIAL_SPIRAL_QUALITY` + +## Current tradeoffs + +- `packed` rings are closer to the paper and Java behavior, but can visually cluster nodes. +- `distributed` rings are more readable, but deviate from the original projection philosophy. +- routed spiral edges are more consistent with the crossing-reduction objective, but can look long and unintuitive. +- shortest SVG edges are visually direct, but may contradict the layout’s crossing-minimization assumptions. +- subtree filtering around BFO `entity` improves focus, but intentionally hides unrelated ontology regions. + +## Current direction of improvement + +The visualization work is moving toward a readable ontology-browser layout rather than strict reproduction of the original paper. The main current themes are: + +1. keep the hierarchical ring semantics +2. reduce clutter by filtering to a meaningful ontology root +3. make projection and rendering behavior configurable +4. improve readability without discarding the useful parts of the original radial Sugiyama pipeline diff --git a/radial_sugiyama/out/layout.svg b/radial_sugiyama/out/layout.svg new file mode 100644 index 0000000..2235cb8 --- /dev/null +++ b/radial_sugiyama/out/layout.svg @@ -0,0 +1,985 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/radial_sugiyama/src/bin/radial_sugiyama_go_bridge.rs b/radial_sugiyama/src/bin/radial_sugiyama_go_bridge.rs new file mode 100644 index 0000000..7785a21 --- /dev/null +++ b/radial_sugiyama/src/bin/radial_sugiyama_go_bridge.rs @@ -0,0 +1,38 @@ +use std::io::{self, Read, Write}; +use std::process::ExitCode; + +use radial_sugiyama::{ + process_go_bridge_request_with_options, BridgeRuntimeConfig, EnvConfig, GoBridgeRequest, +}; + +fn main() -> ExitCode { + if let Err(error) = run() { + eprintln!("{error}"); + return ExitCode::FAILURE; + } + ExitCode::SUCCESS +} + +fn run() -> Result<(), Box> { + let env_config = EnvConfig::from_env()?; + let mut input = String::new(); + io::stdin().read_to_string(&mut input)?; + + let request: GoBridgeRequest = serde_json::from_str(&input)?; + let response = process_go_bridge_request_with_options( + request, + BridgeRuntimeConfig { + layout: env_config.layout, + svg: env_config.svg, + svg_output_path: Some(env_config.output_path()), + canonicalize_input: true, + }, + )?; + + let stdout = io::stdout(); + let mut handle = stdout.lock(); + serde_json::to_writer(&mut handle, &response)?; + handle.write_all(b"\n")?; + + Ok(()) +} diff --git a/radial_sugiyama/src/bridge.rs b/radial_sugiyama/src/bridge.rs new file mode 100644 index 0000000..52a9418 --- /dev/null +++ b/radial_sugiyama/src/bridge.rs @@ -0,0 +1,784 @@ +use std::collections::{HashMap, HashSet, VecDeque}; +use std::error::Error; +use std::fmt::{Display, Formatter}; +use std::fs::create_dir_all; +use std::path::PathBuf; + +use serde::{Deserialize, Serialize}; + +use crate::{ + layout_radial_hierarchy_with_artifacts, write_svg_path_with_options, Edge, EdgeRouteKind, + Graph, LayoutArtifacts, LayoutConfig, LayoutError, Node, SvgConfig, SvgExportError, +}; + +#[derive(Debug)] +pub enum BridgeError { + DuplicateNodeId { + node_id: u32, + }, + DuplicateEdgeIndex { + edge_index: usize, + }, + MissingNodeRef { + edge_index: usize, + node_id: u32, + }, + RootNotFound { + root_iri: String, + }, + NoDescendants { + root_iri: String, + }, + CreateOutputDir { + path: PathBuf, + source: std::io::Error, + }, + SvgExport(SvgExportError), + Layout(LayoutError), +} + +impl Display for BridgeError { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + BridgeError::DuplicateNodeId { node_id } => { + write!(f, "bridge request contains duplicate node_id {node_id}") + } + BridgeError::DuplicateEdgeIndex { edge_index } => { + write!( + f, + "bridge request contains duplicate edge_index {edge_index}" + ) + } + BridgeError::MissingNodeRef { + edge_index, + node_id, + } => { + write!( + f, + "bridge request edge {edge_index} references unknown node_id {node_id}" + ) + } + BridgeError::RootNotFound { root_iri } => { + write!( + f, + "root class IRI {root_iri} was not found in the bridge graph" + ) + } + BridgeError::NoDescendants { root_iri } => { + write!( + f, + "root class IRI {root_iri} has no subclass descendants in the bridge graph" + ) + } + BridgeError::CreateOutputDir { path, source } => write!( + f, + "failed to create SVG output directory {}: {source}", + path.display() + ), + BridgeError::SvgExport(error) => Display::fmt(error, f), + BridgeError::Layout(error) => Display::fmt(error, f), + } + } +} + +impl Error for BridgeError { + fn source(&self) -> Option<&(dyn Error + 'static)> { + match self { + BridgeError::CreateOutputDir { source, .. } => Some(source), + BridgeError::SvgExport(error) => Some(error), + BridgeError::Layout(error) => Some(error), + _ => None, + } + } +} + +impl From for BridgeError { + fn from(value: LayoutError) -> Self { + Self::Layout(value) + } +} + +impl From for BridgeError { + fn from(value: SvgExportError) -> Self { + Self::SvgExport(value) + } +} + +#[derive(Debug, Clone, PartialEq)] +pub struct BridgeRuntimeConfig { + pub layout: LayoutConfig, + pub svg: SvgConfig, + pub svg_output_path: Option, + pub canonicalize_input: bool, +} + +impl BridgeRuntimeConfig { + pub fn json_only(layout: LayoutConfig) -> Self { + Self { + layout, + svg: SvgConfig::default(), + svg_output_path: None, + canonicalize_input: true, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct GoBridgeRequest { + pub root_iri: String, + pub nodes: Vec, + pub edges: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct GoBridgeNode { + pub node_id: u32, + pub iri: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct GoBridgeEdge { + pub edge_index: usize, + pub parent_id: u32, + pub child_id: u32, + #[serde(default)] + pub predicate_iri: Option, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct GoBridgeResponse { + pub nodes: Vec, + pub route_segments: Vec, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct GoBridgeRoutedNode { + pub node_id: u32, + pub x: f64, + pub y: f64, + pub level: usize, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct GoBridgeRouteSegment { + pub edge_index: usize, + pub kind: String, + pub points: Vec, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct GoBridgePoint { + pub x: f64, + pub y: f64, +} + +struct BridgeGraph { + root_iri: String, + graph: Graph, + node_ids: Vec, + edge_indices: Vec, +} + +pub fn process_go_bridge_request( + request: GoBridgeRequest, + config: LayoutConfig, +) -> Result { + process_go_bridge_request_with_options(request, BridgeRuntimeConfig::json_only(config)) +} + +pub fn process_go_bridge_request_with_options( + request: GoBridgeRequest, + config: BridgeRuntimeConfig, +) -> Result { + let bridge_graph = build_bridge_graph(request)?; + let mut filtered = filter_bridge_graph_to_descendants(bridge_graph)?; + if config.canonicalize_input { + filtered = canonicalize_bridge_graph(filtered); + } + + let artifacts = layout_radial_hierarchy_with_artifacts(&mut filtered.graph, config.layout)?; + write_debug_svg_if_configured(&filtered.graph, &artifacts, &config)?; + + Ok(build_bridge_response(&filtered, &artifacts)) +} + +fn build_bridge_graph(request: GoBridgeRequest) -> Result { + let mut node_id_to_index = HashMap::with_capacity(request.nodes.len()); + let mut nodes = Vec::with_capacity(request.nodes.len()); + let mut node_ids = Vec::with_capacity(request.nodes.len()); + + for node in request.nodes { + if node_id_to_index.insert(node.node_id, nodes.len()).is_some() { + return Err(BridgeError::DuplicateNodeId { + node_id: node.node_id, + }); + } + nodes.push(Node { + label: Some(node.iri), + ..Node::default() + }); + node_ids.push(node.node_id); + } + + let mut seen_edge_indices = HashSet::with_capacity(request.edges.len()); + let mut edges = Vec::with_capacity(request.edges.len()); + let mut edge_indices = Vec::with_capacity(request.edges.len()); + for edge in request.edges { + if !seen_edge_indices.insert(edge.edge_index) { + return Err(BridgeError::DuplicateEdgeIndex { + edge_index: edge.edge_index, + }); + } + + let Some(&source) = node_id_to_index.get(&edge.parent_id) else { + return Err(BridgeError::MissingNodeRef { + edge_index: edge.edge_index, + node_id: edge.parent_id, + }); + }; + let Some(&target) = node_id_to_index.get(&edge.child_id) else { + return Err(BridgeError::MissingNodeRef { + edge_index: edge.edge_index, + node_id: edge.child_id, + }); + }; + + edges.push(Edge::new(source, target)); + edge_indices.push(edge.edge_index); + } + + Ok(BridgeGraph { + root_iri: request.root_iri, + graph: Graph::new(nodes, edges), + node_ids, + edge_indices, + }) +} + +fn filter_bridge_graph_to_descendants( + bridge_graph: BridgeGraph, +) -> Result { + let BridgeGraph { + root_iri, + graph, + node_ids, + edge_indices, + } = bridge_graph; + + let Some(root_index) = graph + .nodes + .iter() + .position(|node| node.label.as_deref() == Some(root_iri.as_str())) + else { + return Err(BridgeError::RootNotFound { root_iri }); + }; + + let mut adjacency = vec![Vec::new(); graph.nodes.len()]; + for edge in &graph.edges { + adjacency[edge.source].push(edge.target); + } + + let mut visited = HashSet::from([root_index]); + let mut queue = VecDeque::from([root_index]); + while let Some(node) = queue.pop_front() { + for &child in &adjacency[node] { + if visited.insert(child) { + queue.push_back(child); + } + } + } + + if visited.len() <= 1 { + return Err(BridgeError::NoDescendants { root_iri }); + } + + let mut reindex = HashMap::with_capacity(visited.len()); + let mut filtered_nodes = Vec::with_capacity(visited.len()); + let mut filtered_node_ids = Vec::with_capacity(visited.len()); + for (old_index, node) in graph.nodes.iter().enumerate() { + if !visited.contains(&old_index) { + continue; + } + let new_index = filtered_nodes.len(); + reindex.insert(old_index, new_index); + filtered_nodes.push(node.clone()); + filtered_node_ids.push(node_ids[old_index]); + } + + let mut filtered_edges = Vec::new(); + let mut filtered_edge_indices = Vec::new(); + for (old_edge_index, edge) in graph.edges.iter().enumerate() { + if !visited.contains(&edge.source) || !visited.contains(&edge.target) { + continue; + } + filtered_edges.push(Edge::new(reindex[&edge.source], reindex[&edge.target])); + filtered_edge_indices.push(edge_indices[old_edge_index]); + } + + Ok(BridgeGraph { + root_iri, + graph: Graph::new(filtered_nodes, filtered_edges), + node_ids: filtered_node_ids, + edge_indices: filtered_edge_indices, + }) +} + +fn canonicalize_bridge_graph(bridge_graph: BridgeGraph) -> BridgeGraph { + let BridgeGraph { + root_iri, + graph, + node_ids, + edge_indices, + } = bridge_graph; + + let mut node_order = (0..graph.nodes.len()).collect::>(); + node_order.sort_by(|left, right| { + graph.nodes[*left] + .label + .as_deref() + .unwrap_or("") + .cmp(graph.nodes[*right].label.as_deref().unwrap_or("")) + .then(node_ids[*left].cmp(&node_ids[*right])) + }); + + let mut reindex = vec![0usize; graph.nodes.len()]; + let mut nodes = Vec::with_capacity(graph.nodes.len()); + let mut canonical_node_ids = Vec::with_capacity(node_ids.len()); + for (new_index, old_index) in node_order.into_iter().enumerate() { + reindex[old_index] = new_index; + nodes.push(graph.nodes[old_index].clone()); + canonical_node_ids.push(node_ids[old_index]); + } + + let mut edge_order = (0..graph.edges.len()).collect::>(); + edge_order.sort_by(|left, right| { + let left_edge = graph.edges[*left]; + let right_edge = graph.edges[*right]; + + graph.nodes[left_edge.source] + .label + .as_deref() + .unwrap_or("") + .cmp( + graph.nodes[right_edge.source] + .label + .as_deref() + .unwrap_or(""), + ) + .then( + graph.nodes[left_edge.target] + .label + .as_deref() + .unwrap_or("") + .cmp( + graph.nodes[right_edge.target] + .label + .as_deref() + .unwrap_or(""), + ), + ) + .then(edge_indices[*left].cmp(&edge_indices[*right])) + }); + + let mut edges = Vec::with_capacity(graph.edges.len()); + let mut canonical_edge_indices = Vec::with_capacity(edge_indices.len()); + for old_edge_index in edge_order { + let edge = graph.edges[old_edge_index]; + edges.push(Edge::new(reindex[edge.source], reindex[edge.target])); + canonical_edge_indices.push(edge_indices[old_edge_index]); + } + + BridgeGraph { + root_iri, + graph: Graph::new(nodes, edges), + node_ids: canonical_node_ids, + edge_indices: canonical_edge_indices, + } +} + +fn write_debug_svg_if_configured( + graph: &Graph, + artifacts: &LayoutArtifacts, + config: &BridgeRuntimeConfig, +) -> Result<(), BridgeError> { + let Some(path) = &config.svg_output_path else { + return Ok(()); + }; + + if let Some(parent) = path.parent() { + create_dir_all(parent).map_err(|source| BridgeError::CreateOutputDir { + path: parent.to_path_buf(), + source, + })?; + } + + write_svg_path_with_options(path, graph, artifacts, config.layout, config.svg)?; + Ok(()) +} + +fn build_bridge_response(graph: &BridgeGraph, artifacts: &LayoutArtifacts) -> GoBridgeResponse { + let nodes = graph + .graph + .nodes + .iter() + .enumerate() + .map(|(node_index, node)| GoBridgeRoutedNode { + node_id: graph.node_ids[node_index], + x: node.x, + y: node.y, + level: artifacts.node_levels[node_index], + }) + .collect::>(); + + let route_segments = artifacts + .edge_routes + .iter() + .map(|route| GoBridgeRouteSegment { + edge_index: graph.edge_indices[route.original_edge_index], + kind: route_kind_name(route.kind).to_owned(), + points: route + .points + .iter() + .map(|point| GoBridgePoint { + x: point.x, + y: point.y, + }) + .collect(), + }) + .collect::>(); + + GoBridgeResponse { + nodes, + route_segments, + } +} + +fn route_kind_name(kind: EdgeRouteKind) -> &'static str { + match kind { + EdgeRouteKind::Straight => "straight", + EdgeRouteKind::Spiral => "spiral", + EdgeRouteKind::IntraLevel => "intra_level", + } +} + +#[cfg(test)] +mod tests { + use std::fs; + use std::time::{SystemTime, UNIX_EPOCH}; + + use super::*; + use crate::{filter_graph_to_descendants, RingDistribution}; + + fn node(node_id: u32, iri: &str) -> GoBridgeNode { + GoBridgeNode { + node_id, + iri: iri.to_owned(), + } + } + + fn edge(edge_index: usize, parent_id: u32, child_id: u32) -> GoBridgeEdge { + GoBridgeEdge { + edge_index, + parent_id, + child_id, + predicate_iri: None, + } + } + + fn runtime_config() -> BridgeRuntimeConfig { + BridgeRuntimeConfig { + layout: LayoutConfig { + ring_distribution: RingDistribution::Adaptive, + ..LayoutConfig::default() + }, + svg: SvgConfig { + shortest_edges: false, + show_labels: false, + }, + svg_output_path: None, + canonicalize_input: true, + } + } + + fn sorted_nodes(mut nodes: Vec) -> Vec<(u32, usize, i64, i64)> { + nodes.sort_by_key(|node| node.node_id); + nodes + .into_iter() + .map(|node| { + ( + node.node_id, + node.level, + (node.x * 1_000_000.0).round() as i64, + (node.y * 1_000_000.0).round() as i64, + ) + }) + .collect() + } + + fn sorted_segments( + mut segments: Vec, + ) -> Vec<(usize, String, Vec<(i64, i64)>)> { + segments.sort_by(|left, right| { + left.edge_index + .cmp(&right.edge_index) + .then(left.kind.cmp(&right.kind)) + .then(left.points.len().cmp(&right.points.len())) + }); + segments + .into_iter() + .map(|segment| { + ( + segment.edge_index, + segment.kind, + segment + .points + .into_iter() + .map(|point| { + ( + (point.x * 1_000_000.0).round() as i64, + (point.y * 1_000_000.0).round() as i64, + ) + }) + .collect(), + ) + }) + .collect() + } + + #[test] + fn filters_to_root_descendants_and_preserves_node_ids() { + let response = process_go_bridge_request_with_options( + GoBridgeRequest { + root_iri: "root".to_owned(), + nodes: vec![ + node(10, "root"), + node(11, "child"), + node(12, "leaf"), + node(13, "other"), + ], + edges: vec![edge(0, 10, 11), edge(1, 11, 12), edge(2, 13, 12)], + }, + runtime_config(), + ) + .unwrap(); + + let mut kept_ids = response + .nodes + .iter() + .map(|node| node.node_id) + .collect::>(); + kept_ids.sort(); + assert_eq!(kept_ids, vec![10, 11, 12]); + assert!(response + .route_segments + .iter() + .all(|segment| segment.edge_index == 0 || segment.edge_index == 1)); + } + + #[test] + fn returns_multiple_route_segments_for_long_edges_with_dummies() { + let response = process_go_bridge_request_with_options( + GoBridgeRequest { + root_iri: "root".to_owned(), + nodes: vec![node(1, "root"), node(2, "child"), node(3, "leaf")], + edges: vec![edge(10, 1, 2), edge(11, 2, 3), edge(12, 1, 3)], + }, + runtime_config(), + ) + .unwrap(); + + let long_edge_routes = response + .route_segments + .iter() + .filter(|segment| segment.edge_index == 12) + .count(); + assert!(long_edge_routes >= 2); + } + + #[test] + fn returns_error_when_root_is_missing() { + let error = process_go_bridge_request_with_options( + GoBridgeRequest { + root_iri: "root".to_owned(), + nodes: vec![node(1, "other")], + edges: vec![], + }, + runtime_config(), + ) + .unwrap_err(); + + assert!(matches!( + error, + BridgeError::RootNotFound { root_iri } if root_iri == "root" + )); + } + + #[test] + fn returns_error_when_root_has_no_descendants() { + let error = process_go_bridge_request_with_options( + GoBridgeRequest { + root_iri: "root".to_owned(), + nodes: vec![node(1, "root"), node(2, "other")], + edges: vec![edge(0, 2, 1)], + }, + runtime_config(), + ) + .unwrap_err(); + + assert!(matches!( + error, + BridgeError::NoDescendants { root_iri } if root_iri == "root" + )); + } + + #[test] + fn bridge_matches_direct_layout_for_same_graph_and_config() { + let request = GoBridgeRequest { + root_iri: "root".to_owned(), + nodes: vec![ + node(5, "leaf"), + node(1, "root"), + node(4, "sibling"), + node(2, "child"), + node(3, "grandchild"), + ], + edges: vec![ + edge(12, 2, 3), + edge(10, 1, 2), + edge(11, 1, 4), + edge(13, 1, 5), + ], + }; + let config = runtime_config(); + + let response = + process_go_bridge_request_with_options(request.clone(), config.clone()).unwrap(); + + let node_index_by_id = request + .nodes + .iter() + .enumerate() + .map(|(index, node)| (node.node_id, index)) + .collect::>(); + let direct_edges = request + .edges + .iter() + .map(|edge| { + Edge::new( + *node_index_by_id.get(&edge.parent_id).unwrap(), + *node_index_by_id.get(&edge.child_id).unwrap(), + ) + }) + .collect::>(); + let direct_nodes = request + .nodes + .iter() + .map(|node| Node { + label: Some(node.iri.clone()), + ..Node::default() + }) + .collect::>(); + let direct_graph = Graph::new(direct_nodes, direct_edges); + let filtered_direct = + filter_graph_to_descendants(&direct_graph, &request.root_iri).unwrap(); + + let mut filtered = + filter_bridge_graph_to_descendants(build_bridge_graph(request).unwrap()).unwrap(); + filtered = canonicalize_bridge_graph(filtered); + let mut direct_expected_iris = filtered_direct + .nodes + .iter() + .filter_map(|node| node.label.clone()) + .collect::>(); + let mut actual_iris = filtered + .graph + .nodes + .iter() + .filter_map(|node| node.label.clone()) + .collect::>(); + direct_expected_iris.sort(); + actual_iris.sort(); + assert_eq!(actual_iris, direct_expected_iris); + + let artifacts = + layout_radial_hierarchy_with_artifacts(&mut filtered.graph, config.layout).unwrap(); + let expected = build_bridge_response(&filtered, &artifacts); + + assert_eq!(sorted_nodes(response.nodes), sorted_nodes(expected.nodes)); + assert_eq!( + sorted_segments(response.route_segments), + sorted_segments(expected.route_segments) + ); + } + + #[test] + fn canonicalization_makes_bridge_positions_independent_of_input_order() { + let config = runtime_config(); + let request_a = GoBridgeRequest { + root_iri: "root".to_owned(), + nodes: vec![ + node(1, "root"), + node(2, "child"), + node(3, "leaf"), + node(4, "sibling"), + ], + edges: vec![edge(0, 1, 2), edge(1, 2, 3), edge(2, 1, 4)], + }; + let request_b = GoBridgeRequest { + root_iri: "root".to_owned(), + nodes: vec![ + node(4, "sibling"), + node(3, "leaf"), + node(2, "child"), + node(1, "root"), + ], + edges: vec![edge(2, 1, 4), edge(1, 2, 3), edge(0, 1, 2)], + }; + + let response_a = process_go_bridge_request_with_options(request_a, config.clone()).unwrap(); + let response_b = process_go_bridge_request_with_options(request_b, config).unwrap(); + + assert_eq!( + sorted_nodes(response_a.nodes), + sorted_nodes(response_b.nodes) + ); + assert_eq!( + sorted_segments(response_a.route_segments), + sorted_segments(response_b.route_segments) + ); + } + + #[test] + fn writes_debug_svg_to_configured_output_path() { + let unique = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_nanos(); + let dir = std::env::temp_dir().join(format!( + "radial_sugiyama_bridge_svg_{}_{}", + std::process::id(), + unique + )); + let path = dir.join("layout.svg"); + let mut config = runtime_config(); + config.svg_output_path = Some(path.clone()); + + let response = process_go_bridge_request_with_options( + GoBridgeRequest { + root_iri: "root".to_owned(), + nodes: vec![node(1, "root"), node(2, "child"), node(3, "leaf")], + edges: vec![edge(0, 1, 2), edge(1, 2, 3)], + }, + config, + ) + .unwrap(); + + assert_eq!(response.nodes.len(), 3); + let svg = fs::read_to_string(&path).unwrap(); + assert!(svg.contains(" Result { + let _ = dotenv(); + Self::from_lookup(|key| env::var(key).ok()) + } + + pub fn input_path(&self) -> PathBuf { + self.input_dir.join(&self.input_file) + } + + pub fn output_path(&self) -> PathBuf { + self.output_dir.join(&self.output_file) + } + + fn from_lookup(mut lookup: F) -> Result + where + F: FnMut(&str) -> Option, + { + let defaults = LayoutConfig::default(); + let input_dir = PathBuf::from(require_var(&mut lookup, INPUT_DIR_KEY)?); + let input_file = PathBuf::from(require_var(&mut lookup, INPUT_FILE_KEY)?); + let root_class_iri = + lookup(ROOT_CLASS_IRI_KEY).unwrap_or_else(|| DEFAULT_ROOT_CLASS_IRI.to_owned()); + let output_dir = + PathBuf::from(lookup(OUTPUT_DIR_KEY).unwrap_or_else(|| DEFAULT_OUTPUT_DIR.to_owned())); + let output_file = PathBuf::from( + lookup(OUTPUT_FILE_KEY).unwrap_or_else(|| DEFAULT_OUTPUT_FILE.to_owned()), + ); + let layout = LayoutConfig { + min_radius: parse_f64(&mut lookup, MIN_RADIUS_KEY, defaults.min_radius)?, + level_distance: parse_f64(&mut lookup, LEVEL_DISTANCE_KEY, defaults.level_distance)?, + align_positive_coords: parse_bool( + &mut lookup, + ALIGN_POSITIVE_KEY, + defaults.align_positive_coords, + )?, + spiral_quality: parse_usize(&mut lookup, SPIRAL_QUALITY_KEY, defaults.spiral_quality)?, + left_border: parse_f64(&mut lookup, LEFT_BORDER_KEY, defaults.left_border)?, + upper_border: parse_f64(&mut lookup, UPPER_BORDER_KEY, defaults.upper_border)?, + node_distance: parse_f64(&mut lookup, NODE_DISTANCE_KEY, defaults.node_distance)?, + ring_distribution: parse_ring_distribution( + &mut lookup, + RING_DISTRIBUTION_KEY, + defaults.ring_distribution, + )?, + }; + let svg = SvgConfig { + shortest_edges: parse_bool( + &mut lookup, + SVG_SHORTEST_EDGES_KEY, + SvgConfig::default().shortest_edges, + )?, + show_labels: parse_bool( + &mut lookup, + SVG_SHOW_LABELS_KEY, + SvgConfig::default().show_labels, + )?, + }; + + Ok(Self { + input_dir, + input_file, + root_class_iri, + output_dir, + output_file, + layout, + svg, + }) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum EnvConfigError { + MissingVar(&'static str), + InvalidFloat { key: &'static str, value: String }, + InvalidUsize { key: &'static str, value: String }, + InvalidBool { key: &'static str, value: String }, + InvalidRingDistribution { key: &'static str, value: String }, +} + +impl Display for EnvConfigError { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + EnvConfigError::MissingVar(key) => write!(f, "missing required environment variable {key}"), + EnvConfigError::InvalidFloat { key, value } => { + write!(f, "environment variable {key} must be a float, got {value}") + } + EnvConfigError::InvalidUsize { key, value } => { + write!(f, "environment variable {key} must be a non-negative integer, got {value}") + } + EnvConfigError::InvalidBool { key, value } => { + write!(f, "environment variable {key} must be a boolean, got {value}") + } + EnvConfigError::InvalidRingDistribution { key, value } => write!( + f, + "environment variable {key} must be 'packed', 'distributed', or 'adaptive', got {value}" + ), + } + } +} + +impl Error for EnvConfigError {} + +fn require_var(lookup: &mut F, key: &'static str) -> Result +where + F: FnMut(&str) -> Option, +{ + lookup(key).ok_or(EnvConfigError::MissingVar(key)) +} + +fn parse_f64(lookup: &mut F, key: &'static str, default: f64) -> Result +where + F: FnMut(&str) -> Option, +{ + match lookup(key) { + Some(value) => value + .parse::() + .map_err(|_| EnvConfigError::InvalidFloat { key, value }), + None => Ok(default), + } +} + +fn parse_usize( + lookup: &mut F, + key: &'static str, + default: usize, +) -> Result +where + F: FnMut(&str) -> Option, +{ + match lookup(key) { + Some(value) => value + .parse::() + .map_err(|_| EnvConfigError::InvalidUsize { key, value }), + None => Ok(default), + } +} + +fn parse_bool(lookup: &mut F, key: &'static str, default: bool) -> Result +where + F: FnMut(&str) -> Option, +{ + match lookup(key) { + Some(value) => match value.to_ascii_lowercase().as_str() { + "true" | "1" | "yes" | "on" => Ok(true), + "false" | "0" | "no" | "off" => Ok(false), + _ => Err(EnvConfigError::InvalidBool { key, value }), + }, + None => Ok(default), + } +} + +fn parse_ring_distribution( + lookup: &mut F, + key: &'static str, + default: RingDistribution, +) -> Result +where + F: FnMut(&str) -> Option, +{ + match lookup(key) { + Some(value) => match value.to_ascii_lowercase().as_str() { + "packed" => Ok(RingDistribution::Packed), + "distributed" => Ok(RingDistribution::Distributed), + "adaptive" => Ok(RingDistribution::Adaptive), + _ => Err(EnvConfigError::InvalidRingDistribution { key, value }), + }, + None => Ok(default), + } +} + +#[cfg(test)] +mod tests { + use super::*; + + fn config_from_map(entries: &[(&str, &str)]) -> Result { + let vars = entries + .iter() + .map(|(key, value)| ((*key).to_owned(), (*value).to_owned())) + .collect::>(); + EnvConfig::from_lookup(|key| vars.get(key).cloned()) + } + + #[test] + fn parses_env_config_and_path() { + let config = config_from_map(&[ + (INPUT_DIR_KEY, "./ttl"), + (INPUT_FILE_KEY, "ontology.ttl"), + (ROOT_CLASS_IRI_KEY, "http://example.com/root"), + (OUTPUT_DIR_KEY, "./svg"), + (OUTPUT_FILE_KEY, "graph.svg"), + (SVG_SHORTEST_EDGES_KEY, "true"), + (SVG_SHOW_LABELS_KEY, "false"), + (MIN_RADIUS_KEY, "2.5"), + (LEVEL_DISTANCE_KEY, "3.0"), + (ALIGN_POSITIVE_KEY, "false"), + (SPIRAL_QUALITY_KEY, "800"), + (LEFT_BORDER_KEY, "120.0"), + (UPPER_BORDER_KEY, "140.0"), + (NODE_DISTANCE_KEY, "90.0"), + (RING_DISTRIBUTION_KEY, "adaptive"), + ]) + .unwrap(); + + assert_eq!( + config.input_path(), + PathBuf::from("./ttl").join("ontology.ttl") + ); + assert_eq!(config.root_class_iri, "http://example.com/root"); + assert_eq!( + config.output_path(), + PathBuf::from("./svg").join("graph.svg") + ); + assert_eq!(config.layout.min_radius, 2.5); + assert_eq!(config.layout.level_distance, 3.0); + assert!(!config.layout.align_positive_coords); + assert_eq!(config.layout.spiral_quality, 800); + assert_eq!(config.layout.left_border, 120.0); + assert_eq!(config.layout.upper_border, 140.0); + assert_eq!(config.layout.node_distance, 90.0); + assert_eq!(config.layout.ring_distribution, RingDistribution::Adaptive); + assert!(config.svg.shortest_edges); + assert!(!config.svg.show_labels); + } + + #[test] + fn missing_input_file_is_reported() { + let error = config_from_map(&[(INPUT_DIR_KEY, "./ttl")]).unwrap_err(); + assert_eq!(error, EnvConfigError::MissingVar(INPUT_FILE_KEY)); + } + + #[test] + fn invalid_boolean_is_reported() { + let error = config_from_map(&[ + (INPUT_DIR_KEY, "./ttl"), + (INPUT_FILE_KEY, "ontology.ttl"), + (ALIGN_POSITIVE_KEY, "maybe"), + ]) + .unwrap_err(); + + assert_eq!( + error, + EnvConfigError::InvalidBool { + key: ALIGN_POSITIVE_KEY, + value: "maybe".to_owned(), + } + ); + } + + #[test] + fn uses_default_output_location_when_not_provided() { + let config = + config_from_map(&[(INPUT_DIR_KEY, "./ttl"), (INPUT_FILE_KEY, "ontology.ttl")]).unwrap(); + + assert_eq!( + config.root_class_iri, + "http://purl.obolibrary.org/obo/BFO_0000001" + ); + assert_eq!( + config.output_path(), + PathBuf::from("./out").join("layout.svg") + ); + assert!(!config.svg.shortest_edges); + assert!(config.svg.show_labels); + assert_eq!(config.layout.ring_distribution, RingDistribution::Packed); + } + + #[test] + fn invalid_ring_distribution_is_reported() { + let error = config_from_map(&[ + (INPUT_DIR_KEY, "./ttl"), + (INPUT_FILE_KEY, "ontology.ttl"), + (RING_DISTRIBUTION_KEY, "arc"), + ]) + .unwrap_err(); + + assert_eq!( + error, + EnvConfigError::InvalidRingDistribution { + key: RING_DISTRIBUTION_KEY, + value: "arc".to_owned(), + } + ); + } +} diff --git a/radial_sugiyama/src/error.rs b/radial_sugiyama/src/error.rs new file mode 100644 index 0000000..b97b371 --- /dev/null +++ b/radial_sugiyama/src/error.rs @@ -0,0 +1,70 @@ +use std::error::Error; +use std::fmt::{Display, Formatter}; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum LayoutError { + InvalidNodeIndex { + edge_index: usize, + node_index: usize, + node_count: usize, + }, + SelfLoop { + edge_index: usize, + node: usize, + }, + DuplicateEdge { + edge_index: usize, + source: usize, + target: usize, + }, + CycleDetected, + InvalidHierarchyEdge { + edge_index: usize, + source: usize, + target: usize, + source_level: usize, + target_level: usize, + }, +} + +impl Display for LayoutError { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + LayoutError::InvalidNodeIndex { + edge_index, + node_index, + node_count, + } => write!( + f, + "edge {} references node {} but graph only has {} nodes", + edge_index, node_index, node_count + ), + LayoutError::SelfLoop { edge_index, node } => { + write!(f, "edge {} is a self-loop on node {}", edge_index, node) + } + LayoutError::DuplicateEdge { + edge_index, + source, + target, + } => write!( + f, + "edge {} duplicates existing directed edge {} -> {}", + edge_index, source, target + ), + LayoutError::CycleDetected => write!(f, "graph must be a directed acyclic graph"), + LayoutError::InvalidHierarchyEdge { + edge_index, + source, + target, + source_level, + target_level, + } => write!( + f, + "edge {} ({} -> {}) violates hierarchy levels {} -> {}", + edge_index, source, target, source_level, target_level + ), + } + } +} + +impl Error for LayoutError {} diff --git a/radial_sugiyama/src/filter.rs b/radial_sugiyama/src/filter.rs new file mode 100644 index 0000000..28da632 --- /dev/null +++ b/radial_sugiyama/src/filter.rs @@ -0,0 +1,159 @@ +use std::collections::{HashMap, HashSet, VecDeque}; +use std::error::Error; +use std::fmt::{Display, Formatter}; + +use crate::model::{Edge, Graph}; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum GraphFilterError { + RootNotFound { root_iri: String }, + NoDescendants { root_iri: String }, +} + +impl Display for GraphFilterError { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + GraphFilterError::RootNotFound { root_iri } => { + write!( + f, + "root class IRI {root_iri} was not found in the imported graph" + ) + } + GraphFilterError::NoDescendants { root_iri } => { + write!( + f, + "root class IRI {root_iri} has no subclass descendants in the imported graph" + ) + } + } + } +} + +impl Error for GraphFilterError {} + +pub fn filter_graph_to_descendants( + graph: &Graph, + root_iri: &str, +) -> Result { + let Some(root_index) = graph + .nodes + .iter() + .position(|node| node.label.as_deref() == Some(root_iri)) + else { + return Err(GraphFilterError::RootNotFound { + root_iri: root_iri.to_owned(), + }); + }; + + let mut adjacency = vec![Vec::new(); graph.nodes.len()]; + for edge in &graph.edges { + adjacency[edge.source].push(edge.target); + } + + let mut visited = HashSet::from([root_index]); + let mut queue = VecDeque::from([root_index]); + while let Some(node) = queue.pop_front() { + for &child in &adjacency[node] { + if visited.insert(child) { + queue.push_back(child); + } + } + } + + if visited.len() <= 1 { + return Err(GraphFilterError::NoDescendants { + root_iri: root_iri.to_owned(), + }); + } + + let mut reindex = HashMap::new(); + let mut nodes = Vec::new(); + for (old_index, node) in graph.nodes.iter().enumerate() { + if visited.contains(&old_index) { + let new_index = nodes.len(); + reindex.insert(old_index, new_index); + nodes.push(node.clone()); + } + } + + let mut edges = Vec::new(); + for edge in &graph.edges { + if visited.contains(&edge.source) && visited.contains(&edge.target) { + edges.push(Edge::new(reindex[&edge.source], reindex[&edge.target])); + } + } + + Ok(Graph::new(nodes, edges)) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::model::Node; + + fn node(label: &str) -> Node { + Node { + label: Some(label.to_owned()), + ..Node::default() + } + } + + #[test] + fn keeps_root_and_all_descendants() { + let graph = Graph::new( + vec![ + node("root"), + node("child"), + node("grandchild"), + node("other"), + node("other_child"), + ], + vec![Edge::new(0, 1), Edge::new(1, 2), Edge::new(3, 4)], + ); + + let filtered = filter_graph_to_descendants(&graph, "root").unwrap(); + + assert_eq!(filtered.nodes.len(), 3); + assert_eq!( + filtered + .nodes + .iter() + .map(|node| node.label.clone()) + .collect::>(), + vec![ + Some("root".to_owned()), + Some("child".to_owned()), + Some("grandchild".to_owned()), + ] + ); + assert_eq!(filtered.edges, vec![Edge::new(0, 1), Edge::new(1, 2)]); + } + + #[test] + fn returns_error_when_root_is_missing() { + let graph = Graph::new(vec![node("other")], vec![]); + + let error = filter_graph_to_descendants(&graph, "root").unwrap_err(); + + assert_eq!( + error, + GraphFilterError::RootNotFound { + root_iri: "root".to_owned(), + } + ); + } + + #[test] + fn returns_error_when_root_has_no_descendants() { + let graph = Graph::new(vec![node("root"), node("other")], vec![Edge::new(1, 0)]); + + let error = filter_graph_to_descendants(&graph, "root").unwrap_err(); + + assert_eq!( + error, + GraphFilterError::NoDescendants { + root_iri: "root".to_owned(), + } + ); + } +} diff --git a/radial_sugiyama/src/layering.rs b/radial_sugiyama/src/layering.rs new file mode 100644 index 0000000..48853bb --- /dev/null +++ b/radial_sugiyama/src/layering.rs @@ -0,0 +1,88 @@ +use std::collections::{HashSet, VecDeque}; + +use crate::error::LayoutError; +use crate::model::Graph; + +pub fn compute_hierarchy_levels(graph: &Graph) -> Result, LayoutError> { + validate_simple_dag(graph)?; + + let node_count = graph.nodes.len(); + if node_count == 0 { + return Ok(Vec::new()); + } + + let mut indegree = vec![0usize; node_count]; + let mut outgoing = vec![Vec::new(); node_count]; + + for edge in &graph.edges { + indegree[edge.target] += 1; + outgoing[edge.source].push(edge.target); + } + + let mut queue = VecDeque::new(); + for (node_index, degree) in indegree.iter().enumerate() { + if *degree == 0 { + queue.push_back(node_index); + } + } + + let mut levels = vec![0usize; node_count]; + let mut visited = 0usize; + + while let Some(node) = queue.pop_front() { + visited += 1; + let next_level = levels[node] + 1; + for &child in &outgoing[node] { + if levels[child] < next_level { + levels[child] = next_level; + } + indegree[child] -= 1; + if indegree[child] == 0 { + queue.push_back(child); + } + } + } + + if visited != node_count { + return Err(LayoutError::CycleDetected); + } + + Ok(levels) +} + +pub(crate) fn validate_simple_dag(graph: &Graph) -> Result<(), LayoutError> { + let node_count = graph.nodes.len(); + let mut seen_edges = HashSet::new(); + + for (edge_index, edge) in graph.edges.iter().enumerate() { + if edge.source >= node_count { + return Err(LayoutError::InvalidNodeIndex { + edge_index, + node_index: edge.source, + node_count, + }); + } + if edge.target >= node_count { + return Err(LayoutError::InvalidNodeIndex { + edge_index, + node_index: edge.target, + node_count, + }); + } + if edge.source == edge.target { + return Err(LayoutError::SelfLoop { + edge_index, + node: edge.source, + }); + } + if !seen_edges.insert((edge.source, edge.target)) { + return Err(LayoutError::DuplicateEdge { + edge_index, + source: edge.source, + target: edge.target, + }); + } + } + + Ok(()) +} diff --git a/radial_sugiyama/src/layout.rs b/radial_sugiyama/src/layout.rs new file mode 100644 index 0000000..987b30e --- /dev/null +++ b/radial_sugiyama/src/layout.rs @@ -0,0 +1,2888 @@ +use std::cmp::Ordering; +use std::collections::{HashMap, HashSet}; +use std::f64::consts::PI; + +use crate::error::LayoutError; +use crate::layering::compute_hierarchy_levels; +use crate::model::{ + EdgeRoute, EdgeRouteKind, Graph, LayoutArtifacts, LayoutConfig, Point, RingDistribution, + RoutedNode, +}; + +const MIN_DIST: i32 = 1; +const MAX_SIFTING_ROUNDS: usize = 10; +const INTRA_LEVEL_ALPHA: f64 = 10.0; +const INTRA_LEVEL_SEGMENT_ANGLE: f64 = PI / 18.0; +const ARC_SAMPLE_STEP: f64 = PI / 36.0; +const QUADRATIC_SAMPLES: usize = 12; +const EPSILON: f64 = 1e-9; +const ANGLE_EPSILON: f64 = 1e-6; + +pub fn layout_radial_hierarchy(graph: &mut Graph, config: LayoutConfig) -> Result<(), LayoutError> { + layout_radial_hierarchy_with_artifacts(graph, config).map(|_| ()) +} + +pub fn layout_radial_hierarchy_with_artifacts( + graph: &mut Graph, + config: LayoutConfig, +) -> Result { + if graph.nodes.is_empty() { + return Ok(LayoutArtifacts { + node_levels: Vec::new(), + edge_offsets: Vec::new(), + edge_routes: Vec::new(), + routed_nodes: Vec::new(), + center: Point::new(config.left_border, config.upper_border), + }); + } + + let levels = compute_hierarchy_levels(graph)?; + let (working_graph, dummies, order) = build_working_graph(graph, &levels)?; + let (order, offset) = CrossingReduction::new(&working_graph, order).minimize_crossings(); + let coordinates = CoordinateAssignment::new(&working_graph, &order, &dummies).get_coordinates(); + let projection = project_radial(&working_graph, &order, &coordinates, config); + + for (work_node_index, work_node) in working_graph.nodes.iter().enumerate() { + if let WorkNodeKind::Original(original_index) = work_node.kind { + let point = projection.points[work_node_index]; + graph.nodes[original_index].x = point.x; + graph.nodes[original_index].y = point.y; + } + } + + Ok(build_layout_artifacts( + &working_graph, + &order, + &offset, + &projection, + &levels, + config, + )) +} + +#[derive(Debug, Clone)] +struct WorkGraph { + nodes: Vec, + edges: Vec, + incident_edges: Vec>, + edge_lookup: HashMap<(usize, usize), usize>, +} + +impl WorkGraph { + fn new(original_count: usize) -> Self { + let nodes = (0..original_count) + .map(|node_index| WorkNode { + kind: WorkNodeKind::Original(node_index), + }) + .collect(); + + Self { + nodes, + edges: Vec::new(), + incident_edges: vec![Vec::new(); original_count], + edge_lookup: HashMap::new(), + } + } + + fn add_dummy(&mut self) -> usize { + let node_index = self.nodes.len(); + self.nodes.push(WorkNode { + kind: WorkNodeKind::Dummy, + }); + self.incident_edges.push(Vec::new()); + node_index + } + + fn add_edge( + &mut self, + source: usize, + target: usize, + original_edge: usize, + ) -> Result { + let key = canonical_pair(source, target); + if self.edge_lookup.contains_key(&key) { + return Err(LayoutError::DuplicateEdge { + edge_index: original_edge, + source, + target, + }); + } + + let edge_index = self.edges.len(); + self.edges.push(WorkEdge { + source, + target, + original_edge, + }); + self.edge_lookup.insert(key, edge_index); + self.incident_edges[source].push(edge_index); + self.incident_edges[target].push(edge_index); + Ok(edge_index) + } + + fn node_count(&self) -> usize { + self.nodes.len() + } + + fn edge_between(&self, left: usize, right: usize) -> Option { + self.edge_lookup.get(&canonical_pair(left, right)).copied() + } + + fn edges_to_node_from_level(&self, level_nodes: &[usize], node: usize) -> Vec { + let mut edges = Vec::new(); + for &other in level_nodes { + if let Some(edge_index) = self.edge_between(other, node) { + edges.push(edge_index); + } + } + edges + } + + fn vertical_edges(&self, level1: &[usize], level2: &[usize]) -> Vec { + let mut edges = Vec::new(); + for &lower in level2 { + for &upper in level1 { + if let Some(edge_index) = self.edge_between(upper, lower) { + edges.push(edge_index); + } + } + } + edges + } + + fn horizontal_edges(&self, level: &[usize]) -> Vec { + let mut edges = Vec::new(); + for &left in level { + for &right in level { + if let Some(edge_index) = self.edge_between(left, right) { + if !edges.contains(&edge_index) { + edges.push(edge_index); + } + } + } + } + edges + } + + fn adjacent_node(&self, edge_index: usize, node: usize) -> Option { + let edge = self.edges[edge_index]; + if edge.source == node { + Some(edge.target) + } else if edge.target == node { + Some(edge.source) + } else { + None + } + } + + fn connects_nodes(&self, edge_index: usize, left: usize, right: usize) -> bool { + let edge = self.edges[edge_index]; + (edge.source == left && edge.target == right) + || (edge.source == right && edge.target == left) + } +} + +#[derive(Debug, Clone)] +struct WorkNode { + kind: WorkNodeKind, +} + +#[derive(Debug, Clone, Copy)] +enum WorkNodeKind { + Original(usize), + Dummy, +} + +#[derive(Debug, Clone, Copy)] +struct WorkEdge { + source: usize, + target: usize, + original_edge: usize, +} + +#[derive(Debug, Clone)] +struct ProjectionResult { + points: Vec, + center: Point, + node_levels: Vec, +} + +fn build_working_graph( + graph: &Graph, + levels: &[usize], +) -> Result<(WorkGraph, HashSet, Vec>), LayoutError> { + let level_count = levels + .iter() + .copied() + .max() + .map_or(0, |max_level| max_level + 1); + let mut order = vec![Vec::new(); level_count]; + let mut working_graph = WorkGraph::new(graph.nodes.len()); + let mut dummies = HashSet::new(); + + for (node_index, &level) in levels.iter().enumerate() { + order[level].push(node_index); + } + + for (edge_index, edge) in graph.edges.iter().enumerate() { + let source_level = levels[edge.source]; + let target_level = levels[edge.target]; + if target_level < source_level { + return Err(LayoutError::InvalidHierarchyEdge { + edge_index, + source: edge.source, + target: edge.target, + source_level, + target_level, + }); + } + + if target_level == source_level { + working_graph.add_edge(edge.source, edge.target, edge_index)?; + continue; + } + + let mut current = edge.source; + for level in (source_level + 1)..target_level { + let dummy = working_graph.add_dummy(); + order[level].push(dummy); + dummies.insert(dummy); + working_graph.add_edge(current, dummy, edge_index)?; + current = dummy; + } + + working_graph.add_edge(current, edge.target, edge_index)?; + } + + Ok((working_graph, dummies, order)) +} + +fn project_radial( + working_graph: &WorkGraph, + order: &[Vec], + coordinates: &[i32], + config: LayoutConfig, +) -> ProjectionResult { + if order.is_empty() { + return ProjectionResult { + points: Vec::new(), + center: Point::new(config.left_border, config.upper_border), + node_levels: Vec::new(), + }; + } + + let mut node_levels = vec![0usize; working_graph.node_count()]; + for (level_index, level) in order.iter().enumerate() { + for &node in level { + node_levels[node] = level_index; + } + } + + let mut z = 0i32; + for level in order { + if level.is_empty() { + continue; + } + let min = level + .iter() + .map(|&node| coordinates[node]) + .min() + .unwrap_or(0); + let max = level + .iter() + .map(|&node| coordinates[node]) + .max() + .unwrap_or(0); + z = z.max(max - min + 1); + } + if z <= 0 { + z = 1; + } + + let mut raw_x = vec![0.0; working_graph.node_count()]; + let mut raw_y = vec![0.0; working_graph.node_count()]; + + let start = if order.first().map_or(false, |level| level.len() == 1) { + 1 + } else { + 0 + }; + + let mut radius = config.min_radius; + let mut adaptive_min_gap = None; + let mut node_angles = vec![None; working_graph.node_count()]; + let mut previous_level = if start > 0 { + Some(order[start - 1].as_slice()) + } else { + None + }; + + if start == 1 { + node_angles[order[0][0]] = Some(0.0); + } + + for level in order.iter().skip(start) { + let (angles, next_min_gap) = project_level_angles( + working_graph, + level, + previous_level, + &node_angles, + coordinates, + z, + config.ring_distribution, + adaptive_min_gap, + ); + adaptive_min_gap = next_min_gap; + + for (&node, angle) in level.iter().zip(angles.into_iter()) { + node_angles[node] = Some(angle); + raw_x[node] = radius * angle.cos(); + raw_y[node] = radius * angle.sin(); + } + previous_level = Some(level.as_slice()); + radius += config.level_distance; + } + + if order[0].len() == 1 { + let center_node = order[0][0]; + raw_x[center_node] = 0.0; + raw_y[center_node] = 0.0; + } + + let (shift_x, shift_y, center) = if config.align_positive_coords { + let min_x = raw_x.iter().copied().fold(f64::INFINITY, f64::min); + let min_y = raw_y.iter().copied().fold(f64::INFINITY, f64::min); + ( + -min_x, + -min_y, + Point::new( + config.left_border - min_x * config.node_distance, + config.upper_border - min_y * config.node_distance, + ), + ) + } else { + ( + 0.0, + 0.0, + Point::new(config.left_border, config.upper_border), + ) + }; + + let mut points = vec![Point::new(0.0, 0.0); working_graph.node_count()]; + for node in 0..working_graph.node_count() { + points[node] = Point::new( + config.left_border + (raw_x[node] + shift_x) * config.node_distance, + config.upper_border + (raw_y[node] + shift_y) * config.node_distance, + ); + } + + ProjectionResult { + points, + center, + node_levels, + } +} + +fn project_level_angles( + working_graph: &WorkGraph, + level: &[usize], + previous_level: Option<&[usize]>, + node_angles: &[Option], + coordinates: &[i32], + z: i32, + distribution: RingDistribution, + previous_min_gap: Option, +) -> (Vec, Option) { + match distribution { + RingDistribution::Packed => ( + level + .iter() + .map(|&node| packed_angle(z, coordinates[node])) + .collect(), + previous_min_gap, + ), + RingDistribution::Distributed => ( + (0..level.len()) + .map(|index| distributed_angle(level.len(), index)) + .collect(), + previous_min_gap, + ), + RingDistribution::Adaptive => adaptive_level_angles( + working_graph, + level, + previous_level, + node_angles, + coordinates, + z, + previous_min_gap, + ), + } +} + +fn adaptive_level_angles( + working_graph: &WorkGraph, + level: &[usize], + previous_level: Option<&[usize]>, + node_angles: &[Option], + coordinates: &[i32], + z: i32, + previous_min_gap: Option, +) -> (Vec, Option) { + if level.is_empty() { + return (Vec::new(), previous_min_gap); + } + + if previous_min_gap.is_none() { + let angles = (0..level.len()) + .map(|index| distributed_angle(level.len(), index)) + .collect::>(); + return ( + angles.clone(), + Some(positive_gap(min_circular_gap(&angles))), + ); + } + + let preferred_angles = preferred_level_angles( + working_graph, + level, + previous_level, + node_angles, + coordinates, + z, + ); + + if level.len() == 1 { + let angle = normalize_angle(preferred_angles[0]); + let next_min_gap = Some(positive_gap(previous_min_gap.unwrap_or(2.0 * PI))); + return (vec![angle], next_min_gap); + } + + let minimum_gap = positive_gap( + previous_min_gap + .unwrap_or(2.0 * PI) + .min(2.0 * PI / level.len() as f64), + ); + let normalized_angles = solve_cyclic_adaptive_angles(&preferred_angles, minimum_gap); + let next_min_gap = Some(positive_gap( + previous_min_gap + .unwrap_or(2.0 * PI) + .min(min_circular_gap(&normalized_angles)), + )); + + (normalized_angles, next_min_gap) +} + +fn solve_cyclic_adaptive_angles(preferred_angles: &[f64], minimum_gap: f64) -> Vec { + debug_assert!(preferred_angles.len() >= 2); + + let mut best_solution = Vec::new(); + let mut best_error = f64::INFINITY; + let mut best_cut = usize::MAX; + + for cut in 0..preferred_angles.len() { + let rotated_preferred = rotate_values(preferred_angles, cut); + let unwrapped_preferred = unwrap_angles(&rotated_preferred); + let placed_unwrapped = solve_unwrapped_level_placement(&unwrapped_preferred, minimum_gap); + let error = squared_error(&placed_unwrapped, &unwrapped_preferred); + + if error < best_error - EPSILON || (approx_eq(error, best_error) && cut < best_cut) { + best_error = error; + best_cut = cut; + best_solution = inverse_rotate_values( + &placed_unwrapped + .into_iter() + .map(normalize_angle) + .collect::>(), + cut, + ); + } + } + + best_solution +} + +fn solve_unwrapped_level_placement(unwrapped_preferred: &[f64], minimum_gap: f64) -> Vec { + let count = unwrapped_preferred.len(); + let slack = (2.0 * PI - count as f64 * minimum_gap).max(0.0); + let targets = unwrapped_preferred + .iter() + .enumerate() + .map(|(index, angle)| angle - index as f64 * minimum_gap) + .collect::>(); + let free_positions = bounded_isotonic_regression(&targets, 0.0, slack); + let relative_positions = free_positions + .iter() + .enumerate() + .map(|(index, position)| position + index as f64 * minimum_gap) + .collect::>(); + let shift = unwrapped_preferred + .iter() + .zip(&relative_positions) + .map(|(preferred, placed)| preferred - placed) + .sum::() + / count as f64; + + relative_positions + .into_iter() + .map(|position| position + shift) + .collect() +} + +fn positive_gap(gap: f64) -> f64 { + gap.max(ANGLE_EPSILON) +} + +fn rotate_values(values: &[f64], start: usize) -> Vec { + values[start..] + .iter() + .chain(values[..start].iter()) + .copied() + .collect() +} + +fn inverse_rotate_values(values: &[f64], start: usize) -> Vec { + let mut restored = vec![0.0; values.len()]; + for (index, value) in values.iter().copied().enumerate() { + restored[(start + index) % values.len()] = value; + } + restored +} + +fn squared_error(left: &[f64], right: &[f64]) -> f64 { + left.iter() + .zip(right) + .map(|(left, right)| (left - right).powi(2)) + .sum() +} + +fn bounded_isotonic_regression(values: &[f64], lower: f64, upper: f64) -> Vec { + if values.is_empty() { + return Vec::new(); + } + + let mut result = vec![0.0; values.len()]; + let mut start = 0usize; + let mut end = values.len(); + + while start < end { + let regression = isotonic_regression(&values[start..end]); + if let Some(last_below) = regression + .iter() + .rposition(|value| *value < lower - EPSILON) + { + result[start..=start + last_below].fill(lower); + start += last_below + 1; + continue; + } + + if let Some(first_above) = regression.iter().position(|value| *value > upper + EPSILON) { + result[start + first_above..end].fill(upper); + end = start + first_above; + continue; + } + + result[start..end].copy_from_slice(®ression); + return result; + } + + result +} + +fn isotonic_regression(values: &[f64]) -> Vec { + #[derive(Clone, Copy)] + struct Block { + start: usize, + end: usize, + mean: f64, + count: usize, + } + + let mut blocks = Vec::::new(); + for (index, value) in values.iter().copied().enumerate() { + blocks.push(Block { + start: index, + end: index + 1, + mean: value, + count: 1, + }); + + while blocks.len() >= 2 { + let right = blocks[blocks.len() - 1]; + let left = blocks[blocks.len() - 2]; + if left.mean <= right.mean + EPSILON { + break; + } + + let merged_count = left.count + right.count; + let merged_mean = (left.mean * left.count as f64 + right.mean * right.count as f64) + / merged_count as f64; + blocks.pop(); + blocks.pop(); + blocks.push(Block { + start: left.start, + end: right.end, + mean: merged_mean, + count: merged_count, + }); + } + } + + let mut result = vec![0.0; values.len()]; + for block in blocks { + result[block.start..block.end].fill(block.mean); + } + result +} + +fn preferred_level_angles( + working_graph: &WorkGraph, + level: &[usize], + previous_level: Option<&[usize]>, + node_angles: &[Option], + coordinates: &[i32], + z: i32, +) -> Vec { + level + .iter() + .map(|&node| { + preferred_angle_for_node( + working_graph, + previous_level, + node_angles, + node, + coordinates[node], + z, + ) + }) + .collect() +} + +fn preferred_angle_for_node( + working_graph: &WorkGraph, + previous_level: Option<&[usize]>, + node_angles: &[Option], + node: usize, + coordinate: i32, + z: i32, +) -> f64 { + let Some(previous_level) = previous_level else { + return packed_angle(z, coordinate); + }; + + let parent_angles = working_graph + .edges_to_node_from_level(previous_level, node) + .into_iter() + .filter_map(|edge_index| { + working_graph + .adjacent_node(edge_index, node) + .and_then(|adjacent| node_angles[adjacent]) + }) + .collect::>(); + + if parent_angles.is_empty() { + packed_angle(z, coordinate) + } else { + circular_mean_angle(&parent_angles) + } +} + +fn build_layout_artifacts( + working_graph: &WorkGraph, + order: &[Vec], + offsets: &[i32], + projection: &ProjectionResult, + original_levels: &[usize], + config: LayoutConfig, +) -> LayoutArtifacts { + let routed_nodes = working_graph + .nodes + .iter() + .enumerate() + .map(|(node_index, node)| RoutedNode { + original_index: match node.kind { + WorkNodeKind::Original(original_index) => Some(original_index), + WorkNodeKind::Dummy => None, + }, + level: projection.node_levels[node_index], + point: projection.points[node_index], + }) + .collect::>(); + + let edge_routes = working_graph + .edges + .iter() + .enumerate() + .map(|(edge_index, edge)| { + let kind = classify_route(edge, &projection.node_levels, order); + let points = build_route_points( + edge, + kind, + offsets[edge_index], + &projection.points, + &projection.node_levels, + projection.center, + config, + order, + ); + + EdgeRoute { + original_edge_index: edge.original_edge, + source: edge.source, + target: edge.target, + kind, + points, + } + }) + .collect(); + + LayoutArtifacts { + node_levels: original_levels.to_vec(), + edge_offsets: offsets.to_vec(), + edge_routes, + routed_nodes, + center: projection.center, + } +} + +fn classify_route(edge: &WorkEdge, node_levels: &[usize], order: &[Vec]) -> EdgeRouteKind { + let source_level = node_levels[edge.source]; + let target_level = node_levels[edge.target]; + + if order.first().map_or(false, |level| level.len() == 1) + && (source_level == 0 || target_level == 0) + { + EdgeRouteKind::Straight + } else if source_level != target_level { + EdgeRouteKind::Spiral + } else if source_level == 0 { + EdgeRouteKind::Straight + } else { + EdgeRouteKind::IntraLevel + } +} + +fn build_route_points( + edge: &WorkEdge, + kind: EdgeRouteKind, + offset: i32, + points: &[Point], + node_levels: &[usize], + center: Point, + config: LayoutConfig, + order: &[Vec], +) -> Vec { + match kind { + EdgeRouteKind::Straight => vec![points[edge.source], points[edge.target]], + EdgeRouteKind::Spiral => build_spiral_route( + edge, + points, + node_levels, + center, + offset, + config.spiral_quality.max(1), + ), + EdgeRouteKind::IntraLevel => build_intra_level_route( + points[edge.source], + points[edge.target], + center, + config.level_distance, + config.node_distance, + order.first().map_or(0, Vec::len), + ), + } +} + +fn build_spiral_route( + edge: &WorkEdge, + points: &[Point], + node_levels: &[usize], + center: Point, + offset: i32, + quality: usize, +) -> Vec { + let source_level = node_levels[edge.source]; + let target_level = node_levels[edge.target]; + + let (start_node, end_node, reverse_output) = if source_level <= target_level { + (edge.source, edge.target, false) + } else { + (edge.target, edge.source, true) + }; + + let start = points[start_node]; + let end = points[end_node]; + let start_radius = distance(start, center); + let end_radius = distance(end, center); + let start_angle = polar_angle(start, center); + let end_angle = effective_spiral_end_angle(start_angle, polar_angle(end, center), offset); + + if approx_eq(start_angle, end_angle) { + let mut straight = vec![start, end]; + if reverse_output { + straight.reverse(); + } + return straight; + } + + let outer_radius = start_radius.max(end_radius); + let flatness = outer_radius / quality as f64; + let angle_direction = if start_angle < end_angle { 1.0 } else { -1.0 }; + let mut angle = start_angle; + let mut radius = start_radius; + let mut route = vec![start]; + + loop { + route.push(Point::new( + center.x + radius * angle.cos(), + center.y - radius * angle.sin(), + )); + + let x = if radius.abs() < EPSILON { + f64::INFINITY + } else { + flatness / radius + }; + if x.is_nan() || x > 0.1 { + angle += PI / 4.0 * angle_direction; + } else { + let y = (2.0 * x * x - 4.0 * x + 1.0).clamp(-1.0, 1.0); + angle += y.acos() * angle_direction; + } + + if (angle - end_angle) * angle_direction > 0.0 { + break; + } + + let denominator = end_angle - start_angle; + let fraction_complete = if denominator.abs() < EPSILON { + 1.0 + } else { + (angle - start_angle) / denominator + }; + radius = start_radius + (end_radius - start_radius) * fraction_complete; + } + + route.push(end); + dedup_points(&mut route); + + if reverse_output { + route.reverse(); + } + + route +} + +fn effective_spiral_end_angle(start_angle: f64, raw_end_angle: f64, offset: i32) -> f64 { + let original_end_angle = raw_end_angle + (-(offset as f64)) * 2.0 * PI; + let base_turns = ((start_angle - original_end_angle) / (2.0 * PI)).round() as i32; + + let mut best_angle = original_end_angle; + let mut best_span = (best_angle - start_angle).abs(); + let mut best_distance_from_original = (best_angle - original_end_angle).abs(); + + for turns in (base_turns - 2)..=(base_turns + 2) { + let candidate = original_end_angle + turns as f64 * 2.0 * PI; + let span = (candidate - start_angle).abs(); + let distance_from_original = (candidate - original_end_angle).abs(); + + if span < best_span - EPSILON + || (approx_eq(span, best_span) + && distance_from_original < best_distance_from_original - EPSILON) + { + best_angle = candidate; + best_span = span; + best_distance_from_original = distance_from_original; + } + } + + best_angle +} + +fn build_intra_level_route( + start: Point, + end: Point, + center: Point, + level_distance: f64, + node_distance: f64, + _inner_level_size: usize, +) -> Vec { + let start_angle = polar_angle(start, center); + let end_angle = polar_angle(end, center); + + let mid_angle = if (start_angle - end_angle).abs() <= PI { + (start_angle + end_angle) / 2.0 + } else { + (start_angle + end_angle) / 2.0 - PI + }; + + let dist_center_edge = distance(center, start) + - (level_distance * node_distance / INTRA_LEVEL_ALPHA + * ((start_angle - end_angle) / PI).abs()); + let edge_point = Point::new( + center.x + dist_center_edge * mid_angle.cos(), + center.y - dist_center_edge * mid_angle.sin(), + ); + + if let Some(circle_center) = compute_circle_center(start, edge_point, end) { + let radius = distance(circle_center, edge_point); + let start_circle_angle = polar_angle(start, circle_center); + let end_circle_angle = polar_angle(end, circle_center); + + let (first, first_angle, last, last_angle, reverse_output) = + normalize_circle_arc(start, start_circle_angle, end, end_circle_angle); + + let mut route = + if ((start_circle_angle - end_circle_angle).abs() / PI) < INTRA_LEVEL_SEGMENT_ANGLE { + sample_quadratic(first, edge_point, last, QUADRATIC_SAMPLES) + } else { + sample_circle_arc(circle_center, radius, first, first_angle, last, last_angle) + }; + + if reverse_output { + route.reverse(); + } + dedup_points(&mut route); + route + } else { + sample_quadratic(start, edge_point, end, QUADRATIC_SAMPLES) + } +} + +fn sample_circle_arc( + center: Point, + radius: f64, + first: Point, + first_angle: f64, + last: Point, + last_angle: f64, +) -> Vec { + let mut route = vec![first]; + let mut angle = first_angle + ARC_SAMPLE_STEP; + while angle < last_angle - EPSILON { + route.push(Point::new( + center.x + radius * angle.cos(), + center.y - radius * angle.sin(), + )); + angle += ARC_SAMPLE_STEP; + } + route.push(last); + route +} + +fn sample_quadratic(start: Point, control: Point, end: Point, samples: usize) -> Vec { + let mut route = Vec::with_capacity(samples + 1); + for step in 0..=samples { + let t = step as f64 / samples as f64; + let one_minus_t = 1.0 - t; + let x = + one_minus_t * one_minus_t * start.x + 2.0 * one_minus_t * t * control.x + t * t * end.x; + let y = + one_minus_t * one_minus_t * start.y + 2.0 * one_minus_t * t * control.y + t * t * end.y; + route.push(Point::new(x, y)); + } + dedup_points(&mut route); + route +} + +fn normalize_circle_arc( + start: Point, + start_angle: f64, + end: Point, + end_angle: f64, +) -> (Point, f64, Point, f64, bool) { + if start_angle < end_angle { + if (start_angle - end_angle).abs() < PI { + (start, start_angle, end, end_angle, false) + } else { + (end, end_angle, start, start_angle + 2.0 * PI, true) + } + } else if (start_angle - end_angle).abs() < PI { + (end, end_angle, start, start_angle, true) + } else { + (start, start_angle, end, end_angle + 2.0 * PI, false) + } +} + +fn compute_circle_center(p1: Point, p2: Point, p3: Point) -> Option { + let p = [[p1.x, p1.y], [p2.x, p2.y], [p3.x, p3.y]]; + + let m11 = determinant([ + [p[0][0], p[0][1], 1.0], + [p[1][0], p[1][1], 1.0], + [p[2][0], p[2][1], 1.0], + ]); + if approx_eq(m11, 0.0) { + return None; + } + + let m12 = determinant([ + [p[0][0] * p[0][0] + p[0][1] * p[0][1], p[0][1], 1.0], + [p[1][0] * p[1][0] + p[1][1] * p[1][1], p[1][1], 1.0], + [p[2][0] * p[2][0] + p[2][1] * p[2][1], p[2][1], 1.0], + ]); + let m13 = determinant([ + [p[0][0] * p[0][0] + p[0][1] * p[0][1], p[0][0], 1.0], + [p[1][0] * p[1][0] + p[1][1] * p[1][1], p[1][0], 1.0], + [p[2][0] * p[2][0] + p[2][1] * p[2][1], p[2][0], 1.0], + ]); + + Some(Point::new(0.5 * m12 / m11, -0.5 * m13 / m11)) +} + +fn determinant(a: [[f64; 3]; 3]) -> f64 { + a[0][0] * (a[1][1] * a[2][2] - a[2][1] * a[1][2]) + - a[0][1] * (a[1][0] * a[2][2] - a[2][0] * a[1][2]) + + a[0][2] * (a[1][0] * a[2][1] - a[2][0] * a[1][1]) +} + +struct CrossingReduction<'a> { + graph: &'a WorkGraph, + order: Vec>, + offset: Vec, + hl_edges: Vec>, + hr_edges: Vec>, + v_edges: Vec>, +} + +impl<'a> CrossingReduction<'a> { + fn new(graph: &'a WorkGraph, order: Vec>) -> Self { + let node_count = graph.node_count(); + Self { + graph, + order, + offset: vec![0; graph.edges.len()], + hl_edges: vec![Vec::new(); node_count], + hr_edges: vec![Vec::new(); node_count], + v_edges: vec![Vec::new(); node_count], + } + } + + fn minimize_crossings(mut self) -> (Vec>, Vec) { + let mut counter = 0usize; + + loop { + counter += 1; + let order_snapshot = self.order.clone(); + let temp_crossings = self.count_all_crossings(&order_snapshot); + let mut best_crossings = temp_crossings; + let mut best_order = self.order.clone(); + let mut best_offset = self.offset.clone(); + + for level in 0..self.order.len().saturating_sub(1) { + let fixed = self.order[level].clone(); + let movable = self.order[level + 1].clone(); + self.order[level + 1] = self.sifting(&fixed, movable); + } + + let order_snapshot = self.order.clone(); + let current_crossings = self.count_all_crossings(&order_snapshot); + if current_crossings < best_crossings { + best_crossings = current_crossings; + best_order = self.order.clone(); + best_offset = self.offset.clone(); + } + + for level in (1..self.order.len()).rev() { + if level >= 2 { + let upper = self.order[level - 2].clone(); + let lower = self.order[level - 1].clone(); + self.store_adjacent_vertical_edges(&upper, &lower); + } else { + for &node in &self.order[level - 1] { + self.v_edges[node].clear(); + } + } + + let fixed = self.order[level].clone(); + let movable = self.order[level - 1].clone(); + self.order[level - 1] = self.sifting(&fixed, movable); + } + + let order_snapshot = self.order.clone(); + let current_crossings = self.count_all_crossings(&order_snapshot); + if current_crossings < best_crossings { + best_crossings = current_crossings; + best_order = self.order.clone(); + best_offset = self.offset.clone(); + } + + self.order = best_order; + self.offset = best_offset; + + if temp_crossings <= best_crossings || counter == MAX_SIFTING_ROUNDS { + break; + } + } + + self.minimize_edge_lengths(); + (self.order, self.offset) + } + + fn sifting(&mut self, level1: &[usize], mut level2: Vec) -> Vec { + let vertical_edges = self.graph.vertical_edges(level1, &level2); + let horizontal_edges = self.graph.horizontal_edges(&level2); + let level2_snapshot = level2.clone(); + + for v in level2_snapshot { + let edges_to_v = self.graph.edges_to_node_from_level(level1, v); + self.store_adjacent_horizontal_edges(&level2, &horizontal_edges); + move_to_front(&mut level2, v); + + for &edge_index in &edges_to_v { + self.offset[edge_index] = 1; + } + + let mut best_position = 0usize; + let mut current_offset = 0i32; + let mut best_offset = 0i32; + let mut parting = 0usize; + let mut best_parting = 0usize; + let mut crossing_delta = 0i32; + let mut best_crossing_delta = 0i32; + + for i in 0..level2.len().saturating_sub(1) { + let v2 = level2[i + 1]; + let edges_to_v2 = self.graph.edges_to_node_from_level(level1, v2); + + crossing_delta -= + self.count_vertical_crossings_lists(&edges_to_v, &edges_to_v2, level1, &level2); + swap_nodes(&mut level2, v, v2); + crossing_delta += + self.count_vertical_crossings_lists(&edges_to_v, &edges_to_v2, level1, &level2); + + let mut before = 0i32; + let mut after = 0i32; + + loop { + if edges_to_v.is_empty() + || edges_to_v2.is_empty() + || (edges_to_v.len() == 1 + && vertical_edges.len() == 1 + && edges_to_v == vertical_edges) + { + break; + } + + let edge_index = edges_to_v[parting]; + before = self.count_vertical_crossings_edge( + edge_index, + &vertical_edges, + level1, + &level2, + ); + self.offset[edge_index] = current_offset; + after = self.count_vertical_crossings_edge( + edge_index, + &vertical_edges, + level1, + &level2, + ); + + if after <= before { + crossing_delta = crossing_delta - before + after; + parting += 1; + if parting == edges_to_v.len() { + current_offset -= 1; + parting = 0; + } + } else { + self.offset[edge_index] = current_offset + 1; + break; + } + } + + let x = self.update_h_adj(&level2, v, v2); + crossing_delta += self.sifting_swap_h(v, v2, &level2); + crossing_delta += self.sifting_swap_hv(v, v2, x); + + if crossing_delta < best_crossing_delta { + best_position = i + 1; + best_offset = current_offset; + best_parting = parting; + best_crossing_delta = crossing_delta; + } + + if after > before && !edges_to_v.is_empty() { + continue; + } + } + + for (index, &edge_index) in edges_to_v.iter().enumerate() { + self.offset[edge_index] = if index < best_parting { + best_offset + } else { + best_offset + 1 + }; + } + + move_to_position(&mut level2, v, best_position); + } + + level2 + } + + fn sifting_swap_h(&self, v: usize, v2: usize, nodes: &[usize]) -> i32 { + let mut hv = self.hr_edges[v].clone(); + hv.extend(self.hl_edges[v].iter().copied()); + let mut hv2 = self.hr_edges[v2].clone(); + hv2.extend(self.hl_edges[v2].iter().copied()); + + let mut v_adj = Vec::new(); + let mut v2_adj = Vec::new(); + + for edge_index in hv { + if let Some(adjacent) = self.graph.adjacent_node(edge_index, v) { + if adjacent != v2 { + v_adj.push(adjacent); + } + } + } + + for edge_index in hv2 { + if let Some(adjacent) = self.graph.adjacent_node(edge_index, v2) { + if adjacent != v { + v2_adj.push(adjacent); + } + } + } + + let r = v_adj.len(); + let s = v2_adj.len(); + let n = nodes.len(); + let mut i = 0usize; + let mut j = 0usize; + let mut c = 0i32; + + while i < r && j < s { + let left = (position(nodes, v_adj[i]) + n - position(nodes, v2)) % n; + let right = (position(nodes, v2_adj[j]) + n - position(nodes, v2)) % n; + + match left.cmp(&right) { + Ordering::Less => { + c -= (s - j) as i32; + i += 1; + } + Ordering::Greater => { + c += (r - i) as i32; + j += 1; + } + Ordering::Equal => { + c += (r - i) as i32 - (s - j) as i32; + i += 1; + j += 1; + } + } + } + + c + } + + fn sifting_swap_hv(&self, v: usize, v2: usize, x: i32) -> i32 { + let hr_adj_v = self.hr_edges[v].len() as i32; + let hl_adj_v = self.hl_edges[v].len() as i32; + let hr_adj_v2 = self.hr_edges[v2].len() as i32; + let hl_adj_v2 = self.hl_edges[v2].len() as i32; + let v_adj_v = self.v_edges[v].len() as i32; + let v_adj_v2 = self.v_edges[v2].len() as i32; + + ((hr_adj_v2 - x) - hl_adj_v2) * v_adj_v + ((hl_adj_v - x) - hr_adj_v) * v_adj_v2 + } + + fn store_adjacent_horizontal_edges(&mut self, level2: &[usize], horizontal_edges: &[usize]) { + for &v in level2 { + let mut hl = Vec::new(); + let mut hr = Vec::new(); + let n = level2.len(); + let s = position(level2, v); + + for &edge_index in horizontal_edges { + let Some(u) = self.graph.adjacent_node(edge_index, v) else { + continue; + }; + + let list = if ((position(level2, u) + n - s) % n) <= n / 2 { + &mut hr + } else { + &mut hl + }; + + let mut inserted = false; + for cursor in 0..list.len() { + let w = self.graph.adjacent_node(list[cursor], v).unwrap(); + let u_pos = (position(level2, u) + n - s) % n; + let w_pos = (position(level2, w) + n - s) % n; + if u_pos < w_pos { + list.insert(cursor, edge_index); + inserted = true; + break; + } + } + if !inserted { + list.push(edge_index); + } + } + + self.hl_edges[v] = hl; + self.hr_edges[v] = hr; + } + } + + fn store_adjacent_vertical_edges(&mut self, level1: &[usize], level2: &[usize]) { + for &v in level2 { + self.v_edges[v] = self.graph.edges_to_node_from_level(level1, v); + } + } + + fn update_h_adj(&mut self, level2: &[usize], v: usize, v2: usize) -> i32 { + let mut hr_v = self.hr_edges[v].clone(); + let mut hl_v = self.hl_edges[v].clone(); + + if let Some(&edge_index) = hl_v.first() { + if let Some(u) = self.graph.adjacent_node(edge_index, v) { + let n = level2.len(); + if ((position(level2, u) + n - position(level2, v)) % n) < n / 2 { + hl_v.remove(0); + hr_v.push(edge_index); + } + } + } + + if let Some(&edge_index) = hr_v.first() { + if self.graph.connects_nodes(edge_index, v, v2) { + let moved = hr_v.remove(0); + hl_v.push(moved); + self.hr_edges[v] = hr_v; + self.hl_edges[v] = hl_v; + return 1; + } + } + + self.hr_edges[v] = hr_v; + self.hl_edges[v] = hl_v; + 0 + } + + fn count_all_crossings(&mut self, order: &[Vec]) -> i32 { + self.count_horizontal_crossings(order) + + self.count_all_vertical_crossings(order) + + self.count_hv_crossings(order) + } + + fn count_horizontal_crossings(&self, order: &[Vec]) -> i32 { + let mut c = 0; + + for level in order { + let n = level.len(); + let edges = self.graph.horizontal_edges(level); + + for first in 0..edges.len().saturating_sub(1) { + for second in (first + 1)..edges.len() { + let e1 = self.graph.edges[edges[first]]; + let e2 = self.graph.edges[edges[second]]; + + let (u1, v1) = ordered_pair(level, e1.source, e1.target); + let (u2, v2) = ordered_pair(level, e2.source, e2.target); + + if ((u2 + n - u1) % n) < ((v1 + n - u1) % n) + && ((v1 + n - u1) % n) < ((v2 + n - u1) % n) + && ((u2 + n - u1) % n) > 0 + { + c += 1; + } + } + } + } + + c + } + + fn count_hv_crossings(&mut self, order: &[Vec]) -> i32 { + let mut c = 0; + + for level in 0..order.len().saturating_sub(1) { + self.store_adjacent_vertical_edges(&order[level], &order[level + 1]); + let horizontal_edges = self.graph.horizontal_edges(&order[level + 1]); + + for edge_index in horizontal_edges { + let edge = self.graph.edges[edge_index]; + let mut i1 = position(&order[level + 1], edge.target); + let mut i2 = position(&order[level + 1], edge.source); + if i2 < i1 { + i1 = position(&order[level + 1], edge.source); + i2 = position(&order[level + 1], edge.target); + } + + let n = order[level + 1].len(); + let (start, end) = if ((i1 + n - i2) % n) < ((i2 + n - i1) % n) { + if i1 > i2 { + (i1, i2) + } else { + (i2, i1) + } + } else if ((i1 + n - i2) % n) > ((i2 + n - i1) % n) { + if i2 > i1 { + (i1, i2) + } else { + (i2, i1) + } + } else { + (i1, i2) + }; + + if start < end { + for index in (start + 1)..end { + let node = order[level + 1][index]; + c += self.v_edges[node].len() as i32; + } + } else { + for index in (start + 1)..n { + let node = order[level + 1][index]; + c += self.v_edges[node].len() as i32; + } + for index in 0..end { + let node = order[level + 1][index]; + c += self.v_edges[node].len() as i32; + } + } + } + } + + c + } + + fn count_all_vertical_crossings(&self, order: &[Vec]) -> i32 { + let mut c = 0; + + for level in 0..order.len().saturating_sub(1) { + let edges = self.graph.vertical_edges(&order[level], &order[level + 1]); + for first in 0..edges.len().saturating_sub(1) { + for second in (first + 1)..edges.len() { + c += self.crossings_between_two_edges( + edges[first], + edges[second], + &order[level], + &order[level + 1], + ); + } + } + } + + c + } + + fn count_vertical_crossings_edge( + &self, + edge: usize, + edges: &[usize], + level1: &[usize], + level2: &[usize], + ) -> i32 { + let mut crossings = 0; + for &other in edges { + crossings += self.crossings_between_two_edges(other, edge, level1, level2); + } + crossings + } + + fn count_vertical_crossings_lists( + &self, + first_edges: &[usize], + second_edges: &[usize], + level1: &[usize], + level2: &[usize], + ) -> i32 { + let mut crossings = 0; + for &first in first_edges { + for &second in second_edges { + crossings += self.crossings_between_two_edges(first, second, level1, level2); + } + } + crossings + } + + fn crossings_between_two_edges( + &self, + first_edge: usize, + second_edge: usize, + level1: &[usize], + level2: &[usize], + ) -> i32 { + let (u1, v1) = self.endpoints_for_levels(first_edge, level1); + let (u2, v2) = self.endpoints_for_levels(second_edge, level1); + + let a = sign(position(level1, u2) as i32 - position(level1, u1) as i32); + let b = sign(position(level2, v2) as i32 - position(level2, v1) as i32); + + let value = (self.offset[second_edge] - self.offset[first_edge] + (b - a) / 2).abs() + + (a.abs() + b.abs()) / 2 + - 1; + value.max(0) + } + + fn endpoints_for_levels(&self, edge_index: usize, level1: &[usize]) -> (usize, usize) { + let edge = self.graph.edges[edge_index]; + if level1.contains(&edge.source) { + (edge.source, edge.target) + } else { + (edge.target, edge.source) + } + } + + fn minimize_edge_lengths(&mut self) { + for level in 0..self.order.len().saturating_sub(1) { + if self.order[level].is_empty() || self.order[level + 1].is_empty() { + continue; + } + + let inner_increment = 2.0 * PI / self.order[level].len() as f64; + let outer_increment = 2.0 * PI / self.order[level + 1].len() as f64; + let mut average_span = 0.0; + let mut outer_angle = 0.0; + let mut edge_count = 0usize; + + for &outer_node in &self.order[level + 1] { + for &inner_node in &self.order[level] { + if let Some(edge_index) = self.graph.edge_between(outer_node, inner_node) { + edge_count += 1; + let inner_angle = + position(&self.order[level], inner_node) as f64 * inner_increment; + average_span += (inner_angle - outer_angle) + + (-(self.offset[edge_index] as f64) * 2.0 * PI); + } + } + outer_angle += outer_increment; + } + + if edge_count == 0 { + continue; + } + + average_span /= edge_count as f64; + let rotation = (average_span / outer_increment).round() as isize; + self.rotate(level + 1, rotation); + + if self.order[level + 1].len() == 1 { + let node = self.order[level + 1][0]; + for &edge_index in &self.graph.incident_edges[node] { + self.offset[edge_index] = 0; + } + } + } + } + + fn rotate(&mut self, level: usize, rotation: isize) { + if rotation > 0 { + for _ in 0..rotation { + let node = self.order[level].pop().unwrap(); + self.order[level].insert(0, node); + + if level + 1 < self.order.len() { + for &neighbour in &self.order[level + 1] { + if let Some(edge_index) = self.graph.edge_between(node, neighbour) { + self.offset[edge_index] -= 1; + } + } + } + + for &neighbour in &self.order[level - 1] { + if let Some(edge_index) = self.graph.edge_between(node, neighbour) { + self.offset[edge_index] += 1; + } + } + } + } else if rotation < 0 { + for _ in rotation..0 { + let node = self.order[level].remove(0); + self.order[level].push(node); + + if level + 1 < self.order.len() { + for &neighbour in &self.order[level + 1] { + if let Some(edge_index) = self.graph.edge_between(node, neighbour) { + self.offset[edge_index] += 1; + } + } + } + + for &neighbour in &self.order[level - 1] { + if let Some(edge_index) = self.graph.edge_between(node, neighbour) { + self.offset[edge_index] -= 1; + } + } + } + } + } +} + +struct CoordinateAssignment<'a> { + graph: &'a WorkGraph, + order: &'a [Vec], + dummies: &'a HashSet, +} + +impl<'a> CoordinateAssignment<'a> { + fn new(graph: &'a WorkGraph, order: &'a [Vec], dummies: &'a HashSet) -> Self { + Self { + graph, + order, + dummies, + } + } + + fn get_coordinates(&self) -> Vec { + if self.graph.node_count() == 0 { + return Vec::new(); + } + + let mut assignments = Vec::with_capacity(4); + for direction in 0..4 { + let new_order = self.update_order(direction); + let mut bk = BrandesKoepf::new(self.graph); + let relative = bk.compute_relative_coordinates(&new_order, self.dummies); + let mut absolute = bk.get_absolute_coordinates(&relative); + + if direction == 2 || direction == 3 { + let max_coordinate = absolute.iter().copied().max().unwrap_or(0); + for value in &mut absolute { + *value = max_coordinate - *value; + } + } + + assignments.push(absolute); + } + + self.balance(&mut assignments) + } + + fn update_order(&self, direction: usize) -> Vec> { + match direction { + 0 => self.order.to_vec(), + 1 => { + let mut new_order = vec![Vec::new(); self.order.len()]; + for (index, level) in self.order.iter().enumerate() { + new_order[self.order.len() - 1 - index] = level.clone(); + } + new_order + } + 2 => self + .order + .iter() + .map(|level| level.iter().rev().copied().collect()) + .collect(), + 3 => { + let mut new_order = vec![Vec::new(); self.order.len()]; + for (index, level) in self.order.iter().enumerate() { + new_order[self.order.len() - 1 - index] = level.iter().rev().copied().collect(); + } + new_order + } + _ => self.order.to_vec(), + } + } + + fn balance(&self, assignments: &mut [Vec]) -> Vec { + let mut min_width = i32::MAX; + let mut smallest_layout = 0usize; + let mut min_values = [i32::MAX; 4]; + let mut max_values = [0i32; 4]; + + for (index, coordinates) in assignments.iter().enumerate() { + for &x in coordinates { + min_values[index] = min_values[index].min(x); + max_values[index] = max_values[index].max(x); + } + let width = max_values[index] - min_values[index]; + if width < min_width { + min_width = width; + smallest_layout = index; + } + } + + for (index, coordinates) in assignments.iter_mut().enumerate() { + if index == smallest_layout { + continue; + } + + let diff = if index == 0 || index == 1 { + min_values[index] - min_values[smallest_layout] + } else { + max_values[index] - max_values[smallest_layout] + }; + for value in coordinates { + *value -= diff; + } + } + + let min_value = assignments + .iter() + .flat_map(|coordinates| coordinates.iter()) + .copied() + .min() + .unwrap_or(0); + if min_value != 0 { + for coordinates in &mut *assignments { + for value in coordinates { + *value -= min_value; + } + } + } + + let mut coordinates = vec![0; self.graph.node_count()]; + for node in 0..self.graph.node_count() { + let mut values = [ + assignments[0][node], + assignments[1][node], + assignments[2][node], + assignments[3][node], + ]; + values.sort(); + coordinates[node] = (values[1] + values[2]) / 2; + } + + let min_coordinate = coordinates.iter().copied().min().unwrap_or(0); + if min_coordinate != 0 { + for value in &mut coordinates { + *value -= min_coordinate; + } + } + + coordinates + } +} + +struct BrandesKoepf<'a> { + graph: &'a WorkGraph, + order: Vec>, + dummies: HashSet, + marked: HashSet, + root: Vec, + align: Vec, + x: Vec>, + sink: Vec, + shift: Vec, + level: Vec, + positions: Vec, +} + +impl<'a> BrandesKoepf<'a> { + fn new(graph: &'a WorkGraph) -> Self { + let node_count = graph.node_count(); + Self { + graph, + order: Vec::new(), + dummies: HashSet::new(), + marked: HashSet::new(), + root: (0..node_count).collect(), + align: (0..node_count).collect(), + x: vec![None; node_count], + sink: (0..node_count).collect(), + shift: vec![i32::MAX; node_count], + level: vec![0; node_count], + positions: vec![0; node_count], + } + } + + fn compute_relative_coordinates( + &mut self, + order: &[Vec], + dummies: &HashSet, + ) -> Vec { + self.order = order.to_vec(); + self.dummies = dummies.clone(); + self.marked.clear(); + + for (level_index, level) in self.order.iter().enumerate() { + for (position_index, &node) in level.iter().enumerate() { + self.level[node] = level_index; + self.positions[node] = position_index; + } + } + + self.mark_conflicts(); + self.init(); + self.vertical_alignment(); + self.horizontal_compaction(); + + self.x.iter().map(|value| value.unwrap_or(0)).collect() + } + + fn mark_conflicts(&mut self) { + self.marked.clear(); + for level in 1..self.order.len().saturating_sub(2) { + for lower_index in 0..self.order[level + 1].len() { + let current = self.order[level + 1][lower_index]; + let neighbours = self.get_neighbours(current, level); + let incident_dummy = neighbours + .iter() + .copied() + .find(|node| self.dummies.contains(node)); + + if self.dummies.contains(¤t) { + if let Some(incident) = incident_dummy { + let incident_position = self.positions[incident]; + + for left_index in 0..lower_index { + let left = self.order[level + 1][left_index]; + let left_neighbours = self.get_neighbours(left, level); + for neighbour in left_neighbours { + if self.positions[neighbour] > incident_position + && !(self.dummies.contains(&left) + && self.dummies.contains(&neighbour)) + { + if let Some(edge_index) = + self.graph.edge_between(left, neighbour) + { + self.marked.insert(edge_index); + } + } + } + } + + for right_index in lower_index..self.order[level + 1].len() { + let right = self.order[level + 1][right_index]; + let right_neighbours = self.get_neighbours(right, level); + for neighbour in right_neighbours { + if incident_position > self.positions[neighbour] + && !(self.dummies.contains(&right) + && self.dummies.contains(&neighbour)) + { + if let Some(edge_index) = + self.graph.edge_between(right, neighbour) + { + self.marked.insert(edge_index); + } + } + } + } + } + } + } + } + } + + fn init(&mut self) { + let node_count = self.graph.node_count(); + self.root = (0..node_count).collect(); + self.align = (0..node_count).collect(); + self.sink = (0..node_count).collect(); + self.shift = vec![i32::MAX; node_count]; + self.x = vec![None; node_count]; + } + + fn vertical_alignment(&mut self) { + for level in 1..self.order.len() { + let mut barrier = -1i32; + for current_index in 0..self.order[level].len() { + let current = self.order[level][current_index]; + let neighbours = self.get_neighbours(current, level - 1); + let degree = neighbours.len(); + if degree == 0 { + continue; + } + + for median_index in ((degree - 1) / 2)..=(degree / 2) { + if self.align[current] != current { + continue; + } + + let median = neighbours[median_index]; + if let Some(edge_index) = self.graph.edge_between(current, median) { + let median_position = self.positions[median] as i32; + if !self.marked.contains(&edge_index) && barrier < median_position { + self.align[median] = current; + self.root[current] = self.root[median]; + self.align[current] = self.root[current]; + barrier = median_position; + } + } + } + } + } + } + + fn horizontal_compaction(&mut self) { + for node in 0..self.graph.node_count() { + if self.root[node] == node { + self.place_block(node); + } + } + } + + fn place_block(&mut self, node: usize) { + if self.x[node].is_some() { + return; + } + + self.x[node] = Some(0); + let mut current = node; + + loop { + let level = self.level[current]; + let position = self.positions[current]; + + if position > 0 { + let neighbour = self.order[level][position - 1]; + let root = self.root[neighbour]; + self.place_block(root); + + if self.sink[node] == node { + self.sink[node] = self.sink[root]; + } + + if self.sink[node] != self.sink[root] { + let candidate = self.x[node].unwrap() - self.x[root].unwrap() - MIN_DIST; + let sink = self.sink[root]; + self.shift[sink] = self.shift[sink].min(candidate); + } else { + let candidate = self.x[root].unwrap() + MIN_DIST; + self.x[node] = Some(self.x[node].unwrap().max(candidate)); + } + } + + current = self.align[current]; + if current == node { + break; + } + } + } + + fn get_absolute_coordinates(&self, relative: &[i32]) -> Vec { + let mut coordinates = relative.to_vec(); + for node in 0..self.graph.node_count() { + coordinates[node] = coordinates[self.root[node]]; + let current_shift = self.shift[self.sink[self.root[node]]]; + if current_shift < i32::MAX { + coordinates[node] += current_shift; + } + } + coordinates + } + + fn get_neighbours(&self, node: usize, level: usize) -> Vec { + let mut neighbours = Vec::new(); + for &other in &self.order[level] { + if self.graph.edge_between(node, other).is_some() { + neighbours.push(other); + } + } + neighbours + } +} + +fn canonical_pair(left: usize, right: usize) -> (usize, usize) { + if left < right { + (left, right) + } else { + (right, left) + } +} + +fn ordered_pair(level: &[usize], left: usize, right: usize) -> (usize, usize) { + let left_index = position(level, left); + let right_index = position(level, right); + if left_index < right_index { + (left_index, right_index) + } else { + (right_index, left_index) + } +} + +fn position(nodes: &[usize], node: usize) -> usize { + nodes.iter().position(|¤t| current == node).unwrap() +} + +fn sign(value: i32) -> i32 { + match value.cmp(&0) { + Ordering::Less => -1, + Ordering::Equal => 0, + Ordering::Greater => 1, + } +} + +fn move_to_front(nodes: &mut Vec, node: usize) { + move_to_position(nodes, node, 0); +} + +fn move_to_position(nodes: &mut Vec, node: usize, position_index: usize) { + let current_index = position(nodes, node); + let node = nodes.remove(current_index); + nodes.insert(position_index, node); +} + +fn swap_nodes(nodes: &mut [usize], first: usize, second: usize) { + let first_index = position(nodes, first); + let second_index = position(nodes, second); + nodes.swap(first_index, second_index); +} + +fn approx_eq(left: f64, right: f64) -> bool { + (left - right).abs() <= EPSILON +} + +fn distance(left: Point, right: Point) -> f64 { + ((left.x - right.x).powi(2) + (left.y - right.y).powi(2)).sqrt() +} + +fn polar_angle(point: Point, center: Point) -> f64 { + let radius = distance(point, center); + if radius.abs() < EPSILON { + return 0.0; + } + + if point.y - center.y < 0.0 { + ((point.x - center.x) / radius).clamp(-1.0, 1.0).acos() + } else { + (-(point.x - center.x) / radius).clamp(-1.0, 1.0).acos() + PI + } +} + +fn dedup_points(points: &mut Vec) { + points.dedup_by(|left, right| approx_eq(left.x, right.x) && approx_eq(left.y, right.y)); +} + +fn normalize_angle(angle: f64) -> f64 { + angle.rem_euclid(2.0 * PI) +} + +fn circular_mean_angle(angles: &[f64]) -> f64 { + if angles.is_empty() { + return 0.0; + } + + let sin_sum = angles.iter().map(|angle| angle.sin()).sum::(); + let cos_sum = angles.iter().map(|angle| angle.cos()).sum::(); + + if approx_eq(sin_sum, 0.0) && approx_eq(cos_sum, 0.0) { + return normalize_angle(angles[0]); + } + + normalize_angle(sin_sum.atan2(cos_sum)) +} + +fn unwrap_angles(angles: &[f64]) -> Vec { + if angles.is_empty() { + return Vec::new(); + } + + let mut unwrapped = Vec::with_capacity(angles.len()); + unwrapped.push(normalize_angle(angles[0])); + for &angle in angles.iter().skip(1) { + let mut candidate = normalize_angle(angle); + while candidate < *unwrapped.last().unwrap() { + candidate += 2.0 * PI; + } + unwrapped.push(candidate); + } + unwrapped +} + +fn min_circular_gap(angles: &[f64]) -> f64 { + if angles.len() <= 1 { + return 2.0 * PI; + } + + let mut sorted = angles + .iter() + .copied() + .map(normalize_angle) + .collect::>(); + sorted.sort_by(f64::total_cmp); + + sorted + .windows(2) + .map(|pair| pair[1] - pair[0]) + .chain(std::iter::once( + 2.0 * PI - sorted.last().copied().unwrap_or(0.0) + sorted[0], + )) + .fold(f64::INFINITY, f64::min) +} + +#[cfg(test)] +fn count_rendered_crossings( + working_graph: &WorkGraph, + node_levels: &[usize], + points: &[Point], +) -> usize { + let inter_level_edges = working_graph + .edges + .iter() + .enumerate() + .filter(|(_, edge)| node_levels[edge.source] != node_levels[edge.target]) + .collect::>(); + + let mut crossings = 0usize; + for left_index in 0..inter_level_edges.len() { + for right_index in (left_index + 1)..inter_level_edges.len() { + let (_, left_edge) = inter_level_edges[left_index]; + let (_, right_edge) = inter_level_edges[right_index]; + + if left_edge.source == right_edge.source + || left_edge.source == right_edge.target + || left_edge.target == right_edge.source + || left_edge.target == right_edge.target + { + continue; + } + + if segments_cross( + points[left_edge.source], + points[left_edge.target], + points[right_edge.source], + points[right_edge.target], + ) { + crossings += 1; + } + } + } + + crossings +} + +#[cfg(test)] +fn segments_cross(start_a: Point, end_a: Point, start_b: Point, end_b: Point) -> bool { + let orientation1 = orientation(start_a, end_a, start_b); + let orientation2 = orientation(start_a, end_a, end_b); + let orientation3 = orientation(start_b, end_b, start_a); + let orientation4 = orientation(start_b, end_b, end_a); + + sign_f64(orientation1) != sign_f64(orientation2) + && sign_f64(orientation3) != sign_f64(orientation4) +} + +#[cfg(test)] +fn orientation(start: Point, end: Point, point: Point) -> f64 { + (end.x - start.x) * (point.y - start.y) - (end.y - start.y) * (point.x - start.x) +} + +#[cfg(test)] +fn sign_f64(value: f64) -> i32 { + if value > EPSILON { + 1 + } else if value < -EPSILON { + -1 + } else { + 0 + } +} + +fn packed_angle(z: i32, coordinate: i32) -> f64 { + 2.0 * PI / f64::from(z) * f64::from(coordinate) +} + +fn distributed_angle(level_size: usize, index: usize) -> f64 { + if level_size <= 1 { + 0.0 + } else { + 2.0 * PI * index as f64 / level_size as f64 + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::filter_graph_to_descendants; + use crate::graph_from_ttl_path; + use crate::layering::compute_hierarchy_levels; + use crate::model::{Edge, Graph, Node}; + use crate::LayoutError; + use std::path::PathBuf; + + fn blank_nodes(count: usize) -> Vec { + (0..count).map(|_| Node::default()).collect() + } + + fn manual_work_graph(node_count: usize, edges: &[(usize, usize)]) -> WorkGraph { + let mut graph = WorkGraph::new(node_count); + for (index, &(source, target)) in edges.iter().enumerate() { + graph.add_edge(source, target, index).unwrap(); + } + graph + } + + fn circular_distance(left: f64, right: f64) -> f64 { + let difference = (normalize_angle(left) - normalize_angle(right)).abs(); + difference.min(2.0 * PI - difference) + } + + fn unique_point_count(points: &[Point]) -> usize { + let mut unique = Vec::::new(); + for point in points { + if !unique + .iter() + .any(|candidate| approx_eq(candidate.x, point.x) && approx_eq(candidate.y, point.y)) + { + unique.push(*point); + } + } + unique.len() + } + + fn legacy_spiral_route( + start: Point, + end: Point, + center: Point, + offset: i32, + quality: usize, + ) -> Vec { + let start_radius = distance(start, center); + let end_radius = distance(end, center); + let start_angle = polar_angle(start, center); + let end_angle = polar_angle(end, center) + (-(offset as f64)) * 2.0 * PI; + + if approx_eq(start_angle, end_angle) && offset == 0 { + return vec![start, end]; + } + + let outer_radius = start_radius.max(end_radius); + let flatness = outer_radius / quality as f64; + let angle_direction = if start_angle < end_angle { 1.0 } else { -1.0 }; + let mut angle = start_angle; + let mut radius = start_radius; + let mut route = vec![start]; + + loop { + route.push(Point::new( + center.x + radius * angle.cos(), + center.y - radius * angle.sin(), + )); + + let x = if radius.abs() < EPSILON { + f64::INFINITY + } else { + flatness / radius + }; + if x.is_nan() || x > 0.1 { + angle += PI / 4.0 * angle_direction; + } else { + let y = (2.0 * x * x - 4.0 * x + 1.0).clamp(-1.0, 1.0); + angle += y.acos() * angle_direction; + } + + if (angle - end_angle) * angle_direction > 0.0 { + break; + } + + let denominator = end_angle - start_angle; + let fraction_complete = if denominator.abs() < EPSILON { + 1.0 + } else { + (angle - start_angle) / denominator + }; + radius = start_radius + (end_radius - start_radius) * fraction_complete; + } + + route.push(end); + dedup_points(&mut route); + route + } + + fn route_angular_span(route: &[Point], center: Point) -> f64 { + if route.len() <= 1 { + return 0.0; + } + + let mut unwrapped = Vec::with_capacity(route.len()); + unwrapped.push(polar_angle(route[0], center)); + for point in route.iter().skip(1) { + let angle = polar_angle(*point, center); + let previous = *unwrapped.last().unwrap(); + let mut best_candidate = angle; + let mut best_delta = (angle - previous).abs(); + + for turns in -1..=1 { + let candidate = angle + turns as f64 * 2.0 * PI; + let delta = (candidate - previous).abs(); + if delta < best_delta { + best_candidate = candidate; + best_delta = delta; + } + } + + unwrapped.push(best_candidate); + } + + (unwrapped.last().unwrap() - unwrapped[0]).abs() + } + + fn count_polyline_crossings(routes: &[Vec]) -> usize { + let mut crossings = 0usize; + for left_route_index in 0..routes.len() { + for right_route_index in (left_route_index + 1)..routes.len() { + let left = &routes[left_route_index]; + let right = &routes[right_route_index]; + for left_segment in left.windows(2) { + for right_segment in right.windows(2) { + if left_segment.iter().any(|left_point| { + right_segment.iter().any(|right_point| { + approx_eq(left_point.x, right_point.x) + && approx_eq(left_point.y, right_point.y) + }) + }) { + continue; + } + + if segments_cross( + left_segment[0], + left_segment[1], + right_segment[0], + right_segment[1], + ) { + crossings += 1; + } + } + } + } + } + crossings + } + + #[test] + fn longest_path_levels_match_hierarchy_rings() { + let graph = Graph::new( + blank_nodes(4), + vec![ + Edge::new(0, 1), + Edge::new(0, 2), + Edge::new(1, 3), + Edge::new(2, 3), + ], + ); + + let levels = compute_hierarchy_levels(&graph).unwrap(); + assert_eq!(levels, vec![0, 1, 1, 2]); + } + + #[test] + fn working_graph_inserts_dummy_nodes_for_long_edges() { + let graph = Graph::new( + blank_nodes(3), + vec![Edge::new(0, 1), Edge::new(1, 2), Edge::new(0, 2)], + ); + let levels = compute_hierarchy_levels(&graph).unwrap(); + let (work_graph, dummies, order) = build_working_graph(&graph, &levels).unwrap(); + + assert_eq!(levels, vec![0, 1, 2]); + assert_eq!(dummies.len(), 1); + assert_eq!(work_graph.node_count(), 4); + assert_eq!(order[1].len(), 2); + } + + #[test] + fn count_horizontal_crossings_matches_java_behavior() { + let work_graph = manual_work_graph(4, &[(0, 2), (1, 3)]); + let order = vec![vec![0, 1, 2, 3]]; + let reducer = CrossingReduction::new(&work_graph, order.clone()); + + assert_eq!(reducer.count_horizontal_crossings(&order), 1); + } + + #[test] + fn count_mixed_crossings_matches_java_behavior() { + let work_graph = manual_work_graph(6, &[(0, 3), (2, 4)]); + let order = vec![vec![0, 1], vec![2, 3, 4, 5]]; + let mut reducer = CrossingReduction::new(&work_graph, order.clone()); + + assert_eq!(reducer.count_hv_crossings(&order), 1); + } + + #[test] + fn sifting_reorders_crossing_two_level_graph() { + let work_graph = manual_work_graph(4, &[(0, 3), (1, 2)]); + let order = vec![vec![0, 1], vec![2, 3]]; + let (final_order, _) = CrossingReduction::new(&work_graph, order).minimize_crossings(); + + assert_eq!(final_order[1], vec![3, 2]); + } + + #[test] + fn layout_sets_coordinates_in_place() { + let mut graph = Graph::new( + blank_nodes(4), + vec![ + Edge::new(0, 1), + Edge::new(0, 2), + Edge::new(1, 3), + Edge::new(2, 3), + ], + ); + + layout_radial_hierarchy(&mut graph, LayoutConfig::default()).unwrap(); + + for node in &graph.nodes { + assert!(node.x.is_finite()); + assert!(node.y.is_finite()); + } + } + + #[test] + fn layout_wrapper_discards_artifacts_without_changing_edges() { + let graph = Graph::new(blank_nodes(3), vec![Edge::new(0, 1), Edge::new(1, 2)]); + let mut first = graph.clone(); + let mut second = graph.clone(); + + let artifacts = + layout_radial_hierarchy_with_artifacts(&mut first, LayoutConfig::default()).unwrap(); + layout_radial_hierarchy(&mut second, LayoutConfig::default()).unwrap(); + + assert_eq!(first.edges, second.edges); + assert_eq!(first.nodes, second.nodes); + assert!(!artifacts.edge_routes.is_empty()); + } + + #[test] + fn layout_artifacts_include_offsets_for_routed_edges() { + let mut graph = Graph::new( + blank_nodes(3), + vec![Edge::new(0, 1), Edge::new(1, 2), Edge::new(0, 2)], + ); + + let artifacts = + layout_radial_hierarchy_with_artifacts(&mut graph, LayoutConfig::default()).unwrap(); + + assert_eq!(artifacts.edge_offsets.len(), artifacts.edge_routes.len()); + assert!(artifacts + .edge_routes + .iter() + .all(|route| !route.points.is_empty())); + } + + #[test] + fn layout_artifacts_include_straight_route_for_single_root_edge() { + let mut graph = Graph::new(blank_nodes(2), vec![Edge::new(0, 1)]); + + let artifacts = + layout_radial_hierarchy_with_artifacts(&mut graph, LayoutConfig::default()).unwrap(); + + assert_eq!(artifacts.edge_routes[0].kind, EdgeRouteKind::Straight); + assert_eq!(artifacts.edge_routes[0].points.len(), 2); + } + + #[test] + fn layout_artifacts_include_spiral_route_for_non_root_interlevel_edge() { + let mut graph = Graph::new(blank_nodes(3), vec![Edge::new(0, 1), Edge::new(1, 2)]); + + let artifacts = + layout_radial_hierarchy_with_artifacts(&mut graph, LayoutConfig::default()).unwrap(); + + assert!(artifacts + .edge_routes + .iter() + .any(|route| route.original_edge_index == 1 && route.kind == EdgeRouteKind::Spiral)); + } + + #[test] + fn intra_level_route_builder_returns_finite_points() { + let center = Point::new(200.0, 200.0); + let start = Point::new(300.0, 200.0); + let end = Point::new(200.0, 300.0); + let route = build_intra_level_route(start, end, center, 1.0, 80.0, 3); + + assert!(route.len() >= 2); + assert!(route + .iter() + .all(|point| point.x.is_finite() && point.y.is_finite())); + assert_eq!(route.first().copied(), Some(start)); + assert_eq!(route.last().copied(), Some(end)); + } + + #[test] + fn layout_is_deterministic() { + let graph = Graph::new( + blank_nodes(5), + vec![ + Edge::new(0, 2), + Edge::new(1, 2), + Edge::new(1, 3), + Edge::new(2, 4), + Edge::new(3, 4), + ], + ); + + let mut first = graph.clone(); + let mut second = graph; + let first_artifacts = + layout_radial_hierarchy_with_artifacts(&mut first, LayoutConfig::default()).unwrap(); + let second_artifacts = + layout_radial_hierarchy_with_artifacts(&mut second, LayoutConfig::default()).unwrap(); + + assert_eq!(first.nodes, second.nodes); + assert_eq!(first_artifacts.edge_offsets, second_artifacts.edge_offsets); + assert_eq!(first_artifacts.edge_routes, second_artifacts.edge_routes); + } + + #[test] + fn project_radial_places_single_root_on_center() { + let work_graph = manual_work_graph(2, &[(0, 1)]); + let order = vec![vec![0], vec![1]]; + let projection = project_radial(&work_graph, &order, &[0, 0], LayoutConfig::default()); + + assert_eq!(projection.points[0], projection.center); + } + + #[test] + fn distributed_ring_projection_spreads_nodes_around_full_circle() { + let work_graph = manual_work_graph(15, &[]); + let order = vec![ + vec![0], + vec![1, 2, 3, 4], + vec![5, 6, 7, 8, 9, 10, 11, 12, 13, 14], + ]; + let coordinates = vec![0, 0, 1, 2, 3, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9]; + let config = LayoutConfig { + ring_distribution: RingDistribution::Distributed, + ..LayoutConfig::default() + }; + + let projection = project_radial(&work_graph, &order, &coordinates, config); + let level_points = [ + projection.points[1], + projection.points[2], + projection.points[3], + projection.points[4], + ]; + + let center = projection.center; + let mut angles = level_points + .iter() + .map(|point| polar_angle(*point, center)) + .collect::>(); + angles.sort_by(f64::total_cmp); + + let gaps = angles + .windows(2) + .map(|pair| pair[1] - pair[0]) + .chain(std::iter::once(2.0 * PI - angles[3] + angles[0])) + .collect::>(); + + assert!(gaps.iter().all(|gap| (*gap - PI / 2.0).abs() < 1e-6)); + } + + #[test] + fn adaptive_ring_projection_caps_sparse_outer_rings_by_previous_density() { + let work_graph = manual_work_graph(7, &[(0, 1), (0, 2), (0, 3), (0, 4), (1, 5), (2, 6)]); + let order = vec![vec![0], vec![1, 2, 3, 4], vec![5, 6]]; + let coordinates = vec![0, 0, 1, 2, 3, 0, 1]; + let config = LayoutConfig { + ring_distribution: RingDistribution::Adaptive, + ..LayoutConfig::default() + }; + + let projection = project_radial(&work_graph, &order, &coordinates, config); + let center = projection.center; + let parent_angles = [ + polar_angle(projection.points[1], center), + polar_angle(projection.points[2], center), + ]; + let child_angles = [ + polar_angle(projection.points[5], center), + polar_angle(projection.points[6], center), + ]; + let child_gap = min_circular_gap(&child_angles); + + assert!(circular_distance(child_angles[0], parent_angles[0]) < 1e-6); + assert!(circular_distance(child_angles[1], parent_angles[1]) < 1e-6); + assert!(child_gap >= PI / 2.0 - 1e-6); + assert!(child_gap < PI - 1e-6); + } + + #[test] + fn adaptive_ring_projection_avoids_duplicates_across_wrap_boundary() { + let work_graph = manual_work_graph(9, &[(4, 5), (1, 6), (4, 7), (1, 8)]); + let order = vec![vec![0], vec![1, 2, 3, 4], vec![5, 6, 7, 8]]; + let coordinates = vec![0, 0, 1, 2, 3, 0, 1, 2, 3]; + let config = LayoutConfig { + ring_distribution: RingDistribution::Adaptive, + ..LayoutConfig::default() + }; + + let projection = project_radial(&work_graph, &order, &coordinates, config); + let center = projection.center; + let child_points = [ + projection.points[5], + projection.points[6], + projection.points[7], + projection.points[8], + ]; + let child_angles = child_points + .iter() + .map(|point| polar_angle(*point, center)) + .collect::>(); + + assert_eq!(unique_point_count(&child_points), child_points.len()); + assert!(min_circular_gap(&child_angles) >= ANGLE_EPSILON); + } + + #[test] + fn adaptive_ring_projection_keeps_propagated_gap_positive() { + let work_graph = manual_work_graph(9, &[(4, 5), (1, 6), (4, 7), (1, 8)]); + let order = vec![vec![0], vec![1, 2, 3, 4], vec![5, 6, 7, 8]]; + let coordinates = vec![0, 0, 1, 2, 3, 0, 1, 2, 3]; + + let (first_angles, first_gap) = project_level_angles( + &work_graph, + &order[1], + Some(order[0].as_slice()), + &vec![Some(0.0); work_graph.node_count()], + &coordinates, + 4, + RingDistribution::Adaptive, + None, + ); + let mut node_angles = vec![None; work_graph.node_count()]; + for (&node, angle) in order[1].iter().zip(first_angles.iter().copied()) { + node_angles[node] = Some(angle); + } + + let (_, second_gap) = project_level_angles( + &work_graph, + &order[2], + Some(order[1].as_slice()), + &node_angles, + &coordinates, + 4, + RingDistribution::Adaptive, + first_gap, + ); + + assert!(first_gap.unwrap() >= ANGLE_EPSILON); + assert!(second_gap.unwrap() >= ANGLE_EPSILON); + } + + #[test] + fn adaptive_ring_projection_uses_full_circle_when_a_later_ring_needs_more_space() { + let work_graph = manual_work_graph(14, &[]); + let order = vec![ + vec![0], + vec![1, 2, 3, 4], + vec![5, 6], + vec![7, 8, 9, 10, 11, 12, 13], + ]; + let coordinates = vec![0, 0, 1, 2, 3, 0, 1, 0, 1, 2, 3, 4, 5, 6]; + let config = LayoutConfig { + ring_distribution: RingDistribution::Adaptive, + ..LayoutConfig::default() + }; + + let projection = project_radial(&work_graph, &order, &coordinates, config); + let center = projection.center; + let outer_angles = [ + polar_angle(projection.points[7], center), + polar_angle(projection.points[8], center), + polar_angle(projection.points[9], center), + polar_angle(projection.points[10], center), + polar_angle(projection.points[11], center), + polar_angle(projection.points[12], center), + polar_angle(projection.points[13], center), + ]; + let outer_gap = min_circular_gap(&outer_angles); + + assert!((outer_gap - (2.0 * PI / 7.0)).abs() < 1e-6); + } + + #[test] + fn adaptive_projection_does_not_increase_rendered_crossings_on_boundary_fixture() { + let work_graph = manual_work_graph(9, &[(1, 5), (2, 6), (4, 7), (1, 8), (4, 6)]); + let order = vec![vec![0], vec![1, 2, 3, 4], vec![5, 6, 7, 8]]; + let coordinates = vec![0, 0, 1, 2, 3, 0, 1, 2, 3]; + + let packed = project_radial( + &work_graph, + &order, + &coordinates, + LayoutConfig { + ring_distribution: RingDistribution::Packed, + ..LayoutConfig::default() + }, + ); + let distributed = project_radial( + &work_graph, + &order, + &coordinates, + LayoutConfig { + ring_distribution: RingDistribution::Distributed, + ..LayoutConfig::default() + }, + ); + let adaptive = project_radial( + &work_graph, + &order, + &coordinates, + LayoutConfig { + ring_distribution: RingDistribution::Adaptive, + ..LayoutConfig::default() + }, + ); + + let packed_crossings = + count_rendered_crossings(&work_graph, &packed.node_levels, &packed.points); + let distributed_crossings = + count_rendered_crossings(&work_graph, &distributed.node_levels, &distributed.points); + let adaptive_crossings = + count_rendered_crossings(&work_graph, &adaptive.node_levels, &adaptive.points); + + assert!(adaptive_crossings <= packed_crossings); + assert!(adaptive_crossings <= distributed_crossings); + } + + #[test] + fn effective_spiral_end_angle_prefers_shortest_equivalent_branch() { + let start_angle = 11.0 * PI / 6.0; + let raw_end_angle = PI / 6.0; + + let end_angle = effective_spiral_end_angle(start_angle, raw_end_angle, 0); + + assert!((end_angle - (raw_end_angle + 2.0 * PI)).abs() < 1e-6); + assert!((end_angle - start_angle).abs() < PI / 2.0); + } + + #[test] + fn effective_spiral_end_angle_can_reduce_offset_wraps() { + let start_angle = 11.0 * PI / 6.0; + let raw_end_angle = PI / 6.0; + + let end_angle = effective_spiral_end_angle(start_angle, raw_end_angle, 1); + + assert!((end_angle - (raw_end_angle + 2.0 * PI)).abs() < 1e-6); + assert!((end_angle - start_angle).abs() < PI / 2.0); + } + + #[test] + fn seam_spiral_route_uses_short_arc() { + let center = Point::new(200.0, 200.0); + let start = Point::new( + center.x + 100.0 * (11.0 * PI / 6.0).cos(), + center.y - 100.0 * (11.0 * PI / 6.0).sin(), + ); + let end = Point::new( + center.x + 200.0 * (PI / 6.0).cos(), + center.y - 200.0 * (PI / 6.0).sin(), + ); + let points = vec![start, end]; + let node_levels = vec![0, 1]; + let edge = WorkEdge { + source: 0, + target: 1, + original_edge: 0, + }; + + let route = build_spiral_route(&edge, &points, &node_levels, center, 0, 128); + + assert_eq!(route.first().copied(), Some(start)); + assert_eq!(route.last().copied(), Some(end)); + assert!(route_angular_span(&route, center) < PI / 2.0); + } + + #[test] + fn seam_spiral_route_does_not_increase_crossings_against_legacy_branch() { + let center = Point::new(200.0, 200.0); + let left_inner = Point::new( + center.x + 100.0 * (11.0 * PI / 6.0).cos(), + center.y - 100.0 * (11.0 * PI / 6.0).sin(), + ); + let left_outer = Point::new( + center.x + 200.0 * (PI / 6.0).cos(), + center.y - 200.0 * (PI / 6.0).sin(), + ); + let right_inner = Point::new( + center.x + 100.0 * (7.0 * PI / 6.0).cos(), + center.y - 100.0 * (7.0 * PI / 6.0).sin(), + ); + let right_outer = Point::new( + center.x + 200.0 * (5.0 * PI / 6.0).cos(), + center.y - 200.0 * (5.0 * PI / 6.0).sin(), + ); + + let left_edge = WorkEdge { + source: 0, + target: 1, + original_edge: 0, + }; + let right_edge = WorkEdge { + source: 2, + target: 3, + original_edge: 1, + }; + let points = vec![left_inner, left_outer, right_inner, right_outer]; + let node_levels = vec![0, 1, 0, 1]; + + let new_routes = vec![ + build_spiral_route(&left_edge, &points, &node_levels, center, 0, 128), + build_spiral_route(&right_edge, &points, &node_levels, center, 0, 128), + ]; + let legacy_routes = vec![ + legacy_spiral_route(left_inner, left_outer, center, 0, 128), + legacy_spiral_route(right_inner, right_outer, center, 0, 128), + ]; + + assert!(count_polyline_crossings(&new_routes) <= count_polyline_crossings(&legacy_routes)); + } + + #[test] + fn real_bfo_subtree_has_no_duplicate_nodes_or_long_seam_spirals() { + let ttl_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("data") + .join("vkg_full.ttl"); + if !ttl_path.exists() { + return; + } + + let imported = graph_from_ttl_path(&ttl_path).unwrap(); + let mut graph = + filter_graph_to_descendants(&imported, "http://purl.obolibrary.org/obo/BFO_0000001") + .unwrap(); + let artifacts = layout_radial_hierarchy_with_artifacts( + &mut graph, + LayoutConfig { + spiral_quality: 80, + ring_distribution: RingDistribution::Adaptive, + ..LayoutConfig::default() + }, + ) + .unwrap(); + + assert_eq!( + unique_point_count( + &graph + .nodes + .iter() + .map(|node| Point::new(node.x, node.y)) + .collect::>() + ), + graph.nodes.len() + ); + + let suspicious_routes = artifacts + .edge_routes + .iter() + .filter(|route| route.kind == EdgeRouteKind::Spiral && route.points.len() >= 2) + .filter(|route| { + let start_angle = polar_angle(route.points[0], artifacts.center); + let end_angle = polar_angle(*route.points.last().unwrap(), artifacts.center); + let endpoint_gap = circular_distance(start_angle, end_angle); + let span = route_angular_span(&route.points, artifacts.center); + endpoint_gap < PI / 4.0 && span > 1.5 * PI + }) + .count(); + + assert_eq!(suspicious_routes, 0); + } + + #[test] + fn rejects_invalid_graph_shapes() { + let duplicate = Graph::new(blank_nodes(2), vec![Edge::new(0, 1), Edge::new(0, 1)]); + let cycle = Graph::new(blank_nodes(2), vec![Edge::new(0, 1), Edge::new(1, 0)]); + let self_loop = Graph::new(blank_nodes(1), vec![Edge::new(0, 0)]); + + assert!(matches!( + compute_hierarchy_levels(&duplicate), + Err(LayoutError::DuplicateEdge { .. }) + )); + assert!(matches!( + compute_hierarchy_levels(&cycle), + Err(LayoutError::CycleDetected) + )); + assert!(matches!( + compute_hierarchy_levels(&self_loop), + Err(LayoutError::SelfLoop { .. }) + )); + } +} diff --git a/radial_sugiyama/src/lib.rs b/radial_sugiyama/src/lib.rs new file mode 100644 index 0000000..805ec24 --- /dev/null +++ b/radial_sugiyama/src/lib.rs @@ -0,0 +1,42 @@ +//! Hierarchical radial Sugiyama layout for directed acyclic graphs. +//! +//! ``` +//! use radial_sugiyama::{layout_radial_hierarchy, Edge, Graph, LayoutConfig, Node}; +//! +//! let mut graph = Graph::new( +//! vec![Node::default(), Node::default(), Node::default()], +//! vec![Edge::new(0, 1), Edge::new(1, 2)], +//! ); +//! +//! layout_radial_hierarchy(&mut graph, LayoutConfig::default()).unwrap(); +//! assert!(graph.nodes.iter().all(|node| node.x.is_finite() && node.y.is_finite())); +//! ``` +mod bridge; +mod env_config; +mod error; +mod filter; +mod layering; +mod layout; +mod model; +mod svg_export; +mod ttl; + +pub use bridge::{ + process_go_bridge_request, process_go_bridge_request_with_options, BridgeError, + BridgeRuntimeConfig, GoBridgeEdge, GoBridgeNode, GoBridgePoint, GoBridgeRequest, + GoBridgeResponse, GoBridgeRouteSegment, GoBridgeRoutedNode, +}; +pub use env_config::{EnvConfig, EnvConfigError}; +pub use error::LayoutError; +pub use filter::{filter_graph_to_descendants, GraphFilterError}; +pub use layering::compute_hierarchy_levels; +pub use layout::{layout_radial_hierarchy, layout_radial_hierarchy_with_artifacts}; +pub use model::{ + Edge, EdgeRoute, EdgeRouteKind, Graph, LayoutArtifacts, LayoutConfig, Node, Point, + RingDistribution, RoutedNode, SvgConfig, +}; +pub use svg_export::{ + render_svg_string, render_svg_string_with_options, write_svg_path, write_svg_path_with_options, + SvgExportError, +}; +pub use ttl::{graph_from_ttl_path, graph_from_ttl_reader, TtlImportError}; diff --git a/radial_sugiyama/src/main.rs b/radial_sugiyama/src/main.rs new file mode 100644 index 0000000..60d3589 --- /dev/null +++ b/radial_sugiyama/src/main.rs @@ -0,0 +1,29 @@ +use std::error::Error; +use std::fs::create_dir_all; + +use radial_sugiyama::{ + filter_graph_to_descendants, graph_from_ttl_path, layout_radial_hierarchy_with_artifacts, + write_svg_path_with_options, EnvConfig, +}; + +fn main() -> Result<(), Box> { + let config = EnvConfig::from_env()?; + let input_path = config.input_path(); + let output_path = config.output_path(); + let imported_graph = graph_from_ttl_path(&input_path)?; + let mut graph = filter_graph_to_descendants(&imported_graph, &config.root_class_iri)?; + let artifacts = layout_radial_hierarchy_with_artifacts(&mut graph, config.layout)?; + if let Some(parent) = output_path.parent() { + create_dir_all(parent)?; + } + write_svg_path_with_options(&output_path, &graph, &artifacts, config.layout, config.svg)?; + + println!("input={}", input_path.display()); + println!("root={}", config.root_class_iri); + println!("output={}", output_path.display()); + println!("nodes={}", graph.nodes.len()); + println!("edges={}", graph.edges.len()); + println!("routes={}", artifacts.edge_routes.len()); + + Ok(()) +} diff --git a/radial_sugiyama/src/model.rs b/radial_sugiyama/src/model.rs new file mode 100644 index 0000000..b5d9d3d --- /dev/null +++ b/radial_sugiyama/src/model.rs @@ -0,0 +1,133 @@ +#[derive(Debug, Clone, PartialEq)] +pub struct Graph { + pub nodes: Vec, + pub edges: Vec, +} + +impl Graph { + pub fn new(nodes: Vec, edges: Vec) -> Self { + Self { nodes, edges } + } +} + +#[derive(Debug, Clone, PartialEq)] +pub struct Node { + pub label: Option, + pub x: f64, + pub y: f64, +} + +impl Default for Node { + fn default() -> Self { + Self { + label: None, + x: 0.0, + y: 0.0, + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct Edge { + pub source: usize, + pub target: usize, +} + +impl Edge { + pub fn new(source: usize, target: usize) -> Self { + Self { source, target } + } +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub struct Point { + pub x: f64, + pub y: f64, +} + +impl Point { + pub fn new(x: f64, y: f64) -> Self { + Self { x, y } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum EdgeRouteKind { + Straight, + Spiral, + IntraLevel, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct EdgeRoute { + pub original_edge_index: usize, + pub source: usize, + pub target: usize, + pub kind: EdgeRouteKind, + pub points: Vec, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct RoutedNode { + pub original_index: Option, + pub level: usize, + pub point: Point, +} + +#[derive(Debug, Clone, PartialEq)] +pub struct LayoutArtifacts { + pub node_levels: Vec, + pub edge_offsets: Vec, + pub edge_routes: Vec, + pub routed_nodes: Vec, + pub center: Point, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct SvgConfig { + pub shortest_edges: bool, + pub show_labels: bool, +} + +impl Default for SvgConfig { + fn default() -> Self { + Self { + shortest_edges: false, + show_labels: true, + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum RingDistribution { + Packed, + Distributed, + Adaptive, +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub struct LayoutConfig { + pub min_radius: f64, + pub level_distance: f64, + pub align_positive_coords: bool, + pub spiral_quality: usize, + pub left_border: f64, + pub upper_border: f64, + pub node_distance: f64, + pub ring_distribution: RingDistribution, +} + +impl Default for LayoutConfig { + fn default() -> Self { + Self { + min_radius: 1.0, + level_distance: 1.0, + align_positive_coords: true, + spiral_quality: 500, + left_border: 80.0, + upper_border: 80.0, + node_distance: 80.0, + ring_distribution: RingDistribution::Packed, + } + } +} diff --git a/radial_sugiyama/src/svg_export.rs b/radial_sugiyama/src/svg_export.rs new file mode 100644 index 0000000..09c655b --- /dev/null +++ b/radial_sugiyama/src/svg_export.rs @@ -0,0 +1,469 @@ +use std::error::Error; +use std::fmt::{Display, Formatter}; +use std::path::Path; + +use svg::node::element::path::Data; +use svg::node::element::{Circle, Path as SvgPathElement, Rectangle, Text as SvgText}; +use svg::Document; + +use crate::model::{Graph, LayoutArtifacts, LayoutConfig, Point, SvgConfig}; + +const BACKGROUND_COLOR: &str = "#ffffff"; +const RING_COLOR: &str = "#d9d9d9"; +const EDGE_COLOR: &str = "#5c6773"; +const NODE_FILL_COLOR: &str = "#4f81bd"; +const NODE_STROKE_COLOR: &str = "#355c8a"; +const LABEL_COLOR: &str = "#111111"; +const NODE_RADIUS: f64 = 6.0; +const LABEL_FONT_SIZE: usize = 9; +const LABEL_X_OFFSET: f64 = NODE_RADIUS + 4.0; +const LABEL_Y_OFFSET: f64 = NODE_RADIUS + 2.0; +const LABEL_WIDTH_FACTOR: f64 = 0.56; +const EDGE_STROKE_WIDTH: f64 = 1.5; +const RING_STROKE_WIDTH: f64 = 1.0; +const VIEWBOX_HORIZONTAL_MARGIN: f64 = 72.0; +const VIEWBOX_VERTICAL_MARGIN: f64 = 36.0; + +#[derive(Debug)] +pub enum SvgExportError { + Io(std::io::Error), +} + +impl Display for SvgExportError { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + SvgExportError::Io(error) => write!(f, "failed to write SVG output: {error}"), + } + } +} + +impl Error for SvgExportError { + fn source(&self) -> Option<&(dyn Error + 'static)> { + match self { + SvgExportError::Io(error) => Some(error), + } + } +} + +impl From for SvgExportError { + fn from(error: std::io::Error) -> Self { + Self::Io(error) + } +} + +pub fn write_svg_path( + path: impl AsRef, + graph: &Graph, + artifacts: &LayoutArtifacts, + layout: LayoutConfig, +) -> Result<(), SvgExportError> { + write_svg_path_with_options(path, graph, artifacts, layout, SvgConfig::default()) +} + +pub fn render_svg_string( + graph: &Graph, + artifacts: &LayoutArtifacts, + layout: LayoutConfig, +) -> String { + render_svg_string_with_options(graph, artifacts, layout, SvgConfig::default()) +} + +pub fn write_svg_path_with_options( + path: impl AsRef, + graph: &Graph, + artifacts: &LayoutArtifacts, + layout: LayoutConfig, + svg_config: SvgConfig, +) -> Result<(), SvgExportError> { + svg::save(path, &build_document(graph, artifacts, layout, svg_config)).map_err(Into::into) +} + +pub fn render_svg_string_with_options( + graph: &Graph, + artifacts: &LayoutArtifacts, + layout: LayoutConfig, + svg_config: SvgConfig, +) -> String { + build_document(graph, artifacts, layout, svg_config).to_string() +} + +fn build_document( + graph: &Graph, + artifacts: &LayoutArtifacts, + layout: LayoutConfig, + svg_config: SvgConfig, +) -> Document { + let bounds = compute_bounds(graph, artifacts, layout, svg_config); + let width = (bounds.max_x - bounds.min_x).max(1.0); + let height = (bounds.max_y - bounds.min_y).max(1.0); + + let mut document = Document::new() + .set("viewBox", (bounds.min_x, bounds.min_y, width, height)) + .set("width", width) + .set("height", height); + + document = document.add( + Rectangle::new() + .set("x", bounds.min_x) + .set("y", bounds.min_y) + .set("width", width) + .set("height", height) + .set("fill", BACKGROUND_COLOR), + ); + + for radius in ring_radii(artifacts, layout) { + document = document.add( + Circle::new() + .set("cx", artifacts.center.x) + .set("cy", artifacts.center.y) + .set("r", radius) + .set("fill", "none") + .set("stroke", RING_COLOR) + .set("stroke-width", RING_STROKE_WIDTH), + ); + } + + for data in edge_paths(graph, artifacts, svg_config) { + document = document.add( + SvgPathElement::new() + .set("fill", "none") + .set("stroke", EDGE_COLOR) + .set("stroke-width", EDGE_STROKE_WIDTH) + .set("stroke-linecap", "round") + .set("stroke-linejoin", "round") + .set("d", data), + ); + } + + for node in &graph.nodes { + document = document.add( + Circle::new() + .set("cx", node.x) + .set("cy", node.y) + .set("r", NODE_RADIUS) + .set("fill", NODE_FILL_COLOR) + .set("stroke", NODE_STROKE_COLOR) + .set("stroke-width", 1.0), + ); + } + + if svg_config.show_labels { + for node in &graph.nodes { + if let Some(label) = &node.label { + document = document.add( + SvgText::new(label.clone()) + .set("x", node.x + LABEL_X_OFFSET) + .set("y", node.y - LABEL_Y_OFFSET) + .set("fill", LABEL_COLOR) + .set("font-size", LABEL_FONT_SIZE) + .set("font-family", "Arial, Helvetica, sans-serif"), + ); + } + } + } + + document +} + +fn edge_paths(graph: &Graph, artifacts: &LayoutArtifacts, svg_config: SvgConfig) -> Vec { + if svg_config.shortest_edges { + graph + .edges + .iter() + .map(|edge| { + let source = &graph.nodes[edge.source]; + let target = &graph.nodes[edge.target]; + Data::new() + .move_to((source.x, source.y)) + .line_to((target.x, target.y)) + }) + .collect() + } else { + artifacts + .edge_routes + .iter() + .filter_map(|route| { + if route.points.len() < 2 { + return None; + } + + let mut data = Data::new().move_to((route.points[0].x, route.points[0].y)); + for point in route.points.iter().skip(1) { + data = data.line_to((point.x, point.y)); + } + Some(data) + }) + .collect() + } +} + +fn ring_radii(artifacts: &LayoutArtifacts, layout: LayoutConfig) -> Vec { + let Some(max_level) = artifacts.node_levels.iter().copied().max() else { + return Vec::new(); + }; + + let center_only_level = artifacts + .node_levels + .iter() + .filter(|&&level| level == 0) + .count() + == 1; + let start_level = if center_only_level { 1 } else { 0 }; + + if start_level > max_level { + return Vec::new(); + } + + (start_level..=max_level) + .map(|level| { + let radial_units = layout.min_radius + + (level.saturating_sub(start_level) as f64 * layout.level_distance); + radial_units * layout.node_distance + }) + .collect() +} + +#[derive(Debug, Clone, Copy)] +struct Bounds { + min_x: f64, + min_y: f64, + max_x: f64, + max_y: f64, +} + +impl Bounds { + fn around(point: Point) -> Self { + Self { + min_x: point.x, + min_y: point.y, + max_x: point.x, + max_y: point.y, + } + } + + fn include_point(&mut self, point: Point) { + self.min_x = self.min_x.min(point.x); + self.min_y = self.min_y.min(point.y); + self.max_x = self.max_x.max(point.x); + self.max_y = self.max_y.max(point.y); + } + + fn include_radius(&mut self, center: Point, radius: f64) { + self.include_point(Point::new(center.x - radius, center.y - radius)); + self.include_point(Point::new(center.x + radius, center.y + radius)); + } + + fn expand(&mut self, horizontal_margin: f64, vertical_margin: f64) { + self.min_x -= horizontal_margin; + self.min_y -= vertical_margin; + self.max_x += horizontal_margin; + self.max_y += vertical_margin; + } +} + +fn compute_bounds( + graph: &Graph, + artifacts: &LayoutArtifacts, + layout: LayoutConfig, + svg_config: SvgConfig, +) -> Bounds { + let mut bounds = Bounds::around(artifacts.center); + + for radius in ring_radii(artifacts, layout) { + bounds.include_radius(artifacts.center, radius); + } + + for node in &graph.nodes { + bounds.include_radius(Point::new(node.x, node.y), NODE_RADIUS); + if svg_config.show_labels { + if let Some(label) = &node.label { + include_label_bounds(&mut bounds, node, label); + } + } + } + + for route in &artifacts.edge_routes { + for &point in &route.points { + bounds.include_point(point); + } + } + + bounds.expand(VIEWBOX_HORIZONTAL_MARGIN, VIEWBOX_VERTICAL_MARGIN); + bounds +} + +fn include_label_bounds(bounds: &mut Bounds, node: &crate::model::Node, label: &str) { + let start_x = node.x + LABEL_X_OFFSET; + let baseline_y = node.y - LABEL_Y_OFFSET; + let width = estimate_label_width(label); + let ascent = LABEL_FONT_SIZE as f64; + let descent = LABEL_FONT_SIZE as f64 * 0.3; + + bounds.include_point(Point::new(start_x, baseline_y - ascent)); + bounds.include_point(Point::new(start_x + width, baseline_y + descent)); +} + +fn estimate_label_width(label: &str) -> f64 { + label.chars().count() as f64 * LABEL_FONT_SIZE as f64 * LABEL_WIDTH_FACTOR +} + +#[cfg(test)] +mod tests { + use std::fs; + use std::time::{SystemTime, UNIX_EPOCH}; + + use super::*; + use crate::{graph_from_ttl_reader, layout_radial_hierarchy_with_artifacts, Edge, Graph, Node}; + + fn simple_graph() -> (Graph, LayoutArtifacts, LayoutConfig) { + let mut graph = Graph::new( + vec![ + Node { + label: Some("Root".to_owned()), + ..Node::default() + }, + Node { + label: Some("Child".to_owned()), + ..Node::default() + }, + Node { + label: Some("Leaf".to_owned()), + ..Node::default() + }, + ], + vec![Edge::new(0, 1), Edge::new(1, 2)], + ); + let layout = LayoutConfig::default(); + let artifacts = layout_radial_hierarchy_with_artifacts(&mut graph, layout).unwrap(); + (graph, artifacts, layout) + } + + #[test] + fn render_svg_contains_svg_root_and_paths() { + let (graph, artifacts, layout) = simple_graph(); + let svg = render_svg_string(&graph, &artifacts, layout); + + assert!(svg.contains(" 300.0); + assert!(bounds.max_y > 100.0); + } + + #[test] + fn render_svg_omits_labels_when_disabled() { + let (graph, artifacts, layout) = simple_graph(); + let svg = render_svg_string_with_options( + &graph, + &artifacts, + layout, + SvgConfig { + shortest_edges: false, + show_labels: false, + }, + ); + + assert!(!svg.contains("Root")); + assert!(!svg.contains(") -> std::fmt::Result { + match self { + TtlImportError::Io(error) => write!(f, "failed to read Turtle input: {error}"), + TtlImportError::Parse(error) => write!(f, "failed to parse Turtle input: {error}"), + TtlImportError::NoSubclassTriples => { + write!( + f, + "no usable rdfs:subClassOf triples were found in the Turtle input" + ) + } + } + } +} + +impl Error for TtlImportError { + fn source(&self) -> Option<&(dyn Error + 'static)> { + match self { + TtlImportError::Io(error) => Some(error), + TtlImportError::Parse(error) => Some(error), + TtlImportError::NoSubclassTriples => None, + } + } +} + +impl From for TtlImportError { + fn from(error: std::io::Error) -> Self { + Self::Io(error) + } +} + +impl From for TtlImportError { + fn from(error: TurtleParseError) -> Self { + Self::Parse(error) + } +} + +pub fn graph_from_ttl_reader(reader: R) -> Result { + let mut nodes = Vec::new(); + let mut node_indices = HashMap::new(); + let mut edges = Vec::new(); + let mut seen_edges = HashSet::new(); + + for triple in TurtleParser::new().for_reader(reader) { + let triple = triple?; + if triple.predicate.as_ref() != rdfs::SUB_CLASS_OF { + continue; + } + + let NamedOrBlankNode::NamedNode(subject) = triple.subject else { + continue; + }; + let Term::NamedNode(object) = triple.object else { + continue; + }; + + let subclass = get_or_insert_node(&mut nodes, &mut node_indices, subject.as_str()); + let superclass = get_or_insert_node(&mut nodes, &mut node_indices, object.as_str()); + if seen_edges.insert((superclass, subclass)) { + edges.push(Edge::new(superclass, subclass)); + } + } + + if edges.is_empty() { + return Err(TtlImportError::NoSubclassTriples); + } + + Ok(Graph::new(nodes, edges)) +} + +pub fn graph_from_ttl_path(path: impl AsRef) -> Result { + let file = File::open(path)?; + graph_from_ttl_reader(BufReader::new(file)) +} + +fn get_or_insert_node( + nodes: &mut Vec, + node_indices: &mut HashMap, + iri: &str, +) -> usize { + if let Some(&index) = node_indices.get(iri) { + return index; + } + + let index = nodes.len(); + nodes.push(Node { + label: Some(iri.to_owned()), + ..Node::default() + }); + node_indices.insert(iri.to_owned(), index); + index +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{layout_radial_hierarchy, LayoutConfig}; + + const TTL_PREFIXES: &str = "@prefix ex: .\n@prefix rdfs: .\n@prefix schema: .\n"; + + #[test] + fn imports_only_subclass_triples() { + let ttl = format!( + "{TTL_PREFIXES}ex:A rdfs:subClassOf ex:B .\nex:A schema:name \"Alpha\" .\nex:B schema:name \"Beta\" .\n" + ); + + let graph = graph_from_ttl_reader(ttl.as_bytes()).unwrap(); + + assert_eq!(graph.nodes.len(), 2); + assert_eq!(graph.edges, vec![Edge::new(1, 0)]); + assert_eq!( + graph.nodes[0].label.as_deref(), + Some("http://example.com/A") + ); + assert_eq!( + graph.nodes[1].label.as_deref(), + Some("http://example.com/B") + ); + } + + #[test] + fn deduplicates_repeated_subclass_triples() { + let ttl = + format!("{TTL_PREFIXES}ex:A rdfs:subClassOf ex:B .\nex:A rdfs:subClassOf ex:B .\n"); + + let graph = graph_from_ttl_reader(ttl.as_bytes()).unwrap(); + + assert_eq!(graph.edges, vec![Edge::new(1, 0)]); + } + + #[test] + fn ignores_blank_node_and_literal_targets() { + let ttl = format!( + "{TTL_PREFIXES}ex:A rdfs:subClassOf [ a ex:Anonymous ] .\nex:B rdfs:subClassOf \"Literal\" .\nex:C rdfs:subClassOf ex:D .\n" + ); + + let graph = graph_from_ttl_reader(ttl.as_bytes()).unwrap(); + + assert_eq!(graph.nodes.len(), 2); + assert_eq!(graph.edges, vec![Edge::new(1, 0)]); + assert_eq!( + graph.nodes[0].label.as_deref(), + Some("http://example.com/C") + ); + assert_eq!( + graph.nodes[1].label.as_deref(), + Some("http://example.com/D") + ); + } + + #[test] + fn imports_can_flow_into_layout() { + let ttl = + format!("{TTL_PREFIXES}ex:A rdfs:subClassOf ex:B .\nex:B rdfs:subClassOf ex:C .\n"); + + let mut graph = graph_from_ttl_reader(ttl.as_bytes()).unwrap(); + layout_radial_hierarchy(&mut graph, LayoutConfig::default()).unwrap(); + + assert!(graph + .nodes + .iter() + .all(|node| node.x.is_finite() && node.y.is_finite())); + } + + #[test] + fn returns_clear_error_for_invalid_turtle() { + let ttl = "@prefix ex: .\nex:A rdfs:subClassOf .\n"; + let error = graph_from_ttl_reader(ttl.as_bytes()).unwrap_err(); + + assert!(matches!(error, TtlImportError::Parse(_))); + } + + #[test] + fn returns_clear_error_when_no_subclass_triples_exist() { + let ttl = format!("{TTL_PREFIXES}ex:A schema:name \"Alpha\" .\n"); + let error = graph_from_ttl_reader(ttl.as_bytes()).unwrap_err(); + + assert!(matches!(error, TtlImportError::NoSubclassTriples)); + } +}