Files
visualizador_instanciados/backend/app/main.py
2026-03-04 13:49:14 -03:00

173 lines
5.9 KiB
Python

from __future__ import annotations
from contextlib import asynccontextmanager
import logging
import asyncio
from fastapi import FastAPI, HTTPException, Query
from fastapi.middleware.cors import CORSMiddleware
from .models import (
EdgesResponse,
GraphResponse,
NeighborsRequest,
NeighborsResponse,
NodesResponse,
SparqlQueryRequest,
StatsResponse,
)
from .pipelines.layout_dag_radial import CycleError
from .pipelines.owl_imports_combiner import (
build_combined_graph,
output_location_to_path,
resolve_output_location,
serialize_graph_to_ttl,
)
from .pipelines.selection_neighbors import fetch_neighbor_ids_for_selection
from .pipelines.snapshot_service import GraphSnapshotService
from .rdf_store import RDFStore
from .sparql_engine import RdflibEngine, SparqlEngine, create_sparql_engine
from .settings import Settings
settings = Settings()
logger = logging.getLogger(__name__)
@asynccontextmanager
async def lifespan(app: FastAPI):
rdflib_preloaded_graph = None
if settings.combine_owl_imports_on_start:
entry_location = settings.combine_entry_location or settings.ttl_path
output_location = resolve_output_location(
entry_location,
output_location=settings.combine_output_location,
output_name=settings.combine_output_name,
)
output_path = output_location_to_path(output_location)
if output_path.exists() and not settings.combine_force:
logger.info("Skipping combine step (output exists): %s", output_location)
else:
rdflib_preloaded_graph = await asyncio.to_thread(build_combined_graph, entry_location)
logger.info("Finished combining imports; serializing to: %s", output_location)
await asyncio.to_thread(serialize_graph_to_ttl, rdflib_preloaded_graph, output_location)
if settings.graph_backend == "rdflib":
settings.ttl_path = str(output_path)
sparql: SparqlEngine = create_sparql_engine(settings, rdflib_graph=rdflib_preloaded_graph)
await sparql.startup()
app.state.sparql = sparql
app.state.snapshot_service = GraphSnapshotService(sparql=sparql, settings=settings)
# Only build node/edge tables when running in rdflib mode.
if settings.graph_backend == "rdflib":
assert isinstance(sparql, RdflibEngine)
if sparql.graph is None:
raise RuntimeError("rdflib graph failed to load")
store = RDFStore(
ttl_path=settings.ttl_path,
include_bnodes=settings.include_bnodes,
max_triples=settings.max_triples,
)
store.load(sparql.graph)
app.state.store = store
yield
await sparql.shutdown()
app = FastAPI(title="visualizador_instanciados backend", lifespan=lifespan)
cors_origins = settings.cors_origin_list()
app.add_middleware(
CORSMiddleware,
allow_origins=cors_origins,
allow_credentials=False,
allow_methods=["*"],
allow_headers=["*"],
)
@app.get("/api/health")
def health() -> dict[str, str]:
return {"status": "ok"}
@app.get("/api/stats", response_model=StatsResponse)
async def stats() -> StatsResponse:
# Stats reflect exactly what we send to the frontend (/api/graph), not global graph size.
svc: GraphSnapshotService = app.state.snapshot_service
try:
snap = await svc.get(node_limit=50_000, edge_limit=100_000)
except CycleError as e:
raise HTTPException(status_code=422, detail=str(e)) from None
meta = snap.meta
return StatsResponse(
backend=meta.backend if meta else app.state.sparql.name,
ttl_path=meta.ttl_path if meta and meta.ttl_path else settings.ttl_path,
sparql_endpoint=meta.sparql_endpoint if meta else None,
parsed_triples=len(snap.edges),
nodes=len(snap.nodes),
edges=len(snap.edges),
)
@app.post("/api/sparql")
async def sparql_query(req: SparqlQueryRequest) -> dict:
sparql: SparqlEngine = app.state.sparql
data = await sparql.query_json(req.query)
return data
@app.post("/api/neighbors", response_model=NeighborsResponse)
async def neighbors(req: NeighborsRequest) -> NeighborsResponse:
svc: GraphSnapshotService = app.state.snapshot_service
snap = await svc.get(node_limit=req.node_limit, edge_limit=req.edge_limit)
sparql: SparqlEngine = app.state.sparql
neighbor_ids = await fetch_neighbor_ids_for_selection(
sparql,
snapshot=snap,
selected_ids=req.selected_ids,
include_bnodes=settings.include_bnodes,
)
return NeighborsResponse(selected_ids=req.selected_ids, neighbor_ids=neighbor_ids)
@app.get("/api/nodes", response_model=NodesResponse)
def nodes(
limit: int = Query(default=10_000, ge=1, le=200_000),
offset: int = Query(default=0, ge=0),
) -> NodesResponse:
if settings.graph_backend != "rdflib":
raise HTTPException(status_code=501, detail="GET /api/nodes is only supported in GRAPH_BACKEND=rdflib mode")
store: RDFStore = app.state.store
return NodesResponse(total=store.node_count, nodes=store.node_slice(offset=offset, limit=limit))
@app.get("/api/edges", response_model=EdgesResponse)
def edges(
limit: int = Query(default=50_000, ge=1, le=500_000),
offset: int = Query(default=0, ge=0),
) -> EdgesResponse:
if settings.graph_backend != "rdflib":
raise HTTPException(status_code=501, detail="GET /api/edges is only supported in GRAPH_BACKEND=rdflib mode")
store: RDFStore = app.state.store
return EdgesResponse(total=store.edge_count, edges=store.edge_slice(offset=offset, limit=limit))
@app.get("/api/graph", response_model=GraphResponse)
async def graph(
node_limit: int = Query(default=50_000, ge=1, le=200_000),
edge_limit: int = Query(default=100_000, ge=1, le=500_000),
) -> GraphResponse:
svc: GraphSnapshotService = app.state.snapshot_service
try:
return await svc.get(node_limit=node_limit, edge_limit=edge_limit)
except CycleError as e:
raise HTTPException(status_code=422, detail=str(e)) from None