stitch pipeline câblé : DB + dispatcher + UI + fix subpath Caddy
- Table stitches (per_auv + cross_auv) avec cancel/retry API - Dispatcher : PLY export auto (--save_ply), trigger stitch en cascade quand tous les jobs d'un AUV sont done - UI : section stitch live depuis DB avec statuts/durées/boutons - Fix : <base href="/cosma-qc/"> + chemins relatifs pour Caddy subpath - open3d 0.19.0 installé sur gpu (.87) - SSH key .82→.87 configurée, alias gpu ajouté sur .82 Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
62
app/main.py
62
app/main.py
@@ -102,6 +102,24 @@ def init_schema() -> None:
|
||||
|
||||
CREATE INDEX IF NOT EXISTS jobs_status_idx ON jobs(status);
|
||||
CREATE INDEX IF NOT EXISTS jobs_acq_idx ON jobs(acquisition_id);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS stitches (
|
||||
id INTEGER PRIMARY KEY,
|
||||
acquisition_id INTEGER NOT NULL REFERENCES acquisitions(id) ON DELETE CASCADE,
|
||||
level TEXT NOT NULL DEFAULT 'per_auv',
|
||||
auv TEXT,
|
||||
input_job_ids TEXT NOT NULL DEFAULT '[]',
|
||||
input_stitch_ids TEXT NOT NULL DEFAULT '[]',
|
||||
output_ply TEXT,
|
||||
status TEXT NOT NULL DEFAULT 'queued',
|
||||
worker_host TEXT,
|
||||
started_at TEXT,
|
||||
finished_at TEXT,
|
||||
error TEXT,
|
||||
created_at TEXT NOT NULL DEFAULT (datetime('now'))
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS stitches_acq_idx ON stitches(acquisition_id);
|
||||
""")
|
||||
|
||||
|
||||
@@ -124,6 +142,10 @@ def _build_acquisitions():
|
||||
jobs = conn.execute(
|
||||
"SELECT * FROM jobs ORDER BY auv, gopro_serial, segment_label"
|
||||
).fetchall()
|
||||
stitches = conn.execute(
|
||||
"SELECT * FROM stitches ORDER BY level DESC, auv"
|
||||
).fetchall()
|
||||
|
||||
by_acq: dict[int, list[dict]] = {}
|
||||
by_acq_total: dict[int, int] = {}
|
||||
for j in jobs:
|
||||
@@ -133,12 +155,30 @@ def _build_acquisitions():
|
||||
by_acq.setdefault(j["acquisition_id"], []).append(d)
|
||||
by_acq_total[j["acquisition_id"]] = by_acq_total.get(j["acquisition_id"], 0) + dur_s
|
||||
|
||||
stitches_by_acq: dict[int, list[dict]] = {}
|
||||
for s in stitches:
|
||||
d = dict(s)
|
||||
start = _parse_ts(s["started_at"])
|
||||
end = _parse_ts(s["finished_at"]) or (
|
||||
datetime.now(timezone.utc) if s["status"] == "running" else None
|
||||
)
|
||||
if start and end:
|
||||
if start.tzinfo is None:
|
||||
start = start.replace(tzinfo=timezone.utc)
|
||||
if end.tzinfo is None:
|
||||
end = end.replace(tzinfo=timezone.utc)
|
||||
d["_duration"] = _fmt_dur(int((end - start).total_seconds()))
|
||||
else:
|
||||
d["_duration"] = ""
|
||||
stitches_by_acq.setdefault(s["acquisition_id"], []).append(d)
|
||||
|
||||
return [
|
||||
{
|
||||
"id": acq["id"],
|
||||
"name": acq["name"],
|
||||
"source_path": acq["source_path"],
|
||||
"jobs": by_acq.get(acq["id"], []),
|
||||
"stitches": stitches_by_acq.get(acq["id"], []),
|
||||
"total_duration": _fmt_dur(by_acq_total.get(acq["id"], 0)),
|
||||
}
|
||||
for acq in acqs
|
||||
@@ -220,3 +260,25 @@ async def retry_job(job_id: int):
|
||||
(job_id,),
|
||||
)
|
||||
return {"ok": True}
|
||||
|
||||
|
||||
@app.post("/stitches/{stitch_id}/cancel")
|
||||
async def cancel_stitch(stitch_id: int):
|
||||
with closing(db()) as conn:
|
||||
conn.execute(
|
||||
"UPDATE stitches SET status='error', error='cancelled by user', finished_at=datetime('now') "
|
||||
"WHERE id=? AND status IN ('queued','running')",
|
||||
(stitch_id,),
|
||||
)
|
||||
return {"ok": True}
|
||||
|
||||
|
||||
@app.post("/stitches/{stitch_id}/retry")
|
||||
async def retry_stitch(stitch_id: int):
|
||||
with closing(db()) as conn:
|
||||
conn.execute(
|
||||
"UPDATE stitches SET status='queued', error=NULL, output_ply=NULL, "
|
||||
"started_at=NULL, finished_at=NULL, worker_host=NULL WHERE id=? AND status='error'",
|
||||
(stitch_id,),
|
||||
)
|
||||
return {"ok": True}
|
||||
|
||||
Reference in New Issue
Block a user