diff --git a/scripts/__init__.py b/scripts/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/scripts/archive_job.sh b/scripts/archive_job.sh new file mode 100644 index 0000000..5a3a05c --- /dev/null +++ b/scripts/archive_job.sh @@ -0,0 +1,29 @@ +#!/usr/bin/env bash +# archive_job.sh [frames_base] [nas_base] +# Rsyncs job frames + PLY to NAS .156 +set -euo pipefail + +JOB_ID="${1:?Usage: archive_job.sh [frames_base] [nas_base]}" +FRAMES_BASE="${2:-/root/cosma-qc-frames}" +NAS_BASE="${3:-/mnt/nas-cosma/cosma-archive}" + +SRC="${FRAMES_BASE}/job_${JOB_ID}" +DST="${NAS_BASE}/job_${JOB_ID}" + +if [ ! -d "${SRC}" ]; then + echo "Job dir not found: ${SRC}" + exit 1 +fi + +mkdir -p "${DST}" +echo "[$(date)] Archivage job_${JOB_ID} vers NAS..." + +rsync -av --progress "${SRC}/" "${DST}/" \ + --include="frame_*.jpg" \ + --include="*.ply" \ + --include="*.npz" \ + --include="*.log" \ + --exclude="*" \ + 2>&1 | tail -5 + +echo "[$(date)] Archive job_${JOB_ID} done: ${DST}" diff --git a/scripts/check_jobs.py b/scripts/check_jobs.py new file mode 100644 index 0000000..a5be989 --- /dev/null +++ b/scripts/check_jobs.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python3 +"""Check integrity of processed jobs (PLY + poses present).""" +import argparse +import glob +import json +import os +from pathlib import Path +from typing import Any + +REQUIRED_FILES = ["reconstruction.ply", "lingbot_poses.npz"] +OPTIONAL_FILES = ["model_decimated.ply"] + + +def check_job(job_id: int, frames_base: str = "/root/cosma-qc-frames") -> dict[str, Any]: + job_dir = Path(frames_base) / f"job_{job_id}" + if not job_dir.exists(): + return {"job_id": job_id, "status": "missing", "missing": [], "details": {}} + + missing = [f for f in REQUIRED_FILES if not (job_dir / f).exists()] + details: dict[str, Any] = {} + + ply = job_dir / "reconstruction.ply" + if ply.exists(): + details["ply_size_gb"] = round(ply.stat().st_size / 1e9, 2) + + poses = job_dir / "lingbot_poses.npz" + if poses.exists(): + try: + import numpy as np + d = np.load(str(poses)) + n = d["poses"].shape[0] if "poses" in d else 0 + details["n_poses"] = n + except Exception as e: + details["poses_error"] = str(e) + + decimated = job_dir / "model_decimated.ply" + details["decimated"] = decimated.exists() + + return { + "job_id": job_id, + "status": "ok" if not missing else "incomplete", + "missing": missing, + "details": details, + } + + +def main() -> None: + p = argparse.ArgumentParser() + p.add_argument("job_ids", nargs="*", type=int) + p.add_argument("--frames-base", default="/root/cosma-qc-frames") + p.add_argument("--all", action="store_true", help="Check all job dirs") + args = p.parse_args() + + base = Path(args.frames_base) + if args.all: + ids = sorted( + int(d.name.replace("job_", "")) + for d in base.iterdir() + if d.is_dir() and d.name.startswith("job_") and d.name[4:].isdigit() + ) + else: + ids = args.job_ids + + results = [check_job(jid, args.frames_base) for jid in ids] + print(json.dumps(results, indent=2)) + + +if __name__ == "__main__": + main() diff --git a/scripts/pre_decimate.py b/scripts/pre_decimate.py new file mode 100644 index 0000000..250572b --- /dev/null +++ b/scripts/pre_decimate.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python3 +"""Decimate PLY and SCP to cosma-vm after a job completes.""" +import argparse +import subprocess +import sys +from pathlib import Path + +COSMA_VM = "cosma@192.168.0.83" +COSMA_DATA = "/data/cosma" +MAX_PTS = 300_000 + + +def find_ply(frames_dir: Path) -> Path | None: + for candidate in ["model.ply", "output.ply", "reconstruction.ply"]: + p = frames_dir / candidate + if p.exists(): + return p + plys = list(frames_dir.glob("*.ply")) + return plys[0] if plys else None + + +def decimate_ply(src: str, dst: str, max_pts: int = MAX_PTS) -> None: + import open3d as o3d + import numpy as np + src_path = Path(src) + if not src_path.exists(): + raise FileNotFoundError(src) + pcd = o3d.io.read_point_cloud(str(src_path)) + n = len(pcd.points) + if n > max_pts: + vol = float(np.prod(pcd.get_max_bound() - pcd.get_min_bound())) + vox = max((vol / max_pts) ** (1 / 3), 0.02) + pcd = pcd.voxel_down_sample(vox) + o3d.io.write_point_cloud(dst, pcd) + print(f"Decimated {n} -> {len(pcd.points)} pts -> {dst}", flush=True) + + +def main() -> None: + p = argparse.ArgumentParser() + p.add_argument("job_id", type=int) + p.add_argument("--frames-dir", required=True) + p.add_argument("--cosma-vm", default=COSMA_VM) + p.add_argument("--cosma-data", default=COSMA_DATA) + args = p.parse_args() + + frames_dir = Path(args.frames_dir) + ply_src = find_ply(frames_dir) + if ply_src is None: + print(f"No PLY found in {frames_dir}", flush=True) + sys.exit(0) + + ply_dec = frames_dir / "model_decimated.ply" + decimate_ply(str(ply_src), str(ply_dec)) + + remote_dir = f"{args.cosma_data}/{args.job_id}" + subprocess.run(["ssh", args.cosma_vm, f"mkdir -p {remote_dir}"], check=True) + subprocess.run(["scp", str(ply_dec), f"{args.cosma_vm}:{remote_dir}/model_decimated.ply"], check=True) + print(f"SCP done -> {args.cosma_vm}:{remote_dir}/model_decimated.ply", flush=True) + + +if __name__ == "__main__": + main() diff --git a/tests/test_check_jobs.py b/tests/test_check_jobs.py new file mode 100644 index 0000000..7b19ca6 --- /dev/null +++ b/tests/test_check_jobs.py @@ -0,0 +1,54 @@ +import tempfile +from pathlib import Path + + +def _make_job_dir(base: Path, job_id: int, has_ply: bool = True, has_poses: bool = True) -> Path: + job_dir = base / f"job_{job_id}" + job_dir.mkdir(parents=True) + if has_ply: + (job_dir / "reconstruction.ply").write_bytes(b"\x00" * 100) + if has_poses: + (job_dir / "lingbot_poses.npz").touch() + return job_dir + + +def test_complete_job_is_ok(): + from scripts.check_jobs import check_job + with tempfile.TemporaryDirectory() as tmp: + _make_job_dir(Path(tmp), 1) + result = check_job(1, tmp) + assert result["status"] == "ok" + assert result["job_id"] == 1 + assert result["missing"] == [] + + +def test_missing_ply_flagged(): + from scripts.check_jobs import check_job + with tempfile.TemporaryDirectory() as tmp: + _make_job_dir(Path(tmp), 2, has_ply=False) + result = check_job(2, tmp) + assert result["status"] == "incomplete" + assert "reconstruction.ply" in result["missing"] + + +def test_missing_poses_flagged(): + from scripts.check_jobs import check_job + with tempfile.TemporaryDirectory() as tmp: + _make_job_dir(Path(tmp), 3, has_poses=False) + result = check_job(3, tmp) + assert result["status"] == "incomplete" + assert "lingbot_poses.npz" in result["missing"] + + +def test_missing_job_dir_returns_missing(): + from scripts.check_jobs import check_job + result = check_job(999, "/nonexistent/base") + assert result["status"] == "missing" + + +def test_decimated_flag_false_without_file(): + from scripts.check_jobs import check_job + with tempfile.TemporaryDirectory() as tmp: + _make_job_dir(Path(tmp), 4) + result = check_job(4, tmp) + assert result["details"]["decimated"] is False diff --git a/tests/test_pre_decimate.py b/tests/test_pre_decimate.py new file mode 100644 index 0000000..331c933 --- /dev/null +++ b/tests/test_pre_decimate.py @@ -0,0 +1,53 @@ +import numpy as np +import tempfile +from pathlib import Path +import pytest + + +def _make_tiny_ply(path: Path) -> None: + import open3d as o3d + pcd = o3d.geometry.PointCloud() + pts = np.random.rand(1000, 3).astype(np.float64) + pcd.points = o3d.utility.Vector3dVector(pts) + o3d.io.write_point_cloud(str(path), pcd) + + +def test_decimate_reduces_points(): + from scripts.pre_decimate import decimate_ply + with tempfile.TemporaryDirectory() as tmp: + src = Path(tmp) / "model.ply" + dst = Path(tmp) / "model_decimated.ply" + _make_tiny_ply(src) + decimate_ply(str(src), str(dst), max_pts=100) + import open3d as o3d + pcd = o3d.io.read_point_cloud(str(dst)) + # voxel downsampling is approximate — assert significantly fewer than original 1000 + assert len(pcd.points) < 500 + + +def test_decimate_small_cloud_unchanged(): + from scripts.pre_decimate import decimate_ply + with tempfile.TemporaryDirectory() as tmp: + src = Path(tmp) / "small.ply" + dst = Path(tmp) / "small_decimated.ply" + _make_tiny_ply(src) + decimate_ply(str(src), str(dst), max_pts=5000) + import open3d as o3d + pcd = o3d.io.read_point_cloud(str(dst)) + assert len(pcd.points) == 1000 + + +def test_decimate_missing_src_raises(): + from scripts.pre_decimate import decimate_ply + with tempfile.TemporaryDirectory() as tmp: + with pytest.raises(FileNotFoundError): + decimate_ply("/nonexistent.ply", str(Path(tmp) / "out.ply")) + + +def test_find_ply_candidates(): + from scripts.pre_decimate import find_ply + with tempfile.TemporaryDirectory() as tmp: + d = Path(tmp) + assert find_ply(d) is None + (d / "model.ply").touch() + assert find_ply(d) == d / "model.ply" diff --git a/viz/server.py b/viz/server.py index faa292a..aae69af 100644 --- a/viz/server.py +++ b/viz/server.py @@ -162,5 +162,76 @@ def _main(): _PLY_PATH = args.ply app.run(host="0.0.0.0", port=args.port, debug=False) +# ── COSMA QC Platform additions ────────────────────────── +import os +from pathlib import Path + +_DATA_DIR = Path(os.environ.get("COSMA_DATA_DIR", "/data/cosma")) + + +def _load_nav_data(job_id: int) -> dict: + out = {"job_id": job_id} + poses_path = _DATA_DIR / f"job_{job_id}_poses.npz" + ply_path = _DATA_DIR / f"job_{job_id}_decimated.ply" + if poses_path.exists(): + d = np.load(str(poses_path)) + poses = d["poses"] # (N, 3, 4) + xyz = poses[:, :3, 3] + out["track"] = { + "x": xyz[:, 0].tolist(), + "y": xyz[:, 1].tolist(), + "z": xyz[:, 2].tolist(), + } + out["n_poses"] = int(len(poses)) + out["ply_ready"] = ply_path.exists() + out["ply_path"] = str(ply_path) if ply_path.exists() else None + return out + + +@app.route("/map") +def map_view(): + from flask import render_template + return render_template("map.html") + + +@app.route("/nav") +def nav_view(): + from flask import render_template + return render_template("nav.html") + + +@app.route("/api/jobs") +def api_jobs(): + jobs = [] + for p in sorted(_DATA_DIR.glob("job_*_decimated.ply")): + parts = p.stem.split("_") + jid = int(parts[1]) + jobs.append({ + "id": jid, + "ply": str(p), + "poses": str(_DATA_DIR / f"job_{jid}_poses.npz"), + }) + return jsonify(jobs) + + +@app.route("/api/job//nav") +def api_job_nav(job_id: int): + return jsonify(_load_nav_data(job_id)) + + +@app.route("/api/job//ply") +def api_job_ply(job_id: int): + ply_path = _DATA_DIR / f"job_{job_id}_decimated.ply" + if not ply_path.exists(): + return jsonify({"error": "PLY non disponible"}), 404 + try: + import open3d as o3d + pcd = o3d.io.read_point_cloud(str(ply_path)) + pts = np.asarray(pcd.points) + return jsonify({"x": pts[:, 0].tolist(), "y": pts[:, 1].tolist(), + "z": pts[:, 2].tolist(), "n": int(len(pts))}) + except ImportError: + return jsonify({"error": "open3d not installed"}), 503 + if __name__ == "__main__": _main() diff --git a/viz/static/js/map.js b/viz/static/js/map.js new file mode 100644 index 0000000..d7b113c --- /dev/null +++ b/viz/static/js/map.js @@ -0,0 +1,50 @@ +const map = L.map('map').setView([43.17, 5.70], 13); +L.tileLayer('https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', { + attribution: '© OSM contributors', maxZoom: 19 +}).addTo(map); + +const status = document.getElementById('status'); +let trackLayer = null; + +async function loadJobs() { + const res = await fetch('/nav/api/jobs'); + const jobs = await res.json(); + const sel = document.getElementById('job-select'); + jobs.forEach(j => { + const opt = document.createElement('option'); + opt.value = j.id; + opt.textContent = 'job_' + j.id; + sel.appendChild(opt); + }); + const urlJob = new URLSearchParams(window.location.search).get('job'); + const target = urlJob ? parseInt(urlJob) : (jobs.length > 0 ? jobs[0].id : null); + if (target) { + sel.value = target; + loadJob(target); + } else { + status.textContent = 'Aucun job disponible'; + } +} + +async function loadJob(jobId) { + status.textContent = 'Chargement job_' + jobId + '...'; + const res = await fetch('/nav/api/job/' + jobId + '/nav'); + const d = await res.json(); + if (trackLayer) map.removeLayer(trackLayer); + if (!d.track || !d.track.x.length) { + status.textContent = 'Pas de données track pour job_' + jobId; + return; + } + const cx = d.track.x.reduce((a,b)=>a+b,0)/d.track.x.length; + const cy = d.track.y.reduce((a,b)=>a+b,0)/d.track.y.length; + const pts = d.track.x.map((x,i) => [43.17 + (d.track.y[i]-cy)*0.000009, 5.70 + (x-cx)*0.000009]); + trackLayer = L.polyline(pts, { color: '#4ade80', weight: 2 }).addTo(map); + map.fitBounds(trackLayer.getBounds()); + status.textContent = 'job_' + jobId + ' — ' + d.n_poses + ' poses | PLY: ' + (d.ply_ready ? 'prêt' : 'non dispo'); +} + +document.getElementById('job-select').addEventListener('change', e => { + if (e.target.value) loadJob(parseInt(e.target.value)); +}); + +loadJobs(); diff --git a/viz/static/js/nav_charts.js b/viz/static/js/nav_charts.js new file mode 100644 index 0000000..9e9a837 --- /dev/null +++ b/viz/static/js/nav_charts.js @@ -0,0 +1,60 @@ +let chartXY = null, chartZ = null; + +function initCharts() { + const base = { + responsive: true, maintainAspectRatio: false, + plugins: { legend: { labels: { color: '#888' } }, title: { display: true, color: '#ccc' } }, + scales: { + x: { ticks: { color: '#666' }, grid: { color: '#1a2a1a' } }, + y: { ticks: { color: '#666' }, grid: { color: '#1a2a1a' } } + } + }; + chartXY = new Chart(document.getElementById('chart-xy'), { + type: 'scatter', + data: { datasets: [{ label: 'Track XY (m)', data: [], borderColor: '#4ade80', + backgroundColor: 'rgba(74,222,128,0.3)', pointRadius: 1 }] }, + options: { ...base, plugins: { ...base.plugins, + title: { display: true, text: 'Track XY lingbot (m, local)', color: '#ccc' } } } + }); + chartZ = new Chart(document.getElementById('chart-z'), { + type: 'line', + data: { datasets: [{ label: 'Z camera (m)', data: [], borderColor: '#60a5fa', + backgroundColor: 'transparent', pointRadius: 0, borderWidth: 1.5 }] }, + options: { ...base, + plugins: { ...base.plugins, + title: { display: true, text: 'Z camera / profondeur approx (m)', color: '#ccc' } }, + scales: { ...base.scales, y: { ...base.scales.y, reverse: true } } } + }); +} + +async function loadJobs() { + const res = await fetch('/nav/api/jobs'); + const jobs = await res.json(); + const sel = document.getElementById('job-select'); + jobs.forEach(j => { + const opt = document.createElement('option'); + opt.value = j.id; + opt.textContent = 'job_' + j.id; + sel.appendChild(opt); + }); + const urlJob = new URLSearchParams(window.location.search).get('job'); + const target = urlJob ? parseInt(urlJob) : (jobs.length > 0 ? jobs[0].id : null); + if (target) { sel.value = target; loadJob(target); } +} + +async function loadJob(jobId) { + const res = await fetch('/nav/api/job/' + jobId + '/nav'); + const d = await res.json(); + if (!d.track) return; + chartXY.data.datasets[0].data = d.track.x.map((x,i) => ({ x, y: d.track.y[i] })); + chartXY.update(); + chartZ.data.datasets[0].data = d.track.z.map((z,i) => ({ x: i, y: z })); + chartZ.update(); +} + +document.getElementById('job-select').addEventListener('change', e => { + if (e.target.value) loadJob(parseInt(e.target.value)); +}); + +initCharts(); +loadJobs(); diff --git a/viz/templates/map.html b/viz/templates/map.html new file mode 100644 index 0000000..98ab41a --- /dev/null +++ b/viz/templates/map.html @@ -0,0 +1,36 @@ + + + + + COSMA NAV — Carte + + + + + + +
+

COSMA NAV — Carte GPS

+ + 3D ↗ + Graphes ↗ +
+
+
Chargement...
+ + + diff --git a/viz/templates/nav.html b/viz/templates/nav.html new file mode 100644 index 0000000..4e6d325 --- /dev/null +++ b/viz/templates/nav.html @@ -0,0 +1,36 @@ + + + + + COSMA NAV — Données nav + + + + + +
+

COSMA NAV — Navigation

+ + Carte ↗ + 3D ↗ +
+
+
+
+
+ + +