+
+ Read the Docs
+ v: ${config.versions.current.slug}
+
+
+
+
+ ${renderLanguages(config)}
+ ${renderVersions(config)}
+ ${renderDownloads(config)}
+
+ On Read the Docs
+
+ Project Home
+
+
+ Builds
+
+
+ Downloads
+
+
+
+ Search
+
+
+
+
+
+
+ Hosted by Read the Docs
+
+
+
+ `;
+
+ // Inject the generated flyout into the body HTML element.
+ document.body.insertAdjacentHTML("beforeend", flyout);
+
+ // Trigger the Read the Docs Addons Search modal when clicking on the "Search docs" input from inside the flyout.
+ document
+ .querySelector("#flyout-search-form")
+ .addEventListener("focusin", () => {
+ const event = new CustomEvent("readthedocs-search-show");
+ document.dispatchEvent(event);
+ });
+ })
+}
+
+if (themeLanguageSelector || themeVersionSelector) {
+ function onSelectorSwitch(event) {
+ const option = event.target.selectedIndex;
+ const item = event.target.options[option];
+ window.location.href = item.dataset.url;
+ }
+
+ document.addEventListener("readthedocs-addons-data-ready", function (event) {
+ const config = event.detail.data();
+
+ const versionSwitch = document.querySelector(
+ "div.switch-menus > div.version-switch",
+ );
+ if (themeVersionSelector) {
+ let versions = config.versions.active;
+ if (config.versions.current.hidden || config.versions.current.type === "external") {
+ versions.unshift(config.versions.current);
+ }
+ const versionSelect = `
+
+ ${versions
+ .map(
+ (version) => `
+
+ ${version.slug}
+ `,
+ )
+ .join("\n")}
+
+ `;
+
+ versionSwitch.innerHTML = versionSelect;
+ versionSwitch.firstElementChild.addEventListener("change", onSelectorSwitch);
+ }
+
+ const languageSwitch = document.querySelector(
+ "div.switch-menus > div.language-switch",
+ );
+
+ if (themeLanguageSelector) {
+ if (config.projects.translations.length) {
+ // Add the current language to the options on the selector
+ let languages = config.projects.translations.concat(
+ config.projects.current,
+ );
+ languages = languages.sort((a, b) =>
+ a.language.name.localeCompare(b.language.name),
+ );
+
+ const languageSelect = `
+
+ ${languages
+ .map(
+ (language) => `
+
+ ${language.language.name}
+ `,
+ )
+ .join("\n")}
+
+ `;
+
+ languageSwitch.innerHTML = languageSelect;
+ languageSwitch.firstElementChild.addEventListener("change", onSelectorSwitch);
+ }
+ else {
+ languageSwitch.remove();
+ }
+ }
+ });
+}
+
+document.addEventListener("readthedocs-addons-data-ready", function (event) {
+ // Trigger the Read the Docs Addons Search modal when clicking on "Search docs" input from the topnav.
+ document
+ .querySelector("[role='search'] input")
+ .addEventListener("focusin", () => {
+ const event = new CustomEvent("readthedocs-search-show");
+ document.dispatchEvent(event);
+ });
+});
\ No newline at end of file
diff --git a/docs/_build/html/_static/language_data.js b/docs/_build/html/_static/language_data.js
new file mode 100644
index 0000000..5776786
--- /dev/null
+++ b/docs/_build/html/_static/language_data.js
@@ -0,0 +1,13 @@
+/*
+ * This script contains the language-specific data used by searchtools.js,
+ * namely the set of stopwords, stemmer, scorer and splitter.
+ */
+
+const stopwords = new Set(["a", "about", "above", "after", "again", "against", "all", "am", "an", "and", "any", "are", "aren't", "as", "at", "be", "because", "been", "before", "being", "below", "between", "both", "but", "by", "can't", "cannot", "could", "couldn't", "did", "didn't", "do", "does", "doesn't", "doing", "don't", "down", "during", "each", "few", "for", "from", "further", "had", "hadn't", "has", "hasn't", "have", "haven't", "having", "he", "he'd", "he'll", "he's", "her", "here", "here's", "hers", "herself", "him", "himself", "his", "how", "how's", "i", "i'd", "i'll", "i'm", "i've", "if", "in", "into", "is", "isn't", "it", "it's", "its", "itself", "let's", "me", "more", "most", "mustn't", "my", "myself", "no", "nor", "not", "of", "off", "on", "once", "only", "or", "other", "ought", "our", "ours", "ourselves", "out", "over", "own", "same", "shan't", "she", "she'd", "she'll", "she's", "should", "shouldn't", "so", "some", "such", "than", "that", "that's", "the", "their", "theirs", "them", "themselves", "then", "there", "there's", "these", "they", "they'd", "they'll", "they're", "they've", "this", "those", "through", "to", "too", "under", "until", "up", "very", "was", "wasn't", "we", "we'd", "we'll", "we're", "we've", "were", "weren't", "what", "what's", "when", "when's", "where", "where's", "which", "while", "who", "who's", "whom", "why", "why's", "with", "won't", "would", "wouldn't", "you", "you'd", "you'll", "you're", "you've", "your", "yours", "yourself", "yourselves"]);
+window.stopwords = stopwords; // Export to global scope
+
+
+/* Non-minified versions are copied as separate JavaScript files, if available */
+BaseStemmer=function(){this.current="",this.cursor=0,this.limit=0,this.limit_backward=0,this.bra=0,this.ket=0,this.setCurrent=function(t){this.current=t,this.cursor=0,this.limit=this.current.length,this.limit_backward=0,this.bra=this.cursor,this.ket=this.limit},this.getCurrent=function(){return this.current},this.copy_from=function(t){this.current=t.current,this.cursor=t.cursor,this.limit=t.limit,this.limit_backward=t.limit_backward,this.bra=t.bra,this.ket=t.ket},this.in_grouping=function(t,r,i){return!(this.cursor>=this.limit||i<(i=this.current.charCodeAt(this.cursor))||i
>>3]&1<<(7&i))||(this.cursor++,0))},this.go_in_grouping=function(t,r,i){for(;this.cursor>>3]&1<<(7&s)))return!0;this.cursor++}return!1},this.in_grouping_b=function(t,r,i){return!(this.cursor<=this.limit_backward||i<(i=this.current.charCodeAt(this.cursor-1))||i>>3]&1<<(7&i))||(this.cursor--,0))},this.go_in_grouping_b=function(t,r,i){for(;this.cursor>this.limit_backward;){var s=this.current.charCodeAt(this.cursor-1);if(i>>3]&1<<(7&s)))return!0;this.cursor--}return!1},this.out_grouping=function(t,r,i){return!(this.cursor>=this.limit)&&(i<(i=this.current.charCodeAt(this.cursor))||i>>3]&1<<(7&i)))&&(this.cursor++,!0)},this.go_out_grouping=function(t,r,i){for(;this.cursor>>3]&1<<(7&s)))return!0;this.cursor++}return!1},this.out_grouping_b=function(t,r,i){return!(this.cursor<=this.limit_backward)&&(i<(i=this.current.charCodeAt(this.cursor-1))||i>>3]&1<<(7&i)))&&(this.cursor--,!0)},this.go_out_grouping_b=function(t,r,i){for(;this.cursor>this.limit_backward;){var s=this.current.charCodeAt(this.cursor-1);if(s<=i&&r<=s&&0!=(t[(s-=r)>>>3]&1<<(7&s)))return!0;this.cursor--}return!1},this.eq_s=function(t){return!(this.limit-this.cursor>>1),o=0,a=e=(l=t[r])[0].length){if(this.cursor=s+l[0].length,l.length<4)return l[2];var g=l[3](this);if(this.cursor=s+l[0].length,g)return l[2]}}while(0<=(r=l[1]));return 0},this.find_among_b=function(t){for(var r=0,i=t.length,s=this.cursor,h=this.limit_backward,e=0,n=0,c=!1;;){for(var u,o=r+(i-r>>1),a=0,l=e=(u=t[r])[0].length){if(this.cursor=s-u[0].length,u.length<4)return u[2];var g=u[3](this);if(this.cursor=s-u[0].length,g)return u[2]}}while(0<=(r=u[1]));return 0},this.replace_s=function(t,r,i){var s=i.length-(r-t);return this.current=this.current.slice(0,t)+i+this.current.slice(r),this.limit+=s,this.cursor>=r?this.cursor+=s:this.cursor>t&&(this.cursor=t),s},this.slice_check=function(){return!(this.bra<0||this.bra>this.ket||this.ket>this.limit||this.limit>this.current.length)},this.slice_from=function(t){var r=!1;return this.slice_check()&&(this.replace_s(this.bra,this.ket,t),r=!0),r},this.slice_del=function(){return this.slice_from("")},this.insert=function(t,r,i){r=this.replace_s(t,r,i);t<=this.bra&&(this.bra+=r),t<=this.ket&&(this.ket+=r)},this.slice_to=function(){var t="";return t=this.slice_check()?this.current.slice(this.bra,this.ket):t},this.assign_to=function(){return this.current.slice(0,this.limit)}};
+var EnglishStemmer=function(){var a=new BaseStemmer,c=[["arsen",-1,-1],["commun",-1,-1],["emerg",-1,-1],["gener",-1,-1],["later",-1,-1],["organ",-1,-1],["past",-1,-1],["univers",-1,-1]],o=[["'",-1,1],["'s'",0,1],["'s",-1,1]],u=[["ied",-1,2],["s",-1,3],["ies",1,2],["sses",1,1],["ss",1,-1],["us",1,-1]],t=[["succ",-1,1],["proc",-1,1],["exc",-1,1]],l=[["even",-1,2],["cann",-1,2],["inn",-1,2],["earr",-1,2],["herr",-1,2],["out",-1,2],["y",-1,1]],n=[["",-1,-1],["ed",0,2],["eed",1,1],["ing",0,3],["edly",0,2],["eedly",4,1],["ingly",0,2]],f=[["",-1,3],["bb",0,2],["dd",0,2],["ff",0,2],["gg",0,2],["bl",0,1],["mm",0,2],["nn",0,2],["pp",0,2],["rr",0,2],["at",0,1],["tt",0,2],["iz",0,1]],_=[["anci",-1,3],["enci",-1,2],["ogi",-1,14],["li",-1,16],["bli",3,12],["abli",4,4],["alli",3,8],["fulli",3,9],["lessli",3,15],["ousli",3,10],["entli",3,5],["aliti",-1,8],["biliti",-1,12],["iviti",-1,11],["tional",-1,1],["ational",14,7],["alism",-1,8],["ation",-1,7],["ization",17,6],["izer",-1,6],["ator",-1,7],["iveness",-1,11],["fulness",-1,9],["ousness",-1,10],["ogist",-1,13]],m=[["icate",-1,4],["ative",-1,6],["alize",-1,3],["iciti",-1,4],["ical",-1,4],["tional",-1,1],["ational",5,2],["ful",-1,5],["ness",-1,5]],b=[["ic",-1,1],["ance",-1,1],["ence",-1,1],["able",-1,1],["ible",-1,1],["ate",-1,1],["ive",-1,1],["ize",-1,1],["iti",-1,1],["al",-1,1],["ism",-1,1],["ion",-1,2],["er",-1,1],["ous",-1,1],["ant",-1,1],["ent",-1,1],["ment",15,1],["ement",16,1]],k=[["e",-1,1],["l",-1,2]],g=[["andes",-1,-1],["atlas",-1,-1],["bias",-1,-1],["cosmos",-1,-1],["early",-1,5],["gently",-1,3],["howe",-1,-1],["idly",-1,2],["news",-1,-1],["only",-1,6],["singly",-1,7],["skies",-1,1],["sky",-1,-1],["ugly",-1,4]],d=[17,64],v=[17,65,16,1],i=[1,17,65,208,1],w=[55,141,2],p=!1,y=0,h=0;function q(){var r=a.limit-a.cursor;return!!(a.out_grouping_b(i,89,121)&&a.in_grouping_b(v,97,121)&&a.out_grouping_b(v,97,121)||(a.cursor=a.limit-r,a.out_grouping_b(v,97,121)&&a.in_grouping_b(v,97,121)&&!(a.cursor>a.limit_backward))||(a.cursor=a.limit-r,a.eq_s_b("past")))}function z(){return h<=a.cursor}function Y(){return y<=a.cursor}this.stem=function(){var r=a.cursor;if(!(()=>{var r;if(a.bra=a.cursor,0!=(r=a.find_among(g))&&(a.ket=a.cursor,!(a.cursora.limit)a.cursor=i;else{a.cursor=e,a.cursor=r,(()=>{p=!1;var r=a.cursor;if(a.bra=a.cursor,!a.eq_s("'")||(a.ket=a.cursor,a.slice_del())){a.cursor=r;r=a.cursor;if(a.bra=a.cursor,a.eq_s("y")){if(a.ket=a.cursor,!a.slice_from("Y"))return;p=!0}a.cursor=r;for(r=a.cursor;;){var i=a.cursor;r:{for(;;){var e=a.cursor;if(a.in_grouping(v,97,121)&&(a.bra=a.cursor,a.eq_s("y"))){a.ket=a.cursor,a.cursor=e;break}if(a.cursor=e,a.cursor>=a.limit)break r;a.cursor++}if(!a.slice_from("Y"))return;p=!0;continue}a.cursor=i;break}a.cursor=r}})(),h=a.limit,y=a.limit;i=a.cursor;r:{var s=a.cursor;if(0==a.find_among(c)){if(a.cursor=s,!a.go_out_grouping(v,97,121))break r;if(a.cursor++,!a.go_in_grouping(v,97,121))break r;a.cursor++}h=a.cursor,a.go_out_grouping(v,97,121)&&(a.cursor++,a.go_in_grouping(v,97,121))&&(a.cursor++,y=a.cursor)}a.cursor=i,a.limit_backward=a.cursor,a.cursor=a.limit;var e=a.limit-a.cursor,r=((()=>{var r=a.limit-a.cursor;if(a.ket=a.cursor,0==a.find_among_b(o))a.cursor=a.limit-r;else if(a.bra=a.cursor,!a.slice_del())return;if(a.ket=a.cursor,0!=(r=a.find_among_b(u)))switch(a.bra=a.cursor,r){case 1:if(a.slice_from("ss"))break;return;case 2:r:{var i=a.limit-a.cursor,e=a.cursor-2;if(!(e{a.ket=a.cursor,o=a.find_among_b(n),a.bra=a.cursor;r:{var r=a.limit-a.cursor;i:{switch(o){case 1:var i=a.limit-a.cursor;e:{var e=a.limit-a.cursor;if(0==a.find_among_b(t)||a.cursor>a.limit_backward){if(a.cursor=a.limit-e,!z())break e;if(!a.slice_from("ee"))return}}a.cursor=a.limit-i;break;case 2:break i;case 3:if(0==(o=a.find_among_b(l)))break i;switch(o){case 1:var s=a.limit-a.cursor;if(!a.out_grouping_b(v,97,121))break i;if(a.cursor>a.limit_backward)break i;if(a.cursor=a.limit-s,a.bra=a.cursor,a.slice_from("ie"))break;return;case 2:if(a.cursor>a.limit_backward)break i}}break r}a.cursor=a.limit-r;var c=a.limit-a.cursor;if(!a.go_out_grouping_b(v,97,121))return;if(a.cursor--,a.cursor=a.limit-c,!a.slice_del())return;a.ket=a.cursor,a.bra=a.cursor;var o,c=a.limit-a.cursor;switch(o=a.find_among_b(f)){case 1:return a.slice_from("e");case 2:var u=a.limit-a.cursor;if(a.in_grouping_b(d,97,111)&&!(a.cursor>a.limit_backward))return;a.cursor=a.limit-u;break;case 3:return a.cursor!=h||(u=a.limit-a.cursor,q()&&(a.cursor=a.limit-u,a.slice_from("e")))}if(a.cursor=a.limit-c,a.ket=a.cursor,a.cursor<=a.limit_backward)return;if(a.cursor--,a.bra=a.cursor,!a.slice_del())return}})(),a.cursor=a.limit-r,a.limit-a.cursor),r=(a.ket=a.cursor,e=a.limit-a.cursor,(a.eq_s_b("y")||(a.cursor=a.limit-e,a.eq_s_b("Y")))&&(a.bra=a.cursor,a.out_grouping_b(v,97,121))&&a.cursor>a.limit_backward&&a.slice_from("i"),a.cursor=a.limit-i,a.limit-a.cursor),e=((()=>{var r;if(a.ket=a.cursor,0!=(r=a.find_among_b(_))&&(a.bra=a.cursor,z()))switch(r){case 1:if(a.slice_from("tion"))break;return;case 2:if(a.slice_from("ence"))break;return;case 3:if(a.slice_from("ance"))break;return;case 4:if(a.slice_from("able"))break;return;case 5:if(a.slice_from("ent"))break;return;case 6:if(a.slice_from("ize"))break;return;case 7:if(a.slice_from("ate"))break;return;case 8:if(a.slice_from("al"))break;return;case 9:if(a.slice_from("ful"))break;return;case 10:if(a.slice_from("ous"))break;return;case 11:if(a.slice_from("ive"))break;return;case 12:if(a.slice_from("ble"))break;return;case 13:if(a.slice_from("og"))break;return;case 14:if(!a.eq_s_b("l"))return;if(a.slice_from("og"))break;return;case 15:if(a.slice_from("less"))break;return;case 16:if(!a.in_grouping_b(w,99,116))return;if(a.slice_del())break}})(),a.cursor=a.limit-r,a.limit-a.cursor),i=((()=>{var r;if(a.ket=a.cursor,0!=(r=a.find_among_b(m))&&(a.bra=a.cursor,z()))switch(r){case 1:if(a.slice_from("tion"))break;return;case 2:if(a.slice_from("ate"))break;return;case 3:if(a.slice_from("al"))break;return;case 4:if(a.slice_from("ic"))break;return;case 5:if(a.slice_del())break;return;case 6:if(!Y())return;if(a.slice_del())break}})(),a.cursor=a.limit-e,a.limit-a.cursor),r=((()=>{var r;if(a.ket=a.cursor,0!=(r=a.find_among_b(b))&&(a.bra=a.cursor,Y()))switch(r){case 1:if(a.slice_del())break;return;case 2:var i=a.limit-a.cursor;if(!a.eq_s_b("s")&&(a.cursor=a.limit-i,!a.eq_s_b("t")))return;if(a.slice_del())break}})(),a.cursor=a.limit-i,a.limit-a.cursor),e=((()=>{var r;if(a.ket=a.cursor,0!=(r=a.find_among_b(k)))switch(a.bra=a.cursor,r){case 1:if(!Y()){if(!z())return;var i=a.limit-a.cursor;if(q())return;a.cursor=a.limit-i}if(a.slice_del())break;return;case 2:if(!Y())return;if(!a.eq_s_b("l"))return;if(a.slice_del())break}})(),a.cursor=a.limit-r,a.cursor=a.limit_backward,a.cursor);(()=>{if(p)for(;;){var r=a.cursor;r:{for(;;){var i=a.cursor;if(a.bra=a.cursor,a.eq_s("Y")){a.ket=a.cursor,a.cursor=i;break}if(a.cursor=i,a.cursor>=a.limit)break r;a.cursor++}if(a.slice_from("y"))continue;return}a.cursor=r;break}})(),a.cursor=e}}return!0},this.stemWord=function(r){return a.setCurrent(r),this.stem(),a.getCurrent()}};
+window.Stemmer = EnglishStemmer;
diff --git a/docs/_build/html/_static/minus.png b/docs/_build/html/_static/minus.png
new file mode 100644
index 0000000..d96755f
Binary files /dev/null and b/docs/_build/html/_static/minus.png differ
diff --git a/docs/_build/html/_static/plus.png b/docs/_build/html/_static/plus.png
new file mode 100644
index 0000000..7107cec
Binary files /dev/null and b/docs/_build/html/_static/plus.png differ
diff --git a/docs/_build/html/_static/pygments.css b/docs/_build/html/_static/pygments.css
new file mode 100644
index 0000000..6f8b210
--- /dev/null
+++ b/docs/_build/html/_static/pygments.css
@@ -0,0 +1,75 @@
+pre { line-height: 125%; }
+td.linenos .normal { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; }
+span.linenos { color: inherit; background-color: transparent; padding-left: 5px; padding-right: 5px; }
+td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; }
+span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; }
+.highlight .hll { background-color: #ffffcc }
+.highlight { background: #f8f8f8; }
+.highlight .c { color: #3D7B7B; font-style: italic } /* Comment */
+.highlight .err { border: 1px solid #F00 } /* Error */
+.highlight .k { color: #008000; font-weight: bold } /* Keyword */
+.highlight .o { color: #666 } /* Operator */
+.highlight .ch { color: #3D7B7B; font-style: italic } /* Comment.Hashbang */
+.highlight .cm { color: #3D7B7B; font-style: italic } /* Comment.Multiline */
+.highlight .cp { color: #9C6500 } /* Comment.Preproc */
+.highlight .cpf { color: #3D7B7B; font-style: italic } /* Comment.PreprocFile */
+.highlight .c1 { color: #3D7B7B; font-style: italic } /* Comment.Single */
+.highlight .cs { color: #3D7B7B; font-style: italic } /* Comment.Special */
+.highlight .gd { color: #A00000 } /* Generic.Deleted */
+.highlight .ge { font-style: italic } /* Generic.Emph */
+.highlight .ges { font-weight: bold; font-style: italic } /* Generic.EmphStrong */
+.highlight .gr { color: #E40000 } /* Generic.Error */
+.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */
+.highlight .gi { color: #008400 } /* Generic.Inserted */
+.highlight .go { color: #717171 } /* Generic.Output */
+.highlight .gp { color: #000080; font-weight: bold } /* Generic.Prompt */
+.highlight .gs { font-weight: bold } /* Generic.Strong */
+.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */
+.highlight .gt { color: #04D } /* Generic.Traceback */
+.highlight .kc { color: #008000; font-weight: bold } /* Keyword.Constant */
+.highlight .kd { color: #008000; font-weight: bold } /* Keyword.Declaration */
+.highlight .kn { color: #008000; font-weight: bold } /* Keyword.Namespace */
+.highlight .kp { color: #008000 } /* Keyword.Pseudo */
+.highlight .kr { color: #008000; font-weight: bold } /* Keyword.Reserved */
+.highlight .kt { color: #B00040 } /* Keyword.Type */
+.highlight .m { color: #666 } /* Literal.Number */
+.highlight .s { color: #BA2121 } /* Literal.String */
+.highlight .na { color: #687822 } /* Name.Attribute */
+.highlight .nb { color: #008000 } /* Name.Builtin */
+.highlight .nc { color: #00F; font-weight: bold } /* Name.Class */
+.highlight .no { color: #800 } /* Name.Constant */
+.highlight .nd { color: #A2F } /* Name.Decorator */
+.highlight .ni { color: #717171; font-weight: bold } /* Name.Entity */
+.highlight .ne { color: #CB3F38; font-weight: bold } /* Name.Exception */
+.highlight .nf { color: #00F } /* Name.Function */
+.highlight .nl { color: #767600 } /* Name.Label */
+.highlight .nn { color: #00F; font-weight: bold } /* Name.Namespace */
+.highlight .nt { color: #008000; font-weight: bold } /* Name.Tag */
+.highlight .nv { color: #19177C } /* Name.Variable */
+.highlight .ow { color: #A2F; font-weight: bold } /* Operator.Word */
+.highlight .w { color: #BBB } /* Text.Whitespace */
+.highlight .mb { color: #666 } /* Literal.Number.Bin */
+.highlight .mf { color: #666 } /* Literal.Number.Float */
+.highlight .mh { color: #666 } /* Literal.Number.Hex */
+.highlight .mi { color: #666 } /* Literal.Number.Integer */
+.highlight .mo { color: #666 } /* Literal.Number.Oct */
+.highlight .sa { color: #BA2121 } /* Literal.String.Affix */
+.highlight .sb { color: #BA2121 } /* Literal.String.Backtick */
+.highlight .sc { color: #BA2121 } /* Literal.String.Char */
+.highlight .dl { color: #BA2121 } /* Literal.String.Delimiter */
+.highlight .sd { color: #BA2121; font-style: italic } /* Literal.String.Doc */
+.highlight .s2 { color: #BA2121 } /* Literal.String.Double */
+.highlight .se { color: #AA5D1F; font-weight: bold } /* Literal.String.Escape */
+.highlight .sh { color: #BA2121 } /* Literal.String.Heredoc */
+.highlight .si { color: #A45A77; font-weight: bold } /* Literal.String.Interpol */
+.highlight .sx { color: #008000 } /* Literal.String.Other */
+.highlight .sr { color: #A45A77 } /* Literal.String.Regex */
+.highlight .s1 { color: #BA2121 } /* Literal.String.Single */
+.highlight .ss { color: #19177C } /* Literal.String.Symbol */
+.highlight .bp { color: #008000 } /* Name.Builtin.Pseudo */
+.highlight .fm { color: #00F } /* Name.Function.Magic */
+.highlight .vc { color: #19177C } /* Name.Variable.Class */
+.highlight .vg { color: #19177C } /* Name.Variable.Global */
+.highlight .vi { color: #19177C } /* Name.Variable.Instance */
+.highlight .vm { color: #19177C } /* Name.Variable.Magic */
+.highlight .il { color: #666 } /* Literal.Number.Integer.Long */
\ No newline at end of file
diff --git a/docs/_build/html/_static/searchtools.js b/docs/_build/html/_static/searchtools.js
new file mode 100644
index 0000000..e29b1c7
--- /dev/null
+++ b/docs/_build/html/_static/searchtools.js
@@ -0,0 +1,693 @@
+/*
+ * Sphinx JavaScript utilities for the full-text search.
+ */
+"use strict";
+
+/**
+ * Simple result scoring code.
+ */
+if (typeof Scorer === "undefined") {
+ var Scorer = {
+ // Implement the following function to further tweak the score for each result
+ // The function takes a result array [docname, title, anchor, descr, score, filename]
+ // and returns the new score.
+ /*
+ score: result => {
+ const [docname, title, anchor, descr, score, filename, kind] = result
+ return score
+ },
+ */
+
+ // query matches the full name of an object
+ objNameMatch: 11,
+ // or matches in the last dotted part of the object name
+ objPartialMatch: 6,
+ // Additive scores depending on the priority of the object
+ objPrio: {
+ 0: 15, // used to be importantResults
+ 1: 5, // used to be objectResults
+ 2: -5, // used to be unimportantResults
+ },
+ // Used when the priority is not in the mapping.
+ objPrioDefault: 0,
+
+ // query found in title
+ title: 15,
+ partialTitle: 7,
+ // query found in terms
+ term: 5,
+ partialTerm: 2,
+ };
+}
+
+// Global search result kind enum, used by themes to style search results.
+// prettier-ignore
+class SearchResultKind {
+ static get index() { return "index"; }
+ static get object() { return "object"; }
+ static get text() { return "text"; }
+ static get title() { return "title"; }
+}
+
+const _removeChildren = (element) => {
+ while (element && element.lastChild) element.removeChild(element.lastChild);
+};
+
+/**
+ * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#escaping
+ */
+const _escapeRegExp = (string) =>
+ string.replace(/[.*+\-?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string
+
+const _escapeHTML = (text) => {
+ return text
+ .replaceAll("&", "&")
+ .replaceAll("<", "<")
+ .replaceAll(">", ">")
+ .replaceAll('"', """)
+ .replaceAll("'", "'");
+};
+
+const _displayItem = (item, searchTerms, highlightTerms) => {
+ const docBuilder = DOCUMENTATION_OPTIONS.BUILDER;
+ const docFileSuffix = DOCUMENTATION_OPTIONS.FILE_SUFFIX;
+ const docLinkSuffix = DOCUMENTATION_OPTIONS.LINK_SUFFIX;
+ const showSearchSummary = DOCUMENTATION_OPTIONS.SHOW_SEARCH_SUMMARY;
+ const contentRoot = document.documentElement.dataset.content_root;
+
+ const [docName, title, anchor, descr, score, _filename, kind] = item;
+
+ let listItem = document.createElement("li");
+ // Add a class representing the item's type:
+ // can be used by a theme's CSS selector for styling
+ // See SearchResultKind for the class names.
+ listItem.classList.add(`kind-${kind}`);
+ let requestUrl;
+ let linkUrl;
+ if (docBuilder === "dirhtml") {
+ // dirhtml builder
+ let dirname = docName + "/";
+ if (dirname.match(/\/index\/$/))
+ dirname = dirname.substring(0, dirname.length - 6);
+ else if (dirname === "index/") dirname = "";
+ requestUrl = contentRoot + dirname;
+ linkUrl = requestUrl;
+ } else {
+ // normal html builders
+ requestUrl = contentRoot + docName + docFileSuffix;
+ linkUrl = docName + docLinkSuffix;
+ }
+ let linkEl = listItem.appendChild(document.createElement("a"));
+ linkEl.href = linkUrl + anchor;
+ linkEl.dataset.score = score;
+ linkEl.innerHTML = _escapeHTML(title);
+ if (descr) {
+ listItem.appendChild(document.createElement("span")).innerHTML =
+ ` (${_escapeHTML(descr)})`;
+ // highlight search terms in the description
+ if (SPHINX_HIGHLIGHT_ENABLED)
+ // SPHINX_HIGHLIGHT_ENABLED is set in sphinx_highlight.js
+ highlightTerms.forEach((term) =>
+ _highlightText(listItem, term, "highlighted"),
+ );
+ } else if (showSearchSummary)
+ fetch(requestUrl)
+ .then((responseData) => responseData.text())
+ .then((data) => {
+ if (data)
+ listItem.appendChild(
+ Search.makeSearchSummary(data, searchTerms, anchor),
+ );
+ // highlight search terms in the summary
+ if (SPHINX_HIGHLIGHT_ENABLED)
+ // SPHINX_HIGHLIGHT_ENABLED is set in sphinx_highlight.js
+ highlightTerms.forEach((term) =>
+ _highlightText(listItem, term, "highlighted"),
+ );
+ });
+ Search.output.appendChild(listItem);
+};
+const _finishSearch = (resultCount) => {
+ Search.stopPulse();
+ Search.title.innerText = _("Search Results");
+ if (!resultCount)
+ Search.status.innerText = Documentation.gettext(
+ "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories.",
+ );
+ else
+ Search.status.innerText = Documentation.ngettext(
+ "Search finished, found one page matching the search query.",
+ "Search finished, found ${resultCount} pages matching the search query.",
+ resultCount,
+ ).replace("${resultCount}", resultCount);
+};
+const _displayNextItem = (
+ results,
+ resultCount,
+ searchTerms,
+ highlightTerms,
+) => {
+ // results left, load the summary and display it
+ // this is intended to be dynamic (don't sub resultsCount)
+ if (results.length) {
+ _displayItem(results.pop(), searchTerms, highlightTerms);
+ setTimeout(
+ () => _displayNextItem(results, resultCount, searchTerms, highlightTerms),
+ 5,
+ );
+ }
+ // search finished, update title and status message
+ else _finishSearch(resultCount);
+};
+// Helper function used by query() to order search results.
+// Each input is an array of [docname, title, anchor, descr, score, filename, kind].
+// Order the results by score (in opposite order of appearance, since the
+// `_displayNextItem` function uses pop() to retrieve items) and then alphabetically.
+const _orderResultsByScoreThenName = (a, b) => {
+ const leftScore = a[4];
+ const rightScore = b[4];
+ if (leftScore === rightScore) {
+ // same score: sort alphabetically
+ const leftTitle = a[1].toLowerCase();
+ const rightTitle = b[1].toLowerCase();
+ if (leftTitle === rightTitle) return 0;
+ return leftTitle > rightTitle ? -1 : 1; // inverted is intentional
+ }
+ return leftScore > rightScore ? 1 : -1;
+};
+
+/**
+ * Default splitQuery function. Can be overridden in ``sphinx.search`` with a
+ * custom function per language.
+ *
+ * The regular expression works by splitting the string on consecutive characters
+ * that are not Unicode letters, numbers, underscores, or emoji characters.
+ * This is the same as ``\W+`` in Python, preserving the surrogate pair area.
+ */
+if (typeof splitQuery === "undefined") {
+ var splitQuery = (query) =>
+ query
+ .split(/[^\p{Letter}\p{Number}_\p{Emoji_Presentation}]+/gu)
+ .filter((term) => term); // remove remaining empty strings
+}
+
+/**
+ * Search Module
+ */
+const Search = {
+ _index: null,
+ _queued_query: null,
+ _pulse_status: -1,
+
+ htmlToText: (htmlString, anchor) => {
+ const htmlElement = new DOMParser().parseFromString(
+ htmlString,
+ "text/html",
+ );
+ for (const removalQuery of [".headerlink", "script", "style"]) {
+ htmlElement.querySelectorAll(removalQuery).forEach((el) => {
+ el.remove();
+ });
+ }
+ if (anchor) {
+ const anchorContent = htmlElement.querySelector(
+ `[role="main"] ${anchor}`,
+ );
+ if (anchorContent) return anchorContent.textContent;
+
+ console.warn(
+ `Anchored content block not found. Sphinx search tries to obtain it via DOM query '[role=main] ${anchor}'. Check your theme or template.`,
+ );
+ }
+
+ // if anchor not specified or not found, fall back to main content
+ const docContent = htmlElement.querySelector('[role="main"]');
+ if (docContent) return docContent.textContent;
+
+ console.warn(
+ "Content block not found. Sphinx search tries to obtain it via DOM query '[role=main]'. Check your theme or template.",
+ );
+ return "";
+ },
+
+ init: () => {
+ const query = new URLSearchParams(window.location.search).get("q");
+ document
+ .querySelectorAll('input[name="q"]')
+ .forEach((el) => (el.value = query));
+ if (query) Search.performSearch(query);
+ },
+
+ loadIndex: (url) =>
+ (document.body.appendChild(document.createElement("script")).src = url),
+
+ setIndex: (index) => {
+ Search._index = index;
+ if (Search._queued_query !== null) {
+ const query = Search._queued_query;
+ Search._queued_query = null;
+ Search.query(query);
+ }
+ },
+
+ hasIndex: () => Search._index !== null,
+
+ deferQuery: (query) => (Search._queued_query = query),
+
+ stopPulse: () => (Search._pulse_status = -1),
+
+ startPulse: () => {
+ if (Search._pulse_status >= 0) return;
+
+ const pulse = () => {
+ Search._pulse_status = (Search._pulse_status + 1) % 4;
+ Search.dots.innerText = ".".repeat(Search._pulse_status);
+ if (Search._pulse_status >= 0) window.setTimeout(pulse, 500);
+ };
+ pulse();
+ },
+
+ /**
+ * perform a search for something (or wait until index is loaded)
+ */
+ performSearch: (query) => {
+ // create the required interface elements
+ const searchText = document.createElement("h2");
+ searchText.textContent = _("Searching");
+ const searchSummary = document.createElement("p");
+ searchSummary.classList.add("search-summary");
+ searchSummary.innerText = "";
+ const searchList = document.createElement("ul");
+ searchList.setAttribute("role", "list");
+ searchList.classList.add("search");
+
+ const out = document.getElementById("search-results");
+ Search.title = out.appendChild(searchText);
+ Search.dots = Search.title.appendChild(document.createElement("span"));
+ Search.status = out.appendChild(searchSummary);
+ Search.output = out.appendChild(searchList);
+
+ const searchProgress = document.getElementById("search-progress");
+ // Some themes don't use the search progress node
+ if (searchProgress) {
+ searchProgress.innerText = _("Preparing search...");
+ }
+ Search.startPulse();
+
+ // index already loaded, the browser was quick!
+ if (Search.hasIndex()) Search.query(query);
+ else Search.deferQuery(query);
+ },
+
+ _parseQuery: (query) => {
+ // stem the search terms and add them to the correct list
+ const stemmer = new Stemmer();
+ const searchTerms = new Set();
+ const excludedTerms = new Set();
+ const highlightTerms = new Set();
+ const objectTerms = new Set(splitQuery(query.toLowerCase().trim()));
+ splitQuery(query.trim()).forEach((queryTerm) => {
+ const queryTermLower = queryTerm.toLowerCase();
+
+ // maybe skip this "word"
+ // stopwords set is from language_data.js
+ if (stopwords.has(queryTermLower) || queryTerm.match(/^\d+$/)) return;
+
+ // stem the word
+ let word = stemmer.stemWord(queryTermLower);
+ // select the correct list
+ if (word[0] === "-") excludedTerms.add(word.substr(1));
+ else {
+ searchTerms.add(word);
+ highlightTerms.add(queryTermLower);
+ }
+ });
+
+ if (SPHINX_HIGHLIGHT_ENABLED) {
+ // SPHINX_HIGHLIGHT_ENABLED is set in sphinx_highlight.js
+ localStorage.setItem(
+ "sphinx_highlight_terms",
+ [...highlightTerms].join(" "),
+ );
+ }
+
+ // console.debug("SEARCH: searching for:");
+ // console.info("required: ", [...searchTerms]);
+ // console.info("excluded: ", [...excludedTerms]);
+
+ return [query, searchTerms, excludedTerms, highlightTerms, objectTerms];
+ },
+
+ /**
+ * execute search (requires search index to be loaded)
+ */
+ _performSearch: (
+ query,
+ searchTerms,
+ excludedTerms,
+ highlightTerms,
+ objectTerms,
+ ) => {
+ const filenames = Search._index.filenames;
+ const docNames = Search._index.docnames;
+ const titles = Search._index.titles;
+ const allTitles = Search._index.alltitles;
+ const indexEntries = Search._index.indexentries;
+
+ // Collect multiple result groups to be sorted separately and then ordered.
+ // Each is an array of [docname, title, anchor, descr, score, filename, kind].
+ const normalResults = [];
+ const nonMainIndexResults = [];
+
+ _removeChildren(document.getElementById("search-progress"));
+
+ const queryLower = query.toLowerCase().trim();
+ for (const [title, foundTitles] of Object.entries(allTitles)) {
+ if (
+ title.toLowerCase().trim().includes(queryLower)
+ && queryLower.length >= title.length / 2
+ ) {
+ for (const [file, id] of foundTitles) {
+ const score = Math.round(
+ (Scorer.title * queryLower.length) / title.length,
+ );
+ const boost = titles[file] === title ? 1 : 0; // add a boost for document titles
+ normalResults.push([
+ docNames[file],
+ titles[file] !== title ? `${titles[file]} > ${title}` : title,
+ id !== null ? "#" + id : "",
+ null,
+ score + boost,
+ filenames[file],
+ SearchResultKind.title,
+ ]);
+ }
+ }
+ }
+
+ // search for explicit entries in index directives
+ for (const [entry, foundEntries] of Object.entries(indexEntries)) {
+ if (entry.includes(queryLower) && queryLower.length >= entry.length / 2) {
+ for (const [file, id, isMain] of foundEntries) {
+ const score = Math.round((100 * queryLower.length) / entry.length);
+ const result = [
+ docNames[file],
+ titles[file],
+ id ? "#" + id : "",
+ null,
+ score,
+ filenames[file],
+ SearchResultKind.index,
+ ];
+ if (isMain) {
+ normalResults.push(result);
+ } else {
+ nonMainIndexResults.push(result);
+ }
+ }
+ }
+ }
+
+ // lookup as object
+ objectTerms.forEach((term) =>
+ normalResults.push(...Search.performObjectSearch(term, objectTerms)),
+ );
+
+ // lookup as search terms in fulltext
+ normalResults.push(
+ ...Search.performTermsSearch(searchTerms, excludedTerms),
+ );
+
+ // let the scorer override scores with a custom scoring function
+ if (Scorer.score) {
+ normalResults.forEach((item) => (item[4] = Scorer.score(item)));
+ nonMainIndexResults.forEach((item) => (item[4] = Scorer.score(item)));
+ }
+
+ // Sort each group of results by score and then alphabetically by name.
+ normalResults.sort(_orderResultsByScoreThenName);
+ nonMainIndexResults.sort(_orderResultsByScoreThenName);
+
+ // Combine the result groups in (reverse) order.
+ // Non-main index entries are typically arbitrary cross-references,
+ // so display them after other results.
+ let results = [...nonMainIndexResults, ...normalResults];
+
+ // remove duplicate search results
+ // note the reversing of results, so that in the case of duplicates, the highest-scoring entry is kept
+ let seen = new Set();
+ results = results.reverse().reduce((acc, result) => {
+ let resultStr = result
+ .slice(0, 4)
+ .concat([result[5]])
+ .map((v) => String(v))
+ .join(",");
+ if (!seen.has(resultStr)) {
+ acc.push(result);
+ seen.add(resultStr);
+ }
+ return acc;
+ }, []);
+
+ return results.reverse();
+ },
+
+ query: (query) => {
+ const [
+ searchQuery,
+ searchTerms,
+ excludedTerms,
+ highlightTerms,
+ objectTerms,
+ ] = Search._parseQuery(query);
+ const results = Search._performSearch(
+ searchQuery,
+ searchTerms,
+ excludedTerms,
+ highlightTerms,
+ objectTerms,
+ );
+
+ // for debugging
+ //Search.lastresults = results.slice(); // a copy
+ // console.info("search results:", Search.lastresults);
+
+ // print the results
+ _displayNextItem(results, results.length, searchTerms, highlightTerms);
+ },
+
+ /**
+ * search for object names
+ */
+ performObjectSearch: (object, objectTerms) => {
+ const filenames = Search._index.filenames;
+ const docNames = Search._index.docnames;
+ const objects = Search._index.objects;
+ const objNames = Search._index.objnames;
+ const titles = Search._index.titles;
+
+ const results = [];
+
+ const objectSearchCallback = (prefix, match) => {
+ const name = match[4];
+ const fullname = (prefix ? prefix + "." : "") + name;
+ const fullnameLower = fullname.toLowerCase();
+ if (fullnameLower.indexOf(object) < 0) return;
+
+ let score = 0;
+ const parts = fullnameLower.split(".");
+
+ // check for different match types: exact matches of full name or
+ // "last name" (i.e. last dotted part)
+ if (fullnameLower === object || parts.slice(-1)[0] === object)
+ score += Scorer.objNameMatch;
+ else if (parts.slice(-1)[0].indexOf(object) > -1)
+ score += Scorer.objPartialMatch; // matches in last name
+
+ const objName = objNames[match[1]][2];
+ const title = titles[match[0]];
+
+ // If more than one term searched for, we require other words to be
+ // found in the name/title/description
+ const otherTerms = new Set(objectTerms);
+ otherTerms.delete(object);
+ if (otherTerms.size > 0) {
+ const haystack = `${prefix} ${name} ${objName} ${title}`.toLowerCase();
+ if (
+ [...otherTerms].some((otherTerm) => haystack.indexOf(otherTerm) < 0)
+ )
+ return;
+ }
+
+ let anchor = match[3];
+ if (anchor === "") anchor = fullname;
+ else if (anchor === "-") anchor = objNames[match[1]][1] + "-" + fullname;
+
+ const descr = objName + _(", in ") + title;
+
+ // add custom score for some objects according to scorer
+ if (Scorer.objPrio.hasOwnProperty(match[2]))
+ score += Scorer.objPrio[match[2]];
+ else score += Scorer.objPrioDefault;
+
+ results.push([
+ docNames[match[0]],
+ fullname,
+ "#" + anchor,
+ descr,
+ score,
+ filenames[match[0]],
+ SearchResultKind.object,
+ ]);
+ };
+ Object.keys(objects).forEach((prefix) =>
+ objects[prefix].forEach((array) => objectSearchCallback(prefix, array)),
+ );
+ return results;
+ },
+
+ /**
+ * search for full-text terms in the index
+ */
+ performTermsSearch: (searchTerms, excludedTerms) => {
+ // prepare search
+ const terms = Search._index.terms;
+ const titleTerms = Search._index.titleterms;
+ const filenames = Search._index.filenames;
+ const docNames = Search._index.docnames;
+ const titles = Search._index.titles;
+
+ const scoreMap = new Map();
+ const fileMap = new Map();
+
+ // perform the search on the required terms
+ searchTerms.forEach((word) => {
+ const files = [];
+ // find documents, if any, containing the query word in their text/title term indices
+ // use Object.hasOwnProperty to avoid mismatching against prototype properties
+ const arr = [
+ {
+ files: terms.hasOwnProperty(word) ? terms[word] : undefined,
+ score: Scorer.term,
+ },
+ {
+ files: titleTerms.hasOwnProperty(word) ? titleTerms[word] : undefined,
+ score: Scorer.title,
+ },
+ ];
+ // add support for partial matches
+ if (word.length > 2) {
+ const escapedWord = _escapeRegExp(word);
+ if (!terms.hasOwnProperty(word)) {
+ Object.keys(terms).forEach((term) => {
+ if (term.match(escapedWord))
+ arr.push({ files: terms[term], score: Scorer.partialTerm });
+ });
+ }
+ if (!titleTerms.hasOwnProperty(word)) {
+ Object.keys(titleTerms).forEach((term) => {
+ if (term.match(escapedWord))
+ arr.push({ files: titleTerms[term], score: Scorer.partialTitle });
+ });
+ }
+ }
+
+ // no match but word was a required one
+ if (arr.every((record) => record.files === undefined)) return;
+
+ // found search word in contents
+ arr.forEach((record) => {
+ if (record.files === undefined) return;
+
+ let recordFiles = record.files;
+ if (recordFiles.length === undefined) recordFiles = [recordFiles];
+ files.push(...recordFiles);
+
+ // set score for the word in each file
+ recordFiles.forEach((file) => {
+ if (!scoreMap.has(file)) scoreMap.set(file, new Map());
+ const fileScores = scoreMap.get(file);
+ fileScores.set(word, record.score);
+ });
+ });
+
+ // create the mapping
+ files.forEach((file) => {
+ if (!fileMap.has(file)) fileMap.set(file, [word]);
+ else if (fileMap.get(file).indexOf(word) === -1)
+ fileMap.get(file).push(word);
+ });
+ });
+
+ // now check if the files don't contain excluded terms
+ const results = [];
+ for (const [file, wordList] of fileMap) {
+ // check if all requirements are matched
+
+ // as search terms with length < 3 are discarded
+ const filteredTermCount = [...searchTerms].filter(
+ (term) => term.length > 2,
+ ).length;
+ if (
+ wordList.length !== searchTerms.size
+ && wordList.length !== filteredTermCount
+ )
+ continue;
+
+ // ensure that none of the excluded terms is in the search result
+ if (
+ [...excludedTerms].some(
+ (term) =>
+ terms[term] === file
+ || titleTerms[term] === file
+ || (terms[term] || []).includes(file)
+ || (titleTerms[term] || []).includes(file),
+ )
+ )
+ break;
+
+ // select one (max) score for the file.
+ const score = Math.max(...wordList.map((w) => scoreMap.get(file).get(w)));
+ // add result to the result list
+ results.push([
+ docNames[file],
+ titles[file],
+ "",
+ null,
+ score,
+ filenames[file],
+ SearchResultKind.text,
+ ]);
+ }
+ return results;
+ },
+
+ /**
+ * helper function to return a node containing the
+ * search summary for a given text. keywords is a list
+ * of stemmed words.
+ */
+ makeSearchSummary: (htmlText, keywords, anchor) => {
+ const text = Search.htmlToText(htmlText, anchor);
+ if (text === "") return null;
+
+ const textLower = text.toLowerCase();
+ const actualStartPosition = [...keywords]
+ .map((k) => textLower.indexOf(k.toLowerCase()))
+ .filter((i) => i > -1)
+ .slice(-1)[0];
+ const startWithContext = Math.max(actualStartPosition - 120, 0);
+
+ const top = startWithContext === 0 ? "" : "...";
+ const tail = startWithContext + 240 < text.length ? "..." : "";
+
+ let summary = document.createElement("p");
+ summary.classList.add("context");
+ summary.textContent =
+ top + text.substr(startWithContext, 240).trim() + tail;
+
+ return summary;
+ },
+};
+
+_ready(Search.init);
diff --git a/docs/_build/html/_static/sphinx_highlight.js b/docs/_build/html/_static/sphinx_highlight.js
new file mode 100644
index 0000000..a74e103
--- /dev/null
+++ b/docs/_build/html/_static/sphinx_highlight.js
@@ -0,0 +1,159 @@
+/* Highlighting utilities for Sphinx HTML documentation. */
+"use strict";
+
+const SPHINX_HIGHLIGHT_ENABLED = true;
+
+/**
+ * highlight a given string on a node by wrapping it in
+ * span elements with the given class name.
+ */
+const _highlight = (node, addItems, text, className) => {
+ if (node.nodeType === Node.TEXT_NODE) {
+ const val = node.nodeValue;
+ const parent = node.parentNode;
+ const pos = val.toLowerCase().indexOf(text);
+ if (
+ pos >= 0
+ && !parent.classList.contains(className)
+ && !parent.classList.contains("nohighlight")
+ ) {
+ let span;
+
+ const closestNode = parent.closest("body, svg, foreignObject");
+ const isInSVG = closestNode && closestNode.matches("svg");
+ if (isInSVG) {
+ span = document.createElementNS("http://www.w3.org/2000/svg", "tspan");
+ } else {
+ span = document.createElement("span");
+ span.classList.add(className);
+ }
+
+ span.appendChild(document.createTextNode(val.substr(pos, text.length)));
+ const rest = document.createTextNode(val.substr(pos + text.length));
+ parent.insertBefore(span, parent.insertBefore(rest, node.nextSibling));
+ node.nodeValue = val.substr(0, pos);
+ /* There may be more occurrences of search term in this node. So call this
+ * function recursively on the remaining fragment.
+ */
+ _highlight(rest, addItems, text, className);
+
+ if (isInSVG) {
+ const rect = document.createElementNS(
+ "http://www.w3.org/2000/svg",
+ "rect",
+ );
+ const bbox = parent.getBBox();
+ rect.x.baseVal.value = bbox.x;
+ rect.y.baseVal.value = bbox.y;
+ rect.width.baseVal.value = bbox.width;
+ rect.height.baseVal.value = bbox.height;
+ rect.setAttribute("class", className);
+ addItems.push({ parent: parent, target: rect });
+ }
+ }
+ } else if (node.matches && !node.matches("button, select, textarea")) {
+ node.childNodes.forEach((el) => _highlight(el, addItems, text, className));
+ }
+};
+const _highlightText = (thisNode, text, className) => {
+ let addItems = [];
+ _highlight(thisNode, addItems, text, className);
+ addItems.forEach((obj) =>
+ obj.parent.insertAdjacentElement("beforebegin", obj.target),
+ );
+};
+
+/**
+ * Small JavaScript module for the documentation.
+ */
+const SphinxHighlight = {
+ /**
+ * highlight the search words provided in localstorage in the text
+ */
+ highlightSearchWords: () => {
+ if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight
+
+ // get and clear terms from localstorage
+ const url = new URL(window.location);
+ const highlight =
+ localStorage.getItem("sphinx_highlight_terms")
+ || url.searchParams.get("highlight")
+ || "";
+ localStorage.removeItem("sphinx_highlight_terms");
+ // Update history only if '?highlight' is present; otherwise it
+ // clears text fragments (not set in window.location by the browser)
+ if (url.searchParams.has("highlight")) {
+ url.searchParams.delete("highlight");
+ window.history.replaceState({}, "", url);
+ }
+
+ // get individual terms from highlight string
+ const terms = highlight
+ .toLowerCase()
+ .split(/\s+/)
+ .filter((x) => x);
+ if (terms.length === 0) return; // nothing to do
+
+ // There should never be more than one element matching "div.body"
+ const divBody = document.querySelectorAll("div.body");
+ const body = divBody.length ? divBody[0] : document.querySelector("body");
+ window.setTimeout(() => {
+ terms.forEach((term) => _highlightText(body, term, "highlighted"));
+ }, 10);
+
+ const searchBox = document.getElementById("searchbox");
+ if (searchBox === null) return;
+ searchBox.appendChild(
+ document
+ .createRange()
+ .createContextualFragment(
+ ''
+ + ''
+ + _("Hide Search Matches")
+ + "
",
+ ),
+ );
+ },
+
+ /**
+ * helper function to hide the search marks again
+ */
+ hideSearchWords: () => {
+ document
+ .querySelectorAll("#searchbox .highlight-link")
+ .forEach((el) => el.remove());
+ document
+ .querySelectorAll("span.highlighted")
+ .forEach((el) => el.classList.remove("highlighted"));
+ localStorage.removeItem("sphinx_highlight_terms");
+ },
+
+ initEscapeListener: () => {
+ // only install a listener if it is really needed
+ if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) return;
+
+ document.addEventListener("keydown", (event) => {
+ // bail for input elements
+ if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName))
+ return;
+ // bail with special keys
+ if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey)
+ return;
+ if (
+ DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS
+ && event.key === "Escape"
+ ) {
+ SphinxHighlight.hideSearchWords();
+ event.preventDefault();
+ }
+ });
+ },
+};
+
+_ready(() => {
+ /* Do not call highlightSearchWords() when we are on the search page.
+ * It will highlight words from the *previous* search query.
+ */
+ if (typeof Search === "undefined") SphinxHighlight.highlightSearchWords();
+ SphinxHighlight.initEscapeListener();
+});
diff --git a/docs/_build/html/data.html b/docs/_build/html/data.html
new file mode 100644
index 0000000..61568a8
--- /dev/null
+++ b/docs/_build/html/data.html
@@ -0,0 +1,288 @@
+
+
+
+
+
+
+
+
+ Données — Stockage et budget disque — cosma-qc 1.0 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ cosma-qc
+
+
+
+
+
+
+
+
+
+Données — Stockage et budget disque
+
+Où sont stockées les données
+
+
+
+
+
+
+
+Type de donnée
+Emplacement
+Remarques
+
+
+
+MP4 bruts GoPro
+z620 /mnt/portablessd
+Ne quittent jamais z620. Jamais copiés sur workers.
+
+Frames JPEG
+Worker /cosma-qc-frames/job_{id}/frame_*.jpg
+Conservés pour reprise sur crash. Supprimables après validation du stitch.
+
+PLY par job
+Worker /cosma-qc-frames/job_{id}/reconstruction.ply
+Entrée du stitch per_auv.
+
+PLY stitch par AUV
+Worker /cosma-qc-frames/stitch_{N}.ply
+Fusion des segments d’un AUV.
+
+PLY stitch global
+Worker /cosma-qc-frames/stitch_global.ply
+Nuage de points final toute mission.
+
+GLB (export web)
+Worker /cosma-qc-frames/job_{id}/reconstruction.glb
+Généré à la demande. 5M points, ~76 MB.
+
+
+
+
+
+Budget disque observé
+
+
+
+
+
+
+
+Type
+Taille typique
+Base de calcul
+
+
+
+Frames JPEG par job
+~11 GB
+job 45 min à 2 fps, 1920x1080
+
+PLY par job (reconstruction)
+2 – 5 GB
+dépend de la densité de la scène
+
+GLB par job (export web)
+~76 MB
+5M points (job_21 observé)
+
+PLY stitch AUV
+variable
+somme des PLY segments
+
+PLY global
+variable
+somme de tous les AUV
+
+
+
+Pour un AUV avec 4 jobs de 45 min chacun :
+
+Frames : 4 x 11 GB = ~44 GB (supprimables après validation)
+PLY jobs : 4 x 3.5 GB = ~14 GB
+PLY stitch AUV : ~6-10 GB
+
+
+
+Politique de nettoyage
+
+Frames JPEG
+Les frames sont conservées uniquement pour permettre la reprise sur crash.
+Une fois le stitch per_auv validé visuellement, les frames peuvent être supprimées .
+# Supprimer les frames d'un job (conserver le PLY !)
+rm -rf /cosma-qc-frames/job_ID/frame_*.jpg
+rm -f /cosma-qc-frames/job_ID/.video_*.done
+
+# Vérifier que le PLY est intact avant suppression
+ls -lh /cosma-qc-frames/job_ID/reconstruction.ply
+
+
+
+
+
+Vérification espace disque
+# Espace total workers
+ssh floppyrj45@192.168.0.84 "df -h /cosma-qc-frames"
+ssh floppyrj45@192.168.0.87 "df -h /cosma-qc-frames"
+
+# Taille par job
+du -sh /cosma-qc-frames/job_*/
+
+# Top consommateurs
+du -sh /cosma-qc-frames/* | sort -rh | head -20
+
+
+
+
+
+Export GLB
+Le GLB est une version allégée du nuage de points pour visualisation web.
+Génération via l’API dashboard :
+ curl -X POST http://192.168.0.82:3849/jobs/ID/export_glb
+
+
+Génération manuelle sur le worker :
+import trimesh , numpy as np
+pc = trimesh . load ( '/cosma-qc-frames/job_ID/reconstruction.ply' )
+idx = np . random . choice ( len ( pc . vertices ), 5_000_000 , replace = False )
+sub = trimesh . PointCloud ( pc . vertices [ idx ], colors = pc . colors [ idx ])
+sub . export ( '/cosma-qc-frames/job_ID/reconstruction.glb' )
+
+
+Téléchargement :
+# Lancer le serveur HTTP sur le worker
+ssh floppyrj45@192.168.0.84 \
+ "python3 -m http.server 8300 --directory /cosma-qc-frames"
+
+# Télécharger depuis PC
+wget http://192.168.0.84:8300/job_ID/reconstruction.glb
+
+
+
+
+Reprise sur crash — marqueurs .done
+Chaque MP4 extrait avec succès génère un fichier marqueur :
+ /cosma-qc-frames/job_ID/.video_0.done
+/cosma-qc-frames/job_ID/.video_1.done
+...
+
+
+En cas de crash, la reprise saute automatiquement les vidéos déjà traitées.
+Pour forcer une ré-extraction complète :
+ rm /cosma-qc-frames/job_ID/.video_*.done
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/_build/html/genindex.html b/docs/_build/html/genindex.html
new file mode 100644
index 0000000..cb46f17
--- /dev/null
+++ b/docs/_build/html/genindex.html
@@ -0,0 +1,108 @@
+
+
+
+
+
+
+
+ Index — cosma-qc 1.0 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/_build/html/index.html b/docs/_build/html/index.html
new file mode 100644
index 0000000..2841519
--- /dev/null
+++ b/docs/_build/html/index.html
@@ -0,0 +1,156 @@
+
+
+
+
+
+
+
+
+ cosma-qc — Documentation — cosma-qc 1.0 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ cosma-qc
+
+
+
+
+
+
+
+
+
+cosma-qc — Documentation
+Pipeline de contrôle qualité vidéo pour drones sous-marins AUV COSMA.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/_build/html/infrastructure.html b/docs/_build/html/infrastructure.html
new file mode 100644
index 0000000..5cd0572
--- /dev/null
+++ b/docs/_build/html/infrastructure.html
@@ -0,0 +1,285 @@
+
+
+
+
+
+
+
+
+ Infrastructure — cosma-qc 1.0 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ cosma-qc
+
+
+
+
+
+
+
+
+
+Infrastructure
+
+Réseau LAN — 192.168.0.0/24
+ ┌─────────────────────────────────────────────────────────┐
+│ LAN 192.168.0.0/24 │
+│ │
+│ .82 CORE Dispatcher (systemd) + FastAPI :3849 │
+│ Gitea + Grafana + InfluxDB + Caddy │
+│ │
+│ .84 ml-stack GPU worker RTX 3090 24GB │
+│ .87 gpu GPU worker RTX 3060 12GB │
+│ │
+│ .168 z620 Proxmox host HP Z620 │
+│ SSD → /mnt/portablessd (MP4 bruts) │
+└─────────────────────────────────────────────────────────┘
+
+
+
+
+Nœud core (.82)
+Rôle : orchestrateur central du pipeline.
+Services actifs :
+
+Dispatcher — service systemd cosma-qc-dispatcher.
+Boucle principale qui dispatch les jobs aux workers GPU.
+Dashboard FastAPI — conteneur Docker exposé sur le port 3849 .
+Interface web de monitoring des jobs.
+Gitea — dépôt source floppyrj45/cosma-qc.
+Grafana / InfluxDB — monitoring infrastructure.
+
+Commandes utiles :
+# Statut dispatcher
+sudo systemctl status cosma-qc-dispatcher
+
+# Logs dispatcher temps réel
+sudo journalctl -u cosma-qc-dispatcher -f
+
+# Dashboard
+http://192.168.0.82:3849
+
+
+
+
+Nœuds GPU workers (.84 et .87)
+
+
+
+
+
+
+
+
+IP
+Nom
+GPU
+VRAM
+
+
+
+.84
+ml-stack
+RTX 3090
+24 GB
+
+.87
+gpu
+RTX 3060
+12 GB
+
+
+
+Rôle : exécution de ffmpeg (extraction frames) et lingbot-map (reconstruction 3D).
+Répertoire de travail sur chaque worker :
+ /cosma-qc-frames/
+├── job_1/
+│ ├── frame_000001.jpg … frame_NNNNNN.jpg
+│ ├── .video_0.done
+│ ├── reconstruction.ply
+│ └── reconstruction.glb (généré à la demande)
+├── job_2/
+│ └── …
+└── stitch_1.ply
+
+
+
+
+Nœud z620 (.168)
+Rôle : stockage des MP4 bruts GoPro.
+
+Accès SSH depuis core :
+ ssh floppyrj45@192.168.0.168
+
+
+
+
+Service systemd dispatcher
+Fichier de service : /etc/systemd/system/cosma-qc-dispatcher.service
+[Unit]
+Description = COSMA QC Dispatcher
+After = network.target
+
+[Service]
+User = floppyrj45
+WorkingDirectory = /home/floppyrj45/docker/cosma-qc
+ExecStart = /usr/bin/python3 app/dispatcher.py
+Restart = on-failure
+RestartSec = 10
+
+[Install]
+WantedBy = multi-user.target
+
+
+Commandes de gestion :
+ sudo systemctl start cosma-qc-dispatcher
+sudo systemctl stop cosma-qc-dispatcher
+sudo systemctl restart cosma-qc-dispatcher
+sudo systemctl enable cosma-qc-dispatcher # démarrage auto
+
+
+
+
+Conteneur Docker dashboard
+Le dashboard FastAPI tourne dans un conteneur Docker.
+cd /home/floppyrj45/docker/cosma-qc
+docker compose up -d # démarrer
+docker compose down # arrêter
+docker compose logs -f # logs
+
+
+Accès : http://192.168.0.82:3849
+
+
+Ports réseau récapitulatifs
+
+
+
+
+
+
+
+Host
+Port
+Service
+
+
+
+.82
+3849
+Dashboard FastAPI cosma-qc
+
+.84 / .87
+8100+N
+Viser viewer (reconstruction job N)
+
+.84 / .87
+8300
+HTTP server GLB export
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/_build/html/objects.inv b/docs/_build/html/objects.inv
new file mode 100644
index 0000000..ad1c2e3
Binary files /dev/null and b/docs/_build/html/objects.inv differ
diff --git a/docs/_build/html/pipeline.html b/docs/_build/html/pipeline.html
new file mode 100644
index 0000000..6989774
--- /dev/null
+++ b/docs/_build/html/pipeline.html
@@ -0,0 +1,276 @@
+
+
+
+
+
+
+
+
+ Pipeline cosma-qc — cosma-qc 1.0 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ cosma-qc
+
+
+
+
+
+
+
+
+
+Pipeline cosma-qc
+
+Vue d’ensemble
+Le pipeline cosma-qc traite les vidéos GoPro brutes acquises par les drones sous-marins
+AUV COSMA pour produire des nuages de points 3D denses (PLY) et des exports web (GLB).
+
+
+Flux de données global
+ z620 (/mnt/portablessd)
+├── GX*.MP4 (brut)
+ │
+ ▼ [1. Ingest]
+SQLite DB (jobs)
+ │
+ ▼ [2. Extraction ffmpeg]
+GPU worker /cosma-qc-frames/job_{id}/frame_%06d.jpg
+ │
+ ▼ [3. Reconstruction lingbot-map]
+GPU worker /cosma-qc-frames/job_{id}/reconstruction.ply
+ │
+ ▼ [4. Stitch per_auv]
+GPU worker /cosma-qc-frames/stitch_{N}.ply (par AUV)
+ │
+ ▼ [5. Stitch cross_auv]
+GPU worker /cosma-qc-frames/stitch_global.ply
+ │
+ ▼ [6. Export GLB (à la demande)]
+GPU worker /cosma-qc-frames/job_{id}/reconstruction.glb
+
+
+
+
+Étape 1 — Ingest
+Script : scripts/ingest.py
+Rôle : Scanner le SSD de z620, regrouper les MP4 GoPro par AUV/GoPro/segment
+temporel et écrire les jobs dans la base SQLite.
+Logique de regroupement :
+
+Les fichiers sont groupés par numéro de caméra GoPro (GXXX).
+Un nouveau segment est créé si l’écart entre deux fichiers dépasse 5 minutes .
+Chaque segment → un job en base avec statut pending.
+
+Structure d’un job en base :
+CREATE TABLE jobs (
+ id INTEGER PRIMARY KEY ,
+ auv TEXT ,
+ gopro TEXT ,
+ segment INTEGER ,
+ files TEXT , -- JSON list of z620:/path/GX*.MP4
+ status TEXT , -- pending / running / done / failed
+ worker TEXT ,
+ created_at TIMESTAMP ,
+ updated_at TIMESTAMP
+);
+
+
+Les chemins vidéo sont stockés sous la forme z620:/chemin/absolu/GX*.MP4
+pour indiquer que la source est sur z620, pas sur le worker GPU.
+
+
+
+Étape 3 — Reconstruction 3D
+Fonction : do_reconstruct
+Outil : lingbot-map (demo.py) sur le worker GPU
+Entrée : répertoire de frames {worker_frames_dir}/job_{id}/
+Sortie : {worker_frames_dir}/job_{id}/reconstruction.ply
+(nuage de points dense, jusqu’à 150 millions de points )
+Un viewer Viser est automatiquement démarré sur le port 8100 + job_id
+pendant la reconstruction pour visualisation en temps réel.
+# Visualiser pendant reconstruction
+# naviguer vers http://{worker_ip}:{8100+job_id}
+
+
+
+
+Étape 4 — Stitch par AUV
+Fonction : do_stitch_per_auv
+Déclenchement : automatique quand tous les jobs d’un AUV sont en statut done.
+Outil : cosma-stitch.py
+Opération : alignement et fusion des PLY de tous les segments d’un même AUV.
+Sortie : {worker_frames_dir}/stitch_{N}.ply
+Ce stitch s’exécute sur le worker qui détient les PLY (pas de copie inter-workers).
+
+
+Étape 5 — Stitch cross-AUV
+Fonction : do_stitch_cross_auv
+Déclenchement : automatique quand tous les stitches per_auv sont validés.
+Opération : fusion de tous les PLY par AUV en un nuage de points global final.
+Sortie : {worker_frames_dir}/stitch_global.ply
+
+
+Étape 6 — Export GLB (à la demande)
+Outil : trimesh (conversion PLY → GLB)
+Sous-échantillonnage : 5 millions de points (adapté web/navigateur)
+Sortie : {worker_frames_dir}/job_{id}/reconstruction.glb
+Un serveur HTTP minimal est lancé sur le worker (port 8300 ) pour le téléchargement :
+# Sur le worker
+python3 -m http.server 8300 --directory { worker_frames_dir}
+
+# Télécharger depuis un PC
+wget http://{ worker_ip} :8300/job_{ id} /reconstruction.glb
+
+
+
+
+Statuts de jobs
+
+
+
+
+
+
+Statut
+Description
+
+
+
+pending
+Job créé, en attente de dispatch
+
+running
+En cours de traitement (extraction ou reconstruction)
+
+done
+Reconstruction terminée, PLY disponible
+
+failed
+Erreur — voir logs du dispatcher
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/_build/html/search.html b/docs/_build/html/search.html
new file mode 100644
index 0000000..e67fcaf
--- /dev/null
+++ b/docs/_build/html/search.html
@@ -0,0 +1,123 @@
+
+
+
+
+
+
+
+ Search — cosma-qc 1.0 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ cosma-qc
+
+
+
+
+
+
+
+
+
+
+
+ Please activate JavaScript to enable the search functionality.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/_build/html/searchindex.js b/docs/_build/html/searchindex.js
new file mode 100644
index 0000000..af5f5e7
--- /dev/null
+++ b/docs/_build/html/searchindex.js
@@ -0,0 +1 @@
+Search.setIndex({"alltitles":{"Base de donn\u00e9es":[[4,"base-de-donnees"]],"Budget disque observ\u00e9":[[0,"budget-disque-observe"]],"Conteneur Docker dashboard":[[2,"conteneur-docker-dashboard"]],"Contenu":[[1,null]],"Dashboard web":[[4,"dashboard-web"]],"Donn\u00e9es \u2014 Stockage et budget disque":[[0,null]],"Export GLB":[[0,"export-glb"]],"Flux de donn\u00e9es global":[[3,"flux-de-donnees-global"]],"Frames JPEG":[[0,"frames-jpeg"]],"Infrastructure":[[2,null]],"Ing\u00e9rer une nouvelle acquisition":[[4,"ingerer-une-nouvelle-acquisition"]],"Logs dispatcher":[[4,"logs-dispatcher"]],"N\u0153ud core (.82)":[[2,"noeud-core-82"]],"N\u0153ud z620 (.168)":[[2,"noeud-z620-168"]],"N\u0153uds GPU workers (.84 et .87)":[[2,"noeuds-gpu-workers-84-et-87"]],"O\u00f9 sont stock\u00e9es les donn\u00e9es":[[0,"ou-sont-stockees-les-donnees"]],"PLY interm\u00e9diaires":[[0,"ply-intermediaires"]],"Pipeline cosma-qc":[[3,null]],"Politique de nettoyage":[[0,"politique-de-nettoyage"]],"Ports r\u00e9seau r\u00e9capitulatifs":[[2,"ports-reseau-recapitulatifs"]],"Red\u00e9marrer le pipeline complet":[[4,"redemarrer-le-pipeline-complet"]],"Relancer un job \u00e9chou\u00e9":[[4,"relancer-un-job-echoue"]],"Reprise sur crash \u2014 marqueurs .done":[[0,"reprise-sur-crash-marqueurs-done"]],"R\u00e9seau LAN \u2014 192.168.0.0/24":[[2,"reseau-lan-192-168-0-0-24"]],"Service systemd dispatcher":[[2,"service-systemd-dispatcher"]],"Statuts de jobs":[[3,"statuts-de-jobs"]],"Surveiller les jobs":[[4,"surveiller-les-jobs"]],"T\u00e9l\u00e9charger un GLB":[[4,"telecharger-un-glb"]],"Utilisation":[[4,null]],"Visualiser un nuage de points PLY":[[4,"visualiser-un-nuage-de-points-ply"]],"Vue d\u2019ensemble":[[3,"vue-d-ensemble"]],"V\u00e9rification espace disque":[[0,"verification-espace-disque"]],"V\u00e9rifications rapides post-mission":[[4,"verifications-rapides-post-mission"]],"cosma-qc \u2014 Documentation":[[1,null]],"\u00c9tape 1 \u2014 Ingest":[[3,"etape-1-ingest"]],"\u00c9tape 2 \u2014 Extraction des frames":[[3,"etape-2-extraction-des-frames"]],"\u00c9tape 3 \u2014 Reconstruction 3D":[[3,"etape-3-reconstruction-3d"]],"\u00c9tape 4 \u2014 Stitch par AUV":[[3,"etape-4-stitch-par-auv"]],"\u00c9tape 5 \u2014 Stitch cross-AUV":[[3,"etape-5-stitch-cross-auv"]],"\u00c9tape 6 \u2014 Export GLB (\u00e0 la demande)":[[3,"etape-6-export-glb-a-la-demande"]]},"docnames":["data","index","infrastructure","pipeline","usage"],"envversion":{"sphinx":66,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["data.rst","index.rst","infrastructure.rst","pipeline.rst","usage.rst"],"indexentries":{},"objects":{},"objnames":{},"objtypes":{},"terms":{"06d":3,"12gb":2,"1920x1080":0,"24gb":2,"3d":[1,2],"5_000_000":0,"5m":0,"After":2,"BY":4,"FROM":4,"WHERE":4,"absolu":3,"acc\u00e8":2,"acc\u00e9der":4,"acquis":3,"acquisit":1,"actif":2,"adapt\u00e9":3,"affich":4,"align":3,"all\u00e9g\u00e9":0,"api":[0,4],"app":2,"apr\u00e8":[0,3,4],"arr\u00eater":[2,4],"assign\u00e9":4,"assur":4,"attendr":4,"attent":3,"au":[3,4],"auto":2,"automatiqu":[0,3,4],"auv":[0,1,4],"auv009":4,"aux":2,"avant":0,"avec":[0,3,4],"base":[0,3],"bin":2,"boucl":2,"bruit":3,"brut":[0,2,3],"brute":3,"budget":1,"c":4,"caddi":2,"calcul":0,"cam\u00e9ra":3,"cas":[0,3],"cd":[2,4],"ce":3,"central":2,"chacun":0,"chaqu":[0,2,3,4],"charg":4,"chemin":3,"choic":0,"ciel":3,"close":4,"cloudcompar":4,"color":0,"command":2,"complet":1,"compl\u00e8t":[0,4],"compos":[2,4],"concern\u00e9":4,"conn":4,"connect":4,"conserv":0,"conserv\u00e9":0,"consommateur":0,"conteneur":1,"contr\u00f4l":1,"convers":3,"copi":3,"copier":4,"copi\u00e9":0,"core":[1,3,4],"cosma":[0,2,4],"count":4,"cour":[3,4],"crash":[1,3],"creat":3,"created_at":3,"cross":1,"cross_auv":3,"cr\u00e9\u00e9":3,"cr\u00e9\u00e9s":4,"curl":[0,4],"cycl":4,"d":[0,1,2,4],"dan":[2,3,4],"dashboard":[0,1],"db":[3,4],"de":[1,2],"demand":[0,1,2],"demo":3,"dens":3,"densit\u00e9":0,"depui":[0,2,3,4],"derni\u00e8r":4,"des":[0,1,2,4],"descript":[2,3],"dessus":3,"deux":3,"df":[0,4],"direct":3,"directori":[0,3,4],"dispatch":[1,3],"dispon":3,"disqu":[1,4],"do_extract":3,"do_reconstruct":3,"do_stitch_cross_auv":3,"do_stitch_per_auv":3,"docker":[1,4],"doit":4,"done":[1,2,3,4],"donn\u00e9":1,"dont":3,"drone":[1,3],"du":[0,2,3,4],"d\u00e9clenchement":3,"d\u00e9j\u00e0":[0,3],"d\u00e9marrag":2,"d\u00e9marrer":[2,4],"d\u00e9marr\u00e9":3,"d\u00e9pass":3,"d\u00e9pend":0,"d\u00e9p\u00f4t":2,"d\u00e9tient":3,"eau":3,"emplac":0,"en":[0,3,4],"enabl":2,"ensembl":1,"entr":3,"entr\u00e9":[0,3],"erreur":[3,4],"error":4,"espac":4,"est":[0,3],"et":[1,3,4],"etc":[2,4],"execstart":2,"execut":4,"export":[1,2],"export_glb":[0,4],"expos\u00e9":2,"extract":[0,1,2,4],"extrait":0,"ex\u00e9cut":[2,3],"f":[0,2,4],"fail":[3,4],"failur":2,"fals":0,"fastapi":2,"ffmpeg":[2,3],"fichier":[0,2,3],"file":3,"filtr":3,"filtrer":4,"fin":4,"final":[0,3],"floppyrj45":[0,2,4],"flux":1,"foi":0,"fonction":3,"forcer":[0,4],"form":3,"fps":[0,3],"frame":[1,2,4],"frame_":[0,3],"frame_000001":2,"frame_nnnnnn":2,"frames_dir":3,"fusion":[0,3],"gb":[0,2],"gestion":2,"gitea":2,"glb":[1,2],"global":[0,1,4],"gopro":[0,2,3,4],"gpu":[1,3],"grafana":2,"grep":4,"group\u00e9":3,"gx":3,"gxxx":3,"g\u00e9n\u00e8re":0,"g\u00e9n\u00e9ration":[0,4],"g\u00e9n\u00e9rer":4,"g\u00e9n\u00e9r\u00e9":[0,2],"h":[0,4],"head":0,"heuristiqu":3,"home":[2,4],"hor":[0,3],"host":2,"hp":2,"http":[0,2,3,4],"http8300":4,"id":[0,3,4],"idx":0,"ignor\u00e9":3,"il":4,"import":[0,4],"indiqu":3,"influxdb":2,"infrastructur":1,"ingest":[1,4],"ing\u00e9rer":1,"input":3,"instal":2,"intact":0,"integ":3,"inter":3,"interfac":2,"ip":2,"jamai":[0,2,3],"job":[0,1,2],"job_":[0,3,4],"job_1":2,"job_2":2,"job_21":0,"job_id":[0,3,4],"journalctl":[2,4],"jpeg":3,"jpg":[0,2,3],"json":3,"jusqu":3,"key":3,"l":[0,3,4],"la":[0,1,2,4],"lan":1,"lancer":[0,4],"lanc\u00e9":[3,4],"le":[0,1,2,3],"len":0,"les":[1,2,3],"lh":[0,4],"lien":4,"lign":[0,4],"lingbot":[2,3],"list":3,"load":0,"log":[2,3],"logiqu":3,"lorsqu":3,"ls":[0,4],"luminosit\u00e9":3,"m":[0,3,4],"manuell":0,"map":[2,3],"marin":[1,3],"marqueur":[1,3,4],"mb":0,"million":3,"min":0,"minim":3,"minut":[3,4],"mission":[0,1],"ml":2,"mnt":[0,2,3,4],"monitor":2,"mont\u00e9":2,"moyenn":3,"mp4":[0,2,3,4],"multi":2,"m\u00eame":3,"n":[0,2,3,4],"navigateur":3,"navigu":[3,4],"ne":[0,2,3],"nettoyag":1,"network":2,"nohup":4,"nom":2,"nouveau":3,"nouvell":1,"np":0,"nuag":[0,1,3],"null":4,"numpi":0,"num\u00e9ro":[3,4],"n\u00e9cessair":4,"n\u0153ud":1,"observ\u00e9":1,"ok":4,"op\u00e9rat":3,"orchestrateur":2,"order":4,"ou":[3,4],"outil":3,"ouvrir":4,"o\u00f9":1,"par":[0,1,4],"param\u00e8tr":3,"particularit\u00e9":3,"pas":3,"path":[3,4],"pc":[0,3,4],"pend":[3,4],"pendant":[3,4],"per_auv":[0,3],"permettr":0,"peut":4,"peuvent":0,"pipelin":[1,2],"pkill":4,"pli":[1,2,3],"point":[0,1,3],"pointcloud":0,"politiqu":1,"port":[1,3],"portablessd":[0,2,3,4],"post":[0,1],"pour":[0,1,3,4],"prend":4,"prendr":4,"primari":3,"principal":2,"print":4,"prochain":4,"produir":3,"progress":4,"proxmox":2,"pr\u00e9sent":4,"py":[2,3,4],"python3":[0,2,3,4],"q":3,"qc":[0,2,4],"qualit\u00e9":1,"quand":3,"que":[0,3,4],"quelqu":4,"qui":[2,3],"quittent":[0,2,3],"random":0,"rapid":1,"reconstruct":[0,1,2,4],"red\u00e9marr":1,"red\u00e9marrag":3,"regroup":3,"relanc":1,"remarqu":0,"remettr":4,"remplac":4,"replac":0,"reprend":4,"repris":[1,3],"restart":[2,4],"restartsec":2,"retourn":4,"rf":0,"rh":0,"rm":[0,4],"row":4,"rtx":2,"run":[3,4],"r\u00e9":[0,4],"r\u00e9capitulatif":1,"r\u00e9duit":3,"r\u00e9el":[2,3,4],"r\u00e9pertoir":[2,3],"r\u00e9seau":[1,3],"r\u00f4le":[2,3],"s":[2,3,4],"saut":0,"sauvegard\u00e9":0,"scale":3,"scanner":3,"scp":4,"script":[3,4],"sc\u00e8ne":0,"segment":[0,3,4],"select":4,"selon":4,"server":[0,2,3,4],"serveur":[0,3,4],"servic":1,"set":4,"seuil":3,"seul":3,"sh":0,"si":[3,4],"somm":0,"sont":[1,3,4],"sort":0,"sorti":3,"sourc":[2,3],"sous":[1,3],"sqlite":3,"sqlite3":4,"ssd":[2,3,4],"ssh":[0,2,3,4],"stack":2,"start":[2,4],"status":[2,3,4],"statut":[1,2,4],"stitch":[0,1],"stitch_":[0,3],"stitch_1":2,"stitch_glob":[0,3],"stockag":[1,2],"stock\u00e9":[1,3],"stop":[2,4],"structur":3,"sub":0,"succ\u00e8":0,"sudo":[2,4],"suppress":0,"supprim":[0,4],"supprim\u00e9":[0,3],"sur":[1,2,3,4],"surfac":3,"surveil":1,"system":2,"systemctl":[2,4],"systemd":1,"tabl":3,"taill":[0,4],"target":2,"temp":[2,3,4],"temporel":3,"termin\u00e9":3,"text":3,"timestamp":3,"tmp":4,"top":0,"total":0,"tourn":2,"tous":[0,3,4],"tout":0,"trait":3,"traitement":3,"trait\u00e9":[0,3],"travail":2,"trimesh":[0,3],"type":0,"typiqu":0,"t\u00e9l\u00e9chargement":[0,3,4],"t\u00e9l\u00e9charger":[0,1,3],"t\u00e9l\u00e9charg\u00e9":4,"u":[2,4],"un":[0,1,2,3],"une":[0,1],"uniqu":0,"unit":2,"updat":4,"updated_at":[3,4],"user":2,"usr":2,"util":2,"utilis":1,"v":3,"valid":0,"valid\u00e9":[0,3],"variabl":0,"ver":[3,4],"version":0,"vertic":0,"vf":3,"via":[0,2,3,4],"video_":[0,4],"video_0":[0,2],"video_1":0,"video_n":3,"vid\u00e9o":[0,1,3],"viewer":[2,3,4],"viser":[2,3,4],"visualis":[0,1,3],"visuel":0,"voir":3,"voyag":3,"vram":2,"vue":1,"v\u00e9rific":1,"v\u00e9rifier":[0,4],"wantedbi":2,"web":[0,2,3],"wget":[0,3,4],"worker":[0,1,3,4],"worker_frames_dir":3,"worker_ip":3,"workingdirectori":2,"x":[0,4],"y":2,"yml":4,"z620":[0,1,3,4],"\u00e0":[0,1,2,4],"\u00e9cart":3,"\u00e9chantillonnag":3,"\u00e9chou\u00e9":1,"\u00e9crire":3,"\u00e9tape":1,"\u00e9tat":4,"\u00eatre":0},"titles":["Donn\u00e9es \u2014 Stockage et budget disque","cosma-qc \u2014 Documentation","Infrastructure","Pipeline cosma-qc","Utilisation"],"titleterms":{"3d":3,"acquisit":4,"auv":3,"base":4,"budget":0,"complet":4,"conteneur":2,"contenu":1,"core":2,"cosma":[1,3],"crash":0,"cross":3,"d":3,"dashboard":[2,4],"de":[0,3,4],"demand":3,"des":3,"dispatch":[2,4],"disqu":0,"docker":2,"document":1,"done":0,"donn\u00e9":[0,3,4],"ensembl":3,"espac":0,"et":[0,2],"export":[0,3],"extract":3,"flux":3,"frame":[0,3],"glb":[0,3,4],"global":3,"gpu":2,"infrastructur":2,"ingest":3,"ing\u00e9rer":4,"interm\u00e9diair":0,"job":[3,4],"jpeg":0,"la":3,"lan":2,"le":4,"les":[0,4],"log":4,"marqueur":0,"mission":4,"nettoyag":0,"nouvell":4,"nuag":4,"n\u0153ud":2,"observ\u00e9":0,"o\u00f9":0,"par":3,"pipelin":[3,4],"pli":[0,4],"point":4,"politiqu":0,"port":2,"post":4,"qc":[1,3],"rapid":4,"reconstruct":3,"red\u00e9marr":4,"relanc":4,"repris":0,"r\u00e9capitulatif":2,"r\u00e9seau":2,"servic":2,"sont":0,"statut":3,"stitch":3,"stockag":0,"stock\u00e9":0,"sur":0,"surveil":4,"systemd":2,"t\u00e9l\u00e9charger":4,"un":4,"une":4,"utilis":4,"visualis":4,"vue":3,"v\u00e9rific":[0,4],"web":4,"worker":2,"z620":2,"\u00e0":3,"\u00e9chou\u00e9":4,"\u00e9tape":3}})
\ No newline at end of file
diff --git a/docs/_build/html/usage.html b/docs/_build/html/usage.html
new file mode 100644
index 0000000..9b72ec7
--- /dev/null
+++ b/docs/_build/html/usage.html
@@ -0,0 +1,285 @@
+
+
+
+
+
+
+
+
+ Utilisation — cosma-qc 1.0 documentation
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ cosma-qc
+
+
+
+
+
+
+
+
+
+Utilisation
+
+Ingérer une nouvelle acquisition
+
+Connecter le SSD GoPro à z620 (ou s’assurer que les MP4 sont dans /mnt/portablessd ).
+Lancer l’ingest depuis core :
+
+ ssh floppyrj45@192.168.0.82
+cd /home/floppyrj45/docker/cosma-qc
+python3 scripts/ingest.py --path /mnt/portablessd/AUV009/
+
+
+
+Vérifier les jobs créés :
+
+ python3 -c "
+import sqlite3
+conn = sqlite3.connect('cosma-qc.db')
+for row in conn.execute('SELECT id, auv, gopro, segment, status FROM jobs ORDER BY id'):
+ print(row)
+conn.close()
+"
+
+
+
+Le dispatcher prend automatiquement en charge les jobs en statut pending .
+
+
+
+Surveiller les jobs
+
+
+Logs dispatcher
+# Logs temps réel
+sudo journalctl -u cosma-qc-dispatcher -f
+
+# Logs des 100 dernières lignes
+sudo journalctl -u cosma-qc-dispatcher -n 100
+
+# Filtrer erreurs
+sudo journalctl -u cosma-qc-dispatcher | grep -i error
+
+
+
+
+Base de données
+# État global des jobs
+sqlite3 /home/floppyrj45/docker/cosma-qc/cosma-qc.db \
+ "SELECT id, auv, status, worker, updated_at FROM jobs ORDER BY id;"
+
+# Jobs en cours
+sqlite3 cosma-qc.db \
+ "SELECT id, auv, worker FROM jobs WHERE status='running';"
+
+# Jobs échoués
+sqlite3 cosma-qc.db \
+ "SELECT id, auv, status FROM jobs WHERE status='failed';"
+
+
+
+
+
+Visualiser un nuage de points PLY
+Sur le worker avec viewer Viser (lancé automatiquement pendant reconstruction) :
+# Naviguer vers (remplacer ID par le numéro de job)
+http://192.168.0.84:8100 # pour job 0
+http://192.168.0.84:8101 # pour job 1
+# etc.
+
+
+Avec CloudCompare depuis un PC (si PLY téléchargé) :
+# Copier le PLY vers PC
+scp floppyrj45@192.168.0.84:/cosma-qc-frames/job_ID/reconstruction.ply ./
+# Ouvrir dans CloudCompare
+
+
+
+
+Télécharger un GLB
+
+Générer le GLB via l’API :
+
+ curl -X POST http://192.168.0.82:3849/jobs/ID/export_glb
+
+
+
+Attendre la fin de la génération (peut prendre quelques minutes selon la taille du PLY).
+Lancer le serveur HTTP sur le worker concerné :
+
+ ssh floppyrj45@192.168.0.84 \
+ "nohup python3 -m http.server 8300 --directory /cosma-qc-frames > /tmp/http8300.log 2>&1 &"
+
+
+
+Télécharger :
+
+ wget http://192.168.0.84:8300/job_ID/reconstruction.glb
+
+
+
+Arrêter le serveur HTTP après téléchargement :
+
+ ssh floppyrj45@192.168.0.84 "pkill -f 'http.server 8300'"
+
+
+
+
+Relancer un job échoué
+# Remettre un job en pending
+sqlite3 /home/floppyrj45/docker/cosma-qc/cosma-qc.db \
+ "UPDATE jobs SET status='pending', worker=NULL WHERE id=ID;"
+
+# Supprimer les marqueurs .done pour forcer ré-extraction complète
+ssh floppyrj45@192.168.0.84 \
+ "rm -f /cosma-qc-frames/job_ID/.video_*.done"
+
+# Redémarrer le dispatcher si nécessaire
+sudo systemctl restart cosma-qc-dispatcher
+
+
+Le dispatcher reprend le job au prochain cycle.
+
+
+Redémarrer le pipeline complet
+# Arrêter
+sudo systemctl stop cosma-qc-dispatcher
+docker compose -f /home/floppyrj45/docker/cosma-qc/docker-compose.yml down
+
+# Démarrer
+docker compose -f /home/floppyrj45/docker/cosma-qc/docker-compose.yml up -d
+sudo systemctl start cosma-qc-dispatcher
+
+
+
+
+Vérifications rapides post-mission
+# 1. Tous les jobs done ?
+sqlite3 cosma-qc.db "SELECT COUNT(*) FROM jobs WHERE status != 'done';"
+# Doit retourner 0
+
+# 2. Tous les PLY présents ?
+for id in $( sqlite3 cosma-qc.db "SELECT id FROM jobs WHERE status='done'" ) ; do
+ worker = $( sqlite3 cosma-qc.db "SELECT worker FROM jobs WHERE id= $id " )
+ ssh floppyrj45@$worker "ls -lh /cosma-qc-frames/job_ ${ id } /reconstruction.ply"
+done
+
+# 3. Espace disque OK ?
+ssh floppyrj45@192.168.0.84 "df -h /cosma-qc-frames"
+ssh floppyrj45@192.168.0.87 "df -h /cosma-qc-frames"
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/docs/conf.py b/docs/conf.py
new file mode 100644
index 0000000..bc9f4b9
--- /dev/null
+++ b/docs/conf.py
@@ -0,0 +1,6 @@
+project = "cosma-qc"
+author = "COSMA"
+release = "1.0"
+extensions = []
+html_theme = "sphinx_rtd_theme"
+html_static_path = ["_static"]
diff --git a/docs/data.rst b/docs/data.rst
new file mode 100644
index 0000000..eb0b194
--- /dev/null
+++ b/docs/data.rst
@@ -0,0 +1,162 @@
+Données — Stockage et budget disque
+=====================================
+
+Où sont stockées les données
+------------------------------
+
+.. list-table::
+ :header-rows: 1
+ :widths: 30 20 50
+
+ * - Type de donnée
+ - Emplacement
+ - Remarques
+ * - MP4 bruts GoPro
+ - z620 ``/mnt/portablessd``
+ - Ne quittent jamais z620. Jamais copiés sur workers.
+ * - Frames JPEG
+ - Worker ``/cosma-qc-frames/job_{id}/frame_*.jpg``
+ - Conservés pour reprise sur crash. Supprimables après validation du stitch.
+ * - PLY par job
+ - Worker ``/cosma-qc-frames/job_{id}/reconstruction.ply``
+ - Entrée du stitch per_auv.
+ * - PLY stitch par AUV
+ - Worker ``/cosma-qc-frames/stitch_{N}.ply``
+ - Fusion des segments d'un AUV.
+ * - PLY stitch global
+ - Worker ``/cosma-qc-frames/stitch_global.ply``
+ - Nuage de points final toute mission.
+ * - GLB (export web)
+ - Worker ``/cosma-qc-frames/job_{id}/reconstruction.glb``
+ - Généré à la demande. 5M points, ~76 MB.
+
+
+Budget disque observé
+----------------------
+
+.. list-table::
+ :header-rows: 1
+ :widths: 40 30 30
+
+ * - Type
+ - Taille typique
+ - Base de calcul
+ * - Frames JPEG par job
+ - ~11 GB
+ - job 45 min à 2 fps, 1920x1080
+ * - PLY par job (reconstruction)
+ - 2 – 5 GB
+ - dépend de la densité de la scène
+ * - GLB par job (export web)
+ - ~76 MB
+ - 5M points (job_21 observé)
+ * - PLY stitch AUV
+ - variable
+ - somme des PLY segments
+ * - PLY global
+ - variable
+ - somme de tous les AUV
+
+Pour un AUV avec 4 jobs de 45 min chacun :
+
+- Frames : 4 x 11 GB = **~44 GB** (supprimables après validation)
+- PLY jobs : 4 x 3.5 GB = **~14 GB**
+- PLY stitch AUV : **~6-10 GB**
+
+
+Politique de nettoyage
+------------------------
+
+Frames JPEG
+^^^^^^^^^^^
+
+Les frames sont conservées uniquement pour permettre la reprise sur crash.
+Une fois le stitch per_auv validé visuellement, **les frames peuvent être supprimées**.
+
+.. code-block:: bash
+
+ # Supprimer les frames d'un job (conserver le PLY !)
+ rm -rf /cosma-qc-frames/job_ID/frame_*.jpg
+ rm -f /cosma-qc-frames/job_ID/.video_*.done
+
+ # Vérifier que le PLY est intact avant suppression
+ ls -lh /cosma-qc-frames/job_ID/reconstruction.ply
+
+PLY intermédiaires
+^^^^^^^^^^^^^^^^^^
+
+Les PLY par job peuvent être supprimés après que le stitch per_auv est validé
+et sauvegardé hors-ligne.
+
+.. code-block:: bash
+
+ # Conserver uniquement le stitch, supprimer les PLY jobs
+ rm /cosma-qc-frames/job_ID/reconstruction.ply
+
+Vérification espace disque
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. code-block:: bash
+
+ # Espace total workers
+ ssh floppyrj45@192.168.0.84 "df -h /cosma-qc-frames"
+ ssh floppyrj45@192.168.0.87 "df -h /cosma-qc-frames"
+
+ # Taille par job
+ du -sh /cosma-qc-frames/job_*/
+
+ # Top consommateurs
+ du -sh /cosma-qc-frames/* | sort -rh | head -20
+
+
+Export GLB
+-----------
+
+Le GLB est une version allégée du nuage de points pour visualisation web.
+
+Génération via l'API dashboard :
+
+.. code-block:: bash
+
+ curl -X POST http://192.168.0.82:3849/jobs/ID/export_glb
+
+Génération manuelle sur le worker :
+
+.. code-block:: python
+
+ import trimesh, numpy as np
+ pc = trimesh.load('/cosma-qc-frames/job_ID/reconstruction.ply')
+ idx = np.random.choice(len(pc.vertices), 5_000_000, replace=False)
+ sub = trimesh.PointCloud(pc.vertices[idx], colors=pc.colors[idx])
+ sub.export('/cosma-qc-frames/job_ID/reconstruction.glb')
+
+Téléchargement :
+
+.. code-block:: bash
+
+ # Lancer le serveur HTTP sur le worker
+ ssh floppyrj45@192.168.0.84 \
+ "python3 -m http.server 8300 --directory /cosma-qc-frames"
+
+ # Télécharger depuis PC
+ wget http://192.168.0.84:8300/job_ID/reconstruction.glb
+
+
+Reprise sur crash — marqueurs .done
+-------------------------------------
+
+Chaque MP4 extrait avec succès génère un fichier marqueur :
+
+.. code-block:: text
+
+ /cosma-qc-frames/job_ID/.video_0.done
+ /cosma-qc-frames/job_ID/.video_1.done
+ ...
+
+En cas de crash, la reprise saute automatiquement les vidéos déjà traitées.
+
+Pour forcer une ré-extraction complète :
+
+.. code-block:: bash
+
+ rm /cosma-qc-frames/job_ID/.video_*.done
diff --git a/docs/index.rst b/docs/index.rst
new file mode 100644
index 0000000..4896b9b
--- /dev/null
+++ b/docs/index.rst
@@ -0,0 +1,13 @@
+cosma-qc — Documentation
+=========================
+
+Pipeline de contrôle qualité vidéo pour drones sous-marins AUV COSMA.
+
+.. toctree::
+ :maxdepth: 2
+ :caption: Contenu
+
+ pipeline
+ infrastructure
+ data
+ usage
diff --git a/docs/infrastructure.rst b/docs/infrastructure.rst
new file mode 100644
index 0000000..2e3cc58
--- /dev/null
+++ b/docs/infrastructure.rst
@@ -0,0 +1,171 @@
+Infrastructure
+==============
+
+Réseau LAN — 192.168.0.0/24
+-----------------------------
+
+.. code-block:: text
+
+ ┌─────────────────────────────────────────────────────────┐
+ │ LAN 192.168.0.0/24 │
+ │ │
+ │ .82 CORE Dispatcher (systemd) + FastAPI :3849 │
+ │ Gitea + Grafana + InfluxDB + Caddy │
+ │ │
+ │ .84 ml-stack GPU worker RTX 3090 24GB │
+ │ .87 gpu GPU worker RTX 3060 12GB │
+ │ │
+ │ .168 z620 Proxmox host HP Z620 │
+ │ SSD → /mnt/portablessd (MP4 bruts) │
+ └─────────────────────────────────────────────────────────┘
+
+
+Nœud core (.82)
+----------------
+
+**Rôle :** orchestrateur central du pipeline.
+
+Services actifs :
+
+- **Dispatcher** — service systemd cosma-qc-dispatcher.
+ Boucle principale qui dispatch les jobs aux workers GPU.
+
+- **Dashboard FastAPI** — conteneur Docker exposé sur le port **3849**.
+ Interface web de monitoring des jobs.
+
+- **Gitea** — dépôt source floppyrj45/cosma-qc.
+
+- **Grafana / InfluxDB** — monitoring infrastructure.
+
+Commandes utiles :
+
+.. code-block:: bash
+
+ # Statut dispatcher
+ sudo systemctl status cosma-qc-dispatcher
+
+ # Logs dispatcher temps réel
+ sudo journalctl -u cosma-qc-dispatcher -f
+
+ # Dashboard
+ http://192.168.0.82:3849
+
+
+Nœuds GPU workers (.84 et .87)
+--------------------------------
+
+.. list-table::
+ :header-rows: 1
+ :widths: 15 25 20 40
+
+ * - IP
+ - Nom
+ - GPU
+ - VRAM
+ * - .84
+ - ml-stack
+ - RTX 3090
+ - 24 GB
+ * - .87
+ - gpu
+ - RTX 3060
+ - 12 GB
+
+**Rôle :** exécution de ffmpeg (extraction frames) et lingbot-map (reconstruction 3D).
+
+Répertoire de travail sur chaque worker :
+
+.. code-block:: text
+
+ /cosma-qc-frames/
+ ├── job_1/
+ │ ├── frame_000001.jpg … frame_NNNNNN.jpg
+ │ ├── .video_0.done
+ │ ├── reconstruction.ply
+ │ └── reconstruction.glb (généré à la demande)
+ ├── job_2/
+ │ └── …
+ └── stitch_1.ply
+
+
+Nœud z620 (.168)
+-----------------
+
+**Rôle :** stockage des MP4 bruts GoPro.
+
+- Proxmox host HP Z620.
+- SSD monté sur /mnt/portablessd.
+- Les MP4 **ne quittent jamais** z620 — ffmpeg s'y exécute via SSH.
+
+Accès SSH depuis core :
+
+.. code-block:: bash
+
+ ssh floppyrj45@192.168.0.168
+
+
+Service systemd dispatcher
+---------------------------
+
+Fichier de service : /etc/systemd/system/cosma-qc-dispatcher.service
+
+.. code-block:: ini
+
+ [Unit]
+ Description=COSMA QC Dispatcher
+ After=network.target
+
+ [Service]
+ User=floppyrj45
+ WorkingDirectory=/home/floppyrj45/docker/cosma-qc
+ ExecStart=/usr/bin/python3 app/dispatcher.py
+ Restart=on-failure
+ RestartSec=10
+
+ [Install]
+ WantedBy=multi-user.target
+
+Commandes de gestion :
+
+.. code-block:: bash
+
+ sudo systemctl start cosma-qc-dispatcher
+ sudo systemctl stop cosma-qc-dispatcher
+ sudo systemctl restart cosma-qc-dispatcher
+ sudo systemctl enable cosma-qc-dispatcher # démarrage auto
+
+
+Conteneur Docker dashboard
+---------------------------
+
+Le dashboard FastAPI tourne dans un conteneur Docker.
+
+.. code-block:: bash
+
+ cd /home/floppyrj45/docker/cosma-qc
+ docker compose up -d # démarrer
+ docker compose down # arrêter
+ docker compose logs -f # logs
+
+Accès : http://192.168.0.82:3849
+
+
+Ports réseau récapitulatifs
+----------------------------
+
+.. list-table::
+ :header-rows: 1
+ :widths: 15 15 70
+
+ * - Host
+ - Port
+ - Service
+ * - .82
+ - 3849
+ - Dashboard FastAPI cosma-qc
+ * - .84 / .87
+ - 8100+N
+ - Viser viewer (reconstruction job N)
+ * - .84 / .87
+ - 8300
+ - HTTP server GLB export
diff --git a/docs/pipeline.rst b/docs/pipeline.rst
new file mode 100644
index 0000000..c9ae201
--- /dev/null
+++ b/docs/pipeline.rst
@@ -0,0 +1,185 @@
+Pipeline cosma-qc
+=================
+
+Vue d'ensemble
+--------------
+
+Le pipeline cosma-qc traite les vidéos GoPro brutes acquises par les drones sous-marins
+AUV COSMA pour produire des nuages de points 3D denses (PLY) et des exports web (GLB).
+
+Flux de données global
+----------------------
+
+.. code-block:: text
+
+ z620 (/mnt/portablessd)
+ ├── GX*.MP4 (brut)
+ │
+ ▼ [1. Ingest]
+ SQLite DB (jobs)
+ │
+ ▼ [2. Extraction ffmpeg]
+ GPU worker /cosma-qc-frames/job_{id}/frame_%06d.jpg
+ │
+ ▼ [3. Reconstruction lingbot-map]
+ GPU worker /cosma-qc-frames/job_{id}/reconstruction.ply
+ │
+ ▼ [4. Stitch per_auv]
+ GPU worker /cosma-qc-frames/stitch_{N}.ply (par AUV)
+ │
+ ▼ [5. Stitch cross_auv]
+ GPU worker /cosma-qc-frames/stitch_global.ply
+ │
+ ▼ [6. Export GLB (à la demande)]
+ GPU worker /cosma-qc-frames/job_{id}/reconstruction.glb
+
+
+Étape 1 — Ingest
+-----------------
+
+**Script :** scripts/ingest.py
+
+**Rôle :** Scanner le SSD de z620, regrouper les MP4 GoPro par AUV/GoPro/segment
+temporel et écrire les jobs dans la base SQLite.
+
+Logique de regroupement :
+
+- Les fichiers sont groupés par numéro de caméra GoPro (GXXX).
+- Un nouveau segment est créé si l'écart entre deux fichiers dépasse **5 minutes**.
+- Chaque segment → un job en base avec statut pending.
+
+Structure d'un job en base :
+
+.. code-block:: sql
+
+ CREATE TABLE jobs (
+ id INTEGER PRIMARY KEY,
+ auv TEXT,
+ gopro TEXT,
+ segment INTEGER,
+ files TEXT, -- JSON list of z620:/path/GX*.MP4
+ status TEXT, -- pending / running / done / failed
+ worker TEXT,
+ created_at TIMESTAMP,
+ updated_at TIMESTAMP
+ );
+
+Les chemins vidéo sont stockés sous la forme z620:/chemin/absolu/GX*.MP4
+pour indiquer que la source est sur z620, pas sur le worker GPU.
+
+
+Étape 2 — Extraction des frames
+---------------------------------
+
+**Fonction :** do_extract dans le dispatcher (core .82)
+
+**Outil :** ffmpeg
+
+Paramètres d'extraction :
+
+.. code-block:: bash
+
+ ffmpeg -i input.mp4 -vf fps=2,scale=1920:1080 -q:v 2 {frames_dir}/job_{id}/frame_%06d.jpg
+
+**Particularité z620 :** lorsque la source est z620:/..., ffmpeg s'exécute
+**directement sur z620** via SSH — seuls les JPEG voyagent sur le réseau.
+Les MP4 bruts ne quittent jamais z620.
+
+Filtre hors-eau (heuristique de luminosité) :
+
+- Les frames dont la luminosité moyenne est au-dessus d'un seuil sont supprimées
+ (ciel, surface, hors-eau).
+- Ce filtre réduit le bruit dans la reconstruction.
+
+Reprise sur crash :
+
+- Un fichier marqueur .video_N.done est créé après chaque MP4 traité.
+- En cas de redémarrage, les vidéos déjà traitées sont ignorées.
+
+
+Étape 3 — Reconstruction 3D
+-----------------------------
+
+**Fonction :** do_reconstruct
+
+**Outil :** lingbot-map (demo.py) sur le worker GPU
+
+**Entrée :** répertoire de frames {worker_frames_dir}/job_{id}/
+
+**Sortie :** {worker_frames_dir}/job_{id}/reconstruction.ply
+(nuage de points dense, jusqu'à **150 millions de points**)
+
+Un viewer Viser est automatiquement démarré sur le port 8100 + job_id
+pendant la reconstruction pour visualisation en temps réel.
+
+.. code-block:: bash
+
+ # Visualiser pendant reconstruction
+ # naviguer vers http://{worker_ip}:{8100+job_id}
+
+
+Étape 4 — Stitch par AUV
+--------------------------
+
+**Fonction :** do_stitch_per_auv
+
+**Déclenchement :** automatique quand **tous les jobs d'un AUV** sont en statut done.
+
+**Outil :** cosma-stitch.py
+
+**Opération :** alignement et fusion des PLY de tous les segments d'un même AUV.
+
+**Sortie :** {worker_frames_dir}/stitch_{N}.ply
+
+Ce stitch s'exécute sur le worker qui détient les PLY (pas de copie inter-workers).
+
+
+Étape 5 — Stitch cross-AUV
+----------------------------
+
+**Fonction :** do_stitch_cross_auv
+
+**Déclenchement :** automatique quand **tous les stitches per_auv** sont validés.
+
+**Opération :** fusion de tous les PLY par AUV en un nuage de points global final.
+
+**Sortie :** {worker_frames_dir}/stitch_global.ply
+
+
+Étape 6 — Export GLB (à la demande)
+-------------------------------------
+
+**Outil :** trimesh (conversion PLY → GLB)
+
+**Sous-échantillonnage :** 5 millions de points (adapté web/navigateur)
+
+**Sortie :** {worker_frames_dir}/job_{id}/reconstruction.glb
+
+Un serveur HTTP minimal est lancé sur le worker (port **8300**) pour le téléchargement :
+
+.. code-block:: bash
+
+ # Sur le worker
+ python3 -m http.server 8300 --directory {worker_frames_dir}
+
+ # Télécharger depuis un PC
+ wget http://{worker_ip}:8300/job_{id}/reconstruction.glb
+
+
+Statuts de jobs
+---------------
+
+.. list-table::
+ :header-rows: 1
+ :widths: 20 80
+
+ * - Statut
+ - Description
+ * - pending
+ - Job créé, en attente de dispatch
+ * - running
+ - En cours de traitement (extraction ou reconstruction)
+ * - done
+ - Reconstruction terminée, PLY disponible
+ * - failed
+ - Erreur — voir logs du dispatcher
diff --git a/docs/usage.rst b/docs/usage.rst
new file mode 100644
index 0000000..8fa6edb
--- /dev/null
+++ b/docs/usage.rst
@@ -0,0 +1,181 @@
+Utilisation
+===========
+
+Ingérer une nouvelle acquisition
+----------------------------------
+
+1. Connecter le SSD GoPro à z620 (ou s'assurer que les MP4 sont dans ``/mnt/portablessd``).
+
+2. Lancer l'ingest depuis core :
+
+.. code-block:: bash
+
+ ssh floppyrj45@192.168.0.82
+ cd /home/floppyrj45/docker/cosma-qc
+ python3 scripts/ingest.py --path /mnt/portablessd/AUV009/
+
+3. Vérifier les jobs créés :
+
+.. code-block:: bash
+
+ python3 -c "
+ import sqlite3
+ conn = sqlite3.connect('cosma-qc.db')
+ for row in conn.execute('SELECT id, auv, gopro, segment, status FROM jobs ORDER BY id'):
+ print(row)
+ conn.close()
+ "
+
+4. Le dispatcher prend automatiquement en charge les jobs en statut ``pending``.
+
+
+Surveiller les jobs
+--------------------
+
+Dashboard web
+^^^^^^^^^^^^^
+
+Accéder au dashboard : http://192.168.0.82:3849
+
+Il affiche en temps réel :
+
+- Statut de chaque job (pending / running / done / failed)
+- Worker assigné
+- Progression des frames
+- Liens vers les PLY et GLB
+
+Logs dispatcher
+^^^^^^^^^^^^^^^
+
+.. code-block:: bash
+
+ # Logs temps réel
+ sudo journalctl -u cosma-qc-dispatcher -f
+
+ # Logs des 100 dernières lignes
+ sudo journalctl -u cosma-qc-dispatcher -n 100
+
+ # Filtrer erreurs
+ sudo journalctl -u cosma-qc-dispatcher | grep -i error
+
+Base de données
+^^^^^^^^^^^^^^^
+
+.. code-block:: bash
+
+ # État global des jobs
+ sqlite3 /home/floppyrj45/docker/cosma-qc/cosma-qc.db \
+ "SELECT id, auv, status, worker, updated_at FROM jobs ORDER BY id;"
+
+ # Jobs en cours
+ sqlite3 cosma-qc.db \
+ "SELECT id, auv, worker FROM jobs WHERE status='running';"
+
+ # Jobs échoués
+ sqlite3 cosma-qc.db \
+ "SELECT id, auv, status FROM jobs WHERE status='failed';"
+
+
+Visualiser un nuage de points PLY
+-----------------------------------
+
+Sur le worker avec viewer Viser (lancé automatiquement pendant reconstruction) :
+
+.. code-block:: bash
+
+ # Naviguer vers (remplacer ID par le numéro de job)
+ http://192.168.0.84:8100 # pour job 0
+ http://192.168.0.84:8101 # pour job 1
+ # etc.
+
+Avec CloudCompare depuis un PC (si PLY téléchargé) :
+
+.. code-block:: bash
+
+ # Copier le PLY vers PC
+ scp floppyrj45@192.168.0.84:/cosma-qc-frames/job_ID/reconstruction.ply ./
+ # Ouvrir dans CloudCompare
+
+
+Télécharger un GLB
+-------------------
+
+1. Générer le GLB via l'API :
+
+.. code-block:: bash
+
+ curl -X POST http://192.168.0.82:3849/jobs/ID/export_glb
+
+2. Attendre la fin de la génération (peut prendre quelques minutes selon la taille du PLY).
+
+3. Lancer le serveur HTTP sur le worker concerné :
+
+.. code-block:: bash
+
+ ssh floppyrj45@192.168.0.84 \
+ "nohup python3 -m http.server 8300 --directory /cosma-qc-frames > /tmp/http8300.log 2>&1 &"
+
+4. Télécharger :
+
+.. code-block:: bash
+
+ wget http://192.168.0.84:8300/job_ID/reconstruction.glb
+
+5. Arrêter le serveur HTTP après téléchargement :
+
+.. code-block:: bash
+
+ ssh floppyrj45@192.168.0.84 "pkill -f 'http.server 8300'"
+
+
+Relancer un job échoué
+-----------------------
+
+.. code-block:: bash
+
+ # Remettre un job en pending
+ sqlite3 /home/floppyrj45/docker/cosma-qc/cosma-qc.db \
+ "UPDATE jobs SET status='pending', worker=NULL WHERE id=ID;"
+
+ # Supprimer les marqueurs .done pour forcer ré-extraction complète
+ ssh floppyrj45@192.168.0.84 \
+ "rm -f /cosma-qc-frames/job_ID/.video_*.done"
+
+ # Redémarrer le dispatcher si nécessaire
+ sudo systemctl restart cosma-qc-dispatcher
+
+Le dispatcher reprend le job au prochain cycle.
+
+
+Redémarrer le pipeline complet
+--------------------------------
+
+.. code-block:: bash
+
+ # Arrêter
+ sudo systemctl stop cosma-qc-dispatcher
+ docker compose -f /home/floppyrj45/docker/cosma-qc/docker-compose.yml down
+
+ # Démarrer
+ docker compose -f /home/floppyrj45/docker/cosma-qc/docker-compose.yml up -d
+ sudo systemctl start cosma-qc-dispatcher
+
+
+Vérifications rapides post-mission
+------------------------------------
+
+.. code-block:: bash
+
+ # 1. Tous les jobs done ?
+ sqlite3 cosma-qc.db "SELECT COUNT(*) FROM jobs WHERE status != 'done';"
+ # Doit retourner 0
+
+ # 2. Tous les PLY présents ?
+ for id in $(sqlite3 cosma-qc.db "SELECT id FROM jobs WHERE status='done'"); do
+ worker=$(sqlite3 cosma-qc.db "SELECT worker FROM jobs WHERE id=$id")
+ ssh floppyrj45@$worker "ls -lh /cosma-qc-frames/job_${id}/reconstruction.ply"
+ done
+
+ # 3. Espace disque OK ?
+ ssh floppyrj45@192.168.0.84 "df -h /cosma-qc-frames"
+ ssh floppyrj45@192.168.0.87 "df -h /cosma-qc-frames"