|
a |
|
b/main.js |
|
|
1 |
import { Niivue } from "@niivue/niivue"; |
|
|
2 |
import { runInference } from "./brainchop-mainthread.js"; |
|
|
3 |
import { inferenceModelsList, brainChopOpts } from "./brainchop-parameters.js"; |
|
|
4 |
import { isChrome, localSystemDetails } from "./brainchop-diagnostics.js"; |
|
|
5 |
import MyWorker from "./brainchop-webworker.js?worker"; |
|
|
6 |
|
|
|
7 |
async function main() { |
|
|
8 |
dragMode.onchange = async function () { |
|
|
9 |
nv1.opts.dragMode = this.selectedIndex; |
|
|
10 |
}; |
|
|
11 |
drawDrop.onchange = async function () { |
|
|
12 |
if (nv1.volumes.length < 2) { |
|
|
13 |
window.alert("No segmentation open (use the Segmentation pull down)"); |
|
|
14 |
drawDrop.selectedIndex = -1; |
|
|
15 |
return; |
|
|
16 |
} |
|
|
17 |
if (!nv1.drawBitmap) { |
|
|
18 |
window.alert("No drawing (hint: use the Draw pull down to select a pen)"); |
|
|
19 |
drawDrop.selectedIndex = -1; |
|
|
20 |
return; |
|
|
21 |
} |
|
|
22 |
const mode = parseInt(this.value); |
|
|
23 |
if (mode === 0) { |
|
|
24 |
nv1.drawUndo(); |
|
|
25 |
drawDrop.selectedIndex = -1; |
|
|
26 |
return; |
|
|
27 |
} |
|
|
28 |
let img = nv1.volumes[1].img; |
|
|
29 |
let draw = await nv1.saveImage({ filename: "", isSaveDrawing: true }); |
|
|
30 |
const niiHdrBytes = 352; |
|
|
31 |
const nvox = draw.length; |
|
|
32 |
if (mode === 1) { |
|
|
33 |
//append |
|
|
34 |
for (let i = 0; i < nvox; i++) if (draw[niiHdrBytes + i] > 0) img[i] = 1; |
|
|
35 |
} |
|
|
36 |
if (mode === 2) { |
|
|
37 |
//delete |
|
|
38 |
for (let i = 0; i < nvox; i++) if (draw[niiHdrBytes + i] > 0) img[i] = 0; |
|
|
39 |
} |
|
|
40 |
nv1.closeDrawing(); |
|
|
41 |
nv1.updateGLVolume(); |
|
|
42 |
nv1.setDrawingEnabled(false); |
|
|
43 |
penDrop.selectedIndex = -1; |
|
|
44 |
drawDrop.selectedIndex = -1; |
|
|
45 |
}; |
|
|
46 |
penDrop.onchange = async function () { |
|
|
47 |
const mode = parseInt(this.value); |
|
|
48 |
nv1.setDrawingEnabled(mode >= 0); |
|
|
49 |
if (mode >= 0) nv1.setPenValue(mode & 7, mode > 7); |
|
|
50 |
}; |
|
|
51 |
aboutBtn.onclick = function () { |
|
|
52 |
window.alert( |
|
|
53 |
"Drag and drop NIfTI images. Use pulldown menu to choose brainchop model", |
|
|
54 |
); |
|
|
55 |
}; |
|
|
56 |
diagnosticsBtn.onclick = function () { |
|
|
57 |
if (diagnosticsString.length < 1) { |
|
|
58 |
window.alert( |
|
|
59 |
"No diagnostic string generated: run a model to create diagnostics", |
|
|
60 |
); |
|
|
61 |
return; |
|
|
62 |
} |
|
|
63 |
missingLabelStatus = missingLabelStatus.slice(0, -2); |
|
|
64 |
if (missingLabelStatus !== "") { |
|
|
65 |
if (diagnosticsString.includes('Status: OK')) { |
|
|
66 |
diagnosticsString = diagnosticsString.replace('Status: OK', `Status: ${missingLabelStatus}`); |
|
|
67 |
} |
|
|
68 |
} |
|
|
69 |
missingLabelStatus = "" |
|
|
70 |
navigator.clipboard.writeText(diagnosticsString); |
|
|
71 |
window.alert("Diagnostics copied to clipboard\n" + diagnosticsString); |
|
|
72 |
}; |
|
|
73 |
opacitySlider0.oninput = function () { |
|
|
74 |
nv1.setOpacity(0, opacitySlider0.value / 255); |
|
|
75 |
nv1.updateGLVolume(); |
|
|
76 |
}; |
|
|
77 |
opacitySlider1.oninput = function () { |
|
|
78 |
nv1.setOpacity(1, opacitySlider1.value / 255); |
|
|
79 |
}; |
|
|
80 |
async function ensureConformed() { |
|
|
81 |
const nii = nv1.volumes[0]; |
|
|
82 |
let isConformed = |
|
|
83 |
nii.dims[1] === 256 && nii.dims[2] === 256 && nii.dims[3] === 256; |
|
|
84 |
if ( |
|
|
85 |
nii.permRAS[0] !== -1 || |
|
|
86 |
nii.permRAS[1] !== 3 || |
|
|
87 |
nii.permRAS[2] !== -2 |
|
|
88 |
) { |
|
|
89 |
isConformed = false; |
|
|
90 |
} |
|
|
91 |
if (isConformed) { |
|
|
92 |
return; |
|
|
93 |
} |
|
|
94 |
const nii2 = await nv1.conform(nii, false); |
|
|
95 |
await nv1.removeVolume(nv1.volumes[0]); |
|
|
96 |
await nv1.addVolume(nii2); |
|
|
97 |
} |
|
|
98 |
async function closeAllOverlays() { |
|
|
99 |
while (nv1.volumes.length > 1) { |
|
|
100 |
await nv1.removeVolume(nv1.volumes[1]); |
|
|
101 |
} |
|
|
102 |
} |
|
|
103 |
modelSelect.onchange = async function () { |
|
|
104 |
if (this.selectedIndex < 0) { |
|
|
105 |
modelSelect.selectedIndex = 11; |
|
|
106 |
} |
|
|
107 |
await closeAllOverlays(); |
|
|
108 |
await ensureConformed(); |
|
|
109 |
const model = inferenceModelsList[this.selectedIndex]; |
|
|
110 |
const opts = brainChopOpts; |
|
|
111 |
// opts.rootURL should be the url without the query string |
|
|
112 |
const urlParams = new URL(window.location.href); |
|
|
113 |
// remove the query string |
|
|
114 |
opts.rootURL = urlParams.origin + urlParams.pathname; |
|
|
115 |
const isLocalhost = Boolean( |
|
|
116 |
window.location.hostname === "localhost" || |
|
|
117 |
// [::1] is the IPv6 localhost address. |
|
|
118 |
window.location.hostname === "[::1]" || |
|
|
119 |
// 127.0.0.1/8 is considered localhost for IPv4. |
|
|
120 |
window.location.hostname.match( |
|
|
121 |
/^127(?:\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/, |
|
|
122 |
), |
|
|
123 |
); |
|
|
124 |
if (isLocalhost) { |
|
|
125 |
opts.rootURL = location.protocol + "//" + location.host; |
|
|
126 |
} |
|
|
127 |
if (workerCheck.checked) { |
|
|
128 |
if (typeof chopWorker !== "undefined") { |
|
|
129 |
console.log( |
|
|
130 |
"Unable to start new segmentation: previous call has not completed", |
|
|
131 |
); |
|
|
132 |
return; |
|
|
133 |
} |
|
|
134 |
chopWorker = await new MyWorker({ type: "module" }); |
|
|
135 |
const hdr = { |
|
|
136 |
datatypeCode: nv1.volumes[0].hdr.datatypeCode, |
|
|
137 |
dims: nv1.volumes[0].hdr.dims, |
|
|
138 |
}; |
|
|
139 |
const msg = { |
|
|
140 |
opts, |
|
|
141 |
modelEntry: model, |
|
|
142 |
niftiHeader: hdr, |
|
|
143 |
niftiImage: nv1.volumes[0].img, |
|
|
144 |
}; |
|
|
145 |
chopWorker.postMessage(msg); |
|
|
146 |
chopWorker.onmessage = function (event) { |
|
|
147 |
const cmd = event.data.cmd; |
|
|
148 |
if (cmd === "ui") { |
|
|
149 |
if (event.data.modalMessage !== "") { |
|
|
150 |
chopWorker.terminate(); |
|
|
151 |
chopWorker = undefined; |
|
|
152 |
} |
|
|
153 |
callbackUI( |
|
|
154 |
event.data.message, |
|
|
155 |
event.data.progressFrac, |
|
|
156 |
event.data.modalMessage, |
|
|
157 |
event.data.statData, |
|
|
158 |
); |
|
|
159 |
} |
|
|
160 |
if (cmd === "img") { |
|
|
161 |
chopWorker.terminate(); |
|
|
162 |
chopWorker = undefined; |
|
|
163 |
callbackImg(event.data.img, event.data.opts, event.data.modelEntry); |
|
|
164 |
} |
|
|
165 |
}; |
|
|
166 |
} else { |
|
|
167 |
runInference( |
|
|
168 |
opts, |
|
|
169 |
model, |
|
|
170 |
nv1.volumes[0].hdr, |
|
|
171 |
nv1.volumes[0].img, |
|
|
172 |
callbackImg, |
|
|
173 |
callbackUI, |
|
|
174 |
); |
|
|
175 |
} |
|
|
176 |
}; |
|
|
177 |
saveImgBtn.onclick = function () { |
|
|
178 |
nv1.volumes[1].saveToDisk("segmentaion.nii.gz"); |
|
|
179 |
}; |
|
|
180 |
saveSceneBtn.onclick = function () { |
|
|
181 |
nv1.saveDocument("brainchop.nvd"); |
|
|
182 |
}; |
|
|
183 |
workerCheck.onchange = function () { |
|
|
184 |
modelSelect.onchange(); |
|
|
185 |
}; |
|
|
186 |
clipCheck.onchange = function () { |
|
|
187 |
if (clipCheck.checked) { |
|
|
188 |
nv1.setClipPlane([0, 0, 90]); |
|
|
189 |
} else { |
|
|
190 |
nv1.setClipPlane([2, 0, 90]); |
|
|
191 |
} |
|
|
192 |
}; |
|
|
193 |
function doLoadImage() { |
|
|
194 |
opacitySlider0.oninput(); |
|
|
195 |
} |
|
|
196 |
async function fetchJSON(fnm) { |
|
|
197 |
const response = await fetch(fnm); |
|
|
198 |
const js = await response.json(); |
|
|
199 |
return js; |
|
|
200 |
} |
|
|
201 |
async function getUniqueValuesAndCounts(uint8Array) { |
|
|
202 |
// Use a Map to count occurrences |
|
|
203 |
const countsMap = new Map(); |
|
|
204 |
|
|
|
205 |
for (let i = 0; i < uint8Array.length; i++) { |
|
|
206 |
const value = uint8Array[i]; |
|
|
207 |
if (countsMap.has(value)) { |
|
|
208 |
countsMap.set(value, countsMap.get(value) + 1); |
|
|
209 |
} else { |
|
|
210 |
countsMap.set(value, 1); |
|
|
211 |
} |
|
|
212 |
} |
|
|
213 |
|
|
|
214 |
// Convert the Map to an array of objects |
|
|
215 |
const result = Array.from(countsMap, ([value, count]) => ({ |
|
|
216 |
value, |
|
|
217 |
count, |
|
|
218 |
})); |
|
|
219 |
|
|
|
220 |
return result; |
|
|
221 |
} |
|
|
222 |
async function createLabeledCounts(uniqueValuesAndCounts, labelStrings) { |
|
|
223 |
if (uniqueValuesAndCounts.length !== labelStrings.length) { |
|
|
224 |
missingLabelStatus = "Failed to Predict Labels - " |
|
|
225 |
console.error( |
|
|
226 |
"Mismatch in lengths: uniqueValuesAndCounts has", |
|
|
227 |
uniqueValuesAndCounts.length, |
|
|
228 |
"items, but labelStrings has", |
|
|
229 |
labelStrings.length, |
|
|
230 |
"items.", |
|
|
231 |
); |
|
|
232 |
} |
|
|
233 |
|
|
|
234 |
return labelStrings.map((label, index) => { |
|
|
235 |
// Find the entry matching the current label index |
|
|
236 |
const entry = uniqueValuesAndCounts.find(item => item.value === index); |
|
|
237 |
|
|
|
238 |
// If an entry is found, append the count value with 'mm3', otherwise show 'Missing' |
|
|
239 |
const countText = entry ? `${entry.count} mm3` : "Missing"; |
|
|
240 |
|
|
|
241 |
countText === "Missing" |
|
|
242 |
? missingLabelStatus += `${label}, ` : null; |
|
|
243 |
|
|
|
244 |
return `${label} ${countText}`; |
|
|
245 |
}); |
|
|
246 |
} |
|
|
247 |
async function callbackImg(img, opts, modelEntry) { |
|
|
248 |
closeAllOverlays(); |
|
|
249 |
const overlayVolume = await nv1.volumes[0].clone(); |
|
|
250 |
overlayVolume.zeroImage(); |
|
|
251 |
overlayVolume.hdr.scl_inter = 0; |
|
|
252 |
overlayVolume.hdr.scl_slope = 1; |
|
|
253 |
overlayVolume.img = new Uint8Array(img); |
|
|
254 |
const roiVolumes = await getUniqueValuesAndCounts(overlayVolume.img); |
|
|
255 |
console.log(roiVolumes); |
|
|
256 |
if (modelEntry.colormapPath) { |
|
|
257 |
const cmap = await fetchJSON(modelEntry.colormapPath); |
|
|
258 |
const newLabels = await createLabeledCounts(roiVolumes, cmap["labels"]); |
|
|
259 |
console.log(newLabels); |
|
|
260 |
overlayVolume.setColormapLabel({ |
|
|
261 |
R: cmap["R"], |
|
|
262 |
G: cmap["G"], |
|
|
263 |
B: cmap["B"], |
|
|
264 |
labels: newLabels, |
|
|
265 |
}); |
|
|
266 |
// n.b. most models create indexed labels, but those without colormap mask scalar input |
|
|
267 |
overlayVolume.hdr.intent_code = 1002; // NIFTI_INTENT_LABEL |
|
|
268 |
} else { |
|
|
269 |
let colormap = opts.atlasSelectedColorTable.toLowerCase(); |
|
|
270 |
const cmaps = nv1.colormaps(); |
|
|
271 |
if (!cmaps.includes(colormap)) { |
|
|
272 |
colormap = "actc"; |
|
|
273 |
} |
|
|
274 |
overlayVolume.colormap = colormap; |
|
|
275 |
} |
|
|
276 |
overlayVolume.opacity = opacitySlider1.value / 255; |
|
|
277 |
await nv1.addVolume(overlayVolume); |
|
|
278 |
} |
|
|
279 |
async function reportTelemetry(statData) { |
|
|
280 |
if (typeof statData === "string" || statData instanceof String) { |
|
|
281 |
function strToArray(str) { |
|
|
282 |
const list = JSON.parse(str); |
|
|
283 |
const array = []; |
|
|
284 |
for (const key in list) { |
|
|
285 |
array[key] = list[key]; |
|
|
286 |
} |
|
|
287 |
return array; |
|
|
288 |
} |
|
|
289 |
statData = strToArray(statData); |
|
|
290 |
} |
|
|
291 |
statData = await localSystemDetails(statData, nv1.gl); |
|
|
292 |
diagnosticsString = |
|
|
293 |
":: Diagnostics can help resolve issues https://github.com/neuroneural/brainchop/issues ::\n"; |
|
|
294 |
for (const key in statData) { |
|
|
295 |
diagnosticsString += key + ": " + statData[key] + "\n"; |
|
|
296 |
} |
|
|
297 |
} |
|
|
298 |
function callbackUI( |
|
|
299 |
message = "", |
|
|
300 |
progressFrac = -1, |
|
|
301 |
modalMessage = "", |
|
|
302 |
statData = [], |
|
|
303 |
) { |
|
|
304 |
if (message !== "") { |
|
|
305 |
console.log(message); |
|
|
306 |
document.getElementById("location").innerHTML = message; |
|
|
307 |
} |
|
|
308 |
if (isNaN(progressFrac)) { |
|
|
309 |
// memory issue |
|
|
310 |
memstatus.style.color = "red"; |
|
|
311 |
memstatus.innerHTML = "Memory Issue"; |
|
|
312 |
} else if (progressFrac >= 0) { |
|
|
313 |
modelProgress.value = progressFrac * modelProgress.max; |
|
|
314 |
} |
|
|
315 |
if (modalMessage !== "") { |
|
|
316 |
window.alert(modalMessage); |
|
|
317 |
} |
|
|
318 |
if (Object.keys(statData).length > 0) { |
|
|
319 |
reportTelemetry(statData); |
|
|
320 |
} |
|
|
321 |
} |
|
|
322 |
function handleLocationChange(data) { |
|
|
323 |
document.getElementById("location").innerHTML = data.string |
|
|
324 |
.split(" ") |
|
|
325 |
.map((value) => `<p style="font-size: 14px;margin:0px;">${value}</p>`) |
|
|
326 |
.join(""); |
|
|
327 |
} |
|
|
328 |
const defaults = { |
|
|
329 |
backColor: [0.4, 0.4, 0.4, 1], |
|
|
330 |
show3Dcrosshair: true, |
|
|
331 |
onLocationChange: handleLocationChange, |
|
|
332 |
}; |
|
|
333 |
let diagnosticsString = ""; |
|
|
334 |
let missingLabelStatus = "" |
|
|
335 |
let chopWorker; |
|
|
336 |
const nv1 = new Niivue(defaults); |
|
|
337 |
nv1.attachToCanvas(gl1); |
|
|
338 |
nv1.opts.dragMode = nv1.dragModes.pan; |
|
|
339 |
nv1.opts.multiplanarForceRender = true; |
|
|
340 |
nv1.opts.yoke3Dto2DZoom = true; |
|
|
341 |
nv1.opts.crosshairGap = 11; |
|
|
342 |
nv1.setInterpolation(true); |
|
|
343 |
await nv1.loadVolumes([{ url: "./t1_crop.nii.gz" }]); |
|
|
344 |
for (let i = 0; i < inferenceModelsList.length; i++) { |
|
|
345 |
const option = document.createElement("option"); |
|
|
346 |
option.text = inferenceModelsList[i].modelName; |
|
|
347 |
option.value = inferenceModelsList[i].id.toString(); |
|
|
348 |
modelSelect.appendChild(option); |
|
|
349 |
} |
|
|
350 |
nv1.onImageLoaded = doLoadImage; |
|
|
351 |
modelSelect.selectedIndex = -1; |
|
|
352 |
drawDrop.selectedIndex = -1; |
|
|
353 |
workerCheck.checked = await isChrome(); // TODO: Safari does not yet support WebGL TFJS webworkers, test FireFox |
|
|
354 |
// uncomment next two lines to automatically run segmentation when web page is loaded |
|
|
355 |
// modelSelect.selectedIndex = 11 |
|
|
356 |
// modelSelect.onchange() |
|
|
357 |
|
|
|
358 |
// get the query string parameter model. |
|
|
359 |
// if set, select the model from the dropdown list and call the modelSelect.onchange() function |
|
|
360 |
const urlParams = new URLSearchParams(window.location.search); |
|
|
361 |
const modelParam = urlParams.get("model"); |
|
|
362 |
if (modelParam) { |
|
|
363 |
// make sure the model index is a number |
|
|
364 |
modelSelect.selectedIndex = Number(modelParam); |
|
|
365 |
modelSelect.onchange(); |
|
|
366 |
} |
|
|
367 |
} |
|
|
368 |
|
|
|
369 |
async function updateStarCount() { |
|
|
370 |
try { |
|
|
371 |
const response = await fetch( |
|
|
372 |
`https://api.github.com/repos/neuroneural/brainchop`, |
|
|
373 |
); |
|
|
374 |
const data = await response.json(); |
|
|
375 |
document.getElementById("star-count").textContent = data.stargazers_count; |
|
|
376 |
} catch (error) { |
|
|
377 |
console.error("Error fetching star count:", error); |
|
|
378 |
} |
|
|
379 |
} |
|
|
380 |
(async function () { |
|
|
381 |
await main(); |
|
|
382 |
updateStarCount(); |
|
|
383 |
})(); |
|
|
384 |
|