|
a |
|
b/dosma/cli.py |
|
|
1 |
"""Initialize and parse command line arguments for DOSMA. |
|
|
2 |
|
|
|
3 |
This module is the entry point for executing DOSMA from the command line. |
|
|
4 |
The DOSMA library is critical for processing. |
|
|
5 |
|
|
|
6 |
To use this file, it must be run as a module from the parent directory:: |
|
|
7 |
|
|
|
8 |
$ python -m dosma/cli ... |
|
|
9 |
|
|
|
10 |
Examples: |
|
|
11 |
Run T2 fitting on Subject 01, Series 007, a quantitative DESS (qDESS) scan, |
|
|
12 |
for femoral cartilage:: |
|
|
13 |
|
|
|
14 |
python -m dosma/cli --dicom subject01/dicoms/007/ --save subject01/data/ \ |
|
|
15 |
qdess --fc generate_t2_map |
|
|
16 |
|
|
|
17 |
Hint: |
|
|
18 |
Run ``python -m dosma/cli --help`` for a detailed description of different |
|
|
19 |
command line arguments. |
|
|
20 |
""" |
|
|
21 |
|
|
|
22 |
import argparse |
|
|
23 |
import ast |
|
|
24 |
import functools |
|
|
25 |
import inspect |
|
|
26 |
import logging |
|
|
27 |
import os |
|
|
28 |
import time |
|
|
29 |
import warnings |
|
|
30 |
from collections import defaultdict |
|
|
31 |
from typing import Sequence |
|
|
32 |
|
|
|
33 |
from dosma.core.io.format_io import ImageDataFormat |
|
|
34 |
from dosma.core.quant_vals import QuantitativeValueType as QV |
|
|
35 |
from dosma.defaults import preferences |
|
|
36 |
from dosma.models.seg_model import SegModel |
|
|
37 |
from dosma.models.util import SUPPORTED_MODELS, get_model, model_from_config |
|
|
38 |
from dosma.msk import knee |
|
|
39 |
from dosma.scan_sequences.mri.cones import Cones |
|
|
40 |
from dosma.scan_sequences.mri.cube_quant import CubeQuant |
|
|
41 |
from dosma.scan_sequences.mri.mapss import Mapss |
|
|
42 |
from dosma.scan_sequences.mri.qdess import QDess |
|
|
43 |
from dosma.scan_sequences.scans import ScanSequence |
|
|
44 |
from dosma.tissues.tissue import Tissue |
|
|
45 |
from dosma.utils import env |
|
|
46 |
from dosma.utils.logger import setup_logger |
|
|
47 |
|
|
|
48 |
SUPPORTED_QUANTITATIVE_VALUES = [QV.T2, QV.T1_RHO, QV.T2_STAR] |
|
|
49 |
|
|
|
50 |
DEBUG_KEY = "debug" |
|
|
51 |
|
|
|
52 |
DICOM_KEY = "dicom" |
|
|
53 |
SAVE_KEY = "save" |
|
|
54 |
LOAD_KEY = "load" |
|
|
55 |
IGNORE_EXT_KEY = "ignore_ext" |
|
|
56 |
SPLIT_BY_KEY = "split_by" |
|
|
57 |
|
|
|
58 |
GPU_KEY = "gpu" |
|
|
59 |
NUM_WORKERS_KEY = "num-workers" |
|
|
60 |
|
|
|
61 |
SCAN_KEY = "scan" |
|
|
62 |
SCAN_ACTION_KEY = "scan_action" |
|
|
63 |
|
|
|
64 |
SEGMENTATION_MODEL_KEY = "model" |
|
|
65 |
SEGMENTATION_CONFIG_KEY = "config" |
|
|
66 |
SEGMENTATION_WEIGHTS_DIR_KEY = "weights_dir" |
|
|
67 |
SEGMENTATION_BATCH_SIZE_KEY = "batch_size" |
|
|
68 |
|
|
|
69 |
TISSUES_KEY = "tissues" |
|
|
70 |
|
|
|
71 |
SUPPORTED_SCAN_TYPES = [Cones, CubeQuant, Mapss, QDess] |
|
|
72 |
BASIC_TYPES = [bool, str, float, int, list, tuple] |
|
|
73 |
|
|
|
74 |
_logger = logging.getLogger(__name__) |
|
|
75 |
|
|
|
76 |
|
|
|
77 |
class CommandLineScanContainer: |
|
|
78 |
def __init__( |
|
|
79 |
self, |
|
|
80 |
scan_type: type, |
|
|
81 |
dicom_path, |
|
|
82 |
load_path, |
|
|
83 |
ignore_ext: bool = False, |
|
|
84 |
group_by=None, |
|
|
85 |
num_workers=0, |
|
|
86 |
**kwargs, |
|
|
87 |
): |
|
|
88 |
"""The class for command-line handling around :class:`ScanSequence`. |
|
|
89 |
|
|
|
90 |
The command line interface for :class:`ScanSequence` data is highly structured, |
|
|
91 |
particularly in data saving and loading to support easy and accurate command-line |
|
|
92 |
compatibility. |
|
|
93 |
|
|
|
94 |
This class overloads some standard functionality in :class:`ScanSequence` |
|
|
95 |
(:func:`save`, :func:`load`). When methods are not implemented, this class provides |
|
|
96 |
access directly to attributes/methods of the underlying scan instantiation. |
|
|
97 |
For example, if ``scan_type=QDess``, the following will call |
|
|
98 |
:func:`QDess.generate_t2_map`: |
|
|
99 |
|
|
|
100 |
>>> cli_scan = CommandLineScanContainer(QDess, dicom_path="/path/to/qdess/scan") |
|
|
101 |
>>> cli_scan.generate_t2_map(...) # this calls cli_scan.scan.generate_t2_map |
|
|
102 |
|
|
|
103 |
Data is loaded either from the ``dicom_path`` or the ``load_path``. If both are specified, |
|
|
104 |
the data is loaded from the ``dicom_path``. |
|
|
105 |
|
|
|
106 |
Args: |
|
|
107 |
scan_type (type): A scan type. Should be subclass of `ScanSequence`. |
|
|
108 |
dicom_path (str): The dicom path. This value can be ``None``, but must |
|
|
109 |
be explicitly set. |
|
|
110 |
load_path (str): The load path. This value can be ``None``, but must be |
|
|
111 |
explicitly set. |
|
|
112 |
ignore_ext (bool, optional): If ``True``, ignore extensions when loading |
|
|
113 |
dicom data. See :func:`DicomReader.load` for details. |
|
|
114 |
group_by (optional): The value(s) to group dicoms by. See :func:`DicomReader.load` |
|
|
115 |
for details. |
|
|
116 |
num_workers (int, optional): Number of works for loading scan. |
|
|
117 |
|
|
|
118 |
Attributes: |
|
|
119 |
scan_type (type): The scan type to instantiate. |
|
|
120 |
scan (ScanSequence): The instantiated scan. |
|
|
121 |
generic_args (Dict[str, Any]): Generic duck typed parameter names and values. |
|
|
122 |
If parameters with this name are part of the method signature, they will |
|
|
123 |
automatically be set to the values in this dictionary. Keys include: |
|
|
124 |
* "num_workers": Number of cpu workers to use. |
|
|
125 |
* "max_workers": Alias for "num_workers" in some functions |
|
|
126 |
* "verbose": Verbosity |
|
|
127 |
* "show_pbar": Show progress bar. |
|
|
128 |
|
|
|
129 |
Raises: |
|
|
130 |
NotADirectoryError: If ``dicom_path`` is not a path to a directory. |
|
|
131 |
""" |
|
|
132 |
self.scan_type = scan_type |
|
|
133 |
|
|
|
134 |
if (dicom_path is not None) and (not os.path.isdir(dicom_path)): |
|
|
135 |
if load_path is not None: |
|
|
136 |
warnings.warn( |
|
|
137 |
"Dicom_path {} not found. Will load data from {}".format(dicom_path, load_path) |
|
|
138 |
) |
|
|
139 |
else: |
|
|
140 |
raise NotADirectoryError("{} is not a directory".format(dicom_path)) |
|
|
141 |
|
|
|
142 |
# Only use dicoms if the path exists and path contains files. |
|
|
143 |
is_dicom_available = (dicom_path is not None) and (os.path.isdir(dicom_path)) |
|
|
144 |
|
|
|
145 |
# If dicom_path is specified and exists, assume user wants to start from scratch with the |
|
|
146 |
# dicoms. load_path is ignored. |
|
|
147 |
group_by = group_by if group_by is not None else scan_type.__DEFAULT_SPLIT_BY__ |
|
|
148 |
if is_dicom_available: |
|
|
149 |
scan = scan_type.from_dicom( |
|
|
150 |
dicom_path, group_by=group_by, ignore_ext=ignore_ext, num_workers=num_workers |
|
|
151 |
) |
|
|
152 |
else: |
|
|
153 |
scan = self.load(load_path, num_workers=num_workers) |
|
|
154 |
|
|
|
155 |
self.scan = scan |
|
|
156 |
self.generic_args = { |
|
|
157 |
"num_workers": num_workers, |
|
|
158 |
"max_workers": num_workers, |
|
|
159 |
"verbose": True, |
|
|
160 |
"show_pbar": True, |
|
|
161 |
} |
|
|
162 |
|
|
|
163 |
def __getattr__(self, name): |
|
|
164 |
attr = getattr(self.scan, name) |
|
|
165 |
if callable(attr): |
|
|
166 |
params = inspect.signature(attr).parameters |
|
|
167 |
params = params.keys() & self.generic_args.keys() |
|
|
168 |
kwargs = {k: self.generic_args[k] for k in params} |
|
|
169 |
if len(kwargs): |
|
|
170 |
attr = functools.partial(attr, **kwargs) |
|
|
171 |
return attr |
|
|
172 |
|
|
|
173 |
def load(self, path: str, num_workers: int = 0): |
|
|
174 |
"""Command line interface loading scan data. |
|
|
175 |
|
|
|
176 |
``self.scan_type`` must be set before calling this function. |
|
|
177 |
|
|
|
178 |
Args |
|
|
179 |
path (str): Path to pickle file or directory where data is stored. |
|
|
180 |
num_workers (int, optional): Number of workers to use to load data. |
|
|
181 |
|
|
|
182 |
Returns: |
|
|
183 |
ScanSequence: Scan of type ``self.scan_type``. |
|
|
184 |
|
|
|
185 |
Raises: |
|
|
186 |
ValueError: If path to load data from cannot be determined. |
|
|
187 |
|
|
|
188 |
Examples: |
|
|
189 |
>>> cli_scan.load("/path/to/pickle/file") # load data from pickle file |
|
|
190 |
>>> cli_scan.load("/path/to/directory") # load data from directory |
|
|
191 |
""" |
|
|
192 |
scan_type = self.scan_type |
|
|
193 |
|
|
|
194 |
file_path = None |
|
|
195 |
if os.path.isfile(path): |
|
|
196 |
file_path = path |
|
|
197 |
elif os.path.isdir(path) and scan_type.NAME: |
|
|
198 |
fname = f"{scan_type.NAME}.data" |
|
|
199 |
_paths = ( |
|
|
200 |
os.path.join(path, fname), |
|
|
201 |
os.path.join(self._save_dir(path, create_dir=False), fname), |
|
|
202 |
) |
|
|
203 |
for _path in _paths: |
|
|
204 |
if os.path.isfile(_path): |
|
|
205 |
file_path = _path |
|
|
206 |
break |
|
|
207 |
if file_path is None: |
|
|
208 |
raise ValueError(f"Cannot load {scan_type.__name__} data from path '{path}'") |
|
|
209 |
|
|
|
210 |
return scan_type.load(file_path, num_workers) |
|
|
211 |
|
|
|
212 |
def _save_dir(self, dir_path: str, create_dir: bool = True): |
|
|
213 |
"""Returns directory path specific to this scan. |
|
|
214 |
|
|
|
215 |
Formatted as '`base_load_dirpath`/`scan.NAME`'. |
|
|
216 |
|
|
|
217 |
Args: |
|
|
218 |
dir_path (str): Directory path where all data is stored. |
|
|
219 |
create_dir (`bool`, optional): If `True`, creates directory if it doesn't exist. |
|
|
220 |
|
|
|
221 |
Returns: |
|
|
222 |
str: Data directory path for this scan. |
|
|
223 |
""" |
|
|
224 |
scan_type = self.scan_type |
|
|
225 |
folder_id = scan_type.NAME |
|
|
226 |
|
|
|
227 |
name_len = len(folder_id) + 2 # buffer |
|
|
228 |
if scan_type.NAME not in dir_path[-name_len:]: |
|
|
229 |
scan_dirpath = os.path.join(dir_path, folder_id) |
|
|
230 |
else: |
|
|
231 |
scan_dirpath = dir_path |
|
|
232 |
|
|
|
233 |
# scan_dirpath = os.path.join(scan_dirpath, folder_id) |
|
|
234 |
|
|
|
235 |
if create_dir: |
|
|
236 |
os.makedirs(scan_dirpath, exist_ok=True) |
|
|
237 |
|
|
|
238 |
return scan_dirpath |
|
|
239 |
|
|
|
240 |
def save( |
|
|
241 |
self, |
|
|
242 |
path: str, |
|
|
243 |
save_custom: bool = True, |
|
|
244 |
image_data_format: ImageDataFormat = None, |
|
|
245 |
num_workers: int = 0, |
|
|
246 |
): |
|
|
247 |
path = self._save_dir(path, create_dir=True) |
|
|
248 |
return self.scan.save(path, save_custom, image_data_format, num_workers) |
|
|
249 |
|
|
|
250 |
|
|
|
251 |
def get_nargs_for_basic_type(base_type: type): |
|
|
252 |
if base_type in [str, float, int]: |
|
|
253 |
return 1 |
|
|
254 |
elif base_type in [list, tuple]: |
|
|
255 |
return "+" |
|
|
256 |
|
|
|
257 |
|
|
|
258 |
def add_tissues(parser: argparse.ArgumentParser): |
|
|
259 |
for tissue in knee.SUPPORTED_TISSUES: |
|
|
260 |
parser.add_argument( |
|
|
261 |
"--%s" % tissue.STR_ID, |
|
|
262 |
action="store_const", |
|
|
263 |
default=False, |
|
|
264 |
const=True, |
|
|
265 |
help="analyze %s" % tissue.FULL_NAME, |
|
|
266 |
) |
|
|
267 |
|
|
|
268 |
|
|
|
269 |
def parse_tissues(vargin: dict): |
|
|
270 |
tissues = [] |
|
|
271 |
for tissue in knee.SUPPORTED_TISSUES: |
|
|
272 |
t = tissue() |
|
|
273 |
if ( |
|
|
274 |
t.STR_ID in vargin.keys() |
|
|
275 |
and vargin[t.STR_ID] |
|
|
276 |
and t.STR_ID not in [x.STR_ID for x in tissues] |
|
|
277 |
): |
|
|
278 |
load_path = vargin[LOAD_KEY] |
|
|
279 |
if load_path: |
|
|
280 |
t.load_data(load_path) |
|
|
281 |
|
|
|
282 |
tissues.append(t) |
|
|
283 |
|
|
|
284 |
# if no tissues are specified, do computation for all supported tissues |
|
|
285 |
if len(tissues) == 0: |
|
|
286 |
_logger.info("No tissues specified, computing for all supported tissues...") |
|
|
287 |
tissues = [] |
|
|
288 |
for tissue in knee.SUPPORTED_TISSUES: |
|
|
289 |
t = tissue() |
|
|
290 |
if t.STR_ID not in [x.STR_ID for x in tissues]: |
|
|
291 |
load_path = vargin[LOAD_KEY] |
|
|
292 |
if load_path: |
|
|
293 |
t.load_data(load_path) |
|
|
294 |
|
|
|
295 |
tissues.append(t) |
|
|
296 |
|
|
|
297 |
analysis_str = "Tissue(s): " |
|
|
298 |
for tissue in tissues: |
|
|
299 |
analysis_str += "%s, " % tissue.FULL_NAME |
|
|
300 |
|
|
|
301 |
_logger.info(analysis_str) |
|
|
302 |
|
|
|
303 |
return tissues |
|
|
304 |
|
|
|
305 |
|
|
|
306 |
def add_segmentation_subparser(parser): |
|
|
307 |
parser.add_argument( |
|
|
308 |
"--%s" % SEGMENTATION_WEIGHTS_DIR_KEY, |
|
|
309 |
type=str, |
|
|
310 |
nargs=1, |
|
|
311 |
required=True, |
|
|
312 |
help="path to directory with weights", |
|
|
313 |
) |
|
|
314 |
parser.add_argument( |
|
|
315 |
"--%s" % SEGMENTATION_MODEL_KEY, |
|
|
316 |
choices=SUPPORTED_MODELS, |
|
|
317 |
nargs="?", |
|
|
318 |
default=None, |
|
|
319 |
help="built-in model to use for segmentation. Choices: %s" % SUPPORTED_MODELS, |
|
|
320 |
) |
|
|
321 |
parser.add_argument( |
|
|
322 |
"--%s" % SEGMENTATION_CONFIG_KEY, |
|
|
323 |
type=str, |
|
|
324 |
default=None, |
|
|
325 |
help="config file for non-built-in model", |
|
|
326 |
) |
|
|
327 |
parser.add_argument( |
|
|
328 |
"--%s" % SEGMENTATION_BATCH_SIZE_KEY, |
|
|
329 |
metavar="B", |
|
|
330 |
type=int, |
|
|
331 |
default=preferences.segmentation_batch_size, |
|
|
332 |
nargs="?", |
|
|
333 |
help="batch size for inference. Default: %d" % preferences.segmentation_batch_size, |
|
|
334 |
) |
|
|
335 |
|
|
|
336 |
return parser |
|
|
337 |
|
|
|
338 |
|
|
|
339 |
def handle_segmentation(vargin, scan: ScanSequence, tissue: Tissue): |
|
|
340 |
if not vargin[SEGMENTATION_MODEL_KEY] and not vargin[SEGMENTATION_CONFIG_KEY]: |
|
|
341 |
raise ValueError( |
|
|
342 |
"Either `--{}` or `--{}` must be specified".format( |
|
|
343 |
SEGMENTATION_MODEL_KEY, SEGMENTATION_CONFIG_KEY |
|
|
344 |
) |
|
|
345 |
) |
|
|
346 |
|
|
|
347 |
segment_weights_path = vargin[SEGMENTATION_WEIGHTS_DIR_KEY][0] |
|
|
348 |
if isinstance(tissue, Sequence): |
|
|
349 |
weights = [t.find_weights(segment_weights_path) for t in tissue] |
|
|
350 |
assert all(weights_file == weights[0] for weights_file in weights) |
|
|
351 |
weights_path = weights[0] |
|
|
352 |
else: |
|
|
353 |
weights_path = tissue.find_weights(segment_weights_path) |
|
|
354 |
|
|
|
355 |
# Load model |
|
|
356 |
dims = scan.get_dimensions() |
|
|
357 |
# TODO: Input shape should be determined by combination of model + scan. |
|
|
358 |
# Currently fixed in 2D plane |
|
|
359 |
input_shape = (dims[0], dims[1], 1) |
|
|
360 |
if vargin[SEGMENTATION_MODEL_KEY]: |
|
|
361 |
# Use built-in model |
|
|
362 |
model = get_model( |
|
|
363 |
vargin[SEGMENTATION_MODEL_KEY], input_shape=input_shape, weights_path=weights_path |
|
|
364 |
) |
|
|
365 |
else: |
|
|
366 |
# Use config |
|
|
367 |
model = model_from_config( |
|
|
368 |
vargin[SEGMENTATION_CONFIG_KEY], |
|
|
369 |
weights_dir=segment_weights_path, |
|
|
370 |
input_shape=input_shape, |
|
|
371 |
) |
|
|
372 |
model.batch_size = vargin[SEGMENTATION_BATCH_SIZE_KEY] |
|
|
373 |
|
|
|
374 |
return model |
|
|
375 |
|
|
|
376 |
|
|
|
377 |
CUSTOM_TYPE_TO_HANDLE_DICT = {SegModel: handle_segmentation} |
|
|
378 |
|
|
|
379 |
|
|
|
380 |
def add_custom_argument(parser, param_type): |
|
|
381 |
# handle all custom arguments except tissues |
|
|
382 |
has_custom_argument = False |
|
|
383 |
if param_type is SegModel: |
|
|
384 |
add_segmentation_subparser(parser) |
|
|
385 |
has_custom_argument = True |
|
|
386 |
|
|
|
387 |
return has_custom_argument |
|
|
388 |
|
|
|
389 |
|
|
|
390 |
def add_base_argument( |
|
|
391 |
parser: argparse.ArgumentParser, |
|
|
392 |
param_name, |
|
|
393 |
param_type, |
|
|
394 |
param_default, |
|
|
395 |
param_help, |
|
|
396 |
additional_param_names: list = None, |
|
|
397 |
): |
|
|
398 |
if additional_param_names is None: |
|
|
399 |
additional_param_names = [] |
|
|
400 |
|
|
|
401 |
# TODO: Clean up this code block to properly do syntax parsing. |
|
|
402 |
try: |
|
|
403 |
if param_type not in BASIC_TYPES: |
|
|
404 |
param_type = extract_basic_type(param_type) |
|
|
405 |
except (AttributeError, TypeError): |
|
|
406 |
raise TypeError( |
|
|
407 |
"Parameter '{}' - type '{}' not in BASIC_TYPES".format(param_name, param_type) |
|
|
408 |
) |
|
|
409 |
|
|
|
410 |
# add default value to param help |
|
|
411 |
has_default = param_default is not inspect._empty |
|
|
412 |
if has_default: |
|
|
413 |
param_help = "%s. Default: %s" % (param_help, param_default) |
|
|
414 |
|
|
|
415 |
if additional_param_names: |
|
|
416 |
param_names = ["--%s" % n for n in additional_param_names] |
|
|
417 |
else: |
|
|
418 |
param_names = [] |
|
|
419 |
|
|
|
420 |
param_names.append("--%s" % param_name) |
|
|
421 |
|
|
|
422 |
if param_type is bool: |
|
|
423 |
if not has_default: |
|
|
424 |
raise ValueError("All boolean parameters must have a default value.") |
|
|
425 |
|
|
|
426 |
parser.add_argument( |
|
|
427 |
*param_names, |
|
|
428 |
action="store_%s" % (str(not param_default).lower()), |
|
|
429 |
dest=param_name, |
|
|
430 |
help=param_help, |
|
|
431 |
) |
|
|
432 |
return |
|
|
433 |
|
|
|
434 |
# all other values with default have this parameter |
|
|
435 |
nargs_no_default = get_nargs_for_basic_type(param_type) |
|
|
436 |
nargs = "?" if has_default else nargs_no_default |
|
|
437 |
|
|
|
438 |
parser.add_argument( |
|
|
439 |
*param_names, |
|
|
440 |
nargs=nargs, |
|
|
441 |
default=param_default if has_default else None, |
|
|
442 |
dest=param_name, |
|
|
443 |
help=param_help, |
|
|
444 |
required=not has_default, |
|
|
445 |
) |
|
|
446 |
|
|
|
447 |
|
|
|
448 |
def parse_basic_type(val, param_type): |
|
|
449 |
if param_type not in BASIC_TYPES: |
|
|
450 |
param_type = extract_basic_type(param_type) |
|
|
451 |
|
|
|
452 |
if type(val) is param_type: |
|
|
453 |
return val |
|
|
454 |
|
|
|
455 |
if param_type in [list, tuple]: |
|
|
456 |
return param_type(val) |
|
|
457 |
|
|
|
458 |
nargs = get_nargs_for_basic_type(param_type) |
|
|
459 |
if type(val) is list and nargs == 1: |
|
|
460 |
return val[0] |
|
|
461 |
return param_type(val) if val else val |
|
|
462 |
|
|
|
463 |
|
|
|
464 |
def extract_basic_type(param_type): |
|
|
465 |
"""Extracts basic types from ``typing`` aliases. |
|
|
466 |
|
|
|
467 |
Args: |
|
|
468 |
param_type (typing._GenericAlias): A generic alias |
|
|
469 |
(e.g. ``typing.Tuple``, ``typing.List``). |
|
|
470 |
|
|
|
471 |
Returns: |
|
|
472 |
type: The basic type. |
|
|
473 |
""" |
|
|
474 |
try: |
|
|
475 |
# Python 3.5 / 3.6 |
|
|
476 |
return param_type.__extra__ |
|
|
477 |
except AttributeError: |
|
|
478 |
# Python 3.7/3.8/3.9 |
|
|
479 |
return param_type.__origin__ |
|
|
480 |
|
|
|
481 |
|
|
|
482 |
def add_scans(dosma_subparser): |
|
|
483 |
for scan in SUPPORTED_SCAN_TYPES: |
|
|
484 |
supported_actions = scan.cmd_line_actions() |
|
|
485 |
|
|
|
486 |
# skip scans that are not supported on the command line |
|
|
487 |
if len(supported_actions) == 0: |
|
|
488 |
continue |
|
|
489 |
scan_name = scan.NAME |
|
|
490 |
scan_parser = dosma_subparser.add_parser(scan.NAME, help="analyze %s sequence" % scan_name) |
|
|
491 |
add_tissues(scan_parser) |
|
|
492 |
|
|
|
493 |
if not len(supported_actions): |
|
|
494 |
raise ValueError("No supported actions for scan %s" % scan_name) |
|
|
495 |
|
|
|
496 |
scan_subparser = scan_parser.add_subparsers( |
|
|
497 |
description="%s subcommands" % scan.NAME, dest=SCAN_ACTION_KEY |
|
|
498 |
) |
|
|
499 |
|
|
|
500 |
for action, action_wrapper in supported_actions: |
|
|
501 |
func_signature = inspect.signature(action) |
|
|
502 |
func_name = action_wrapper.name |
|
|
503 |
aliases = action_wrapper.aliases |
|
|
504 |
action_parser = scan_subparser.add_parser( |
|
|
505 |
func_name, aliases=aliases, help=action_wrapper.help |
|
|
506 |
) |
|
|
507 |
|
|
|
508 |
parameters = func_signature.parameters |
|
|
509 |
for param_name in parameters.keys(): |
|
|
510 |
param = parameters[param_name] |
|
|
511 |
param_type = param.annotation |
|
|
512 |
param_default = param.default |
|
|
513 |
|
|
|
514 |
if param_name == "self" or param_type is Tissue: |
|
|
515 |
continue |
|
|
516 |
|
|
|
517 |
param_help = action_wrapper.get_param_help(param_name) |
|
|
518 |
alternative_param_names = action_wrapper.get_alternative_param_names(param_name) |
|
|
519 |
|
|
|
520 |
if param_type is inspect._empty: |
|
|
521 |
raise ValueError( |
|
|
522 |
"scan %s, action %s, param %s does not have an annotation. " |
|
|
523 |
"Use pytying in the method declaration" % (scan.NAME, func_name, param_name) |
|
|
524 |
) |
|
|
525 |
|
|
|
526 |
# see if the type is a custom type, if not handle it as a basic type |
|
|
527 |
is_custom_arg = add_custom_argument(action_parser, param_type) |
|
|
528 |
if is_custom_arg: |
|
|
529 |
continue |
|
|
530 |
|
|
|
531 |
add_base_argument( |
|
|
532 |
action_parser, |
|
|
533 |
param_name, |
|
|
534 |
param_type, |
|
|
535 |
param_default, |
|
|
536 |
param_help=param_help, |
|
|
537 |
additional_param_names=alternative_param_names, |
|
|
538 |
) |
|
|
539 |
|
|
|
540 |
scan_parser.set_defaults(func=handle_scan) |
|
|
541 |
|
|
|
542 |
|
|
|
543 |
def _find_tissue_groups(vargin, tissues: Sequence[Tissue]): |
|
|
544 |
"""Finds groups of tissues that can be segmented together. |
|
|
545 |
|
|
|
546 |
Some segmentation models have multiple weight files for different tissues. |
|
|
547 |
Many models have one weight file for multiple tissues (i.e. multi-class segmentation). |
|
|
548 |
|
|
|
549 |
This function matches tissues with their corresponding weight file. |
|
|
550 |
If multiple tissues share a single weight file, they will map to the same weight file, |
|
|
551 |
allowing multiple tissues to be segmented simultaneously. |
|
|
552 |
|
|
|
553 |
This is a temporary fix for segmenting multiple classes. |
|
|
554 |
It should not be extended or used as precedence for future development. |
|
|
555 |
""" |
|
|
556 |
if not isinstance(tissues, Sequence): |
|
|
557 |
assert isinstance(tissues, Tissue) |
|
|
558 |
tissues = [tissues] |
|
|
559 |
|
|
|
560 |
weights_dir = vargin[SEGMENTATION_WEIGHTS_DIR_KEY][0] |
|
|
561 |
weights_to_tissues = defaultdict(list) |
|
|
562 |
for tissue in tissues: |
|
|
563 |
weights_to_tissues[tissue.find_weights(weights_dir)].append(tissue) |
|
|
564 |
|
|
|
565 |
return weights_to_tissues |
|
|
566 |
|
|
|
567 |
|
|
|
568 |
def _build_params(vargin, scan, parameters, tissue=None): |
|
|
569 |
param_dict = {} |
|
|
570 |
for param_name in parameters.keys(): |
|
|
571 |
param = parameters[param_name] |
|
|
572 |
param_type = param.annotation |
|
|
573 |
|
|
|
574 |
if param_name == "self": |
|
|
575 |
continue |
|
|
576 |
|
|
|
577 |
if param_type is Tissue: |
|
|
578 |
assert tissue is not None |
|
|
579 |
param_dict["tissue"] = tissue |
|
|
580 |
continue |
|
|
581 |
|
|
|
582 |
if param_type in CUSTOM_TYPE_TO_HANDLE_DICT: |
|
|
583 |
param_dict[param_name] = CUSTOM_TYPE_TO_HANDLE_DICT[param_type](vargin, scan, tissue) |
|
|
584 |
else: |
|
|
585 |
param_dict[param_name] = parse_basic_type(vargin[param_name], param_type) |
|
|
586 |
return param_dict |
|
|
587 |
|
|
|
588 |
|
|
|
589 |
def handle_scan(vargin): |
|
|
590 |
|
|
|
591 |
scan_name = vargin[SCAN_KEY] |
|
|
592 |
_logger.info("Analyzing {}...".format(scan_name)) |
|
|
593 |
scan = None |
|
|
594 |
|
|
|
595 |
for p_scan in SUPPORTED_SCAN_TYPES: |
|
|
596 |
if p_scan.NAME == scan_name: |
|
|
597 |
scan = p_scan |
|
|
598 |
break |
|
|
599 |
|
|
|
600 |
scan = CommandLineScanContainer( |
|
|
601 |
scan, |
|
|
602 |
dicom_path=vargin[DICOM_KEY], |
|
|
603 |
load_path=vargin[LOAD_KEY], |
|
|
604 |
ignore_ext=vargin[IGNORE_EXT_KEY], |
|
|
605 |
split_by=vargin[SPLIT_BY_KEY] if vargin[SPLIT_BY_KEY] else scan.__DEFAULT_SPLIT_BY__, |
|
|
606 |
num_workers=vargin[NUM_WORKERS_KEY], |
|
|
607 |
) |
|
|
608 |
|
|
|
609 |
tissues = vargin["tissues"] |
|
|
610 |
scan_action = scan_action_str = vargin[SCAN_ACTION_KEY] |
|
|
611 |
|
|
|
612 |
p_action = None |
|
|
613 |
for action, action_wrapper in scan.cmd_line_actions(): |
|
|
614 |
if scan_action == action_wrapper.name or scan_action in action_wrapper.aliases: |
|
|
615 |
p_action = action |
|
|
616 |
break |
|
|
617 |
|
|
|
618 |
# search for name in the cmd_line actions |
|
|
619 |
action = p_action |
|
|
620 |
|
|
|
621 |
if action is None: |
|
|
622 |
scan.save(vargin[SAVE_KEY], image_data_format=preferences.image_data_format) |
|
|
623 |
return |
|
|
624 |
|
|
|
625 |
func_signature = inspect.signature(action) |
|
|
626 |
parameters = func_signature.parameters |
|
|
627 |
if scan_action_str == "segment": |
|
|
628 |
weights_to_tissues = _find_tissue_groups(vargin, tissues) |
|
|
629 |
for _weights_file, seg_tissues in weights_to_tissues.items(): |
|
|
630 |
if len(seg_tissues) == 1: |
|
|
631 |
seg_tissues = seg_tissues[0] |
|
|
632 |
param_dict = _build_params(vargin, scan, parameters, seg_tissues) |
|
|
633 |
getattr(scan, action.__name__)(**param_dict) |
|
|
634 |
else: |
|
|
635 |
if "tissue" in func_signature.parameters.keys(): |
|
|
636 |
for tissue in tissues: |
|
|
637 |
param_dict = _build_params(vargin, scan, parameters, tissue) |
|
|
638 |
getattr(scan, action.__name__)(**param_dict) |
|
|
639 |
else: |
|
|
640 |
param_dict = _build_params(vargin, scan, parameters) |
|
|
641 |
getattr(scan, action.__name__)(**param_dict) |
|
|
642 |
|
|
|
643 |
scan.save(vargin[SAVE_KEY], image_data_format=preferences.image_data_format) |
|
|
644 |
for tissue in tissues: |
|
|
645 |
tissue.save_data(vargin[SAVE_KEY], data_format=preferences.image_data_format) |
|
|
646 |
|
|
|
647 |
return scan |
|
|
648 |
|
|
|
649 |
|
|
|
650 |
def parse_dicom_tag_splitby(vargin_str): |
|
|
651 |
if not vargin_str: |
|
|
652 |
return vargin_str |
|
|
653 |
|
|
|
654 |
try: |
|
|
655 |
parsed_str = ast.literal_eval(vargin_str) |
|
|
656 |
return parsed_str |
|
|
657 |
except Exception: |
|
|
658 |
return vargin_str |
|
|
659 |
|
|
|
660 |
|
|
|
661 |
def parse_args(f_input=None): |
|
|
662 |
"""Parse arguments given through command line (argv) |
|
|
663 |
|
|
|
664 |
:raises ValueError if dicom path is not provided |
|
|
665 |
:raises NotADirectoryError if dicom path does not exist or is not a directory |
|
|
666 |
""" |
|
|
667 |
parser = argparse.ArgumentParser( |
|
|
668 |
prog="DOSMA", |
|
|
669 |
description="A deep-learning powered open source MRI analysis pipeline", |
|
|
670 |
epilog="Either `--dicom` or `--load` must be specified. " |
|
|
671 |
"If both are given, `--dicom` will be used", |
|
|
672 |
) |
|
|
673 |
parser.add_argument("--%s" % DEBUG_KEY, action="store_true", help="use debug mode") |
|
|
674 |
|
|
|
675 |
# Dicom and results paths |
|
|
676 |
parser.add_argument( |
|
|
677 |
"--d", |
|
|
678 |
"--%s" % DICOM_KEY, |
|
|
679 |
metavar="D", |
|
|
680 |
type=str, |
|
|
681 |
default=None, |
|
|
682 |
nargs="?", |
|
|
683 |
dest=DICOM_KEY, |
|
|
684 |
help="path to directory storing dicom files", |
|
|
685 |
) |
|
|
686 |
parser.add_argument( |
|
|
687 |
"--l", |
|
|
688 |
"--%s" % LOAD_KEY, |
|
|
689 |
metavar="L", |
|
|
690 |
type=str, |
|
|
691 |
default=None, |
|
|
692 |
nargs="?", |
|
|
693 |
dest=LOAD_KEY, |
|
|
694 |
help="path to data directory to load from", |
|
|
695 |
) |
|
|
696 |
parser.add_argument( |
|
|
697 |
"--s", |
|
|
698 |
"--%s" % SAVE_KEY, |
|
|
699 |
metavar="S", |
|
|
700 |
type=str, |
|
|
701 |
default=None, |
|
|
702 |
nargs="?", |
|
|
703 |
dest=SAVE_KEY, |
|
|
704 |
help="path to data directory to save to. Default: L/D", |
|
|
705 |
) |
|
|
706 |
parser.add_argument( |
|
|
707 |
"--%s" % IGNORE_EXT_KEY, |
|
|
708 |
action="store_true", |
|
|
709 |
default=False, |
|
|
710 |
dest=IGNORE_EXT_KEY, |
|
|
711 |
help="ignore .dcm extension when loading dicoms. Default: False", |
|
|
712 |
) |
|
|
713 |
parser.add_argument( |
|
|
714 |
"--%s" % SPLIT_BY_KEY, |
|
|
715 |
metavar="G", |
|
|
716 |
type=str, |
|
|
717 |
default=None, |
|
|
718 |
nargs="?", |
|
|
719 |
dest=SPLIT_BY_KEY, |
|
|
720 |
help="override dicom tag to split volumes by (eg. `EchoNumbers`)", |
|
|
721 |
) |
|
|
722 |
|
|
|
723 |
parser.add_argument( |
|
|
724 |
"--%s" % GPU_KEY, |
|
|
725 |
metavar="G", |
|
|
726 |
type=str, |
|
|
727 |
default=None, |
|
|
728 |
nargs="?", |
|
|
729 |
dest=GPU_KEY, |
|
|
730 |
help="gpu id. Default: None", |
|
|
731 |
) |
|
|
732 |
|
|
|
733 |
parser.add_argument( |
|
|
734 |
"--%s" % NUM_WORKERS_KEY, |
|
|
735 |
metavar="G", |
|
|
736 |
type=int, |
|
|
737 |
default=0, |
|
|
738 |
dest=NUM_WORKERS_KEY, |
|
|
739 |
help="num cpu workers. Default: 0", |
|
|
740 |
) |
|
|
741 |
|
|
|
742 |
# Add preferences |
|
|
743 |
preferences_flags = preferences.cmd_line_flags() |
|
|
744 |
for flag in preferences_flags.keys(): |
|
|
745 |
argparse_kwargs = preferences_flags[flag] |
|
|
746 |
argparse_kwargs["dest"] = flag |
|
|
747 |
aliases = argparse_kwargs.pop("aliases", None) |
|
|
748 |
name = argparse_kwargs.pop("name", None) # noqa: F841 |
|
|
749 |
parser.add_argument(*aliases, **argparse_kwargs) |
|
|
750 |
|
|
|
751 |
subparsers = parser.add_subparsers(help="sub-command help", dest=SCAN_KEY) |
|
|
752 |
add_scans(subparsers) |
|
|
753 |
|
|
|
754 |
# MSK knee parser |
|
|
755 |
knee.knee_parser(subparsers) |
|
|
756 |
|
|
|
757 |
start_time = time.time() |
|
|
758 |
if f_input: |
|
|
759 |
args = parser.parse_args(f_input) |
|
|
760 |
else: |
|
|
761 |
args = parser.parse_args() |
|
|
762 |
|
|
|
763 |
# Only initialize logger if called from command line. |
|
|
764 |
# If UI is using it, the logger should be initialized by the UI. |
|
|
765 |
setup_logger(env.log_file_path()) |
|
|
766 |
|
|
|
767 |
vargin = vars(args) |
|
|
768 |
|
|
|
769 |
if vargin[DEBUG_KEY]: |
|
|
770 |
env.debug(True) |
|
|
771 |
|
|
|
772 |
gpu = vargin[GPU_KEY] |
|
|
773 |
|
|
|
774 |
_logger.debug(vargin) |
|
|
775 |
|
|
|
776 |
if gpu is not None: |
|
|
777 |
os.environ["CUDA_VISIBLE_DEVICES"] = gpu |
|
|
778 |
elif "CUDA_VISIBLE_DEVICES" not in os.environ: |
|
|
779 |
os.environ["CUDA_VISIBLE_DEVICES"] = "-1" |
|
|
780 |
|
|
|
781 |
# parse and update preferences |
|
|
782 |
for flag in preferences_flags.keys(): |
|
|
783 |
preferences.set(flag, vargin[flag]) |
|
|
784 |
|
|
|
785 |
dicom_path = vargin[DICOM_KEY] |
|
|
786 |
load_path = vargin[LOAD_KEY] |
|
|
787 |
|
|
|
788 |
if not dicom_path and not load_path: |
|
|
789 |
raise ValueError("Must provide path to dicoms or path to load data from") |
|
|
790 |
|
|
|
791 |
save_path = vargin[SAVE_KEY] |
|
|
792 |
if not save_path: |
|
|
793 |
save_path = load_path if load_path else "%s/data" % dicom_path |
|
|
794 |
vargin[SAVE_KEY] = save_path |
|
|
795 |
|
|
|
796 |
if not os.path.isdir(save_path): |
|
|
797 |
os.makedirs(save_path) |
|
|
798 |
|
|
|
799 |
tissues = parse_tissues(vargin) |
|
|
800 |
vargin["tissues"] = tissues |
|
|
801 |
|
|
|
802 |
vargin[SPLIT_BY_KEY] = parse_dicom_tag_splitby(vargin[SPLIT_BY_KEY]) |
|
|
803 |
|
|
|
804 |
args.func(vargin) |
|
|
805 |
|
|
|
806 |
time_elapsed = time.time() - start_time |
|
|
807 |
_logger.info("Time Elapsed: {:.2f} seconds".format(time.time() - start_time)) |
|
|
808 |
|
|
|
809 |
return time_elapsed |
|
|
810 |
|
|
|
811 |
|
|
|
812 |
if __name__ == "__main__": |
|
|
813 |
parse_args() |