Diff of /test_docker.py [000000] .. [42b7b1]

Switch to unified view

a b/test_docker.py
1
"""script to predict the segmentation results and calculate surrogate biomarkers of a given testing dataset using the docker image.
2
3
This script allows users to execute the whole pipeline using the docker image.
4
"""
5
6
import os
7
import sys
8
import warnings
9
warnings.filterwarnings('ignore')
10
11
from src.LFBNet.utilities import train_valid_paths
12
from src.LFBNet.preprocessing import preprocessing
13
from src.run import trainer, parse_argument
14
from src.LFBNet.utilities.compute_surrogate_features import ComputesTMTVsDmaxFromNii
15
16
17
def main():
18
    """ Predicts tumor segmentation results and calculates associated quantitative metrics on a given testing dataset.
19
20
21
22
    This function receives the path directory to the testing dataset that contains the PET images. It predicts the
23
24
    segmentation results and saves them as .nii files. It then calculates the surrogate metabolic tumor volume (sTMTV) and
25
26
    surrogate dissemination feature (sDmax) and saves it as CSV or Xls file.
27
28
    Acronyms:
29
        PET: Nifti format of [18]F-FDG PET images in SUV unit.
30
        GT: Ground truth mask from the expert if available.
31
32
    [directory_path_to_raw 3D nifti data with SUV values] should have the following structure as:
33
    main_dir:
34
        -- patient_id_1:
35
            -- PET
36
                --give_name.nii or give_name.nii.gz
37
            -- GT (if available) (Ground truth mask from the expert if available)
38
                -- give_name.nii or give_name.nii.gz
39
40
         -- patient_id_2:
41
            -- PET
42
                --give_name.nii or give_name.nii.gz
43
            -- GT (if available)
44
                -- give_name.nii or give_name.nii.gz
45
46
    It reads the .nii files, resize, crop, and save the 3D data, then from these data it generates the sagittal and
47
    coronal PET MIPs and the ground truth (mask from the expert) if available in the folder.
48
49
    Get the latest trained model weight from './weight' directory and use that weight to predict the segmentation.
50
51
    Returns:
52
        save segmented images and computed surrogate biomarker features using the last weight saved in the ./weight
53
        folder.
54
    """
55
56
    # Path to the parent/main directory. Please read readme.md for how to organize your files.
57
    input_dir = "/input"
58
59
    # parameters to set
60
    dataset_name = 'data'
61
    desired_spacing = [4.0, 4.0, 4.0]
62
63
    # path to the preprocessed data
64
    preprocessing_data_dir = "/output"
65
66
    preprocessing_params = dict(
67
        data_path=input_dir, data_name=dataset_name, saving_dir=preprocessing_data_dir, save_3D=True,
68
        output_resolution=[128, 128, 256], desired_spacing=desired_spacing, generate_mip=True
69
        )
70
    mip_data_dir = preprocessing.read_pet_gt_resize_crop_save_as_3d_andor_mip(**preprocessing_params)
71
72
    # get list of all patient names from the generated mip directory
73
    patient_id_list = os.listdir(mip_data_dir)
74
    print('There are %d cases to evaluate \n' % len(patient_id_list))
75
76
    # prediction on the given testing dataset
77
    test_params = dict(
78
        preprocessed_dir=mip_data_dir, data_list=patient_id_list, predicted_dir=preprocessing_data_dir
79
        )
80
    network_run = trainer.ModelTesting(**test_params)
81
    network_run.test()
82
83
    print("\n\n Computing the surrogate biomarkers ... \n\n")
84
    for identifier, data_path in zip(
85
            ["predicted", "ground_truth"], [os.path.join(preprocessing_data_dir, "predicted_data"),
86
                            os.path.join(preprocessing_data_dir, "data_default_MIP_dir")]
87
            ):
88
        try:
89
            csv_file = ComputesTMTVsDmaxFromNii(data_path=data_path, get_identifier=identifier)
90
            csv_file.compute_and_save_surrogate_features()
91
        except:
92
            continue
93
94
95
# check
96
if __name__ == '__main__':
97
    print("\n Running the integrated framework for testing use case... \n\n")
98
    main()