|
a |
|
b/utils.py |
|
|
1 |
import platform |
|
|
2 |
import pwd |
|
|
3 |
import subprocess |
|
|
4 |
import time |
|
|
5 |
import numpy as np |
|
|
6 |
import glob |
|
|
7 |
import os |
|
|
8 |
import cPickle as pickle |
|
|
9 |
|
|
|
10 |
maxfloat = np.finfo(np.float32).max |
|
|
11 |
|
|
|
12 |
|
|
|
13 |
def auto_make_dir(path): |
|
|
14 |
if not os.path.exists(path): |
|
|
15 |
os.makedirs(path) |
|
|
16 |
print 'Created dir', path |
|
|
17 |
|
|
|
18 |
|
|
|
19 |
def find_model_metadata(metadata_dir, config_name): |
|
|
20 |
metadata_paths = glob.glob(metadata_dir + '/%s-*' % config_name) |
|
|
21 |
if not metadata_paths: |
|
|
22 |
raise ValueError('No metadata files for config %s' % config_name) |
|
|
23 |
elif len(metadata_paths) > 1: |
|
|
24 |
raise ValueError('Multiple metadata files for config %s' % config_name) |
|
|
25 |
print 'Loaded model from', metadata_paths[0] |
|
|
26 |
return metadata_paths[0] |
|
|
27 |
|
|
|
28 |
|
|
|
29 |
def get_train_valid_split(train_data_path): |
|
|
30 |
filename = 'valid_split.pkl' |
|
|
31 |
# if not os.path.isfile(filename): |
|
|
32 |
# print 'Making validation split' |
|
|
33 |
# create_validation_split.save_train_validation_ids(filename, train_data_path) |
|
|
34 |
return load_pkl(filename) |
|
|
35 |
|
|
|
36 |
|
|
|
37 |
def check_data_paths(data_path): |
|
|
38 |
if not os.path.isdir(data_path): |
|
|
39 |
raise ValueError('wrong path to DICOM data') |
|
|
40 |
|
|
|
41 |
|
|
|
42 |
def get_dir_path(dir_name, root_dir, no_name=True): |
|
|
43 |
if no_name: |
|
|
44 |
username = '' |
|
|
45 |
else: |
|
|
46 |
username = pwd.getpwuid(os.getuid())[0] |
|
|
47 |
dir_path = root_dir + '/' + dir_name + '/%s' % username |
|
|
48 |
if not os.path.isdir(dir_path): |
|
|
49 |
os.makedirs(dir_path) |
|
|
50 |
return dir_path |
|
|
51 |
|
|
|
52 |
|
|
|
53 |
def hms(seconds): |
|
|
54 |
seconds = np.floor(seconds) |
|
|
55 |
minutes, seconds = divmod(seconds, 60) |
|
|
56 |
hours, minutes = divmod(minutes, 60) |
|
|
57 |
|
|
|
58 |
return "%02d:%02d:%02d" % (hours, minutes, seconds) |
|
|
59 |
|
|
|
60 |
|
|
|
61 |
def timestamp(): |
|
|
62 |
return time.strftime("%Y%m%d-%H%M%S", time.localtime()) |
|
|
63 |
|
|
|
64 |
|
|
|
65 |
def hostname(): |
|
|
66 |
return platform.node() |
|
|
67 |
|
|
|
68 |
|
|
|
69 |
def generate_expid(arch_name): |
|
|
70 |
return "%s-%s" % (arch_name, timestamp()) |
|
|
71 |
|
|
|
72 |
|
|
|
73 |
def get_git_revision_hash(): |
|
|
74 |
try: |
|
|
75 |
return subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip() |
|
|
76 |
except: |
|
|
77 |
return 0 |
|
|
78 |
|
|
|
79 |
|
|
|
80 |
def save_pkl(obj, path): |
|
|
81 |
with open(path, 'wb') as f: |
|
|
82 |
pickle.dump(obj, f) |
|
|
83 |
|
|
|
84 |
|
|
|
85 |
def load_pkl(path): |
|
|
86 |
with open(path, 'rb') as f: |
|
|
87 |
obj = pickle.load(f) |
|
|
88 |
return obj |
|
|
89 |
|
|
|
90 |
|
|
|
91 |
def save_np(obj, path): |
|
|
92 |
np.save(file=path, arr=obj, fix_imports=True) |
|
|
93 |
|
|
|
94 |
|
|
|
95 |
def load_np(path): |
|
|
96 |
return np.load(path) |
|
|
97 |
|
|
|
98 |
|
|
|
99 |
def copy(from_folder, to_folder): |
|
|
100 |
command = "cp -r %s %s/." % (from_folder, to_folder) |
|
|
101 |
print command |
|
|
102 |
os.system(command) |
|
|
103 |
|
|
|
104 |
|
|
|
105 |
def current_learning_rate(schedule, idx): |
|
|
106 |
s = schedule.keys() |
|
|
107 |
s.sort() |
|
|
108 |
current_lr = schedule[0] |
|
|
109 |
for i in s: |
|
|
110 |
if idx >= i: |
|
|
111 |
current_lr = schedule[i] |
|
|
112 |
|
|
|
113 |
return current_lr |
|
|
114 |
|
|
|
115 |
|
|
|
116 |
def get_script_name(file_path): |
|
|
117 |
return os.path.basename(file_path).replace('.py', '') |