|
a |
|
b/utils.py |
|
|
1 |
""" |
|
|
2 |
|
|
|
3 |
Stefania Fresca, MOX Laboratory, Politecnico di Milano |
|
|
4 |
February 2019 |
|
|
5 |
|
|
|
6 |
""" |
|
|
7 |
|
|
|
8 |
import os |
|
|
9 |
os.environ['TF_CPP_MIN_LOG_LEVEL']='2' |
|
|
10 |
import numpy as np |
|
|
11 |
import scipy.io as sio |
|
|
12 |
import h5py |
|
|
13 |
|
|
|
14 |
def read_data(mat): |
|
|
15 |
data = sio.loadmat(mat) |
|
|
16 |
S = data['S'].squeeze() |
|
|
17 |
S = np.transpose(S) |
|
|
18 |
|
|
|
19 |
return S |
|
|
20 |
|
|
|
21 |
def read_large_data(mat): |
|
|
22 |
file = h5py.File(mat, 'r') |
|
|
23 |
S = file['S'][:] |
|
|
24 |
|
|
|
25 |
return S |
|
|
26 |
|
|
|
27 |
def read_params(mat): |
|
|
28 |
params = sio.loadmat(mat) |
|
|
29 |
params = params['I'].squeeze() |
|
|
30 |
|
|
|
31 |
return params |
|
|
32 |
|
|
|
33 |
def max_min(S_train, n_train): |
|
|
34 |
S_max = np.max(np.max(S_train[:n_train], axis = 1), axis = 0) |
|
|
35 |
S_min = np.min(np.min(S_train[:n_train], axis = 1), axis = 0) |
|
|
36 |
|
|
|
37 |
return S_max, S_min |
|
|
38 |
|
|
|
39 |
def scaling(S, S_max, S_min): |
|
|
40 |
S[ : ] = (S - S_min)/(S_max - S_min) |
|
|
41 |
|
|
|
42 |
def inverse_scaling(S, S_max, S_min): |
|
|
43 |
S[ : ] = (S_max - S_min) * S + S_min |
|
|
44 |
|
|
|
45 |
def zero_pad(S, n): |
|
|
46 |
paddings = np.zeros((S.shape[0], n)) |
|
|
47 |
S = np.hstack((S, paddings)) |
|
|
48 |
|
|
|
49 |
return S |
|
|
50 |
|
|
|
51 |
def safe_mkdir(path): |
|
|
52 |
try: |
|
|
53 |
os.mkdir(path) |
|
|
54 |
except OSError: |
|
|
55 |
pass |