|
a |
|
b/ML Training.py |
|
|
1 |
import math |
|
|
2 |
import numpy as np |
|
|
3 |
import h5py |
|
|
4 |
import matplotlib.pyplot as plt |
|
|
5 |
import tensorflow as tf |
|
|
6 |
from tensorflow.python.framework import ops |
|
|
7 |
from sklearn import preprocessing |
|
|
8 |
from sklearn.preprocessing import OneHotEncoder |
|
|
9 |
from tf_utils import load_dataset, convert_to_one_hot |
|
|
10 |
from backwardPropagation import model |
|
|
11 |
from keras.utils import to_categorical |
|
|
12 |
|
|
|
13 |
X_train, X_test, y_train, y_test = load_dataset() |
|
|
14 |
|
|
|
15 |
|
|
|
16 |
|
|
|
17 |
|
|
|
18 |
|
|
|
19 |
|
|
|
20 |
|
|
|
21 |
|
|
|
22 |
# Take transpose of the input data and also normalize it |
|
|
23 |
|
|
|
24 |
X_train = X_train.T |
|
|
25 |
|
|
|
26 |
X_train = (X_train - X_train.mean()) / (X_train.max() - X_train.min()) |
|
|
27 |
X_train = X_train.fillna(0) |
|
|
28 |
X_test = X_test.T |
|
|
29 |
|
|
|
30 |
X_test = (X_test - X_test.mean()) / (X_test.max() - X_test.min()) |
|
|
31 |
X_test = X_test.fillna(0) |
|
|
32 |
|
|
|
33 |
|
|
|
34 |
|
|
|
35 |
|
|
|
36 |
|
|
|
37 |
|
|
|
38 |
|
|
|
39 |
|
|
|
40 |
# Convert training and test labels to one hot matrices |
|
|
41 |
|
|
|
42 |
|
|
|
43 |
y_train = to_categorical(y_train,9) |
|
|
44 |
y_train = y_train.T |
|
|
45 |
|
|
|
46 |
#print(y_train) |
|
|
47 |
#print(y_train.shape) |
|
|
48 |
|
|
|
49 |
|
|
|
50 |
|
|
|
51 |
|
|
|
52 |
|
|
|
53 |
|
|
|
54 |
y_test = to_categorical(y_test,9) |
|
|
55 |
y_test = y_test.T |
|
|
56 |
|
|
|
57 |
|
|
|
58 |
#print(X_train) |
|
|
59 |
|
|
|
60 |
#print(y_train) |
|
|
61 |
#print(X_test) |
|
|
62 |
#print(y_test) |
|
|
63 |
|
|
|
64 |
|
|
|
65 |
|
|
|
66 |
|
|
|
67 |
|
|
|
68 |
parameters = model(X_train,y_train,X_test,y_test) |
|
|
69 |
|
|
|
70 |
print(parameters["W1"]) |
|
|
71 |
|
|
|
72 |
|
|
|
73 |
|
|
|
74 |
|
|
|
75 |
|
|
|
76 |
|
|
|
77 |
|
|
|
78 |
|
|
|
79 |
|