a b/HAR_LSTM.ipynb
1
{
2
 "cells": [
3
  {
4
   "cell_type": "code",
5
   "execution_count": 1,
6
   "metadata": {},
7
   "outputs": [],
8
   "source": [
9
    "# Importing Libraries"
10
   ]
11
  },
12
  {
13
   "cell_type": "code",
14
   "execution_count": 2,
15
   "metadata": {},
16
   "outputs": [],
17
   "source": [
18
    "import pandas as pd\n",
19
    "import numpy as np\n",
20
    "import sys"
21
   ]
22
  },
23
  {
24
   "cell_type": "code",
25
   "execution_count": 3,
26
   "metadata": {},
27
   "outputs": [],
28
   "source": [
29
    "# Activities are the class labels\n",
30
    "# It is a 6 class classification\n",
31
    "ACTIVITIES = {\n",
32
    "    0: 'WALKING',\n",
33
    "    1: 'WALKING_UPSTAIRS',\n",
34
    "    2: 'WALKING_DOWNSTAIRS',\n",
35
    "    3: 'SITTING',\n",
36
    "    4: 'STANDING',\n",
37
    "    5: 'LAYING',\n",
38
    "}\n",
39
    "\n",
40
    "# Utility function to print the confusion matrix\n",
41
    "def confusion_matrix(Y_true, Y_pred):\n",
42
    "    Y_true = pd.Series([ACTIVITIES[y] for y in np.argmax(Y_true, axis=1)])\n",
43
    "    Y_pred = pd.Series([ACTIVITIES[y] for y in np.argmax(Y_pred, axis=1)])\n",
44
    "\n",
45
    "    return pd.crosstab(Y_true, Y_pred, rownames=['True'], colnames=['Pred'])"
46
   ]
47
  },
48
  {
49
   "cell_type": "markdown",
50
   "metadata": {},
51
   "source": [
52
    "### Data"
53
   ]
54
  },
55
  {
56
   "cell_type": "code",
57
   "execution_count": 5,
58
   "metadata": {},
59
   "outputs": [],
60
   "source": [
61
    "# Data directory\n",
62
    "DATADIR = 'UCI_HAR_Dataset'"
63
   ]
64
  },
65
  {
66
   "cell_type": "code",
67
   "execution_count": 6,
68
   "metadata": {},
69
   "outputs": [],
70
   "source": [
71
    "# Raw data signals\n",
72
    "# Signals are from Accelerometer and Gyroscope\n",
73
    "# The signals are in x,y,z directions\n",
74
    "# Sensor signals are filtered to have only body acceleration\n",
75
    "# excluding the acceleration due to gravity\n",
76
    "# Triaxial acceleration from the accelerometer is total acceleration\n",
77
    "SIGNALS = [\n",
78
    "    \"body_acc_x\",\n",
79
    "    \"body_acc_y\",\n",
80
    "    \"body_acc_z\",\n",
81
    "    \"body_gyro_x\",\n",
82
    "    \"body_gyro_y\",\n",
83
    "    \"body_gyro_z\",\n",
84
    "    \"total_acc_x\",\n",
85
    "    \"total_acc_y\",\n",
86
    "    \"total_acc_z\"\n",
87
    "]"
88
   ]
89
  },
90
  {
91
   "cell_type": "code",
92
   "execution_count": 7,
93
   "metadata": {},
94
   "outputs": [],
95
   "source": [
96
    "# Utility function to read the data from csv file\n",
97
    "def _read_csv(filename):\n",
98
    "    return pd.read_csv(filename, delim_whitespace=True, header=None)\n",
99
    "\n",
100
    "# Utility function to load the load\n",
101
    "def load_signals(subset):\n",
102
    "    signals_data = []\n",
103
    "\n",
104
    "    for signal in SIGNALS:\n",
105
    "        filename = f'UCI_HAR_Dataset/{subset}/Inertial Signals/{signal}_{subset}.txt'\n",
106
    "        signals_data.append(\n",
107
    "            _read_csv(filename).as_matrix()\n",
108
    "        ) \n",
109
    "\n",
110
    "    # Transpose is used to change the dimensionality of the output,\n",
111
    "    # aggregating the signals by combination of sample/timestep.\n",
112
    "    # Resultant shape is (7352 train/2947 test samples, 128 timesteps, 9 signals)\n",
113
    "    return np.transpose(signals_data, (1, 2, 0))"
114
   ]
115
  },
116
  {
117
   "cell_type": "code",
118
   "execution_count": 8,
119
   "metadata": {},
120
   "outputs": [],
121
   "source": [
122
    "\n",
123
    "def load_y(subset):\n",
124
    "    \"\"\"\n",
125
    "    The objective that we are trying to predict is a integer, from 1 to 6,\n",
126
    "    that represents a human activity. We return a binary representation of \n",
127
    "    every sample objective as a 6 bits vector using One Hot Encoding\n",
128
    "    (https://pandas.pydata.org/pandas-docs/stable/generated/pandas.get_dummies.html)\n",
129
    "    \"\"\"\n",
130
    "    filename = f'UCI_HAR_Dataset/{subset}/y_{subset}.txt'\n",
131
    "    y = _read_csv(filename)[0]\n",
132
    "\n",
133
    "    return pd.get_dummies(y).as_matrix()"
134
   ]
135
  },
136
  {
137
   "cell_type": "code",
138
   "execution_count": 9,
139
   "metadata": {},
140
   "outputs": [],
141
   "source": [
142
    "def load_data():\n",
143
    "    \"\"\"\n",
144
    "    Obtain the dataset from multiple files.\n",
145
    "    Returns: X_train, X_test, y_train, y_test\n",
146
    "    \"\"\"\n",
147
    "    X_train, X_test = load_signals('train'), load_signals('test')\n",
148
    "    y_train, y_test = load_y('train'), load_y('test')\n",
149
    "\n",
150
    "    return X_train, X_test, y_train, y_test"
151
   ]
152
  },
153
  {
154
   "cell_type": "code",
155
   "execution_count": 10,
156
   "metadata": {},
157
   "outputs": [],
158
   "source": [
159
    "# Importing tensorflow\n",
160
    "np.random.seed(42)\n",
161
    "import tensorflow as tf\n",
162
    "tf.set_random_seed(42)"
163
   ]
164
  },
165
  {
166
   "cell_type": "code",
167
   "execution_count": 11,
168
   "metadata": {},
169
   "outputs": [],
170
   "source": [
171
    "# Configuring a session\n",
172
    "session_conf = tf.ConfigProto(\n",
173
    "    intra_op_parallelism_threads=1,\n",
174
    "    inter_op_parallelism_threads=1\n",
175
    ")"
176
   ]
177
  },
178
  {
179
   "cell_type": "code",
180
   "execution_count": 12,
181
   "metadata": {},
182
   "outputs": [
183
    {
184
     "name": "stderr",
185
     "output_type": "stream",
186
     "text": [
187
      "/home/prajin/Downloads/ENTER/envs/py36/lib/python3.6/site-packages/h5py/__init__.py:34: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n",
188
      "  from ._conv import register_converters as _register_converters\n",
189
      "Using TensorFlow backend.\n"
190
     ]
191
    }
192
   ],
193
   "source": [
194
    "# Import Keras\n",
195
    "from keras import backend as K\n",
196
    "sess = tf.Session(graph=tf.get_default_graph(), config=session_conf)\n",
197
    "K.set_session(sess)"
198
   ]
199
  },
200
  {
201
   "cell_type": "code",
202
   "execution_count": 14,
203
   "metadata": {},
204
   "outputs": [],
205
   "source": [
206
    "# Importing libraries\n",
207
    "from keras.models import Sequential\n",
208
    "from keras.layers import LSTM\n",
209
    "from keras.layers.core import Dense, Dropout"
210
   ]
211
  },
212
  {
213
   "cell_type": "code",
214
   "execution_count": 15,
215
   "metadata": {},
216
   "outputs": [],
217
   "source": [
218
    "# Initializing parameters\n",
219
    "epochs = 30\n",
220
    "batch_size = 16\n",
221
    "n_hidden = 32"
222
   ]
223
  },
224
  {
225
   "cell_type": "code",
226
   "execution_count": 16,
227
   "metadata": {},
228
   "outputs": [],
229
   "source": [
230
    "# Utility function to count the number of classes\n",
231
    "def _count_classes(y):\n",
232
    "    return len(set([tuple(category) for category in y]))"
233
   ]
234
  },
235
  {
236
   "cell_type": "code",
237
   "execution_count": 17,
238
   "metadata": {},
239
   "outputs": [
240
    {
241
     "name": "stderr",
242
     "output_type": "stream",
243
     "text": [
244
      "/home/prajin/Downloads/ENTER/envs/py36/lib/python3.6/site-packages/ipykernel_launcher.py:12: FutureWarning: Method .as_matrix will be removed in a future version. Use .values instead.\n",
245
      "  if sys.path[0] == '':\n"
246
     ]
247
    }
248
   ],
249
   "source": [
250
    "# Loading the train and test data\n",
251
    "X_train, X_test, Y_train, Y_test = load_data()"
252
   ]
253
  },
254
  {
255
   "cell_type": "code",
256
   "execution_count": 18,
257
   "metadata": {},
258
   "outputs": [
259
    {
260
     "name": "stdout",
261
     "output_type": "stream",
262
     "text": [
263
      "128\n",
264
      "9\n",
265
      "7352\n"
266
     ]
267
    }
268
   ],
269
   "source": [
270
    "timesteps = len(X_train[0])\n",
271
    "input_dim = len(X_train[0][0])\n",
272
    "n_classes = _count_classes(Y_train)\n",
273
    "\n",
274
    "print(timesteps)\n",
275
    "print(input_dim)\n",
276
    "print(len(X_train))"
277
   ]
278
  },
279
  {
280
   "cell_type": "code",
281
   "execution_count": 19,
282
   "metadata": {},
283
   "outputs": [
284
    {
285
     "data": {
286
      "text/plain": [
287
       "(7352, 128, 9)"
288
      ]
289
     },
290
     "execution_count": 19,
291
     "metadata": {},
292
     "output_type": "execute_result"
293
    }
294
   ],
295
   "source": [
296
    "X_train.shape"
297
   ]
298
  },
299
  {
300
   "cell_type": "code",
301
   "execution_count": null,
302
   "metadata": {},
303
   "outputs": [],
304
   "source": []
305
  },
306
  {
307
   "cell_type": "code",
308
   "execution_count": 24,
309
   "metadata": {},
310
   "outputs": [
311
    {
312
     "data": {
313
      "text/plain": [
314
       "(7352, 6)"
315
      ]
316
     },
317
     "execution_count": 24,
318
     "metadata": {},
319
     "output_type": "execute_result"
320
    }
321
   ],
322
   "source": [
323
    "Y_train.shape"
324
   ]
325
  },
326
  {
327
   "cell_type": "code",
328
   "execution_count": 34,
329
   "metadata": {},
330
   "outputs": [
331
    {
332
     "data": {
333
      "text/plain": [
334
       "(2947, 128, 9)"
335
      ]
336
     },
337
     "execution_count": 34,
338
     "metadata": {},
339
     "output_type": "execute_result"
340
    }
341
   ],
342
   "source": [
343
    "X_test.shape"
344
   ]
345
  },
346
  {
347
   "cell_type": "code",
348
   "execution_count": 35,
349
   "metadata": {},
350
   "outputs": [
351
    {
352
     "data": {
353
      "text/plain": [
354
       "array([[[ 1.165315e-02, -2.939904e-02,  1.068262e-01, ...,\n",
355
       "          1.041216e+00, -2.697959e-01,  2.377977e-02],\n",
356
       "        [ 1.310909e-02, -3.972867e-02,  1.524549e-01, ...,\n",
357
       "          1.041803e+00, -2.800250e-01,  7.629271e-02],\n",
358
       "        [ 1.126885e-02, -5.240586e-02,  2.168462e-01, ...,\n",
359
       "          1.039086e+00, -2.926631e-01,  1.474754e-01],\n",
360
       "        ...,\n",
361
       "        [ 1.291511e-03,  1.173502e-02,  3.665587e-03, ...,\n",
362
       "          9.930164e-01, -2.599865e-01,  1.443951e-01],\n",
363
       "        [ 1.469997e-03,  9.517414e-03,  4.041945e-03, ...,\n",
364
       "          9.932414e-01, -2.620643e-01,  1.447033e-01],\n",
365
       "        [ 2.573841e-03,  7.305069e-03,  4.888436e-03, ...,\n",
366
       "          9.943906e-01, -2.641348e-01,  1.454939e-01]],\n",
367
       "\n",
368
       "       [[ 9.279629e-03,  6.650520e-03, -2.631933e-02, ...,\n",
369
       "          9.991921e-01, -2.649349e-01,  1.256164e-01],\n",
370
       "        [ 4.929711e-03,  1.864973e-02, -2.688753e-02, ...,\n",
371
       "          9.946787e-01, -2.532142e-01,  1.256249e-01],\n",
372
       "        [ 3.953596e-03,  1.553950e-02, -3.663861e-02, ...,\n",
373
       "          9.935518e-01, -2.565887e-01,  1.163814e-01],\n",
374
       "        ...,\n",
375
       "        [ 7.787600e-03,  4.730625e-03,  1.412899e-02, ...,\n",
376
       "          1.001861e+00, -2.619359e-01,  1.527878e-01],\n",
377
       "        [ 3.433489e-03, -4.619849e-03,  1.338054e-03, ...,\n",
378
       "          9.975208e-01, -2.713225e-01,  1.398428e-01],\n",
379
       "        [-1.238678e-03, -1.322889e-02, -1.703861e-02, ...,\n",
380
       "          9.928615e-01, -2.799715e-01,  1.213135e-01]],\n",
381
       "\n",
382
       "       [[ 5.731945e-03,  7.304842e-03,  1.021286e-02, ...,\n",
383
       "          9.975931e-01, -2.639912e-01,  1.507741e-01],\n",
384
       "        [ 7.065650e-03,  7.330912e-03,  1.341419e-02, ...,\n",
385
       "          9.989703e-01, -2.638194e-01,  1.539427e-01],\n",
386
       "        [ 5.109758e-03,  7.153458e-03,  3.646559e-03, ...,\n",
387
       "          9.970574e-01, -2.638495e-01,  1.441536e-01],\n",
388
       "        ...,\n",
389
       "        [-7.428461e-04, -9.629137e-03, -2.500924e-03, ...,\n",
390
       "          9.918802e-01, -2.836712e-01,  1.326780e-01],\n",
391
       "        [-1.923356e-03, -6.425974e-03, -2.524952e-03, ...,\n",
392
       "          9.906626e-01, -2.805970e-01,  1.326941e-01],\n",
393
       "        [-4.304617e-03, -7.932046e-03, -3.140111e-03, ...,\n",
394
       "          9.882446e-01, -2.822329e-01,  1.321175e-01]],\n",
395
       "\n",
396
       "       ...,\n",
397
       "\n",
398
       "       [[-1.476465e-01,  5.519791e-03,  1.025031e-02, ...,\n",
399
       "          8.213505e-01, -2.484623e-01, -2.216934e-01],\n",
400
       "        [-1.699026e-01,  3.235187e-02,  2.632373e-02, ...,\n",
401
       "          7.991996e-01, -2.232599e-01, -2.045561e-01],\n",
402
       "        [-1.686980e-01,  7.826144e-02, -2.703439e-02, ...,\n",
403
       "          8.004623e-01, -1.790170e-01, -2.568719e-01],\n",
404
       "        ...,\n",
405
       "        [ 4.978930e-01, -3.158365e-01, -2.321939e-02, ...,\n",
406
       "          1.463170e+00, -5.515283e-01, -2.723974e-01],\n",
407
       "        [ 2.141275e-01, -3.121422e-01,  1.814949e-01, ...,\n",
408
       "          1.179223e+00, -5.472997e-01, -6.773376e-02],\n",
409
       "        [-1.145089e-01, -2.553472e-01,  3.870347e-01, ...,\n",
410
       "          8.504963e-01, -4.900368e-01,  1.378256e-01]],\n",
411
       "\n",
412
       "       [[ 7.122683e-02, -1.498122e-01, -1.659306e-01, ...,\n",
413
       "          1.037668e+00, -3.971532e-01, -3.940817e-01],\n",
414
       "        [-8.866530e-02, -3.755543e-02, -8.708159e-02, ...,\n",
415
       "          8.780725e-01, -2.848634e-01, -3.151097e-01],\n",
416
       "        [-7.067473e-02, -1.615178e-02,  1.401189e-02, ...,\n",
417
       "          8.963897e-01, -2.635297e-01, -2.139040e-01],\n",
418
       "        ...,\n",
419
       "        [ 1.859878e-01,  7.344366e-03,  2.383924e-01, ...,\n",
420
       "          1.156389e+00, -2.283478e-01, -3.512052e-03],\n",
421
       "        [ 2.737114e-01, -2.279012e-02,  1.302276e-01, ...,\n",
422
       "          1.243857e+00, -2.583220e-01, -1.117857e-01],\n",
423
       "        [ 3.536738e-01, -1.118625e-01, -3.402252e-02, ...,\n",
424
       "          1.323546e+00, -3.472416e-01, -2.760682e-01]],\n",
425
       "\n",
426
       "       [[-1.936425e-01, -1.907511e-01,  1.958357e-01, ...,\n",
427
       "          7.713622e-01, -4.250499e-01, -5.327655e-02],\n",
428
       "        [-6.498738e-02, -2.035990e-01, -1.531400e-01, ...,\n",
429
       "          9.000949e-01, -4.375916e-01, -4.020727e-01],\n",
430
       "        [-9.712210e-02, -2.083832e-01, -2.710627e-01, ...,\n",
431
       "          8.681034e-01, -4.421595e-01, -5.197379e-01],\n",
432
       "        ...,\n",
433
       "        [-5.075521e-02, -1.047171e-01,  1.732707e-01, ...,\n",
434
       "          9.188616e-01, -3.516799e-01, -7.253919e-02],\n",
435
       "        [-1.980675e-02, -2.076396e-02,  1.956384e-01, ...,\n",
436
       "          9.494752e-01, -2.675260e-01, -5.097549e-02],\n",
437
       "        [-1.104015e-02,  5.243883e-02,  2.184321e-01, ...,\n",
438
       "          9.578348e-01, -1.941603e-01, -2.892477e-02]]])"
439
      ]
440
     },
441
     "execution_count": 35,
442
     "metadata": {},
443
     "output_type": "execute_result"
444
    }
445
   ],
446
   "source": [
447
    "X_test"
448
   ]
449
  },
450
  {
451
   "cell_type": "code",
452
   "execution_count": 26,
453
   "metadata": {},
454
   "outputs": [
455
    {
456
     "data": {
457
      "text/plain": [
458
       "(2947, 6)"
459
      ]
460
     },
461
     "execution_count": 26,
462
     "metadata": {},
463
     "output_type": "execute_result"
464
    }
465
   ],
466
   "source": [
467
    "Y_test.shape"
468
   ]
469
  },
470
  {
471
   "cell_type": "markdown",
472
   "metadata": {},
473
   "source": [
474
    "- Defining the Architecture of LSTM"
475
   ]
476
  },
477
  {
478
   "cell_type": "code",
479
   "execution_count": 27,
480
   "metadata": {},
481
   "outputs": [
482
    {
483
     "name": "stdout",
484
     "output_type": "stream",
485
     "text": [
486
      "_________________________________________________________________\n",
487
      "Layer (type)                 Output Shape              Param #   \n",
488
      "=================================================================\n",
489
      "lstm_2 (LSTM)                (None, 32)                5376      \n",
490
      "_________________________________________________________________\n",
491
      "dropout_2 (Dropout)          (None, 32)                0         \n",
492
      "_________________________________________________________________\n",
493
      "dense_2 (Dense)              (None, 6)                 198       \n",
494
      "=================================================================\n",
495
      "Total params: 5,574\n",
496
      "Trainable params: 5,574\n",
497
      "Non-trainable params: 0\n",
498
      "_________________________________________________________________\n"
499
     ]
500
    }
501
   ],
502
   "source": [
503
    "# Initiliazing the sequential model\n",
504
    "\n",
505
    "model = Sequential()\n",
506
    "# Configuring the parameters\n",
507
    "model.add(LSTM(n_hidden, input_shape=(timesteps, input_dim)))\n",
508
    "# Adding a dropout layer\n",
509
    "model.add(Dropout(0.5))\n",
510
    "# Adding a dense output layer with sigmoid activation\n",
511
    "model.add(Dense(n_classes, activation='sigmoid'))\n",
512
    "model.summary()"
513
   ]
514
  },
515
  {
516
   "cell_type": "code",
517
   "execution_count": 28,
518
   "metadata": {},
519
   "outputs": [],
520
   "source": [
521
    "# Compiling the model\n",
522
    "model.compile(loss='categorical_crossentropy',\n",
523
    "              optimizer='rmsprop',\n",
524
    "              metrics=['accuracy'])"
525
   ]
526
  },
527
  {
528
   "cell_type": "code",
529
   "execution_count": 29,
530
   "metadata": {},
531
   "outputs": [
532
    {
533
     "name": "stdout",
534
     "output_type": "stream",
535
     "text": [
536
      "Train on 7352 samples, validate on 2947 samples\n",
537
      "Epoch 1/30\n",
538
      "7352/7352 [==============================] - 30s 4ms/step - loss: 1.3992 - acc: 0.3528 - val_loss: 1.3149 - val_acc: 0.3485\n",
539
      "Epoch 2/30\n",
540
      "7352/7352 [==============================] - 29s 4ms/step - loss: 1.1923 - acc: 0.4475 - val_loss: 1.1875 - val_acc: 0.4523\n",
541
      "Epoch 3/30\n",
542
      "7352/7352 [==============================] - 27s 4ms/step - loss: 1.0586 - acc: 0.4977 - val_loss: 1.1083 - val_acc: 0.5124\n",
543
      "Epoch 4/30\n",
544
      "7352/7352 [==============================] - 27s 4ms/step - loss: 0.9001 - acc: 0.6019 - val_loss: 0.9712 - val_acc: 0.5898\n",
545
      "Epoch 5/30\n",
546
      "7352/7352 [==============================] - 27s 4ms/step - loss: 0.8077 - acc: 0.6205 - val_loss: 0.8670 - val_acc: 0.5769\n",
547
      "Epoch 6/30\n",
548
      "7352/7352 [==============================] - 29s 4ms/step - loss: 0.7221 - acc: 0.6443 - val_loss: 0.7999 - val_acc: 0.6108\n",
549
      "Epoch 7/30\n",
550
      "7352/7352 [==============================] - 34s 5ms/step - loss: 0.7032 - acc: 0.6481 - val_loss: 0.8130 - val_acc: 0.6067\n",
551
      "Epoch 8/30\n",
552
      "7352/7352 [==============================] - 37s 5ms/step - loss: 0.6789 - acc: 0.6590 - val_loss: 0.7781 - val_acc: 0.6118\n",
553
      "Epoch 9/30\n",
554
      "7352/7352 [==============================] - 38s 5ms/step - loss: 0.6733 - acc: 0.6549 - val_loss: 0.8595 - val_acc: 0.6033\n",
555
      "Epoch 10/30\n",
556
      "7352/7352 [==============================] - 38s 5ms/step - loss: 0.6385 - acc: 0.6714 - val_loss: 0.8202 - val_acc: 0.6043\n",
557
      "Epoch 11/30\n",
558
      "7352/7352 [==============================] - 37s 5ms/step - loss: 0.5983 - acc: 0.6918 - val_loss: 0.7822 - val_acc: 0.6586\n",
559
      "Epoch 12/30\n",
560
      "7352/7352 [==============================] - 28s 4ms/step - loss: 0.5781 - acc: 0.7304 - val_loss: 0.7093 - val_acc: 0.7503\n",
561
      "Epoch 13/30\n",
562
      "7352/7352 [==============================] - 23s 3ms/step - loss: 0.5395 - acc: 0.7752 - val_loss: 0.6877 - val_acc: 0.7503\n",
563
      "Epoch 14/30\n",
564
      "7352/7352 [==============================] - 22s 3ms/step - loss: 0.5074 - acc: 0.7888 - val_loss: 0.5969 - val_acc: 0.7621\n",
565
      "Epoch 15/30\n",
566
      "7352/7352 [==============================] - 24s 3ms/step - loss: 0.4639 - acc: 0.7983 - val_loss: 0.6399 - val_acc: 0.7574\n",
567
      "Epoch 16/30\n",
568
      "7352/7352 [==============================] - 27s 4ms/step - loss: 0.4533 - acc: 0.8041 - val_loss: 0.5525 - val_acc: 0.7625\n",
569
      "Epoch 17/30\n",
570
      "7352/7352 [==============================] - 25s 3ms/step - loss: 0.4612 - acc: 0.8166 - val_loss: 0.5325 - val_acc: 0.7679\n",
571
      "Epoch 18/30\n",
572
      "7352/7352 [==============================] - 27s 4ms/step - loss: 0.3810 - acc: 0.8595 - val_loss: 0.5302 - val_acc: 0.8385\n",
573
      "Epoch 19/30\n",
574
      "7352/7352 [==============================] - 24s 3ms/step - loss: 0.3549 - acc: 0.8924 - val_loss: 0.7042 - val_acc: 0.8246\n",
575
      "Epoch 20/30\n",
576
      "7352/7352 [==============================] - 25s 3ms/step - loss: 0.3123 - acc: 0.9124 - val_loss: 0.5711 - val_acc: 0.8456\n",
577
      "Epoch 21/30\n",
578
      "7352/7352 [==============================] - 35s 5ms/step - loss: 0.2819 - acc: 0.9136 - val_loss: 0.5149 - val_acc: 0.8636\n",
579
      "Epoch 22/30\n",
580
      "7352/7352 [==============================] - 55s 7ms/step - loss: 0.2355 - acc: 0.9249 - val_loss: 0.5110 - val_acc: 0.8646\n",
581
      "Epoch 23/30\n",
582
      "7352/7352 [==============================] - 43s 6ms/step - loss: 0.2248 - acc: 0.9290 - val_loss: 0.6960 - val_acc: 0.8524\n",
583
      "Epoch 24/30\n",
584
      "7352/7352 [==============================] - 32s 4ms/step - loss: 0.2245 - acc: 0.9314 - val_loss: 0.6003 - val_acc: 0.8687\n",
585
      "Epoch 25/30\n",
586
      "7352/7352 [==============================] - 31s 4ms/step - loss: 0.2142 - acc: 0.9312 - val_loss: 0.4520 - val_acc: 0.8809\n",
587
      "Epoch 26/30\n",
588
      "7352/7352 [==============================] - 63s 9ms/step - loss: 0.2139 - acc: 0.9340 - val_loss: 0.4768 - val_acc: 0.8643\n",
589
      "Epoch 27/30\n",
590
      "7352/7352 [==============================] - 38s 5ms/step - loss: 0.2048 - acc: 0.9316 - val_loss: 0.4726 - val_acc: 0.8795\n",
591
      "Epoch 28/30\n",
592
      "7352/7352 [==============================] - 34s 5ms/step - loss: 0.1946 - acc: 0.9369 - val_loss: 0.4605 - val_acc: 0.8765\n",
593
      "Epoch 29/30\n",
594
      "7352/7352 [==============================] - 63s 9ms/step - loss: 0.2185 - acc: 0.9327 - val_loss: 0.4615 - val_acc: 0.8768\n",
595
      "Epoch 30/30\n",
596
      "7352/7352 [==============================] - 76s 10ms/step - loss: 0.1809 - acc: 0.9374 - val_loss: 0.4475 - val_acc: 0.8843\n"
597
     ]
598
    },
599
    {
600
     "data": {
601
      "text/plain": [
602
       "<keras.callbacks.History at 0x7fd101658ef0>"
603
      ]
604
     },
605
     "execution_count": 29,
606
     "metadata": {},
607
     "output_type": "execute_result"
608
    }
609
   ],
610
   "source": [
611
    "# Training the model\n",
612
    "model.fit(X_train,\n",
613
    "          Y_train,\n",
614
    "          batch_size=batch_size,\n",
615
    "          validation_data=(X_test, Y_test),\n",
616
    "          epochs=epochs)"
617
   ]
618
  },
619
  {
620
   "cell_type": "code",
621
   "execution_count": 30,
622
   "metadata": {},
623
   "outputs": [
624
    {
625
     "name": "stdout",
626
     "output_type": "stream",
627
     "text": [
628
      "2947/2947 [==============================] - 2s 821us/step\n"
629
     ]
630
    }
631
   ],
632
   "source": [
633
    "score = model.evaluate(X_test, Y_test)"
634
   ]
635
  },
636
  {
637
   "cell_type": "code",
638
   "execution_count": 33,
639
   "metadata": {},
640
   "outputs": [
641
    {
642
     "data": {
643
      "text/plain": [
644
       "array([[6.73119284e-05, 1.16691635e-05, 6.29200213e-06, 3.27186123e-03,\n",
645
       "        3.09266567e-01, 6.14459668e-06],\n",
646
       "       [6.55044132e-05, 2.05861852e-05, 1.43757034e-05, 6.77454285e-03,\n",
647
       "        4.01470065e-01, 6.11893711e-06],\n",
648
       "       [6.84985353e-05, 1.96996807e-05, 1.35612627e-05, 6.61419239e-03,\n",
649
       "        4.27904457e-01, 6.24169252e-06],\n",
650
       "       ...,\n",
651
       "       [1.55011995e-03, 7.93270528e-01, 3.01599008e-04, 2.30963960e-05,\n",
652
       "        5.54955914e-05, 1.02767759e-08],\n",
653
       "       [4.90763341e-04, 3.85723859e-01, 1.03853172e-05, 6.35696642e-06,\n",
654
       "        2.14066167e-05, 1.14835030e-08],\n",
655
       "       [7.23787583e-04, 6.95120990e-01, 2.18840923e-05, 8.08145796e-06,\n",
656
       "        6.29514252e-05, 5.91321019e-08]], dtype=float32)"
657
      ]
658
     },
659
     "execution_count": 33,
660
     "metadata": {},
661
     "output_type": "execute_result"
662
    }
663
   ],
664
   "source": [
665
    " model.predict(X_test)"
666
   ]
667
  },
668
  {
669
   "cell_type": "code",
670
   "execution_count": 31,
671
   "metadata": {},
672
   "outputs": [
673
    {
674
     "name": "stdout",
675
     "output_type": "stream",
676
     "text": [
677
      "Pred                LAYING  SITTING  STANDING  WALKING  WALKING_DOWNSTAIRS  \\\n",
678
      "True                                                                         \n",
679
      "LAYING                 510        0        27        0                   0   \n",
680
      "SITTING                  0      375       110        3                   0   \n",
681
      "STANDING                 0       80       446        2                   0   \n",
682
      "WALKING                  0        0         0      410                  27   \n",
683
      "WALKING_DOWNSTAIRS       0        0         0        2                 407   \n",
684
      "WALKING_UPSTAIRS         0        0         0        3                  10   \n",
685
      "\n",
686
      "Pred                WALKING_UPSTAIRS  \n",
687
      "True                                  \n",
688
      "LAYING                             0  \n",
689
      "SITTING                            3  \n",
690
      "STANDING                           4  \n",
691
      "WALKING                           59  \n",
692
      "WALKING_DOWNSTAIRS                11  \n",
693
      "WALKING_UPSTAIRS                 458  \n"
694
     ]
695
    }
696
   ],
697
   "source": [
698
    "# Confusion Matrix\n",
699
    "print(confusion_matrix(Y_test, model.predict(X_test)))"
700
   ]
701
  },
702
  {
703
   "cell_type": "code",
704
   "execution_count": 32,
705
   "metadata": {},
706
   "outputs": [
707
    {
708
     "data": {
709
      "text/plain": [
710
       "[0.44746464555687265, 0.8842891075670173]"
711
      ]
712
     },
713
     "execution_count": 32,
714
     "metadata": {},
715
     "output_type": "execute_result"
716
    }
717
   ],
718
   "source": [
719
    "score"
720
   ]
721
  },
722
  {
723
   "cell_type": "code",
724
   "execution_count": null,
725
   "metadata": {},
726
   "outputs": [],
727
   "source": []
728
  },
729
  {
730
   "cell_type": "markdown",
731
   "metadata": {},
732
   "source": [
733
    "- With a simple 2 layer architecture we got 90.09% accuracy and a loss of 0.30\n",
734
    "- We can further imporve the performace with Hyperparameter tuning"
735
   ]
736
  }
737
 ],
738
 "metadata": {
739
  "kernelspec": {
740
   "display_name": "Python 3",
741
   "language": "python",
742
   "name": "python3"
743
  },
744
  "language_info": {
745
   "codemirror_mode": {
746
    "name": "ipython",
747
    "version": 3
748
   },
749
   "file_extension": ".py",
750
   "mimetype": "text/x-python",
751
   "name": "python",
752
   "nbconvert_exporter": "python",
753
   "pygments_lexer": "ipython3",
754
   "version": "3.6.7"
755
  }
756
 },
757
 "nbformat": 4,
758
 "nbformat_minor": 2
759
}