CNN_injection_transfer_weights.ipynb 318 KB
Newer Older
Lafnoune Imane's avatar
Lafnoune Imane committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {
    "papermill": {
     "duration": 0.012261,
     "end_time": "2020-08-20T13:10:33.841122",
     "exception": false,
     "start_time": "2020-08-20T13:10:33.828861",
     "status": "completed"
    },
    "tags": []
   },
   "source": [
16
    "# CNN with transfer learning with weights (efficientnet) + MLP\n",
Lafnoune Imane's avatar
Lafnoune Imane committed
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
    "\n",
    "This notebook contains the configurations required to train an efficientnet model for K-folds.\n",
    "\n",
    "It is possible to hit -0.6910 LB by tweaking parameters in this notebook!\n",
    "\n",
    "https://www.kaggle.com/khoongweihao/k-fold-tf-efficientnet-models-training"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import pandas as pd\n",
    "import os\n",
    "import logging\n",
    "logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## A - Preprocessing : Reading Data"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "os.chdir('../')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>Patient</th>\n",
       "      <th>Weeks</th>\n",
       "      <th>FVC</th>\n",
       "      <th>Percent</th>\n",
       "      <th>Age</th>\n",
       "      <th>Sex</th>\n",
       "      <th>SmokingStatus</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>-4</td>\n",
       "      <td>2315</td>\n",
       "      <td>58.253649</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>5</td>\n",
       "      <td>2214</td>\n",
       "      <td>55.712129</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>7</td>\n",
       "      <td>2061</td>\n",
       "      <td>51.862104</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>9</td>\n",
       "      <td>2144</td>\n",
       "      <td>53.950679</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>11</td>\n",
       "      <td>2069</td>\n",
       "      <td>52.063412</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                     Patient  Weeks   FVC    Percent  Age   Sex SmokingStatus\n",
       "0  ID00007637202177411956430     -4  2315  58.253649   79  Male     Ex-smoker\n",
       "1  ID00007637202177411956430      5  2214  55.712129   79  Male     Ex-smoker\n",
       "2  ID00007637202177411956430      7  2061  51.862104   79  Male     Ex-smoker\n",
       "3  ID00007637202177411956430      9  2144  53.950679   79  Male     Ex-smoker\n",
       "4  ID00007637202177411956430     11  2069  52.063412   79  Male     Ex-smoker"
      ]
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from preprocessing.read_load_data import read_data\n",
    "\n",
    "input_directory='../osic-pulmonary-fibrosis-progression'\n",
    "train_df, test_df, sample_df = read_data(input_directory)   \n",
    "train_df.head()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## B - Preprocessing : Loading Data"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "patients_train_ids= train_df.Patient.unique()\n",
    "patient_test_list= test_df.Patient.unique()\n",
    "patients_train_ids = [pat for pat in patients_train_ids]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO:loading  attributes...\n",
      "INFO:loading images...\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Array shape:  (176, 240, 240, 4)\n",
      "min value:  -0.1251496147096971\n",
Billy Amélie's avatar
Billy Amélie committed
203
      "max value:  0.16921848376184256\n"
Lafnoune Imane's avatar
Lafnoune Imane committed
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
     ]
    }
   ],
   "source": [
    "from preprocessing.read_load_data import load_images\n",
    "\n",
    "logging.info(\"loading  attributes...\")\n",
    "df = pd.read_csv(f'{input_directory}/train.csv')\n",
    "patients_train_ids= df.Patient.unique().tolist()\n",
    "\n",
    "logging.info(\"loading images...\")\n",
    "images = load_images(input_directory,\n",
    "                    'train',\n",
    "                     patients_train_ids,\n",
    "                     option='superposition',\n",
    "                     outputH = 240,\n",
    "                     outputW = 240)\n",
    "\n",
    "print(\"Array shape: \", images.shape)\n",
    "#check value between -1,1\n",
    "print('min value: ', np.amin(images))\n",
    "print('max value: ', np.amax(images))"
   ]
  },
Billy Amélie's avatar
Billy Amélie committed
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>Patient</th>\n",
       "      <th>Weeks</th>\n",
       "      <th>FVC</th>\n",
       "      <th>Percent</th>\n",
       "      <th>Age</th>\n",
       "      <th>Sex</th>\n",
       "      <th>SmokingStatus</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>-4</td>\n",
       "      <td>2315</td>\n",
       "      <td>58.253649</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>5</td>\n",
       "      <td>2214</td>\n",
       "      <td>55.712129</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>7</td>\n",
       "      <td>2061</td>\n",
       "      <td>51.862104</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>9</td>\n",
       "      <td>2144</td>\n",
       "      <td>53.950679</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>11</td>\n",
       "      <td>2069</td>\n",
       "      <td>52.063412</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                     Patient  Weeks   FVC    Percent  Age   Sex SmokingStatus\n",
       "0  ID00007637202177411956430     -4  2315  58.253649   79  Male     Ex-smoker\n",
       "1  ID00007637202177411956430      5  2214  55.712129   79  Male     Ex-smoker\n",
       "2  ID00007637202177411956430      7  2061  51.862104   79  Male     Ex-smoker\n",
       "3  ID00007637202177411956430      9  2144  53.950679   79  Male     Ex-smoker\n",
       "4  ID00007637202177411956430     11  2069  52.063412   79  Male     Ex-smoker"
      ]
     },
     "execution_count": 6,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>Patient</th>\n",
       "      <th>Weeks</th>\n",
       "      <th>FVC</th>\n",
       "      <th>Percent</th>\n",
       "      <th>Age</th>\n",
       "      <th>Sex</th>\n",
       "      <th>SmokingStatus</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>9</th>\n",
       "      <td>ID00009637202177434476278</td>\n",
       "      <td>8</td>\n",
       "      <td>3660</td>\n",
       "      <td>85.282878</td>\n",
       "      <td>69</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>10</th>\n",
       "      <td>ID00009637202177434476278</td>\n",
       "      <td>9</td>\n",
       "      <td>3610</td>\n",
       "      <td>84.117812</td>\n",
       "      <td>69</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11</th>\n",
       "      <td>ID00009637202177434476278</td>\n",
       "      <td>11</td>\n",
       "      <td>3895</td>\n",
       "      <td>90.758691</td>\n",
       "      <td>69</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>12</th>\n",
       "      <td>ID00009637202177434476278</td>\n",
       "      <td>13</td>\n",
       "      <td>3759</td>\n",
       "      <td>87.589710</td>\n",
       "      <td>69</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>13</th>\n",
       "      <td>ID00009637202177434476278</td>\n",
       "      <td>15</td>\n",
       "      <td>3639</td>\n",
       "      <td>84.793550</td>\n",
       "      <td>69</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>14</th>\n",
       "      <td>ID00009637202177434476278</td>\n",
       "      <td>22</td>\n",
       "      <td>3578</td>\n",
       "      <td>83.372169</td>\n",
       "      <td>69</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>15</th>\n",
       "      <td>ID00009637202177434476278</td>\n",
       "      <td>33</td>\n",
       "      <td>3625</td>\n",
       "      <td>84.467332</td>\n",
       "      <td>69</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>16</th>\n",
       "      <td>ID00009637202177434476278</td>\n",
       "      <td>45</td>\n",
       "      <td>3390</td>\n",
       "      <td>78.991518</td>\n",
       "      <td>69</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>17</th>\n",
       "      <td>ID00009637202177434476278</td>\n",
       "      <td>60</td>\n",
       "      <td>3214</td>\n",
       "      <td>74.890484</td>\n",
       "      <td>69</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                      Patient  Weeks   FVC    Percent  Age   Sex SmokingStatus\n",
       "9   ID00009637202177434476278      8  3660  85.282878   69  Male     Ex-smoker\n",
       "10  ID00009637202177434476278      9  3610  84.117812   69  Male     Ex-smoker\n",
       "11  ID00009637202177434476278     11  3895  90.758691   69  Male     Ex-smoker\n",
       "12  ID00009637202177434476278     13  3759  87.589710   69  Male     Ex-smoker\n",
       "13  ID00009637202177434476278     15  3639  84.793550   69  Male     Ex-smoker\n",
       "14  ID00009637202177434476278     22  3578  83.372169   69  Male     Ex-smoker\n",
       "15  ID00009637202177434476278     33  3625  84.467332   69  Male     Ex-smoker\n",
       "16  ID00009637202177434476278     45  3390  78.991518   69  Male     Ex-smoker\n",
       "17  ID00009637202177434476278     60  3214  74.890484   69  Male     Ex-smoker"
      ]
     },
     "execution_count": 7,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df.loc[np.where(df.Patient == 'ID00009637202177434476278')]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "Patient          ID00009637202177434476278\n",
       "Weeks                                    9\n",
       "FVC                                   3610\n",
       "Percent                          84.117812\n",
       "Age                                     69\n",
       "Sex                                   Male\n",
       "SmokingStatus                    Ex-smoker\n",
       "Name: 10, dtype: object"
      ]
     },
     "execution_count": 8,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df.iloc[10]"
   ]
  },
Lafnoune Imane's avatar
Lafnoune Imane committed
515
516
517
518
519
520
521
522
523
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## C - Preprocessing : shuffle"
   ]
  },
  {
   "cell_type": "code",
Billy Amélie's avatar
Billy Amélie committed
524
   "execution_count": 9,
Lafnoune Imane's avatar
Lafnoune Imane committed
525
526
527
   "metadata": {},
   "outputs": [],
   "source": [
528
    "from sklearn.model_selection import train_test_split\n",
Billy Amélie's avatar
Billy Amélie committed
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
    "\n",
    "split = train_test_split(patients_train_ids, images, test_size=0.2, random_state=42)\n",
    "(trainPatient, testPatient, trainImagesX, testImagesX) = split"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [],
   "source": [
    " #split the dataframe like the images\n",
    "df_train = df[df.Patient.isin(trainPatient)].copy()\n",
    "df_test = df[df.Patient.isin(testPatient)].copy()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO:NumExpr defaulting to 8 threads.\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(1093, 8) (280, 8)\n"
     ]
    }
   ],
   "source": [
    "from preprocessing.read_load_data import create_dataframe\n",
    "\n",
    "trainAttrX = create_dataframe(df_train)\n",
    "testAttrX = create_dataframe(df_test)\n",
    "print(trainAttrX.shape, testAttrX.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "1093 280\n"
     ]
    }
   ],
   "source": [
    "#set one image per training row\n",
Lafnoune Imane's avatar
Lafnoune Imane committed
588
    "\n",
Billy Amélie's avatar
Billy Amélie committed
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
    "indice = 0\n",
    "train_dataset = np.ndarray((len(trainAttrX),240,240,4))\n",
    "for i,patient in enumerate(trainPatient):\n",
    "    nb_data = len(trainAttrX[trainAttrX.PatientID ==patient])\n",
    "    for ii in range(nb_data):\n",
    "        train_dataset[indice]=(trainImagesX[i])\n",
    "        indice+=1\n",
    "        \n",
    "        \n",
    "indicet = 0        \n",
    "test_dataset = np.ndarray((len(testAttrX),240,240,4))\n",
    "for i,patient in enumerate(testPatient):\n",
    "    nb_data = len(testAttrX[testAttrX.PatientID ==patient])\n",
    "    for ii in range(nb_data):\n",
    "        test_dataset[indicet] = testImagesX[i]\n",
    "        indicet+=1\n",
    "        \n",
    "        \n",
    "print(len(train_dataset),len(test_dataset))"
Lafnoune Imane's avatar
Lafnoune Imane committed
608
609
610
611
612
613
614
615
616
617
618
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## D - Preprocessing : Scaling + Encoding"
   ]
  },
  {
   "cell_type": "code",
Billy Amélie's avatar
Billy Amélie committed
619
   "execution_count": 13,
Lafnoune Imane's avatar
Lafnoune Imane committed
620
621
622
623
624
   "metadata": {},
   "outputs": [],
   "source": [
    "from preprocessing.scale_data import scale_variable\n",
    "\n",
Billy Amélie's avatar
Billy Amélie committed
625
    "sc, trainAttrX, testAttrX = scale_variable(trainAttrX, testAttrX,'Target_FVC')\n",
626
627
628
    "sc1, trainAttrX, testAttrX = scale_variable(trainAttrX, testAttrX,'First_FVC')\n",
    "sc2, trainAttrX, testAttrX = scale_variable(trainAttrX, testAttrX,'Age')\n",
    "\n",
Billy Amélie's avatar
Billy Amélie committed
629
630
    "trainY = trainAttrX.loc[:,'Target_FVC_scaled']\n",
    "testY = testAttrX.loc[:,'Target_FVC_scaled']"
Lafnoune Imane's avatar
Lafnoune Imane committed
631
632
633
634
   ]
  },
  {
   "cell_type": "code",
Billy Amélie's avatar
Billy Amélie committed
635
   "execution_count": 14,
Lafnoune Imane's avatar
Lafnoune Imane committed
636
637
638
639
640
641
642
643
   "metadata": {},
   "outputs": [],
   "source": [
    "from preprocessing.scale_data import encode_variable\n",
    "\n",
    "trainAttrX, testAttrX = encode_variable(trainAttrX, testAttrX,'Sex')\n",
    "trainAttrX, testAttrX = encode_variable(trainAttrX, testAttrX,'SmokingStatus')\n",
    "\n",
644
645
646
647
648
    "for dft in [trainAttrX,testAttrX]:\n",
    "    dft.drop(columns = ['Sex','SmokingStatus','Target_FVC','Target_FVC_scaled',\n",
    "                          'PatientID','First_FVC','Age'], inplace = True)\n",
    "    dft.loc[:,'First_Percent'] = dft.loc[:,'First_Percent']/100\n",
    "    dft.loc[:,'Delta_week'] = dft.loc[:,'Delta_week']/133"
Lafnoune Imane's avatar
Lafnoune Imane committed
649
650
651
652
653
654
655
656
657
658
659
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## E - Processing : Create models"
   ]
  },
  {
   "cell_type": "code",
Billy Amélie's avatar
Billy Amélie committed
660
   "execution_count": 15,
Lafnoune Imane's avatar
Lafnoune Imane committed
661
662
   "metadata": {},
   "outputs": [],
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
   "source": [
    "from tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping\n",
    "\n",
    "#set early stopping criteria\n",
    "pat = 5 #this is the number of epochs with no improvment after which the training will stop\n",
    "es = EarlyStopping(monitor='val_loss', patience=pat, verbose=1)\n",
    "\n",
    "#define the model checkpoint callback -> this will keep on saving the model as a physical file\n",
    "cp = ModelCheckpoint('clean_notebooks/cnn_injection_transfer_weights.h5', verbose=1, save_best_only=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {},
   "outputs": [],
   "source": [
    "def custom_shuffle_split(trainAttrX,train_dataset,trainY,test_size = 0.1 ):\n",
    "    cut = int(len(trainY)*test_size)\n",
    "    arr = list(np.arange(len(trainY)))\n",
    "    np.random.shuffle(arr)\n",
    "    trainidx = arr[cut:]\n",
    "    testidx = arr[:cut]\n",
    "    train_x, train_y = [trainAttrX.iloc[trainidx], train_dataset[trainidx]] , trainY[trainidx]\n",
    "    val_x, val_y = [trainAttrX.iloc[testidx], train_dataset[testidx]] , trainY[testidx]\n",
    "    return train_x, val_x, train_y, val_y"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [],
Lafnoune Imane's avatar
Lafnoune Imane committed
696
697
698
699
   "source": [
    "from processing.models import create_hybrid_transfer\n",
    "from keras.optimizers import Adam\n",
    "from tensorflow.keras.models import Model\n",
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
    "import efficientnet.tfkeras as efn"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Training on Fold:  1\n",
      "Epoch 1/30\n",
      "111/111 [==============================] - ETA: 15:07 - loss: 0.91 - ETA: 1:07 - loss: 0.8403 - ETA: 1:06 - loss: 0.774 - ETA: 1:03 - loss: 0.725 - ETA: 1:04 - loss: 0.707 - ETA: 1:03 - loss: 0.703 - ETA: 1:02 - loss: 0.697 - ETA: 1:01 - loss: 0.693 - ETA: 1:01 - loss: 0.706 - ETA: 1:01 - loss: 0.726 - ETA: 1:01 - loss: 0.739 - ETA: 1:01 - loss: 0.752 - ETA: 1:03 - loss: 0.765 - ETA: 1:03 - loss: 0.776 - ETA: 1:02 - loss: 0.788 - ETA: 1:02 - loss: 0.800 - ETA: 1:01 - loss: 0.810 - ETA: 1:01 - loss: 0.817 - ETA: 1:00 - loss: 0.824 - ETA: 59s - loss: 0.829 - ETA: 59s - loss: 0.83 - ETA: 58s - loss: 0.84 - ETA: 58s - loss: 0.84 - ETA: 58s - loss: 0.85 - ETA: 57s - loss: 0.85 - ETA: 57s - loss: 0.86 - ETA: 56s - loss: 0.86 - ETA: 55s - loss: 0.86 - ETA: 54s - loss: 0.87 - ETA: 54s - loss: 0.87 - ETA: 53s - loss: 0.87 - ETA: 53s - loss: 0.87 - ETA: 52s - loss: 0.87 - ETA: 51s - loss: 0.88 - ETA: 51s - loss: 0.88 - ETA: 50s - loss: 0.88 - ETA: 49s - loss: 0.88 - ETA: 48s - loss: 0.88 - ETA: 48s - loss: 0.89 - ETA: 47s - loss: 0.89 - ETA: 46s - loss: 0.89 - ETA: 45s - loss: 0.89 - ETA: 45s - loss: 0.90 - ETA: 44s - loss: 0.90 - ETA: 43s - loss: 0.90 - ETA: 42s - loss: 0.90 - ETA: 42s - loss: 0.90 - ETA: 41s - loss: 0.91 - ETA: 40s - loss: 0.91 - ETA: 39s - loss: 0.91 - ETA: 38s - loss: 0.91 - ETA: 38s - loss: 0.91 - ETA: 37s - loss: 0.91 - ETA: 36s - loss: 0.91 - ETA: 36s - loss: 0.91 - ETA: 35s - loss: 0.92 - ETA: 34s - loss: 0.92 - ETA: 33s - loss: 0.92 - ETA: 33s - loss: 0.92 - ETA: 32s - loss: 0.92 - ETA: 31s - loss: 0.92 - ETA: 31s - loss: 0.92 - ETA: 30s - loss: 0.92 - ETA: 29s - loss: 0.92 - ETA: 28s - loss: 0.92 - ETA: 28s - loss: 0.92 - ETA: 27s - loss: 0.92 - ETA: 26s - loss: 0.92 - ETA: 26s - loss: 0.92 - ETA: 25s - loss: 0.92 - ETA: 24s - loss: 0.92 - ETA: 24s - loss: 0.93 - ETA: 23s - loss: 0.93 - ETA: 23s - loss: 0.93 - ETA: 22s - loss: 0.93 - ETA: 21s - loss: 0.93 - ETA: 21s - loss: 0.92 - ETA: 20s - loss: 0.92 - ETA: 19s - loss: 0.92 - ETA: 19s - loss: 0.92 - ETA: 18s - loss: 0.92 - ETA: 18s - loss: 0.92 - ETA: 17s - loss: 0.92 - ETA: 16s - loss: 0.92 - ETA: 16s - loss: 0.92 - ETA: 15s - loss: 0.92 - ETA: 14s - loss: 0.92 - ETA: 14s - loss: 0.92 - ETA: 13s - loss: 0.92 - ETA: 13s - loss: 0.92 - ETA: 12s - loss: 0.92 - ETA: 11s - loss: 0.92 - ETA: 11s - loss: 0.92 - ETA: 10s - loss: 0.92 - ETA: 9s - loss: 0.9276 - ETA: 9s - loss: 0.927 - ETA: 8s - loss: 0.927 - ETA: 8s - loss: 0.927 - ETA: 7s - loss: 0.927 - ETA: 6s - loss: 0.927 - ETA: 6s - loss: 0.927 - ETA: 5s - loss: 0.927 - ETA: 4s - loss: 0.927 - ETA: 4s - loss: 0.927 - ETA: 3s - loss: 0.927 - ETA: 3s - loss: 0.927 - ETA: 2s - loss: 0.927 - ETA: 1s - loss: 0.927 - ETA: 1s - loss: 0.927 - ETA: 0s - loss: 0.927 - ETA: 0s - loss: 0.927 - 87s 713ms/step - loss: 0.9276 - val_loss: 0.8027\n",
      "\n",
      "Epoch 00001: val_loss improved from inf to 0.80272, saving model to clean_notebooks\\cnn_injection_transfer_weights.h5\n",
      "Epoch 2/30\n",
      "111/111 [==============================] - ETA: 1:10 - loss: 0.369 - ETA: 1:06 - loss: 0.387 - ETA: 1:07 - loss: 0.386 - ETA: 1:07 - loss: 0.383 - ETA: 1:04 - loss: 0.419 - ETA: 1:02 - loss: 0.433 - ETA: 1:03 - loss: 0.451 - ETA: 1:03 - loss: 0.476 - ETA: 1:04 - loss: 0.494 - ETA: 1:04 - loss: 0.507 - ETA: 1:04 - loss: 0.524 - ETA: 1:03 - loss: 0.533 - ETA: 1:02 - loss: 0.542 - ETA: 1:02 - loss: 0.550 - ETA: 1:02 - loss: 0.557 - ETA: 1:01 - loss: 0.563 - ETA: 1:00 - loss: 0.569 - ETA: 59s - loss: 0.574 - ETA: 59s - loss: 0.57 - ETA: 58s - loss: 0.58 - ETA: 58s - loss: 0.58 - ETA: 57s - loss: 0.59 - ETA: 57s - loss: 0.60 - ETA: 56s - loss: 0.60 - ETA: 55s - loss: 0.61 - ETA: 55s - loss: 0.61 - ETA: 54s - loss: 0.61 - ETA: 54s - loss: 0.61 - ETA: 53s - loss: 0.62 - ETA: 53s - loss: 0.62 - ETA: 52s - loss: 0.63 - ETA: 52s - loss: 0.63 - ETA: 51s - loss: 0.63 - ETA: 51s - loss: 0.64 - ETA: 50s - loss: 0.64 - ETA: 49s - loss: 0.64 - ETA: 49s - loss: 0.65 - ETA: 48s - loss: 0.65 - ETA: 47s - loss: 0.66 - ETA: 46s - loss: 0.66 - ETA: 46s - loss: 0.66 - ETA: 45s - loss: 0.66 - ETA: 44s - loss: 0.67 - ETA: 43s - loss: 0.67 - ETA: 43s - loss: 0.67 - ETA: 42s - loss: 0.67 - ETA: 41s - loss: 0.68 - ETA: 41s - loss: 0.68 - ETA: 40s - loss: 0.68 - ETA: 39s - loss: 0.68 - ETA: 39s - loss: 0.68 - ETA: 38s - loss: 0.69 - ETA: 38s - loss: 0.69 - ETA: 37s - loss: 0.69 - ETA: 36s - loss: 0.69 - ETA: 36s - loss: 0.69 - ETA: 35s - loss: 0.69 - ETA: 34s - loss: 0.69 - ETA: 34s - loss: 0.70 - ETA: 33s - loss: 0.70 - ETA: 32s - loss: 0.70 - ETA: 32s - loss: 0.70 - ETA: 31s - loss: 0.70 - ETA: 30s - loss: 0.70 - ETA: 30s - loss: 0.70 - ETA: 29s - loss: 0.70 - ETA: 28s - loss: 0.70 - ETA: 28s - loss: 0.71 - ETA: 27s - loss: 0.71 - ETA: 26s - loss: 0.71 - ETA: 26s - loss: 0.71 - ETA: 25s - loss: 0.71 - ETA: 25s - loss: 0.71 - ETA: 24s - loss: 0.71 - ETA: 23s - loss: 0.71 - ETA: 23s - loss: 0.71 - ETA: 22s - loss: 0.72 - ETA: 22s - loss: 0.72 - ETA: 21s - loss: 0.72 - ETA: 20s - loss: 0.72 - ETA: 20s - loss: 0.72 - ETA: 19s - loss: 0.72 - ETA: 18s - loss: 0.72 - ETA: 18s - loss: 0.72 - ETA: 17s - loss: 0.72 - ETA: 16s - loss: 0.72 - ETA: 16s - loss: 0.72 - ETA: 15s - loss: 0.72 - ETA: 14s - loss: 0.72 - ETA: 14s - loss: 0.72 - ETA: 13s - loss: 0.72 - ETA: 12s - loss: 0.72 - ETA: 11s - loss: 0.72 - ETA: 11s - loss: 0.72 - ETA: 10s - loss: 0.72 - ETA: 9s - loss: 0.7300 - ETA: 9s - loss: 0.730 - ETA: 8s - loss: 0.730 - ETA: 7s - loss: 0.730 - ETA: 7s - loss: 0.730 - ETA: 6s - loss: 0.730 - ETA: 5s - loss: 0.730 - ETA: 5s - loss: 0.730 - ETA: 4s - loss: 0.731 - ETA: 3s - loss: 0.731 - ETA: 3s - loss: 0.731 - ETA: 2s - loss: 0.731 - ETA: 1s - loss: 0.731 - ETA: 1s - loss: 0.731 - ETA: 0s - loss: 0.731 - ETA: 0s - loss: 0.731 - 79s 710ms/step - loss: 0.7311 - val_loss: 0.4656\n",
      "\n",
      "Epoch 00002: val_loss improved from 0.80272 to 0.46563, saving model to clean_notebooks\\cnn_injection_transfer_weights.h5\n",
      "Epoch 3/30\n",
      "111/111 [==============================] - ETA: 51s - loss: 0.42 - ETA: 56s - loss: 0.36 - ETA: 56s - loss: 0.37 - ETA: 56s - loss: 0.38 - ETA: 55s - loss: 0.39 - ETA: 55s - loss: 0.40 - ETA: 55s - loss: 0.42 - ETA: 54s - loss: 0.46 - ETA: 55s - loss: 0.49 - ETA: 55s - loss: 0.51 - ETA: 55s - loss: 0.53 - ETA: 54s - loss: 0.54 - ETA: 54s - loss: 0.55 - ETA: 54s - loss: 0.55 - ETA: 53s - loss: 0.56 - ETA: 52s - loss: 0.56 - ETA: 51s - loss: 0.56 - ETA: 51s - loss: 0.56 - ETA: 51s - loss: 0.56 - ETA: 51s - loss: 0.56 - ETA: 51s - loss: 0.57 - ETA: 50s - loss: 0.57 - ETA: 50s - loss: 0.57 - ETA: 49s - loss: 0.57 - ETA: 49s - loss: 0.57 - ETA: 48s - loss: 0.58 - ETA: 48s - loss: 0.58 - ETA: 47s - loss: 0.58 - ETA: 46s - loss: 0.58 - ETA: 46s - loss: 0.59 - ETA: 45s - loss: 0.59 - ETA: 45s - loss: 0.59 - ETA: 44s - loss: 0.59 - ETA: 44s - loss: 0.59 - ETA: 43s - loss: 0.59 - ETA: 42s - loss: 0.59 - ETA: 42s - loss: 0.60 - ETA: 42s - loss: 0.60 - ETA: 41s - loss: 0.60 - ETA: 41s - loss: 0.60 - ETA: 41s - loss: 0.60 - ETA: 40s - loss: 0.60 - ETA: 40s - loss: 0.60 - ETA: 39s - loss: 0.60 - ETA: 39s - loss: 0.60 - ETA: 38s - loss: 0.60 - ETA: 38s - loss: 0.60 - ETA: 37s - loss: 0.60 - ETA: 36s - loss: 0.60 - ETA: 36s - loss: 0.61 - ETA: 35s - loss: 0.61 - ETA: 35s - loss: 0.61 - ETA: 34s - loss: 0.60 - ETA: 34s - loss: 0.60 - ETA: 33s - loss: 0.60 - ETA: 32s - loss: 0.60 - ETA: 32s - loss: 0.60 - ETA: 31s - loss: 0.60 - ETA: 31s - loss: 0.60 - ETA: 30s - loss: 0.60 - ETA: 29s - loss: 0.60 - ETA: 29s - loss: 0.60 - ETA: 28s - loss: 0.60 - ETA: 28s - loss: 0.60 - ETA: 27s - loss: 0.60 - ETA: 26s - loss: 0.60 - ETA: 26s - loss: 0.60 - ETA: 25s - loss: 0.60 - ETA: 25s - loss: 0.60 - ETA: 24s - loss: 0.59 - ETA: 24s - loss: 0.59 - ETA: 23s - loss: 0.59 - ETA: 23s - loss: 0.59 - ETA: 22s - loss: 0.59 - ETA: 22s - loss: 0.59 - ETA: 21s - loss: 0.59 - ETA: 21s - loss: 0.59 - ETA: 20s - loss: 0.59 - ETA: 19s - loss: 0.59 - ETA: 19s - loss: 0.59 - ETA: 18s - loss: 0.59 - ETA: 18s - loss: 0.59 - ETA: 17s - loss: 0.59 - ETA: 16s - loss: 0.59 - ETA: 16s - loss: 0.59 - ETA: 15s - loss: 0.59 - ETA: 15s - loss: 0.59 - ETA: 14s - loss: 0.59 - ETA: 13s - loss: 0.59 - ETA: 13s - loss: 0.59 - ETA: 12s - loss: 0.59 - ETA: 12s - loss: 0.59 - ETA: 11s - loss: 0.59 - ETA: 10s - loss: 0.59 - ETA: 10s - loss: 0.59 - ETA: 9s - loss: 0.5923 - ETA: 8s - loss: 0.592 - ETA: 8s - loss: 0.591 - ETA: 7s - loss: 0.591 - ETA: 6s - loss: 0.591 - ETA: 6s - loss: 0.591 - ETA: 5s - loss: 0.590 - ETA: 5s - loss: 0.590 - ETA: 4s - loss: 0.590 - ETA: 3s - loss: 0.590 - ETA: 3s - loss: 0.589 - ETA: 2s - loss: 0.589 - ETA: 1s - loss: 0.589 - ETA: 1s - loss: 0.588 - ETA: 0s - loss: 0.588 - ETA: 0s - loss: 0.588 - 77s 692ms/step - loss: 0.5882 - val_loss: 0.3119\n",
      "\n",
      "Epoch 00003: val_loss improved from 0.46563 to 0.31189, saving model to clean_notebooks\\cnn_injection_transfer_weights.h5\n",
      "Epoch 4/30\n",
      "111/111 [==============================] - ETA: 51s - loss: 0.34 - ETA: 57s - loss: 0.34 - ETA: 57s - loss: 0.32 - ETA: 56s - loss: 0.31 - ETA: 57s - loss: 0.30 - ETA: 57s - loss: 0.30 - ETA: 57s - loss: 0.30 - ETA: 56s - loss: 0.31 - ETA: 56s - loss: 0.33 - ETA: 56s - loss: 0.34 - ETA: 57s - loss: 0.35 - ETA: 57s - loss: 0.35 - ETA: 57s - loss: 0.36 - ETA: 57s - loss: 0.36 - ETA: 56s - loss: 0.37 - ETA: 56s - loss: 0.37 - ETA: 55s - loss: 0.37 - ETA: 54s - loss: 0.38 - ETA: 54s - loss: 0.38 - ETA: 53s - loss: 0.39 - ETA: 52s - loss: 0.39 - ETA: 52s - loss: 0.39 - ETA: 51s - loss: 0.40 - ETA: 50s - loss: 0.40 - ETA: 49s - loss: 0.41 - ETA: 48s - loss: 0.41 - ETA: 48s - loss: 0.41 - ETA: 48s - loss: 0.42 - ETA: 47s - loss: 0.42 - ETA: 47s - loss: 0.42 - ETA: 46s - loss: 0.43 - ETA: 46s - loss: 0.43 - ETA: 45s - loss: 0.43 - ETA: 45s - loss: 0.43 - ETA: 44s - loss: 0.43 - ETA: 43s - loss: 0.44 - ETA: 43s - loss: 0.44 - ETA: 42s - loss: 0.44 - ETA: 42s - loss: 0.44 - ETA: 41s - loss: 0.44 - ETA: 41s - loss: 0.44 - ETA: 40s - loss: 0.44 - ETA: 39s - loss: 0.45 - ETA: 38s - loss: 0.45 - ETA: 38s - loss: 0.45 - ETA: 37s - loss: 0.45 - ETA: 37s - loss: 0.45 - ETA: 36s - loss: 0.45 - ETA: 35s - loss: 0.45 - ETA: 35s - loss: 0.45 - ETA: 34s - loss: 0.45 - ETA: 34s - loss: 0.45 - ETA: 33s - loss: 0.45 - ETA: 32s - loss: 0.45 - ETA: 32s - loss: 0.45 - ETA: 31s - loss: 0.46 - ETA: 31s - loss: 0.46 - ETA: 30s - loss: 0.46 - ETA: 30s - loss: 0.46 - ETA: 29s - loss: 0.46 - ETA: 28s - loss: 0.46 - ETA: 28s - loss: 0.46 - ETA: 27s - loss: 0.46 - ETA: 27s - loss: 0.46 - ETA: 26s - loss: 0.46 - ETA: 26s - loss: 0.46 - ETA: 25s - loss: 0.46 - ETA: 24s - loss: 0.46 - ETA: 24s - loss: 0.46 - ETA: 23s - loss: 0.46 - ETA: 23s - loss: 0.46 - ETA: 22s - loss: 0.46 - ETA: 21s - loss: 0.46 - ETA: 21s - loss: 0.46 - ETA: 20s - loss: 0.46 - ETA: 20s - loss: 0.46 - ETA: 19s - loss: 0.46 - ETA: 19s - loss: 0.46 - ETA: 18s - loss: 0.46 - ETA: 17s - loss: 0.46 - ETA: 17s - loss: 0.46 - ETA: 16s - loss: 0.46 - ETA: 16s - loss: 0.46 - ETA: 15s - loss: 0.46 - ETA: 14s - loss: 0.46 - ETA: 14s - loss: 0.46 - ETA: 13s - loss: 0.46 - ETA: 13s - loss: 0.46 - ETA: 12s - loss: 0.46 - ETA: 12s - loss: 0.46 - ETA: 12s - loss: 0.46 - ETA: 11s - loss: 0.46 - ETA: 11s - loss: 0.46 - ETA: 10s - loss: 0.46 - ETA: 9s - loss: 0.4691 - ETA: 9s - loss: 0.469 - ETA: 8s - loss: 0.469 - ETA: 8s - loss: 0.469 - ETA: 7s - loss: 0.470 - ETA: 7s - loss: 0.470 - ETA: 6s - loss: 0.470 - ETA: 5s - loss: 0.470 - ETA: 5s - loss: 0.470 - ETA: 4s - loss: 0.471 - ETA: 4s - loss: 0.471 - ETA: 3s - loss: 0.471 - ETA: 2s - loss: 0.471 - ETA: 2s - loss: 0.471 - ETA: 1s - loss: 0.472 - ETA: 0s - loss: 0.472 - ETA: 0s - loss: 0.472 - 85s 770ms/step - loss: 0.4724 - val_loss: 0.2423\n",
      "\n",
      "Epoch 00004: val_loss improved from 0.31189 to 0.24233, saving model to clean_notebooks\\cnn_injection_transfer_weights.h5\n",
      "Epoch 5/30\n",
      "111/111 [==============================] - ETA: 1:19 - loss: 1.020 - ETA: 1:14 - loss: 0.786 - ETA: 1:12 - loss: 0.701 - ETA: 1:11 - loss: 0.640 - ETA: 1:11 - loss: 0.595 - ETA: 1:10 - loss: 0.557 - ETA: 1:10 - loss: 0.533 - ETA: 1:11 - loss: 0.512 - ETA: 1:12 - loss: 0.493 - ETA: 1:12 - loss: 0.479 - ETA: 1:11 - loss: 0.466 - ETA: 1:11 - loss: 0.457 - ETA: 1:09 - loss: 0.449 - ETA: 1:09 - loss: 0.443 - ETA: 1:08 - loss: 0.437 - ETA: 1:07 - loss: 0.432 - ETA: 1:06 - loss: 0.430 - ETA: 1:06 - loss: 0.428 - ETA: 1:06 - loss: 0.426 - ETA: 1:06 - loss: 0.425 - ETA: 1:05 - loss: 0.424 - ETA: 1:05 - loss: 0.424 - ETA: 1:05 - loss: 0.424 - ETA: 1:05 - loss: 0.424 - ETA: 1:04 - loss: 0.425 - ETA: 1:04 - loss: 0.425 - ETA: 1:03 - loss: 0.426 - ETA: 1:03 - loss: 0.426 - ETA: 1:03 - loss: 0.426 - ETA: 1:03 - loss: 0.427 - ETA: 1:03 - loss: 0.428 - ETA: 1:02 - loss: 0.429 - ETA: 1:01 - loss: 0.430 - ETA: 59s - loss: 0.430 - ETA: 58s - loss: 0.43 - ETA: 57s - loss: 0.43 - ETA: 56s - loss: 0.43 - ETA: 55s - loss: 0.43 - ETA: 54s - loss: 0.43 - ETA: 53s - loss: 0.43 - ETA: 52s - loss: 0.43 - ETA: 52s - loss: 0.43 - ETA: 51s - loss: 0.43 - ETA: 50s - loss: 0.43 - ETA: 49s - loss: 0.43 - ETA: 48s - loss: 0.43 - ETA: 48s - loss: 0.43 - ETA: 47s - loss: 0.43 - ETA: 46s - loss: 0.43 - ETA: 45s - loss: 0.43 - ETA: 44s - loss: 0.43 - ETA: 44s - loss: 0.43 - ETA: 43s - loss: 0.43 - ETA: 42s - loss: 0.43 - ETA: 41s - loss: 0.43 - ETA: 41s - loss: 0.43 - ETA: 40s - loss: 0.43 - ETA: 39s - loss: 0.43 - ETA: 38s - loss: 0.43 - ETA: 37s - loss: 0.43 - ETA: 37s - loss: 0.43 - ETA: 36s - loss: 0.43 - ETA: 35s - loss: 0.43 - ETA: 34s - loss: 0.43 - ETA: 34s - loss: 0.43 - ETA: 33s - loss: 0.43 - ETA: 32s - loss: 0.43 - ETA: 32s - loss: 0.43 - ETA: 31s - loss: 0.43 - ETA: 30s - loss: 0.43 - ETA: 29s - loss: 0.43 - ETA: 28s - loss: 0.43 - ETA: 28s - loss: 0.43 - ETA: 27s - loss: 0.43 - ETA: 26s - loss: 0.43 - ETA: 25s - loss: 0.43 - ETA: 25s - loss: 0.43 - ETA: 24s - loss: 0.44 - ETA: 23s - loss: 0.44 - ETA: 23s - loss: 0.44 - ETA: 22s - loss: 0.44 - ETA: 21s - loss: 0.44 - ETA: 21s - loss: 0.44 - ETA: 20s - loss: 0.44 - ETA: 19s - loss: 0.44 - ETA: 19s - loss: 0.44 - ETA: 18s - loss: 0.44 - ETA: 17s - loss: 0.44 - ETA: 16s - loss: 0.44 - ETA: 16s - loss: 0.44 - ETA: 15s - loss: 0.44 - ETA: 14s - loss: 0.44 - ETA: 13s - loss: 0.44 - ETA: 13s - loss: 0.44 - ETA: 12s - loss: 0.44 - ETA: 11s - loss: 0.44 - ETA: 10s - loss: 0.44 - ETA: 10s - loss: 0.44 - ETA: 9s - loss: 0.4472 - ETA: 8s - loss: 0.447 - ETA: 7s - loss: 0.447 - ETA: 6s - loss: 0.447 - ETA: 6s - loss: 0.447 - ETA: 5s - loss: 0.447 - ETA: 4s - loss: 0.447 - ETA: 3s - loss: 0.448 - ETA: 3s - loss: 0.448 - ETA: 2s - loss: 0.448 - ETA: 1s - loss: 0.448 - ETA: 0s - loss: 0.448 - ETA: 0s - loss: 0.448 - 93s 843ms/step - loss: 0.4481 - val_loss: 0.2253\n",
      "\n",
      "Epoch 00005: val_loss improved from 0.24233 to 0.22529, saving model to clean_notebooks\\cnn_injection_transfer_weights.h5\n",
      "Epoch 6/30\n",
      "111/111 [==============================] - ETA: 55s - loss: 0.74 - ETA: 1:01 - loss: 0.732 - ETA: 1:06 - loss: 0.671 - ETA: 1:11 - loss: 0.617 - ETA: 1:12 - loss: 0.570 - ETA: 1:12 - loss: 0.531 - ETA: 1:11 - loss: 0.501 - ETA: 1:11 - loss: 0.492 - ETA: 1:11 - loss: 0.482 - ETA: 1:09 - loss: 0.474 - ETA: 1:09 - loss: 0.470 - ETA: 1:08 - loss: 0.465 - ETA: 1:06 - loss: 0.459 - ETA: 1:05 - loss: 0.455 - ETA: 1:04 - loss: 0.451 - ETA: 1:03 - loss: 0.447 - ETA: 1:02 - loss: 0.444 - ETA: 1:01 - loss: 0.443 - ETA: 1:01 - loss: 0.442 - ETA: 1:00 - loss: 0.441 - ETA: 59s - loss: 0.440 - ETA: 58s - loss: 0.44 - ETA: 58s - loss: 0.44 - ETA: 57s - loss: 0.44 - ETA: 56s - loss: 0.44 - ETA: 56s - loss: 0.44 - ETA: 55s - loss: 0.44 - ETA: 55s - loss: 0.44 - ETA: 54s - loss: 0.44 - ETA: 53s - loss: 0.44 - ETA: 53s - loss: 0.44 - ETA: 52s - loss: 0.44 - ETA: 52s - loss: 0.44 - ETA: 51s - loss: 0.44 - ETA: 51s - loss: 0.44 - ETA: 51s - loss: 0.44 - ETA: 50s - loss: 0.44 - ETA: 50s - loss: 0.44 - ETA: 50s - loss: 0.44 - ETA: 49s - loss: 0.44 - ETA: 49s - loss: 0.44 - ETA: 48s - loss: 0.44 - ETA: 48s - loss: 0.44 - ETA: 47s - loss: 0.44 - ETA: 47s - loss: 0.44 - ETA: 47s - loss: 0.44 - ETA: 46s - loss: 0.44 - ETA: 46s - loss: 0.44 - ETA: 45s - loss: 0.44 - ETA: 44s - loss: 0.44 - ETA: 44s - loss: 0.44 - ETA: 43s - loss: 0.45 - ETA: 43s - loss: 0.45 - ETA: 42s - loss: 0.45 - ETA: 41s - loss: 0.45 - ETA: 40s - loss: 0.45 - ETA: 40s - loss: 0.45 - ETA: 39s - loss: 0.45 - ETA: 38s - loss: 0.45 - ETA: 38s - loss: 0.45 - ETA: 37s - loss: 0.45 - ETA: 36s - loss: 0.45 - ETA: 36s - loss: 0.45 - ETA: 35s - loss: 0.45 - ETA: 34s - loss: 0.45 - ETA: 33s - loss: 0.45 - ETA: 32s - loss: 0.45 - ETA: 32s - loss: 0.45 - ETA: 31s - loss: 0.45 - ETA: 30s - loss: 0.45 - ETA: 29s - loss: 0.45 - ETA: 28s - loss: 0.45 - ETA: 28s - loss: 0.45 - ETA: 27s - loss: 0.45 - ETA: 26s - loss: 0.45 - ETA: 25s - loss: 0.45 - ETA: 24s - loss: 0.45 - ETA: 24s - loss: 0.45 - ETA: 23s - loss: 0.45 - ETA: 22s - loss: 0.45 - ETA: 21s - loss: 0.45 - ETA: 20s - loss: 0.45 - ETA: 20s - loss: 0.45 - ETA: 19s - loss: 0.45 - ETA: 18s - loss: 0.45 - ETA: 17s - loss: 0.45 - ETA: 17s - loss: 0.45 - ETA: 16s - loss: 0.45 - ETA: 15s - loss: 0.44 - ETA: 14s - loss: 0.44 - ETA: 14s - loss: 0.44 - ETA: 13s - loss: 0.44 - ETA: 12s - loss: 0.44 - ETA: 12s - loss: 0.44 - ETA: 11s - loss: 0.44 - ETA: 10s - loss: 0.44 - ETA: 9s - loss: 0.4483 - ETA: 9s - loss: 0.448 - ETA: 8s - loss: 0.448 - ETA: 7s - loss: 0.447 - ETA: 7s - loss: 0.447 - ETA: 6s - loss: 0.447 - ETA: 5s - loss: 0.447 - ETA: 4s - loss: 0.447 - ETA: 4s - loss: 0.447 - ETA: 3s - loss: 0.446 - ETA: 2s - loss: 0.446 - ETA: 2s - loss: 0.446 - ETA: 1s - loss: 0.446 - ETA: 0s - loss: 0.446 - ETA: 0s - loss: 0.446 - 84s 761ms/step - loss: 0.4459 - val_loss: 0.2081\n",
      "\n",
      "Epoch 00006: val_loss improved from 0.22529 to 0.20810, saving model to clean_notebooks\\cnn_injection_transfer_weights.h5\n",
      "Epoch 7/30\n",
      "111/111 [==============================] - ETA: 56s - loss: 0.25 - ETA: 59s - loss: 0.23 - ETA: 1:03 - loss: 0.227 - ETA: 1:02 - loss: 0.229 - ETA: 1:01 - loss: 0.245 - ETA: 1:01 - loss: 0.256 - ETA: 1:02 - loss: 0.263 - ETA: 1:01 - loss: 0.283 - ETA: 1:01 - loss: 0.297 - ETA: 1:00 - loss: 0.311 - ETA: 1:00 - loss: 0.321 - ETA: 59s - loss: 0.328 - ETA: 59s - loss: 0.33 - ETA: 58s - loss: 0.33 - ETA: 57s - loss: 0.34 - ETA: 56s - loss: 0.35 - ETA: 55s - loss: 0.35 - ETA: 55s - loss: 0.35 - ETA: 54s - loss: 0.36 - ETA: 54s - loss: 0.36 - ETA: 53s - loss: 0.36 - ETA: 52s - loss: 0.37 - ETA: 51s - loss: 0.37 - ETA: 51s - loss: 0.37 - ETA: 50s - loss: 0.37 - ETA: 49s - loss: 0.37 - ETA: 49s - loss: 0.38 - ETA: 48s - loss: 0.38 - ETA: 48s - loss: 0.38 - ETA: 47s - loss: 0.38 - ETA: 46s - loss: 0.38 - ETA: 46s - loss: 0.38 - ETA: 45s - loss: 0.38 - ETA: 44s - loss: 0.38 - ETA: 44s - loss: 0.39 - ETA: 43s - loss: 0.39 - ETA: 43s - loss: 0.39 - ETA: 42s - loss: 0.39 - ETA: 42s - loss: 0.39 - ETA: 41s - loss: 0.39 - ETA: 40s - loss: 0.39 - ETA: 40s - loss: 0.39 - ETA: 39s - loss: 0.39 - ETA: 39s - loss: 0.39 - ETA: 38s - loss: 0.39 - ETA: 38s - loss: 0.39 - ETA: 37s - loss: 0.40 - ETA: 37s - loss: 0.40 - ETA: 36s - loss: 0.40 - ETA: 35s - loss: 0.40 - ETA: 35s - loss: 0.40 - ETA: 34s - loss: 0.40 - ETA: 34s - loss: 0.40 - ETA: 33s - loss: 0.40 - ETA: 32s - loss: 0.40 - ETA: 32s - loss: 0.40 - ETA: 31s - loss: 0.40 - ETA: 31s - loss: 0.40 - ETA: 30s - loss: 0.40 - ETA: 30s - loss: 0.40 - ETA: 29s - loss: 0.40 - ETA: 28s - loss: 0.40 - ETA: 28s - loss: 0.40 - ETA: 27s - loss: 0.40 - ETA: 27s - loss: 0.40 - ETA: 26s - loss: 0.40 - ETA: 25s - loss: 0.41 - ETA: 25s - loss: 0.41 - ETA: 24s - loss: 0.41 - ETA: 24s - loss: 0.41 - ETA: 23s - loss: 0.41 - ETA: 22s - loss: 0.41 - ETA: 22s - loss: 0.41 - ETA: 21s - loss: 0.41 - ETA: 21s - loss: 0.41 - ETA: 20s - loss: 0.41 - ETA: 20s - loss: 0.41 - ETA: 19s - loss: 0.41 - ETA: 18s - loss: 0.41 - ETA: 18s - loss: 0.41 - ETA: 17s - loss: 0.41 - ETA: 17s - loss: 0.41 - ETA: 16s - loss: 0.41 - ETA: 16s - loss: 0.41 - ETA: 15s - loss: 0.41 - ETA: 15s - loss: 0.41 - ETA: 14s - loss: 0.41 - ETA: 13s - loss: 0.41 - ETA: 13s - loss: 0.41 - ETA: 12s - loss: 0.41 - ETA: 12s - loss: 0.41 - ETA: 11s - loss: 0.41 - ETA: 11s - loss: 0.41 - ETA: 10s - loss: 0.41 - ETA: 10s - loss: 0.41 - ETA: 9s - loss: 0.4183 - ETA: 8s - loss: 0.418 - ETA: 8s - loss: 0.418 - ETA: 7s - loss: 0.418 - ETA: 6s - loss: 0.418 - ETA: 6s - loss: 0.418 - ETA: 5s - loss: 0.418 - ETA: 5s - loss: 0.418 - ETA: 4s - loss: 0.419 - ETA: 3s - loss: 0.419 - ETA: 3s - loss: 0.419 - ETA: 2s - loss: 0.419 - ETA: 1s - loss: 0.419 - ETA: 1s - loss: 0.419 - ETA: 0s - loss: 0.419 - ETA: 0s - loss: 0.420 - 77s 691ms/step - loss: 0.4202 - val_loss: 0.2019\n",
      "\n",
      "Epoch 00007: val_loss improved from 0.20810 to 0.20187, saving model to clean_notebooks\\cnn_injection_transfer_weights.h5\n",
      "Epoch 8/30\n",
      "111/111 [==============================] - ETA: 57s - loss: 0.45 - ETA: 1:02 - loss: 0.439 - ETA: 1:03 - loss: 0.456 - ETA: 1:04 - loss: 0.569 - ETA: 1:02 - loss: 0.613 - ETA: 1:02 - loss: 0.627 - ETA: 1:01 - loss: 0.632 - ETA: 1:00 - loss: 0.633 - ETA: 1:00 - loss: 0.628 - ETA: 59s - loss: 0.621 - ETA: 1:00 - loss: 0.611 - ETA: 1:00 - loss: 0.601 - ETA: 59s - loss: 0.592 - ETA: 59s - loss: 0.58 - ETA: 58s - loss: 0.57 - ETA: 57s - loss: 0.57 - ETA: 58s - loss: 0.56 - ETA: 57s - loss: 0.56 - ETA: 57s - loss: 0.56 - ETA: 56s - loss: 0.55 - ETA: 55s - loss: 0.55 - ETA: 55s - loss: 0.55 - ETA: 55s - loss: 0.55 - ETA: 54s - loss: 0.54 - ETA: 54s - loss: 0.54 - ETA: 53s - loss: 0.54 - ETA: 53s - loss: 0.54 - ETA: 52s - loss: 0.54 - ETA: 51s - loss: 0.53 - ETA: 50s - loss: 0.53 - ETA: 50s - loss: 0.53 - ETA: 49s - loss: 0.53 - ETA: 49s - loss: 0.53 - ETA: 48s - loss: 0.52 - ETA: 47s - loss: 0.52 - ETA: 47s - loss: 0.52 - ETA: 46s - loss: 0.52 - ETA: 45s - loss: 0.52 - ETA: 45s - loss: 0.51 - ETA: 44s - loss: 0.51 - ETA: 43s - loss: 0.51 - ETA: 43s - loss: 0.51 - ETA: 42s - loss: 0.51 - ETA: 41s - loss: 0.51 - ETA: 41s - loss: 0.50 - ETA: 40s - loss: 0.50 - ETA: 39s - loss: 0.50 - ETA: 38s - loss: 0.50 - ETA: 38s - loss: 0.50 - ETA: 37s - loss: 0.50 - ETA: 37s - loss: 0.50 - ETA: 36s - loss: 0.50 - ETA: 35s - loss: 0.49 - ETA: 35s - loss: 0.49 - ETA: 34s - loss: 0.49 - ETA: 33s - loss: 0.49 - ETA: 33s - loss: 0.49 - ETA: 32s - loss: 0.49 - ETA: 31s - loss: 0.49 - ETA: 31s - loss: 0.49 - ETA: 30s - loss: 0.49 - ETA: 29s - loss: 0.48 - ETA: 29s - loss: 0.48 - ETA: 28s - loss: 0.48 - ETA: 28s - loss: 0.48 - ETA: 27s - loss: 0.48 - ETA: 27s - loss: 0.48 - ETA: 26s - loss: 0.48 - ETA: 26s - loss: 0.48 - ETA: 25s - loss: 0.48 - ETA: 24s - loss: 0.48 - ETA: 24s - loss: 0.47 - ETA: 23s - loss: 0.47 - ETA: 23s - loss: 0.47 - ETA: 22s - loss: 0.47 - ETA: 21s - loss: 0.47 - ETA: 21s - loss: 0.47 - ETA: 20s - loss: 0.47 - ETA: 20s - loss: 0.47 - ETA: 19s - loss: 0.47 - ETA: 19s - loss: 0.47 - ETA: 18s - loss: 0.47 - ETA: 17s - loss: 0.47 - ETA: 17s - loss: 0.46 - ETA: 16s - loss: 0.46 - ETA: 16s - loss: 0.46 - ETA: 15s - loss: 0.46 - ETA: 14s - loss: 0.46 - ETA: 14s - loss: 0.46 - ETA: 13s - loss: 0.46 - ETA: 13s - loss: 0.46 - ETA: 12s - loss: 0.46 - ETA: 11s - loss: 0.46 - ETA: 11s - loss: 0.46 - ETA: 10s - loss: 0.46 - ETA: 9s - loss: 0.4637 - ETA: 9s - loss: 0.463 - ETA: 8s - loss: 0.462 - ETA: 7s - loss: 0.462 - ETA: 7s - loss: 0.462 - ETA: 6s - loss: 0.461 - ETA: 5s - loss: 0.461 - ETA: 5s - loss: 0.460 - ETA: 4s - loss: 0.460 - ETA: 3s - loss: 0.460 - ETA: 3s - loss: 0.459 - ETA: 2s - loss: 0.459 - ETA: 1s - loss: 0.458 - ETA: 1s - loss: 0.458 - ETA: 0s - loss: 0.458 - ETA: 0s - loss: 0.457 - 81s 736ms/step - loss: 0.4574 - val_loss: 0.1846\n",
      "\n",
      "Epoch 00008: val_loss improved from 0.20187 to 0.18456, saving model to clean_notebooks\\cnn_injection_transfer_weights.h5\n",
      "Epoch 9/30\n",
      "111/111 [==============================] - ETA: 1:18 - loss: 0.472 - ETA: 1:07 - loss: 0.443 - ETA: 1:08 - loss: 0.406 - ETA: 1:06 - loss: 0.384 - ETA: 1:06 - loss: 0.378 - ETA: 1:06 - loss: 0.366 - ETA: 1:06 - loss: 0.358 - ETA: 1:05 - loss: 0.351 - ETA: 1:04 - loss: 0.344 - ETA: 1:04 - loss: 0.337 - ETA: 1:03 - loss: 0.330 - ETA: 1:04 - loss: 0.325 - ETA: 1:03 - loss: 0.322 - ETA: 1:02 - loss: 0.317 - ETA: 1:02 - loss: 0.314 - ETA: 1:01 - loss: 0.312 - ETA: 1:00 - loss: 0.309 - ETA: 1:00 - loss: 0.306 - ETA: 59s - loss: 0.304 - ETA: 59s - loss: 0.30 - ETA: 58s - loss: 0.30 - ETA: 57s - loss: 0.30 - ETA: 57s - loss: 0.30 - ETA: 56s - loss: 0.30 - ETA: 56s - loss: 0.30 - ETA: 56s - loss: 0.30 - ETA: 55s - loss: 0.30 - ETA: 54s - loss: 0.31 - ETA: 53s - loss: 0.31 - ETA: 53s - loss: 0.31 - ETA: 52s - loss: 0.31 - ETA: 51s - loss: 0.31 - ETA: 51s - loss: 0.32 - ETA: 50s - loss: 0.32 - ETA: 49s - loss: 0.32 - ETA: 48s - loss: 0.32 - ETA: 47s - loss: 0.32 - ETA: 46s - loss: 0.32 - ETA: 46s - loss: 0.32 - ETA: 45s - loss: 0.33 - ETA: 44s - loss: 0.33 - ETA: 44s - loss: 0.33 - ETA: 43s - loss: 0.33 - ETA: 42s - loss: 0.33 - ETA: 41s - loss: 0.33 - ETA: 41s - loss: 0.33 - ETA: 40s - loss: 0.34 - ETA: 39s - loss: 0.34 - ETA: 38s - loss: 0.34 - ETA: 38s - loss: 0.34 - ETA: 37s - loss: 0.34 - ETA: 36s - loss: 0.34 - ETA: 36s - loss: 0.34 - ETA: 35s - loss: 0.35 - ETA: 34s - loss: 0.35 - ETA: 34s - loss: 0.35 - ETA: 33s - loss: 0.35 - ETA: 32s - loss: 0.35 - ETA: 32s - loss: 0.35 - ETA: 31s - loss: 0.35 - ETA: 30s - loss: 0.35 - ETA: 30s - loss: 0.35 - ETA: 29s - loss: 0.35 - ETA: 28s - loss: 0.35 - ETA: 28s - loss: 0.36 - ETA: 27s - loss: 0.36 - ETA: 27s - loss: 0.36 - ETA: 26s - loss: 0.36 - ETA: 25s - loss: 0.36 - ETA: 25s - loss: 0.36 - ETA: 24s - loss: 0.36 - ETA: 24s - loss: 0.36 - ETA: 23s - loss: 0.36 - ETA: 23s - loss: 0.36 - ETA: 22s - loss: 0.36 - ETA: 21s - loss: 0.36 - ETA: 21s - loss: 0.36 - ETA: 20s - loss: 0.36 - ETA: 19s - loss: 0.36 - ETA: 19s - loss: 0.36 - ETA: 18s - loss: 0.36 - ETA: 18s - loss: 0.36 - ETA: 17s - loss: 0.36 - ETA: 16s - loss: 0.36 - ETA: 16s - loss: 0.36 - ETA: 15s - loss: 0.36 - ETA: 15s - loss: 0.36 - ETA: 14s - loss: 0.36 - ETA: 13s - loss: 0.36 - ETA: 13s - loss: 0.36 - ETA: 12s - loss: 0.36 - ETA: 12s - loss: 0.36 - ETA: 11s - loss: 0.36 - ETA: 10s - loss: 0.36 - ETA: 10s - loss: 0.36 - ETA: 9s - loss: 0.3686 - ETA: 8s - loss: 0.368 - ETA: 8s - loss: 0.368 - ETA: 7s - loss: 0.368 - ETA: 7s - loss: 0.368 - ETA: 6s - loss: 0.368 - ETA: 5s - loss: 0.369 - ETA: 5s - loss: 0.369 - ETA: 4s - loss: 0.369 - ETA: 3s - loss: 0.369 - ETA: 3s - loss: 0.369 - ETA: 2s - loss: 0.369 - ETA: 1s - loss: 0.369 - ETA: 1s - loss: 0.370 - ETA: 0s - loss: 0.370 - ETA: 0s - loss: 0.370 - 85s 762ms/step - loss: 0.3702 - val_loss: 0.1775\n",
      "\n",
      "Epoch 00009: val_loss improved from 0.18456 to 0.17748, saving model to clean_notebooks\\cnn_injection_transfer_weights.h5\n",
      "Epoch 10/30\n",
      "111/111 [==============================] - ETA: 59s - loss: 0.43 - ETA: 1:20 - loss: 0.350 - ETA: 1:13 - loss: 0.340 - ETA: 1:12 - loss: 0.327 - ETA: 1:11 - loss: 0.329 - ETA: 1:12 - loss: 0.325 - ETA: 1:18 - loss: 0.327 - ETA: 1:20 - loss: 0.331 - ETA: 1:20 - loss: 0.332 - ETA: 1:18 - loss: 0.333 - ETA: 1:18 - loss: 0.334 - ETA: 1:18 - loss: 0.334 - ETA: 1:18 - loss: 0.334 - ETA: 1:16 - loss: 0.333 - ETA: 1:14 - loss: 0.332 - ETA: 1:13 - loss: 0.330 - ETA: 1:12 - loss: 0.329 - ETA: 1:10 - loss: 0.328 - ETA: 1:08 - loss: 0.327 - ETA: 1:07 - loss: 0.328 - ETA: 1:05 - loss: 0.329 - ETA: 1:04 - loss: 0.333 - ETA: 1:03 - loss: 0.336 - ETA: 1:01 - loss: 0.338 - ETA: 1:00 - loss: 0.340 - ETA: 59s - loss: 0.342 - ETA: 58s - loss: 0.34 - ETA: 57s - loss: 0.34 - ETA: 56s - loss: 0.34 - ETA: 55s - loss: 0.34 - ETA: 54s - loss: 0.34 - ETA: 53s - loss: 0.35 - ETA: 52s - loss: 0.35 - ETA: 51s - loss: 0.35 - ETA: 50s - loss: 0.35 - ETA: 49s - loss: 0.35 - ETA: 48s - loss: 0.35 - ETA: 47s - loss: 0.35 - ETA: 47s - loss: 0.35 - ETA: 46s - loss: 0.36 - ETA: 45s - loss: 0.36 - ETA: 44s - loss: 0.36 - ETA: 43s - loss: 0.36 - ETA: 43s - loss: 0.36 - ETA: 42s - loss: 0.36 - ETA: 41s - loss: 0.36 - ETA: 41s - loss: 0.36 - ETA: 40s - loss: 0.36 - ETA: 39s - loss: 0.36 - ETA: 39s - loss: 0.36 - ETA: 38s - loss: 0.36 - ETA: 37s - loss: 0.36 - ETA: 36s - loss: 0.36 - ETA: 36s - loss: 0.36 - ETA: 35s - loss: 0.36 - ETA: 34s - loss: 0.37 - ETA: 34s - loss: 0.37 - ETA: 33s - loss: 0.37 - ETA: 33s - loss: 0.37 - ETA: 32s - loss: 0.37 - ETA: 31s - loss: 0.37 - ETA: 31s - loss: 0.37 - ETA: 30s - loss: 0.37 - ETA: 30s - loss: 0.37 - ETA: 29s - loss: 0.37 - ETA: 28s - loss: 0.37 - ETA: 28s - loss: 0.37 - ETA: 27s - loss: 0.37 - ETA: 26s - loss: 0.37 - ETA: 26s - loss: 0.37 - ETA: 25s - loss: 0.37 - ETA: 25s - loss: 0.37 - ETA: 24s - loss: 0.37 - ETA: 23s - loss: 0.37 - ETA: 23s - loss: 0.37 - ETA: 22s - loss: 0.37 - ETA: 21s - loss: 0.37 - ETA: 21s - loss: 0.37 - ETA: 20s - loss: 0.37 - ETA: 19s - loss: 0.37 - ETA: 19s - loss: 0.37 - ETA: 18s - loss: 0.37 - ETA: 17s - loss: 0.37 - ETA: 17s - loss: 0.37 - ETA: 16s - loss: 0.37 - ETA: 15s - loss: 0.37 - ETA: 15s - loss: 0.37 - ETA: 14s - loss: 0.37 - ETA: 13s - loss: 0.37 - ETA: 13s - loss: 0.37 - ETA: 12s - loss: 0.37 - ETA: 12s - loss: 0.37 - ETA: 11s - loss: 0.37 - ETA: 10s - loss: 0.37 - ETA: 10s - loss: 0.37 - ETA: 9s - loss: 0.3774 - ETA: 8s - loss: 0.377 - ETA: 8s - loss: 0.377 - ETA: 7s - loss: 0.378 - ETA: 6s - loss: 0.378 - ETA: 6s - loss: 0.378 - ETA: 5s - loss: 0.378 - ETA: 5s - loss: 0.378 - ETA: 4s - loss: 0.379 - ETA: 3s - loss: 0.379 - ETA: 3s - loss: 0.379 - ETA: 2s - loss: 0.379 - ETA: 1s - loss: 0.379 - ETA: 1s - loss: 0.379 - ETA: 0s - loss: 0.379 - ETA: 0s - loss: 0.380 - 80s 725ms/step - loss: 0.3801 - val_loss: 0.1588\n",
      "\n",
      "Epoch 00010: val_loss improved from 0.17748 to 0.15883, saving model to clean_notebooks\\cnn_injection_transfer_weights.h5\n",
      "Epoch 11/30\n",
      "111/111 [==============================] - ETA: 1:19 - loss: 0.141 - ETA: 1:12 - loss: 0.155 - ETA: 1:09 - loss: 0.150 - ETA: 1:09 - loss: 0.158 - ETA: 1:09 - loss: 0.174 - ETA: 1:08 - loss: 0.181 - ETA: 1:10 - loss: 0.190 - ETA: 1:12 - loss: 0.210 - ETA: 1:14 - loss: 0.224 - ETA: 1:15 - loss: 0.234 - ETA: 1:14 - loss: 0.241 - ETA: 1:15 - loss: 0.249 - ETA: 1:15 - loss: 0.255 - ETA: 1:16 - loss: 0.259 - ETA: 1:16 - loss: 0.264 - ETA: 1:15 - loss: 0.267 - ETA: 1:15 - loss: 0.270 - ETA: 1:14 - loss: 0.272 - ETA: 1:14 - loss: 0.275 - ETA: 1:13 - loss: 0.277 - ETA: 1:12 - loss: 0.280 - ETA: 1:11 - loss: 0.281 - ETA: 1:11 - loss: 0.282 - ETA: 1:10 - loss: 0.282 - ETA: 1:09 - loss: 0.283 - ETA: 1:08 - loss: 0.283 - ETA: 1:07 - loss: 0.283 - ETA: 1:05 - loss: 0.284 - ETA: 1:04 - loss: 0.285 - ETA: 1:03 - loss: 0.286 - ETA: 1:02 - loss: 0.286 - ETA: 1:01 - loss: 0.287 - ETA: 1:00 - loss: 0.288 - ETA: 58s - loss: 0.289 - ETA: 57s - loss: 0.29 - ETA: 56s - loss: 0.29 - ETA: 55s - loss: 0.29 - ETA: 54s - loss: 0.29 - ETA: 53s - loss: 0.29 - ETA: 53s - loss: 0.29 - ETA: 52s - loss: 0.29 - ETA: 51s - loss: 0.30 - ETA: 50s - loss: 0.30 - ETA: 49s - loss: 0.30 - ETA: 48s - loss: 0.30 - ETA: 47s - loss: 0.30 - ETA: 46s - loss: 0.30 - ETA: 46s - loss: 0.30 - ETA: 45s - loss: 0.30 - ETA: 44s - loss: 0.31 - ETA: 43s - loss: 0.31 - ETA: 42s - loss: 0.31 - ETA: 41s - loss: 0.31 - ETA: 41s - loss: 0.31 - ETA: 40s - loss: 0.31 - ETA: 39s - loss: 0.31 - ETA: 38s - loss: 0.31 - ETA: 38s - loss: 0.31 - ETA: 37s - loss: 0.31 - ETA: 36s - loss: 0.32 - ETA: 35s - loss: 0.32 - ETA: 35s - loss: 0.32 - ETA: 34s - loss: 0.32 - ETA: 33s - loss: 0.32 - ETA: 32s - loss: 0.32 - ETA: 32s - loss: 0.32 - ETA: 31s - loss: 0.32 - ETA: 30s - loss: 0.32 - ETA: 29s - loss: 0.32 - ETA: 28s - loss: 0.32 - ETA: 28s - loss: 0.32 - ETA: 27s - loss: 0.33 - ETA: 26s - loss: 0.33 - ETA: 25s - loss: 0.33 - ETA: 25s - loss: 0.33 - ETA: 24s - loss: 0.33 - ETA: 23s - loss: 0.33 - ETA: 22s - loss: 0.33 - ETA: 22s - loss: 0.33 - ETA: 21s - loss: 0.33 - ETA: 20s - loss: 0.33 - ETA: 19s - loss: 0.33 - ETA: 19s - loss: 0.34 - ETA: 18s - loss: 0.34 - ETA: 17s - loss: 0.34 - ETA: 16s - loss: 0.34 - ETA: 16s - loss: 0.34 - ETA: 15s - loss: 0.34 - ETA: 14s - loss: 0.34 - ETA: 14s - loss: 0.34 - ETA: 13s - loss: 0.34 - ETA: 12s - loss: 0.34 - ETA: 12s - loss: 0.34 - ETA: 11s - loss: 0.34 - ETA: 10s - loss: 0.34 - ETA: 10s - loss: 0.34 - ETA: 9s - loss: 0.3484 - ETA: 8s - loss: 0.348 - ETA: 7s - loss: 0.349 - ETA: 7s - loss: 0.349 - ETA: 6s - loss: 0.349 - ETA: 5s - loss: 0.350 - ETA: 5s - loss: 0.350 - ETA: 4s - loss: 0.350 - ETA: 4s - loss: 0.351 - ETA: 3s - loss: 0.351 - ETA: 2s - loss: 0.351 - ETA: 2s - loss: 0.352 - ETA: 1s - loss: 0.352 - ETA: 0s - loss: 0.352 - ETA: 0s - loss: 0.353 - 84s 753ms/step - loss: 0.3532 - val_loss: 0.1888\n",
      "\n",
      "Epoch 00011: val_loss did not improve from 0.15883\n",
      "Epoch 12/30\n",
      "111/111 [==============================] - ETA: 1:06 - loss: 0.424 - ETA: 1:13 - loss: 0.366 - ETA: 1:12 - loss: 0.375 - ETA: 1:09 - loss: 0.355 - ETA: 1:06 - loss: 0.344 - ETA: 1:06 - loss: 0.335 - ETA: 1:05 - loss: 0.327 - ETA: 1:04 - loss: 0.322 - ETA: 1:03 - loss: 0.329 - ETA: 1:03 - loss: 0.333 - ETA: 1:03 - loss: 0.338 - ETA: 1:03 - loss: 0.342 - ETA: 1:02 - loss: 0.345 - ETA: 1:01 - loss: 0.346 - ETA: 1:01 - loss: 0.346 - ETA: 1:00 - loss: 0.348 - ETA: 1:00 - loss: 0.349 - ETA: 1:00 - loss: 0.349 - ETA: 59s - loss: 0.350 - ETA: 59s - loss: 0.35 - ETA: 58s - loss: 0.35 - ETA: 57s - loss: 0.35 - ETA: 57s - loss: 0.35 - ETA: 56s - loss: 0.35 - ETA: 56s - loss: 0.34 - ETA: 55s - loss: 0.34 - ETA: 55s - loss: 0.34 - ETA: 54s - loss: 0.34 - ETA: 53s - loss: 0.34 - ETA: 53s - loss: 0.34 - ETA: 53s - loss: 0.34 - ETA: 52s - loss: 0.34 - ETA: 52s - loss: 0.35 - ETA: 52s - loss: 0.35 - ETA: 51s - loss: 0.35 - ETA: 51s - loss: 0.35 - ETA: 50s - loss: 0.35 - ETA: 49s - loss: 0.35 - ETA: 49s - loss: 0.35 - ETA: 48s - loss: 0.35 - ETA: 48s - loss: 0.35 - ETA: 47s - loss: 0.35 - ETA: 46s - loss: 0.35 - ETA: 46s - loss: 0.35 - ETA: 45s - loss: 0.35 - ETA: 44s - loss: 0.35 - ETA: 43s - loss: 0.35 - ETA: 43s - loss: 0.35 - ETA: 42s - loss: 0.35 - ETA: 41s - loss: 0.35 - ETA: 41s - loss: 0.35 - ETA: 40s - loss: 0.35 - ETA: 39s - loss: 0.35 - ETA: 38s - loss: 0.35 - ETA: 38s - loss: 0.35 - ETA: 37s - loss: 0.35 - ETA: 36s - loss: 0.35 - ETA: 36s - loss: 0.35 - ETA: 35s - loss: 0.35 - ETA: 34s - loss: 0.35 - ETA: 34s - loss: 0.35 - ETA: 33s - loss: 0.35 - ETA: 32s - loss: 0.35 - ETA: 32s - loss: 0.35 - ETA: 31s - loss: 0.35 - ETA: 31s - loss: 0.35 - ETA: 30s - loss: 0.35 - ETA: 29s - loss: 0.35 - ETA: 29s - loss: 0.35 - ETA: 28s - loss: 0.35 - ETA: 27s - loss: 0.35 - ETA: 27s - loss: 0.35 - ETA: 26s - loss: 0.35 - ETA: 25s - loss: 0.35 - ETA: 24s - loss: 0.35 - ETA: 24s - loss: 0.35 - ETA: 23s - loss: 0.35 - ETA: 22s - loss: 0.35 - ETA: 22s - loss: 0.35 - ETA: 21s - loss: 0.35 - ETA: 20s - loss: 0.35 - ETA: 20s - loss: 0.35 - ETA: 19s - loss: 0.35 - ETA: 18s - loss: 0.35 - ETA: 18s - loss: 0.35 - ETA: 17s - loss: 0.35 - ETA: 16s - loss: 0.35 - ETA: 16s - loss: 0.35 - ETA: 15s - loss: 0.35 - ETA: 14s - loss: 0.35 - ETA: 14s - loss: 0.35 - ETA: 13s - loss: 0.35 - ETA: 12s - loss: 0.35 - ETA: 11s - loss: 0.35 - ETA: 11s - loss: 0.35 - ETA: 10s - loss: 0.35 - ETA: 9s - loss: 0.3506 - ETA: 9s - loss: 0.350 - ETA: 8s - loss: 0.350 - ETA: 7s - loss: 0.350 - ETA: 6s - loss: 0.350 - ETA: 6s - loss: 0.350 - ETA: 5s - loss: 0.350 - ETA: 4s - loss: 0.349 - ETA: 4s - loss: 0.349 - ETA: 3s - loss: 0.349 - ETA: 2s - loss: 0.349 - ETA: 2s - loss: 0.349 - ETA: 1s - loss: 0.349 - ETA: 0s - loss: 0.349 - ETA: 0s - loss: 0.349 - 86s 773ms/step - loss: 0.3499 - val_loss: 0.1571\n",
      "\n",
      "Epoch 00012: val_loss improved from 0.15883 to 0.15709, saving model to clean_notebooks\\cnn_injection_transfer_weights.h5\n",
      "Epoch 13/30\n",
      "111/111 [==============================] - ETA: 1:06 - loss: 0.455 - ETA: 1:33 - loss: 0.383 - ETA: 1:38 - loss: 0.336 - ETA: 1:34 - loss: 0.308 - ETA: 1:34 - loss: 0.297 - ETA: 1:32 - loss: 0.286 - ETA: 1:31 - loss: 0.276 - ETA: 1:30 - loss: 0.268 - ETA: 1:31 - loss: 0.261 - ETA: 1:32 - loss: 0.255 - ETA: 1:32 - loss: 0.249 - ETA: 1:32 - loss: 0.245 - ETA: 1:31 - loss: 0.242 - ETA: 1:31 - loss: 0.238 - ETA: 1:31 - loss: 0.237 - ETA: 1:29 - loss: 0.237 - ETA: 1:29 - loss: 0.237 - ETA: 1:27 - loss: 0.238 - ETA: 1:26 - loss: 0.238 - ETA: 1:25 - loss: 0.238 - ETA: 1:24 - loss: 0.240 - ETA: 1:23 - loss: 0.242 - ETA: 1:22 - loss: 0.245 - ETA: 1:21 - loss: 0.247 - ETA: 1:20 - loss: 0.250 - ETA: 1:18 - loss: 0.252 - ETA: 1:17 - loss: 0.254 - ETA: 1:16 - loss: 0.257 - ETA: 1:14 - loss: 0.258 - ETA: 1:13 - loss: 0.260 - ETA: 1:12 - loss: 0.262 - ETA: 1:11 - loss: 0.264 - ETA: 1:09 - loss: 0.266 - ETA: 1:08 - loss: 0.268 - ETA: 1:07 - loss: 0.269 - ETA: 1:05 - loss: 0.271 - ETA: 1:04 - loss: 0.273 - ETA: 1:03 - loss: 0.276 - ETA: 1:02 - loss: 0.279 - ETA: 1:01 - loss: 0.282 - ETA: 1:00 - loss: 0.284 - ETA: 59s - loss: 0.286 - ETA: 58s - loss: 0.28 - ETA: 57s - loss: 0.29 - ETA: 56s - loss: 0.29 - ETA: 55s - loss: 0.29 - ETA: 53s - loss: 0.29 - ETA: 52s - loss: 0.29 - ETA: 51s - loss: 0.30 - ETA: 50s - loss: 0.30 - ETA: 49s - loss: 0.30 - ETA: 48s - loss: 0.30 - ETA: 47s - loss: 0.30 - ETA: 46s - loss: 0.30 - ETA: 45s - loss: 0.31 - ETA: 44s - loss: 0.31 - ETA: 43s - loss: 0.31 - ETA: 42s - loss: 0.31 - ETA: 41s - loss: 0.31 - ETA: 40s - loss: 0.31 - ETA: 39s - loss: 0.31 - ETA: 38s - loss: 0.31 - ETA: 37s - loss: 0.31 - ETA: 37s - loss: 0.32 - ETA: 36s - loss: 0.32 - ETA: 35s - loss: 0.32 - ETA: 34s - loss: 0.32 - ETA: 33s - loss: 0.32 - ETA: 32s - loss: 0.32 - ETA: 32s - loss: 0.32 - ETA: 31s - loss: 0.32 - ETA: 30s - loss: 0.32 - ETA: 29s - loss: 0.32 - ETA: 28s - loss: 0.32 - ETA: 28s - loss: 0.32 - ETA: 27s - loss: 0.32 - ETA: 26s - loss: 0.32 - ETA: 25s - loss: 0.32 - ETA: 24s - loss: 0.33 - ETA: 23s - loss: 0.33 - ETA: 23s - loss: 0.33 - ETA: 22s - loss: 0.33 - ETA: 21s - loss: 0.33 - ETA: 20s - loss: 0.33 - ETA: 19s - loss: 0.33 - ETA: 19s - loss: 0.33 - ETA: 18s - loss: 0.33 - ETA: 17s - loss: 0.33 - ETA: 16s - loss: 0.33 - ETA: 15s - loss: 0.33 - ETA: 15s - loss: 0.33 - ETA: 14s - loss: 0.33 - ETA: 13s - loss: 0.33 - ETA: 12s - loss: 0.33 - ETA: 12s - loss: 0.33 - ETA: 11s - loss: 0.33 - ETA: 10s - loss: 0.33 - ETA: 9s - loss: 0.3366 - ETA: 8s - loss: 0.336 - ETA: 8s - loss: 0.337 - ETA: 7s - loss: 0.337 - ETA: 6s - loss: 0.337 - ETA: 5s - loss: 0.337 - ETA: 5s - loss: 0.337 - ETA: 4s - loss: 0.337 - ETA: 3s - loss: 0.337 - ETA: 2s - loss: 0.338 - ETA: 2s - loss: 0.338 - ETA: 1s - loss: 0.338 - ETA: 0s - loss: 0.338 - ETA: 0s - loss: 0.338 - 91s 819ms/step - loss: 0.3386 - val_loss: 0.1688\n",
      "\n",
      "Epoch 00013: val_loss did not improve from 0.15709\n",
      "Epoch 14/30\n",
      "111/111 [==============================] - ETA: 1:06 - loss: 0.292 - ETA: 1:04 - loss: 0.318 - ETA: 1:02 - loss: 0.335 - ETA: 1:02 - loss: 0.334 - ETA: 1:02 - loss: 0.328 - ETA: 1:02 - loss: 0.326 - ETA: 1:02 - loss: 0.324 - ETA: 1:01 - loss: 0.320 - ETA: 1:00 - loss: 0.317 - ETA: 1:00 - loss: 0.313 - ETA: 59s - loss: 0.312 - ETA: 59s - loss: 0.31 - ETA: 59s - loss: 0.31 - ETA: 58s - loss: 0.31 - ETA: 58s - loss: 0.32 - ETA: 58s - loss: 0.32 - ETA: 57s - loss: 0.32 - ETA: 57s - loss: 0.32 - ETA: 56s - loss: 0.32 - ETA: 55s - loss: 0.32 - ETA: 54s - loss: 0.32 - ETA: 53s - loss: 0.32 - ETA: 53s - loss: 0.33 - ETA: 52s - loss: 0.33 - ETA: 51s - loss: 0.33 - ETA: 51s - loss: 0.33 - ETA: 51s - loss: 0.34 - ETA: 50s - loss: 0.34 - ETA: 50s - loss: 0.34 - ETA: 49s - loss: 0.34 - ETA: 49s - loss: 0.34 - ETA: 49s - loss: 0.34 - ETA: 48s - loss: 0.34 - ETA: 47s - loss: 0.34 - ETA: 46s - loss: 0.34 - ETA: 46s - loss: 0.34 - ETA: 45s - loss: 0.34 - ETA: 45s - loss: 0.34 - ETA: 44s - loss: 0.34 - ETA: 43s - loss: 0.34 - ETA: 43s - loss: 0.34 - ETA: 42s - loss: 0.34 - ETA: 42s - loss: 0.34 - ETA: 41s - loss: 0.34 - ETA: 40s - loss: 0.34 - ETA: 40s - loss: 0.34 - ETA: 39s - loss: 0.34 - ETA: 39s - loss: 0.34 - ETA: 38s - loss: 0.34 - ETA: 37s - loss: 0.34 - ETA: 37s - loss: 0.34 - ETA: 36s - loss: 0.34 - ETA: 36s - loss: 0.34 - ETA: 35s - loss: 0.34 - ETA: 35s - loss: 0.34 - ETA: 34s - loss: 0.34 - ETA: 33s - loss: 0.34 - ETA: 33s - loss: 0.34 - ETA: 32s - loss: 0.34 - ETA: 32s - loss: 0.34 - ETA: 31s - loss: 0.34 - ETA: 30s - loss: 0.34 - ETA: 30s - loss: 0.34 - ETA: 29s - loss: 0.34 - ETA: 29s - loss: 0.34 - ETA: 28s - loss: 0.34 - ETA: 27s - loss: 0.34 - ETA: 27s - loss: 0.34 - ETA: 26s - loss: 0.34 - ETA: 25s - loss: 0.34 - ETA: 25s - loss: 0.34 - ETA: 24s - loss: 0.34 - ETA: 24s - loss: 0.34 - ETA: 23s - loss: 0.34 - ETA: 22s - loss: 0.34 - ETA: 22s - loss: 0.34 - ETA: 21s - loss: 0.34 - ETA: 20s - loss: 0.34 - ETA: 20s - loss: 0.34 - ETA: 19s - loss: 0.34 - ETA: 19s - loss: 0.34 - ETA: 18s - loss: 0.34 - ETA: 17s - loss: 0.34 - ETA: 17s - loss: 0.34 - ETA: 16s - loss: 0.34 - ETA: 16s - loss: 0.34 - ETA: 15s - loss: 0.34 - ETA: 14s - loss: 0.34 - ETA: 14s - loss: 0.34 - ETA: 13s - loss: 0.34 - ETA: 13s - loss: 0.34 - ETA: 12s - loss: 0.34 - ETA: 11s - loss: 0.34 - ETA: 11s - loss: 0.34 - ETA: 10s - loss: 0.34 - ETA: 9s - loss: 0.3406 - ETA: 9s - loss: 0.340 - ETA: 8s - loss: 0.340 - ETA: 7s - loss: 0.340 - ETA: 7s - loss: 0.340 - ETA: 6s - loss: 0.340 - ETA: 5s - loss: 0.340 - ETA: 5s - loss: 0.340 - ETA: 4s - loss: 0.340 - ETA: 3s - loss: 0.340 - ETA: 3s - loss: 0.340 - ETA: 2s - loss: 0.340 - ETA: 1s - loss: 0.340 - ETA: 1s - loss: 0.340 - ETA: 0s - loss: 0.340 - ETA: 0s - loss: 0.340 - 79s 709ms/step - loss: 0.3409 - val_loss: 0.1594\n",
      "\n",
      "Epoch 00014: val_loss did not improve from 0.15709\n",
      "Epoch 15/30\n",
      "111/111 [==============================] - ETA: 1:03 - loss: 0.244 - ETA: 1:02 - loss: 0.262 - ETA: 1:00 - loss: 0.263 - ETA: 1:00 - loss: 0.256 - ETA: 59s - loss: 0.261 - ETA: 58s - loss: 0.26 - ETA: 58s - loss: 0.27 - ETA: 57s - loss: 0.28 - ETA: 56s - loss: 0.28 - ETA: 56s - loss: 0.29 - ETA: 55s - loss: 0.29 - ETA: 54s - loss: 0.29 - ETA: 54s - loss: 0.29 - ETA: 53s - loss: 0.29 - ETA: 52s - loss: 0.29 - ETA: 52s - loss: 0.29 - ETA: 52s - loss: 0.30 - ETA: 51s - loss: 0.30 - ETA: 50s - loss: 0.30 - ETA: 50s - loss: 0.30 - ETA: 49s - loss: 0.30 - ETA: 49s - loss: 0.30 - ETA: 49s - loss: 0.30 - ETA: 49s - loss: 0.30 - ETA: 48s - loss: 0.31 - ETA: 48s - loss: 0.31 - ETA: 48s - loss: 0.31 - ETA: 47s - loss: 0.31 - ETA: 46s - loss: 0.31 - ETA: 46s - loss: 0.31 - ETA: 45s - loss: 0.32 - ETA: 45s - loss: 0.32 - ETA: 44s - loss: 0.32 - ETA: 44s - loss: 0.32 - ETA: 43s - loss: 0.32 - ETA: 42s - loss: 0.32 - ETA: 42s - loss: 0.32 - ETA: 41s - loss: 0.32 - ETA: 41s - loss: 0.32 - ETA: 40s - loss: 0.32 - ETA: 40s - loss: 0.32 - ETA: 39s - loss: 0.32 - ETA: 39s - loss: 0.33 - ETA: 38s - loss: 0.33 - ETA: 37s - loss: 0.33 - ETA: 37s - loss: 0.33 - ETA: 36s - loss: 0.33 - ETA: 36s - loss: 0.33 - ETA: 35s - loss: 0.33 - ETA: 34s - loss: 0.33 - ETA: 34s - loss: 0.33 - ETA: 33s - loss: 0.33 - ETA: 33s - loss: 0.33 - ETA: 32s - loss: 0.33 - ETA: 32s - loss: 0.33 - ETA: 31s - loss: 0.33 - ETA: 31s - loss: 0.33 - ETA: 30s - loss: 0.33 - ETA: 30s - loss: 0.33 - ETA: 29s - loss: 0.33 - ETA: 28s - loss: 0.33 - ETA: 28s - loss: 0.33 - ETA: 27s - loss: 0.33 - ETA: 27s - loss: 0.33 - ETA: 26s - loss: 0.33 - ETA: 26s - loss: 0.33 - ETA: 25s - loss: 0.33 - ETA: 25s - loss: 0.33 - ETA: 24s - loss: 0.33 - ETA: 23s - loss: 0.33 - ETA: 23s - loss: 0.33 - ETA: 22s - loss: 0.33 - ETA: 22s - loss: 0.33 - ETA: 21s - loss: 0.33 - ETA: 21s - loss: 0.33 - ETA: 20s - loss: 0.33 - ETA: 19s - loss: 0.33 - ETA: 19s - loss: 0.33 - ETA: 18s - loss: 0.33 - ETA: 18s - loss: 0.33 - ETA: 17s - loss: 0.33 - ETA: 16s - loss: 0.33 - ETA: 16s - loss: 0.33 - ETA: 15s - loss: 0.33 - ETA: 15s - loss: 0.33 - ETA: 14s - loss: 0.33 - ETA: 13s - loss: 0.33 - ETA: 13s - loss: 0.33 - ETA: 12s - loss: 0.33 - ETA: 12s - loss: 0.33 - ETA: 11s - loss: 0.33 - ETA: 11s - loss: 0.33 - ETA: 10s - loss: 0.33 - ETA: 9s - loss: 0.3361 - ETA: 9s - loss: 0.336 - ETA: 8s - loss: 0.336 - ETA: 8s - loss: 0.336 - ETA: 7s - loss: 0.336 - ETA: 6s - loss: 0.336 - ETA: 6s - loss: 0.336 - ETA: 5s - loss: 0.336 - ETA: 5s - loss: 0.336 - ETA: 4s - loss: 0.336 - ETA: 4s - loss: 0.336 - ETA: 3s - loss: 0.336 - ETA: 2s - loss: 0.336 - ETA: 2s - loss: 0.336 - ETA: 1s - loss: 0.336 - ETA: 1s - loss: 0.336 - ETA: 0s - loss: 0.336 - ETA: 0s - loss: 0.336 - 71s 644ms/step - loss: 0.3367 - val_loss: 0.1590\n",
      "\n",
      "Epoch 00015: val_loss did not improve from 0.15709\n",
      "Epoch 16/30\n",
      "111/111 [==============================] - ETA: 1:02 - loss: 0.225 - ETA: 1:02 - loss: 0.223 - ETA: 1:00 - loss: 0.232 - ETA: 1:01 - loss: 0.245 - ETA: 1:00 - loss: 0.251 - ETA: 59s - loss: 0.251 - ETA: 59s - loss: 0.24 - ETA: 58s - loss: 0.24 - ETA: 58s - loss: 0.25 - ETA: 58s - loss: 0.25 - ETA: 58s - loss: 0.25 - ETA: 57s - loss: 0.25 - ETA: 56s - loss: 0.25 - ETA: 56s - loss: 0.25 - ETA: 55s - loss: 0.25 - ETA: 55s - loss: 0.26 - ETA: 54s - loss: 0.26 - ETA: 53s - loss: 0.26 - ETA: 53s - loss: 0.26 - ETA: 52s - loss: 0.26 - ETA: 52s - loss: 0.26 - ETA: 51s - loss: 0.26 - ETA: 50s - loss: 0.26 - ETA: 50s - loss: 0.26 - ETA: 49s - loss: 0.26 - ETA: 49s - loss: 0.26 - ETA: 48s - loss: 0.27 - ETA: 48s - loss: 0.27 - ETA: 47s - loss: 0.27 - ETA: 47s - loss: 0.27 - ETA: 46s - loss: 0.27 - ETA: 45s - loss: 0.27 - ETA: 45s - loss: 0.27 - ETA: 44s - loss: 0.28 - ETA: 44s - loss: 0.28 - ETA: 43s - loss: 0.28 - ETA: 43s - loss: 0.28 - ETA: 42s - loss: 0.28 - ETA: 42s - loss: 0.28 - ETA: 41s - loss: 0.28 - ETA: 40s - loss: 0.29 - ETA: 40s - loss: 0.29 - ETA: 40s - loss: 0.29 - ETA: 39s - loss: 0.29 - ETA: 39s - loss: 0.29 - ETA: 38s - loss: 0.29 - ETA: 38s - loss: 0.29 - ETA: 37s - loss: 0.29 - ETA: 37s - loss: 0.29 - ETA: 36s - loss: 0.30 - ETA: 36s - loss: 0.30 - ETA: 35s - loss: 0.30 - ETA: 35s - loss: 0.30 - ETA: 34s - loss: 0.30 - ETA: 33s - loss: 0.30 - ETA: 33s - loss: 0.30 - ETA: 32s - loss: 0.30 - ETA: 32s - loss: 0.30 - ETA: 31s - loss: 0.30 - ETA: 30s - loss: 0.30 - ETA: 30s - loss: 0.31 - ETA: 29s - loss: 0.31 - ETA: 29s - loss: 0.31 - ETA: 28s - loss: 0.31 - ETA: 28s - loss: 0.31 - ETA: 27s - loss: 0.31 - ETA: 26s - loss: 0.31 - ETA: 26s - loss: 0.31 - ETA: 25s - loss: 0.31 - ETA: 24s - loss: 0.31 - ETA: 24s - loss: 0.31 - ETA: 23s - loss: 0.31 - ETA: 22s - loss: 0.31 - ETA: 22s - loss: 0.31 - ETA: 21s - loss: 0.31 - ETA: 20s - loss: 0.31 - ETA: 20s - loss: 0.31 - ETA: 19s - loss: 0.31 - ETA: 19s - loss: 0.31 - ETA: 18s - loss: 0.31 - ETA: 17s - loss: 0.31 - ETA: 17s - loss: 0.31 - ETA: 16s - loss: 0.31 - ETA: 16s - loss: 0.31 - ETA: 15s - loss: 0.31 - ETA: 15s - loss: 0.31 - ETA: 14s - loss: 0.31 - ETA: 13s - loss: 0.32 - ETA: 13s - loss: 0.32 - ETA: 12s - loss: 0.32 - ETA: 12s - loss: 0.32 - ETA: 11s - loss: 0.32 - ETA: 10s - loss: 0.32 - ETA: 10s - loss: 0.32 - ETA: 9s - loss: 0.3213 - ETA: 9s - loss: 0.321 - ETA: 8s - loss: 0.321 - ETA: 7s - loss: 0.322 - ETA: 7s - loss: 0.322 - ETA: 6s - loss: 0.322 - ETA: 6s - loss: 0.322 - ETA: 5s - loss: 0.322 - ETA: 4s - loss: 0.322 - ETA: 4s - loss: 0.323 - ETA: 3s - loss: 0.323 - ETA: 3s - loss: 0.323 - ETA: 2s - loss: 0.323 - ETA: 1s - loss: 0.324 - ETA: 1s - loss: 0.324 - ETA: 0s - loss: 0.324 - ETA: 0s - loss: 0.324 - 74s 667ms/step - loss: 0.3249 - val_loss: 0.1422\n",
      "\n",
      "Epoch 00016: val_loss improved from 0.15709 to 0.14224, saving model to clean_notebooks\\cnn_injection_transfer_weights.h5\n",
      "Epoch 17/30\n",
      "111/111 [==============================] - ETA: 1:09 - loss: 0.415 - ETA: 1:14 - loss: 0.394 - ETA: 1:09 - loss: 0.376 - ETA: 1:04 - loss: 0.353 - ETA: 1:06 - loss: 0.354 - ETA: 1:04 - loss: 0.350 - ETA: 1:04 - loss: 0.353 - ETA: 1:04 - loss: 0.360 - ETA: 1:04 - loss: 0.363 - ETA: 1:03 - loss: 0.363 - ETA: 1:02 - loss: 0.372 - ETA: 1:04 - loss: 0.378 - ETA: 1:04 - loss: 0.383 - ETA: 1:03 - loss: 0.388 - ETA: 1:03 - loss: 0.390 - ETA: 1:02 - loss: 0.391 - ETA: 1:02 - loss: 0.391 - ETA: 1:01 - loss: 0.391 - ETA: 1:01 - loss: 0.390 - ETA: 1:00 - loss: 0.390 - ETA: 1:00 - loss: 0.388 - ETA: 59s - loss: 0.388 - ETA: 58s - loss: 0.38 - ETA: 58s - loss: 0.38 - ETA: 57s - loss: 0.38 - ETA: 56s - loss: 0.38 - ETA: 56s - loss: 0.38 - ETA: 55s - loss: 0.38 - ETA: 55s - loss: 0.38 - ETA: 54s - loss: 0.38 - ETA: 54s - loss: 0.38 - ETA: 54s - loss: 0.38 - ETA: 53s - loss: 0.38 - ETA: 53s - loss: 0.38 - ETA: 52s - loss: 0.38 - ETA: 52s - loss: 0.38 - ETA: 51s - loss: 0.37 - ETA: 51s - loss: 0.37 - ETA: 50s - loss: 0.37 - ETA: 49s - loss: 0.37 - ETA: 49s - loss: 0.37 - ETA: 49s - loss: 0.37 - ETA: 48s - loss: 0.37 - ETA: 47s - loss: 0.37 - ETA: 46s - loss: 0.37 - ETA: 45s - loss: 0.37 - ETA: 44s - loss: 0.37 - ETA: 44s - loss: 0.37 - ETA: 43s - loss: 0.37 - ETA: 42s - loss: 0.37 - ETA: 41s - loss: 0.37 - ETA: 40s - loss: 0.37 - ETA: 39s - loss: 0.37 - ETA: 38s - loss: 0.37 - ETA: 38s - loss: 0.37 - ETA: 37s - loss: 0.37 - ETA: 36s - loss: 0.37 - ETA: 35s - loss: 0.37 - ETA: 34s - loss: 0.37 - ETA: 34s - loss: 0.37 - ETA: 33s - loss: 0.37 - ETA: 32s - loss: 0.37 - ETA: 31s - loss: 0.37 - ETA: 31s - loss: 0.37 - ETA: 30s - loss: 0.37 - ETA: 29s - loss: 0.37 - ETA: 28s - loss: 0.36 - ETA: 28s - loss: 0.36 - ETA: 27s - loss: 0.36 - ETA: 26s - loss: 0.36 - ETA: 26s - loss: 0.36 - ETA: 25s - loss: 0.36 - ETA: 24s - loss: 0.36 - ETA: 23s - loss: 0.36 - ETA: 23s - loss: 0.36 - ETA: 22s - loss: 0.36 - ETA: 21s - loss: 0.36 - ETA: 21s - loss: 0.36 - ETA: 20s - loss: 0.36 - ETA: 19s - loss: 0.36 - ETA: 19s - loss: 0.36 - ETA: 18s - loss: 0.36 - ETA: 17s - loss: 0.36 - ETA: 17s - loss: 0.36 - ETA: 16s - loss: 0.36 - ETA: 15s - loss: 0.36 - ETA: 15s - loss: 0.36 - ETA: 14s - loss: 0.36 - ETA: 13s - loss: 0.36 - ETA: 13s - loss: 0.36 - ETA: 12s - loss: 0.36 - ETA: 11s - loss: 0.36 - ETA: 11s - loss: 0.36 - ETA: 10s - loss: 0.36 - ETA: 9s - loss: 0.3688 - ETA: 9s - loss: 0.368 - ETA: 8s - loss: 0.368 - ETA: 8s - loss: 0.369 - ETA: 7s - loss: 0.369 - ETA: 6s - loss: 0.369 - ETA: 6s - loss: 0.369 - ETA: 5s - loss: 0.369 - ETA: 4s - loss: 0.369 - ETA: 4s - loss: 0.369 - ETA: 3s - loss: 0.369 - ETA: 3s - loss: 0.369 - ETA: 2s - loss: 0.369 - ETA: 1s - loss: 0.369 - ETA: 1s - loss: 0.369 - ETA: 0s - loss: 0.369 - ETA: 0s - loss: 0.369 - 75s 677ms/step - loss: 0.3691 - val_loss: 0.1400\n",
      "\n",
      "Epoch 00017: val_loss improved from 0.14224 to 0.14000, saving model to clean_notebooks\\cnn_injection_transfer_weights.h5\n",
      "Epoch 18/30\n",
      "111/111 [==============================] - ETA: 1:00 - loss: 0.169 - ETA: 1:06 - loss: 0.292 - ETA: 1:05 - loss: 0.302 - ETA: 1:03 - loss: 0.342 - ETA: 1:03 - loss: 0.353 - ETA: 1:01 - loss: 0.356 - ETA: 1:01 - loss: 0.361 - ETA: 1:00 - loss: 0.369 - ETA: 59s - loss: 0.375 - ETA: 58s - loss: 0.37 - ETA: 57s - loss: 0.37 - ETA: 56s - loss: 0.37 - ETA: 55s - loss: 0.37 - ETA: 55s - loss: 0.37 - ETA: 54s - loss: 0.37 - ETA: 53s - loss: 0.37 - ETA: 53s - loss: 0.37 - ETA: 52s - loss: 0.37 - ETA: 52s - loss: 0.37 - ETA: 51s - loss: 0.37 - ETA: 50s - loss: 0.37 - ETA: 50s - loss: 0.37 - ETA: 50s - loss: 0.37 - ETA: 49s - loss: 0.37 - ETA: 48s - loss: 0.37 - ETA: 48s - loss: 0.37 - ETA: 48s - loss: 0.37 - ETA: 47s - loss: 0.37 - ETA: 46s - loss: 0.37 - ETA: 46s - loss: 0.37 - ETA: 45s - loss: 0.37 - ETA: 45s - loss: 0.37 - ETA: 44s - loss: 0.37 - ETA: 44s - loss: 0.37 - ETA: 43s - loss: 0.37 - ETA: 42s - loss: 0.37 - ETA: 42s - loss: 0.37 - ETA: 41s - loss: 0.37 - ETA: 41s - loss: 0.37 - ETA: 40s - loss: 0.37 - ETA: 40s - loss: 0.37 - ETA: 40s - loss: 0.37 - ETA: 39s - loss: 0.37 - ETA: 39s - loss: 0.37 - ETA: 38s - loss: 0.36 - ETA: 38s - loss: 0.36 - ETA: 37s - loss: 0.36 - ETA: 36s - loss: 0.36 - ETA: 36s - loss: 0.36 - ETA: 35s - loss: 0.36 - ETA: 35s - loss: 0.36 - ETA: 34s - loss: 0.36 - ETA: 34s - loss: 0.36 - ETA: 33s - loss: 0.36 - ETA: 33s - loss: 0.36 - ETA: 32s - loss: 0.36 - ETA: 32s - loss: 0.36 - ETA: 31s - loss: 0.36 - ETA: 31s - loss: 0.36 - ETA: 30s - loss: 0.36 - ETA: 30s - loss: 0.36 - ETA: 29s - loss: 0.36 - ETA: 29s - loss: 0.36 - ETA: 28s - loss: 0.36 - ETA: 28s - loss: 0.35 - ETA: 27s - loss: 0.35 - ETA: 27s - loss: 0.35 - ETA: 26s - loss: 0.35 - ETA: 26s - loss: 0.35 - ETA: 25s - loss: 0.35 - ETA: 25s - loss: 0.35 - ETA: 24s - loss: 0.35 - ETA: 23s - loss: 0.35 - ETA: 23s - loss: 0.35 - ETA: 22s - loss: 0.35 - ETA: 22s - loss: 0.35 - ETA: 21s - loss: 0.35 - ETA: 21s - loss: 0.35 - ETA: 20s - loss: 0.35 - ETA: 19s - loss: 0.35 - ETA: 19s - loss: 0.35 - ETA: 18s - loss: 0.35 - ETA: 17s - loss: 0.35 - ETA: 17s - loss: 0.35 - ETA: 16s - loss: 0.35 - ETA: 15s - loss: 0.35 - ETA: 15s - loss: 0.35 - ETA: 14s - loss: 0.35 - ETA: 13s - loss: 0.35 - ETA: 13s - loss: 0.35 - ETA: 12s - loss: 0.35 - ETA: 12s - loss: 0.35 - ETA: 11s - loss: 0.35 - ETA: 10s - loss: 0.35 - ETA: 10s - loss: 0.35 - ETA: 9s - loss: 0.3524 - ETA: 8s - loss: 0.352 - ETA: 8s - loss: 0.352 - ETA: 7s - loss: 0.352 - ETA: 6s - loss: 0.352 - ETA: 6s - loss: 0.352 - ETA: 5s - loss: 0.352 - ETA: 5s - loss: 0.352 - ETA: 4s - loss: 0.352 - ETA: 3s - loss: 0.352 - ETA: 3s - loss: 0.353 - ETA: 2s - loss: 0.353 - ETA: 1s - loss: 0.353 - ETA: 1s - loss: 0.353 - ETA: 0s - loss: 0.353 - ETA: 0s - loss: 0.353 - 77s 694ms/step - loss: 0.3531 - val_loss: 0.1327\n",
      "\n",
      "Epoch 00018: val_loss improved from 0.14000 to 0.13266, saving model to clean_notebooks\\cnn_injection_transfer_weights.h5\n",
      "Epoch 19/30\n",
      "111/111 [==============================] - ETA: 1:06 - loss: 0.563 - ETA: 1:03 - loss: 0.468 - ETA: 58s - loss: 0.420 - ETA: 1:01 - loss: 0.392 - ETA: 59s - loss: 0.377 - ETA: 58s - loss: 0.36 - ETA: 57s - loss: 0.37 - ETA: 57s - loss: 0.37 - ETA: 56s - loss: 0.37 - ETA: 56s - loss: 0.37 - ETA: 55s - loss: 0.37 - ETA: 54s - loss: 0.37 - ETA: 54s - loss: 0.37 - ETA: 54s - loss: 0.36 - ETA: 53s - loss: 0.36 - ETA: 53s - loss: 0.36 - ETA: 52s - loss: 0.36 - ETA: 51s - loss: 0.36 - ETA: 51s - loss: 0.35 - ETA: 50s - loss: 0.35 - ETA: 50s - loss: 0.35 - ETA: 50s - loss: 0.35 - ETA: 50s - loss: 0.35 - ETA: 49s - loss: 0.35 - ETA: 49s - loss: 0.35 - ETA: 48s - loss: 0.35 - ETA: 47s - loss: 0.35 - ETA: 47s - loss: 0.35 - ETA: 46s - loss: 0.35 - ETA: 46s - loss: 0.35 - ETA: 46s - loss: 0.35 - ETA: 45s - loss: 0.34 - ETA: 45s - loss: 0.34 - ETA: 45s - loss: 0.34 - ETA: 44s - loss: 0.34 - ETA: 44s - loss: 0.34 - ETA: 43s - loss: 0.34 - ETA: 43s - loss: 0.34 - ETA: 42s - loss: 0.34 - ETA: 41s - loss: 0.34 - ETA: 41s - loss: 0.34 - ETA: 40s - loss: 0.34 - ETA: 39s - loss: 0.34 - ETA: 39s - loss: 0.34 - ETA: 38s - loss: 0.34 - ETA: 38s - loss: 0.34 - ETA: 37s - loss: 0.34 - ETA: 36s - loss: 0.34 - ETA: 36s - loss: 0.34 - ETA: 35s - loss: 0.34 - ETA: 35s - loss: 0.34 - ETA: 34s - loss: 0.34 - ETA: 33s - loss: 0.34 - ETA: 33s - loss: 0.35 - ETA: 32s - loss: 0.35 - ETA: 31s - loss: 0.35 - ETA: 31s - loss: 0.35 - ETA: 30s - loss: 0.35 - ETA: 30s - loss: 0.35 - ETA: 29s - loss: 0.35 - ETA: 28s - loss: 0.35 - ETA: 28s - loss: 0.35 - ETA: 27s - loss: 0.35 - ETA: 27s - loss: 0.35 - ETA: 26s - loss: 0.35 - ETA: 25s - loss: 0.35 - ETA: 25s - loss: 0.35 - ETA: 24s - loss: 0.35 - ETA: 24s - loss: 0.35 - ETA: 23s - loss: 0.35 - ETA: 23s - loss: 0.35 - ETA: 22s - loss: 0.35 - ETA: 21s - loss: 0.35 - ETA: 21s - loss: 0.35 - ETA: 20s - loss: 0.35 - ETA: 20s - loss: 0.35 - ETA: 19s - loss: 0.35 - ETA: 18s - loss: 0.35 - ETA: 18s - loss: 0.35 - ETA: 17s - loss: 0.35 - ETA: 17s - loss: 0.35 - ETA: 16s - loss: 0.35 - ETA: 16s - loss: 0.35 - ETA: 15s - loss: 0.35 - ETA: 14s - loss: 0.35 - ETA: 14s - loss: 0.35 - ETA: 13s - loss: 0.35 - ETA: 13s - loss: 0.35 - ETA: 12s - loss: 0.35 - ETA: 12s - loss: 0.35 - ETA: 11s - loss: 0.35 - ETA: 10s - loss: 0.35 - ETA: 10s - loss: 0.35 - ETA: 9s - loss: 0.3533 - ETA: 9s - loss: 0.353 - ETA: 8s - loss: 0.352 - ETA: 8s - loss: 0.352 - ETA: 7s - loss: 0.352 - ETA: 6s - loss: 0.352 - ETA: 6s - loss: 0.352 - ETA: 5s - loss: 0.352 - ETA: 5s - loss: 0.352 - ETA: 4s - loss: 0.352 - ETA: 4s - loss: 0.351 - ETA: 3s - loss: 0.351 - ETA: 2s - loss: 0.351 - ETA: 2s - loss: 0.351 - ETA: 1s - loss: 0.351 - ETA: 1s - loss: 0.351 - ETA: 0s - loss: 0.351 - ETA: 0s - loss: 0.351 - 71s 637ms/step - loss: 0.3509 - val_loss: 0.1408\n",
      "\n",
      "Epoch 00019: val_loss did not improve from 0.13266\n",
      "Epoch 20/30\n",
      "111/111 [==============================] - ETA: 1:01 - loss: 0.858 - ETA: 58s - loss: 0.739 - ETA: 59s - loss: 0.70 - ETA: 1:00 - loss: 0.663 - ETA: 1:01 - loss: 0.624 - ETA: 1:01 - loss: 0.595 - ETA: 1:02 - loss: 0.569 - ETA: 1:02 - loss: 0.544 - ETA: 1:02 - loss: 0.522 - ETA: 1:01 - loss: 0.502 - ETA: 1:02 - loss: 0.484 - ETA: 1:02 - loss: 0.473 - ETA: 1:01 - loss: 0.463 - ETA: 1:01 - loss: 0.453 - ETA: 1:01 - loss: 0.445 - ETA: 1:00 - loss: 0.438 - ETA: 1:00 - loss: 0.432 - ETA: 1:00 - loss: 0.426 - ETA: 59s - loss: 0.420 - ETA: 59s - loss: 0.41 - ETA: 59s - loss: 0.41 - ETA: 58s - loss: 0.40 - ETA: 58s - loss: 0.40 - ETA: 58s - loss: 0.40 - ETA: 58s - loss: 0.39 - ETA: 57s - loss: 0.39 - ETA: 56s - loss: 0.39 - ETA: 56s - loss: 0.39 - ETA: 55s - loss: 0.39 - ETA: 55s - loss: 0.38 - ETA: 54s - loss: 0.38 - ETA: 54s - loss: 0.38 - ETA: 54s - loss: 0.38 - ETA: 53s - loss: 0.38 - ETA: 52s - loss: 0.38 - ETA: 52s - loss: 0.38 - ETA: 51s - loss: 0.38 - ETA: 50s - loss: 0.37 - ETA: 50s - loss: 0.37 - ETA: 49s - loss: 0.37 - ETA: 48s - loss: 0.37 - ETA: 47s - loss: 0.37 - ETA: 47s - loss: 0.37 - ETA: 46s - loss: 0.37 - ETA: 45s - loss: 0.37 - ETA: 44s - loss: 0.37 - ETA: 43s - loss: 0.37 - ETA: 43s - loss: 0.37 - ETA: 42s - loss: 0.37 - ETA: 41s - loss: 0.37 - ETA: 41s - loss: 0.37 - ETA: 40s - loss: 0.37 - ETA: 39s - loss: 0.37 - ETA: 38s - loss: 0.37 - ETA: 38s - loss: 0.37 - ETA: 37s - loss: 0.37 - ETA: 36s - loss: 0.37 - ETA: 36s - loss: 0.37 - ETA: 35s - loss: 0.37 - ETA: 34s - loss: 0.37 - ETA: 34s - loss: 0.37 - ETA: 33s - loss: 0.37 - ETA: 32s - loss: 0.37 - ETA: 31s - loss: 0.37 - ETA: 31s - loss: 0.37 - ETA: 30s - loss: 0.37 - ETA: 29s - loss: 0.37 - ETA: 28s - loss: 0.37 - ETA: 28s - loss: 0.37 - ETA: 27s - loss: 0.37 - ETA: 26s - loss: 0.37 - ETA: 26s - loss: 0.37 - ETA: 25s - loss: 0.37 - ETA: 24s - loss: 0.37 - ETA: 24s - loss: 0.37 - ETA: 23s - loss: 0.37 - ETA: 22s - loss: 0.37 - ETA: 21s - loss: 0.37 - ETA: 21s - loss: 0.37 - ETA: 20s - loss: 0.37 - ETA: 19s - loss: 0.37 - ETA: 19s - loss: 0.37 - ETA: 18s - loss: 0.37 - ETA: 17s - loss: 0.37 - ETA: 17s - loss: 0.37 - ETA: 16s - loss: 0.37 - ETA: 15s - loss: 0.37 - ETA: 15s - loss: 0.37 - ETA: 14s - loss: 0.37 - ETA: 13s - loss: 0.37 - ETA: 13s - loss: 0.37 - ETA: 12s - loss: 0.37 - ETA: 11s - loss: 0.37 - ETA: 11s - loss: 0.37 - ETA: 10s - loss: 0.37 - ETA: 9s - loss: 0.3702 - ETA: 9s - loss: 0.370 - ETA: 8s - loss: 0.370 - ETA: 7s - loss: 0.370 - ETA: 7s - loss: 0.370 - ETA: 6s - loss: 0.369 - ETA: 5s - loss: 0.369 - ETA: 5s - loss: 0.369 - ETA: 4s - loss: 0.369 - ETA: 3s - loss: 0.369 - ETA: 3s - loss: 0.369 - ETA: 2s - loss: 0.369 - ETA: 1s - loss: 0.369 - ETA: 1s - loss: 0.369 - ETA: 0s - loss: 0.369 - ETA: 0s - loss: 0.369 - 79s 715ms/step - loss: 0.3692 - val_loss: 0.1461\n",
      "\n",
      "Epoch 00020: val_loss did not improve from 0.13266\n",
      "Epoch 21/30\n",
      "111/111 [==============================] - ETA: 1:11 - loss: 0.073 - ETA: 1:07 - loss: 0.163 - ETA: 1:05 - loss: 0.202 - ETA: 1:05 - loss: 0.215 - ETA: 1:05 - loss: 0.216 - ETA: 1:04 - loss: 0.217 - ETA: 1:05 - loss: 0.221 - ETA: 1:05 - loss: 0.222 - ETA: 1:05 - loss: 0.224 - ETA: 1:07 - loss: 0.223 - ETA: 1:10 - loss: 0.222 - ETA: 1:10 - loss: 0.222 - ETA: 1:12 - loss: 0.222 - ETA: 1:12 - loss: 0.221 - ETA: 1:12 - loss: 0.220 - ETA: 1:13 - loss: 0.221 - ETA: 1:12 - loss: 0.228 - ETA: 1:12 - loss: 0.236 - ETA: 1:11 - loss: 0.243 - ETA: 1:09 - loss: 0.250 - ETA: 1:08 - loss: 0.257 - ETA: 1:07 - loss: 0.263 - ETA: 1:06 - loss: 0.268 - ETA: 1:05 - loss: 0.274 - ETA: 1:04 - loss: 0.279 - ETA: 1:03 - loss: 0.283 - ETA: 1:02 - loss: 0.287 - ETA: 1:01 - loss: 0.290 - ETA: 1:00 - loss: 0.293 - ETA: 59s - loss: 0.295 - ETA: 58s - loss: 0.29 - ETA: 56s - loss: 0.29 - ETA: 55s - loss: 0.30 - ETA: 54s - loss: 0.30 - ETA: 53s - loss: 0.30 - ETA: 52s - loss: 0.30 - ETA: 51s - loss: 0.30 - ETA: 50s - loss: 0.30 - ETA: 49s - loss: 0.30 - ETA: 48s - loss: 0.30 - ETA: 48s - loss: 0.31 - ETA: 47s - loss: 0.31 - ETA: 46s - loss: 0.31 - ETA: 45s - loss: 0.31 - ETA: 44s - loss: 0.31 - ETA: 43s - loss: 0.31 - ETA: 42s - loss: 0.31 - ETA: 42s - loss: 0.31 - ETA: 41s - loss: 0.31 - ETA: 40s - loss: 0.31 - ETA: 39s - loss: 0.31 - ETA: 39s - loss: 0.31 - ETA: 38s - loss: 0.31 - ETA: 37s - loss: 0.31 - ETA: 36s - loss: 0.31 - ETA: 36s - loss: 0.31 - ETA: 35s - loss: 0.31 - ETA: 34s - loss: 0.31 - ETA: 33s - loss: 0.31 - ETA: 33s - loss: 0.31 - ETA: 32s - loss: 0.31 - ETA: 31s - loss: 0.31 - ETA: 31s - loss: 0.31 - ETA: 30s - loss: 0.32 - ETA: 29s - loss: 0.32 - ETA: 28s - loss: 0.32 - ETA: 28s - loss: 0.32 - ETA: 27s - loss: 0.32 - ETA: 26s - loss: 0.32 - ETA: 26s - loss: 0.32 - ETA: 25s - loss: 0.32 - ETA: 24s - loss: 0.32 - ETA: 24s - loss: 0.32 - ETA: 23s - loss: 0.32 - ETA: 22s - loss: 0.32 - ETA: 22s - loss: 0.32 - ETA: 21s - loss: 0.32 - ETA: 20s - loss: 0.32 - ETA: 20s - loss: 0.32 - ETA: 19s - loss: 0.32 - ETA: 18s - loss: 0.32 - ETA: 18s - loss: 0.32 - ETA: 17s - loss: 0.32 - ETA: 16s - loss: 0.32 - ETA: 16s - loss: 0.32 - ETA: 15s - loss: 0.32 - ETA: 15s - loss: 0.32 - ETA: 14s - loss: 0.32 - ETA: 13s - loss: 0.32 - ETA: 13s - loss: 0.32 - ETA: 12s - loss: 0.32 - ETA: 11s - loss: 0.32 - ETA: 11s - loss: 0.32 - ETA: 10s - loss: 0.32 - ETA: 9s - loss: 0.3255 - ETA: 9s - loss: 0.325 - ETA: 8s - loss: 0.326 - ETA: 8s - loss: 0.326 - ETA: 7s - loss: 0.326 - ETA: 6s - loss: 0.326 - ETA: 6s - loss: 0.327 - ETA: 5s - loss: 0.327 - ETA: 4s - loss: 0.327 - ETA: 4s - loss: 0.327 - ETA: 3s - loss: 0.328 - ETA: 3s - loss: 0.328 - ETA: 2s - loss: 0.328 - ETA: 1s - loss: 0.328 - ETA: 1s - loss: 0.328 - ETA: 0s - loss: 0.328 - ETA: 0s - loss: 0.329 - 75s 677ms/step - loss: 0.3292 - val_loss: 0.1343\n",
      "\n",
      "Epoch 00021: val_loss did not improve from 0.13266\n",
      "Epoch 22/30\n",
      "111/111 [==============================] - ETA: 1:04 - loss: 0.180 - ETA: 1:10 - loss: 0.198 - ETA: 1:15 - loss: 0.231 - ETA: 1:12 - loss: 0.241 - ETA: 1:12 - loss: 0.241 - ETA: 1:12 - loss: 0.260 - ETA: 1:10 - loss: 0.292 - ETA: 1:08 - loss: 0.311 - ETA: 1:07 - loss: 0.321 - ETA: 1:06 - loss: 0.336 - ETA: 1:05 - loss: 0.346 - ETA: 1:04 - loss: 0.353 - ETA: 1:03 - loss: 0.357 - ETA: 1:02 - loss: 0.360 - ETA: 1:02 - loss: 0.362 - ETA: 1:01 - loss: 0.363 - ETA: 1:01 - loss: 0.363 - ETA: 1:01 - loss: 0.363 - ETA: 1:01 - loss: 0.364 - ETA: 1:01 - loss: 0.365 - ETA: 1:01 - loss: 0.365 - ETA: 1:00 - loss: 0.365 - ETA: 1:00 - loss: 0.365 - ETA: 1:00 - loss: 0.364 - ETA: 1:00 - loss: 0.363 - ETA: 59s - loss: 0.362 - ETA: 59s - loss: 0.36 - ETA: 58s - loss: 0.35 - ETA: 57s - loss: 0.35 - ETA: 56s - loss: 0.35 - ETA: 55s - loss: 0.35 - ETA: 55s - loss: 0.35 - ETA: 54s - loss: 0.35 - ETA: 53s - loss: 0.35 - ETA: 52s - loss: 0.35 - ETA: 52s - loss: 0.35 - ETA: 51s - loss: 0.34 - ETA: 51s - loss: 0.34 - ETA: 50s - loss: 0.34 - ETA: 50s - loss: 0.34 - ETA: 49s - loss: 0.34 - ETA: 49s - loss: 0.34 - ETA: 48s - loss: 0.34 - ETA: 47s - loss: 0.34 - ETA: 47s - loss: 0.34 - ETA: 46s - loss: 0.34 - ETA: 45s - loss: 0.34 - ETA: 45s - loss: 0.34 - ETA: 44s - loss: 0.34 - ETA: 43s - loss: 0.33 - ETA: 42s - loss: 0.33 - ETA: 41s - loss: 0.33 - ETA: 40s - loss: 0.33 - ETA: 40s - loss: 0.33 - ETA: 39s - loss: 0.33 - ETA: 38s - loss: 0.33 - ETA: 37s - loss: 0.33 - ETA: 36s - loss: 0.33 - ETA: 36s - loss: 0.33 - ETA: 35s - loss: 0.33 - ETA: 34s - loss: 0.33 - ETA: 33s - loss: 0.33 - ETA: 32s - loss: 0.33 - ETA: 32s - loss: 0.33 - ETA: 31s - loss: 0.33 - ETA: 30s - loss: 0.33 - ETA: 29s - loss: 0.33 - ETA: 29s - loss: 0.33 - ETA: 28s - loss: 0.33 - ETA: 27s - loss: 0.33 - ETA: 26s - loss: 0.33 - ETA: 26s - loss: 0.33 - ETA: 25s - loss: 0.33 - ETA: 24s - loss: 0.33 - ETA: 24s - loss: 0.33 - ETA: 23s - loss: 0.33 - ETA: 22s - loss: 0.33 - ETA: 22s - loss: 0.33 - ETA: 21s - loss: 0.33 - ETA: 20s - loss: 0.33 - ETA: 19s - loss: 0.33 - ETA: 19s - loss: 0.33 - ETA: 18s - loss: 0.33 - ETA: 17s - loss: 0.33 - ETA: 17s - loss: 0.33 - ETA: 16s - loss: 0.33 - ETA: 15s - loss: 0.33 - ETA: 15s - loss: 0.33 - ETA: 14s - loss: 0.33 - ETA: 14s - loss: 0.33 - ETA: 13s - loss: 0.33 - ETA: 12s - loss: 0.33 - ETA: 12s - loss: 0.33 - ETA: 11s - loss: 0.33 - ETA: 10s - loss: 0.33 - ETA: 9s - loss: 0.3333 - ETA: 9s - loss: 0.333 - ETA: 8s - loss: 0.333 - ETA: 7s - loss: 0.333 - ETA: 7s - loss: 0.333 - ETA: 6s - loss: 0.333 - ETA: 5s - loss: 0.333 - ETA: 5s - loss: 0.333 - ETA: 4s - loss: 0.333 - ETA: 3s - loss: 0.333 - ETA: 3s - loss: 0.333 - ETA: 2s - loss: 0.333 - ETA: 1s - loss: 0.333 - ETA: 1s - loss: 0.333 - ETA: 0s - loss: 0.333 - ETA: 0s - loss: 0.333 - 80s 719ms/step - loss: 0.3336 - val_loss: 0.1240\n",
      "\n",
      "Epoch 00022: val_loss improved from 0.13266 to 0.12397, saving model to clean_notebooks\\cnn_injection_transfer_weights.h5\n",
      "Epoch 23/30\n",
      "111/111 [==============================] - ETA: 1:02 - loss: 0.284 - ETA: 1:10 - loss: 0.380 - ETA: 1:08 - loss: 0.510 - ETA: 1:06 - loss: 0.553 - ETA: 1:06 - loss: 0.565 - ETA: 1:05 - loss: 0.560 - ETA: 1:05 - loss: 0.554 - ETA: 1:04 - loss: 0.562 - ETA: 1:03 - loss: 0.562 - ETA: 1:02 - loss: 0.567 - ETA: 1:01 - loss: 0.568 - ETA: 1:00 - loss: 0.567 - ETA: 59s - loss: 0.565 - ETA: 58s - loss: 0.56 - ETA: 57s - loss: 0.56 - ETA: 57s - loss: 0.55 - ETA: 56s - loss: 0.55 - ETA: 56s - loss: 0.55 - ETA: 55s - loss: 0.54 - ETA: 54s - loss: 0.54 - ETA: 54s - loss: 0.54 - ETA: 53s - loss: 0.54 - ETA: 52s - loss: 0.54 - ETA: 52s - loss: 0.54 - ETA: 51s - loss: 0.54 - ETA: 50s - loss: 0.54 - ETA: 50s - loss: 0.53 - ETA: 50s - loss: 0.53 - ETA: 49s - loss: 0.53 - ETA: 49s - loss: 0.53 - ETA: 48s - loss: 0.52 - ETA: 47s - loss: 0.52 - ETA: 47s - loss: 0.52 - ETA: 47s - loss: 0.51 - ETA: 46s - loss: 0.51 - ETA: 46s - loss: 0.51 - ETA: 45s - loss: 0.51 - ETA: 45s - loss: 0.50 - ETA: 44s - loss: 0.50 - ETA: 44s - loss: 0.50 - ETA: 43s - loss: 0.50 - ETA: 43s - loss: 0.50 - ETA: 42s - loss: 0.49 - ETA: 41s - loss: 0.49 - ETA: 41s - loss: 0.49 - ETA: 40s - loss: 0.49 - ETA: 40s - loss: 0.49 - ETA: 39s - loss: 0.49 - ETA: 39s - loss: 0.48 - ETA: 38s - loss: 0.48 - ETA: 37s - loss: 0.48 - ETA: 37s - loss: 0.48 - ETA: 36s - loss: 0.48 - ETA: 35s - loss: 0.48 - ETA: 35s - loss: 0.47 - ETA: 34s - loss: 0.47 - ETA: 33s - loss: 0.47 - ETA: 33s - loss: 0.47 - ETA: 32s - loss: 0.47 - ETA: 32s - loss: 0.47 - ETA: 31s - loss: 0.47 - ETA: 30s - loss: 0.47 - ETA: 30s - loss: 0.47 - ETA: 29s - loss: 0.47 - ETA: 29s - loss: 0.46 - ETA: 28s - loss: 0.46 - ETA: 27s - loss: 0.46 - ETA: 27s - loss: 0.46 - ETA: 26s - loss: 0.46 - ETA: 26s - loss: 0.46 - ETA: 25s - loss: 0.46 - ETA: 24s - loss: 0.46 - ETA: 24s - loss: 0.46 - ETA: 23s - loss: 0.46 - ETA: 22s - loss: 0.46 - ETA: 22s - loss: 0.46 - ETA: 21s - loss: 0.46 - ETA: 21s - loss: 0.46 - ETA: 20s - loss: 0.45 - ETA: 19s - loss: 0.45 - ETA: 19s - loss: 0.45 - ETA: 18s - loss: 0.45 - ETA: 17s - loss: 0.45 - ETA: 17s - loss: 0.45 - ETA: 16s - loss: 0.45 - ETA: 15s - loss: 0.45 - ETA: 15s - loss: 0.45 - ETA: 14s - loss: 0.45 - ETA: 14s - loss: 0.45 - ETA: 13s - loss: 0.45 - ETA: 12s - loss: 0.45 - ETA: 12s - loss: 0.44 - ETA: 11s - loss: 0.44 - ETA: 10s - loss: 0.44 - ETA: 10s - loss: 0.44 - ETA: 9s - loss: 0.4461 - ETA: 9s - loss: 0.445 - ETA: 8s - loss: 0.444 - ETA: 7s - loss: 0.444 - ETA: 7s - loss: 0.443 - ETA: 6s - loss: 0.442 - ETA: 5s - loss: 0.442 - ETA: 5s - loss: 0.441 - ETA: 4s - loss: 0.440 - ETA: 3s - loss: 0.440 - ETA: 3s - loss: 0.439 - ETA: 2s - loss: 0.438 - ETA: 1s - loss: 0.438 - ETA: 1s - loss: 0.437 - ETA: 0s - loss: 0.437 - ETA: 0s - loss: 0.436 - 78s 706ms/step - loss: 0.4360 - val_loss: 0.1416\n",
      "\n",
      "Epoch 00023: val_loss did not improve from 0.12397\n",
      "Epoch 24/30\n",
      "111/111 [==============================] - ETA: 1:04 - loss: 0.303 - ETA: 1:01 - loss: 0.260 - ETA: 1:01 - loss: 0.250 - ETA: 1:01 - loss: 0.263 - ETA: 1:02 - loss: 0.269 - ETA: 1:03 - loss: 0.270 - ETA: 1:04 - loss: 0.273 - ETA: 1:04 - loss: 0.282 - ETA: 1:03 - loss: 0.287 - ETA: 1:03 - loss: 0.292 - ETA: 1:02 - loss: 0.295 - ETA: 1:02 - loss: 0.298 - ETA: 1:01 - loss: 0.302 - ETA: 1:01 - loss: 0.304 - ETA: 1:00 - loss: 0.306 - ETA: 1:00 - loss: 0.311 - ETA: 59s - loss: 0.314 - ETA: 58s - loss: 0.31 - ETA: 58s - loss: 0.31 - ETA: 57s - loss: 0.32 - ETA: 56s - loss: 0.32 - ETA: 55s - loss: 0.32 - ETA: 54s - loss: 0.32 - ETA: 53s - loss: 0.32 - ETA: 53s - loss: 0.32 - ETA: 52s - loss: 0.32 - ETA: 51s - loss: 0.32 - ETA: 50s - loss: 0.32 - ETA: 49s - loss: 0.32 - ETA: 49s - loss: 0.32 - ETA: 48s - loss: 0.32 - ETA: 47s - loss: 0.32 - ETA: 46s - loss: 0.32 - ETA: 46s - loss: 0.32 - ETA: 45s - loss: 0.32 - ETA: 44s - loss: 0.32 - ETA: 44s - loss: 0.32 - ETA: 43s - loss: 0.32 - ETA: 43s - loss: 0.32 - ETA: 42s - loss: 0.32 - ETA: 41s - loss: 0.32 - ETA: 41s - loss: 0.32 - ETA: 40s - loss: 0.32 - ETA: 39s - loss: 0.32 - ETA: 39s - loss: 0.32 - ETA: 38s - loss: 0.32 - ETA: 38s - loss: 0.32 - ETA: 37s - loss: 0.33 - ETA: 36s - loss: 0.33 - ETA: 36s - loss: 0.33 - ETA: 35s - loss: 0.33 - ETA: 34s - loss: 0.33 - ETA: 34s - loss: 0.33 - ETA: 33s - loss: 0.33 - ETA: 33s - loss: 0.33 - ETA: 32s - loss: 0.33 - ETA: 31s - loss: 0.33 - ETA: 31s - loss: 0.33 - ETA: 30s - loss: 0.33 - ETA: 30s - loss: 0.33 - ETA: 29s - loss: 0.33 - ETA: 28s - loss: 0.33 - ETA: 28s - loss: 0.33 - ETA: 27s - loss: 0.33 - ETA: 27s - loss: 0.33 - ETA: 26s - loss: 0.33 - ETA: 25s - loss: 0.33 - ETA: 25s - loss: 0.33 - ETA: 24s - loss: 0.33 - ETA: 24s - loss: 0.33 - ETA: 23s - loss: 0.33 - ETA: 22s - loss: 0.33 - ETA: 22s - loss: 0.33 - ETA: 21s - loss: 0.33 - ETA: 21s - loss: 0.33 - ETA: 20s - loss: 0.33 - ETA: 19s - loss: 0.33 - ETA: 19s - loss: 0.33 - ETA: 18s - loss: 0.33 - ETA: 18s - loss: 0.33 - ETA: 17s - loss: 0.33 - ETA: 16s - loss: 0.33 - ETA: 16s - loss: 0.33 - ETA: 15s - loss: 0.33 - ETA: 15s - loss: 0.33 - ETA: 14s - loss: 0.33 - ETA: 14s - loss: 0.33 - ETA: 13s - loss: 0.33 - ETA: 12s - loss: 0.33 - ETA: 12s - loss: 0.33 - ETA: 11s - loss: 0.33 - ETA: 11s - loss: 0.33 - ETA: 10s - loss: 0.33 - ETA: 9s - loss: 0.3298 - ETA: 9s - loss: 0.329 - ETA: 8s - loss: 0.329 - ETA: 8s - loss: 0.329 - ETA: 7s - loss: 0.329 - ETA: 7s - loss: 0.329 - ETA: 6s - loss: 0.329 - ETA: 5s - loss: 0.329 - ETA: 5s - loss: 0.328 - ETA: 4s - loss: 0.328 - ETA: 4s - loss: 0.328 - ETA: 3s - loss: 0.328 - ETA: 2s - loss: 0.328 - ETA: 2s - loss: 0.328 - ETA: 1s - loss: 0.328 - ETA: 1s - loss: 0.328 - ETA: 0s - loss: 0.328 - ETA: 0s - loss: 0.328 - 74s 669ms/step - loss: 0.3281 - val_loss: 0.1286\n",
      "\n",
      "Epoch 00024: val_loss did not improve from 0.12397\n",
      "Epoch 25/30\n",
      "111/111 [==============================] - ETA: 1:31 - loss: 0.222 - ETA: 1:23 - loss: 0.257 - ETA: 1:22 - loss: 0.255 - ETA: 1:24 - loss: 0.256 - ETA: 1:23 - loss: 0.253 - ETA: 1:20 - loss: 0.259 - ETA: 1:19 - loss: 0.261 - ETA: 1:17 - loss: 0.264 - ETA: 1:15 - loss: 0.264 - ETA: 1:14 - loss: 0.264 - ETA: 1:12 - loss: 0.264 - ETA: 1:10 - loss: 0.263 - ETA: 1:09 - loss: 0.266 - ETA: 1:07 - loss: 0.270 - ETA: 1:05 - loss: 0.274 - ETA: 1:04 - loss: 0.277 - ETA: 1:03 - loss: 0.280 - ETA: 1:02 - loss: 0.283 - ETA: 1:01 - loss: 0.285 - ETA: 1:00 - loss: 0.288 - ETA: 1:00 - loss: 0.290 - ETA: 59s - loss: 0.292 - ETA: 58s - loss: 0.29 - ETA: 57s - loss: 0.29 - ETA: 56s - loss: 0.30 - ETA: 55s - loss: 0.30 - ETA: 54s - loss: 0.30 - ETA: 53s - loss: 0.30 - ETA: 52s - loss: 0.30 - ETA: 51s - loss: 0.31 - ETA: 51s - loss: 0.31 - ETA: 50s - loss: 0.31 - ETA: 49s - loss: 0.31 - ETA: 49s - loss: 0.31 - ETA: 48s - loss: 0.31 - ETA: 47s - loss: 0.31 - ETA: 46s - loss: 0.31 - ETA: 45s - loss: 0.31 - ETA: 45s - loss: 0.31 - ETA: 44s - loss: 0.31 - ETA: 43s - loss: 0.32 - ETA: 43s - loss: 0.32 - ETA: 42s - loss: 0.32 - ETA: 41s - loss: 0.32 - ETA: 41s - loss: 0.32 - ETA: 40s - loss: 0.32 - ETA: 39s - loss: 0.32 - ETA: 39s - loss: 0.32 - ETA: 38s - loss: 0.32 - ETA: 37s - loss: 0.32 - ETA: 37s - loss: 0.32 - ETA: 36s - loss: 0.32 - ETA: 35s - loss: 0.32 - ETA: 35s - loss: 0.32 - ETA: 34s - loss: 0.32 - ETA: 33s - loss: 0.32 - ETA: 33s - loss: 0.32 - ETA: 32s - loss: 0.32 - ETA: 32s - loss: 0.32 - ETA: 31s - loss: 0.32 - ETA: 30s - loss: 0.32 - ETA: 30s - loss: 0.32 - ETA: 29s - loss: 0.32 - ETA: 28s - loss: 0.32 - ETA: 28s - loss: 0.32 - ETA: 27s - loss: 0.32 - ETA: 26s - loss: 0.32 - ETA: 26s - loss: 0.32 - ETA: 25s - loss: 0.32 - ETA: 25s - loss: 0.32 - ETA: 24s - loss: 0.32 - ETA: 23s - loss: 0.32 - ETA: 23s - loss: 0.32 - ETA: 22s - loss: 0.32 - ETA: 22s - loss: 0.32 - ETA: 21s - loss: 0.32 - ETA: 20s - loss: 0.32 - ETA: 20s - loss: 0.32 - ETA: 19s - loss: 0.32 - ETA: 18s - loss: 0.32 - ETA: 18s - loss: 0.32 - ETA: 17s - loss: 0.32 - ETA: 17s - loss: 0.32 - ETA: 16s - loss: 0.32 - ETA: 15s - loss: 0.32 - ETA: 15s - loss: 0.32 - ETA: 14s - loss: 0.32 - ETA: 13s - loss: 0.32 - ETA: 13s - loss: 0.32 - ETA: 12s - loss: 0.32 - ETA: 12s - loss: 0.32 - ETA: 11s - loss: 0.32 - ETA: 10s - loss: 0.32 - ETA: 10s - loss: 0.32 - ETA: 9s - loss: 0.3235 - ETA: 9s - loss: 0.323 - ETA: 8s - loss: 0.323 - ETA: 7s - loss: 0.323 - ETA: 7s - loss: 0.323 - ETA: 6s - loss: 0.323 - ETA: 6s - loss: 0.323 - ETA: 5s - loss: 0.323 - ETA: 4s - loss: 0.323 - ETA: 4s - loss: 0.323 - ETA: 3s - loss: 0.323 - ETA: 3s - loss: 0.323 - ETA: 2s - loss: 0.323 - ETA: 1s - loss: 0.323 - ETA: 1s - loss: 0.322 - ETA: 0s - loss: 0.322 - ETA: 0s - loss: 0.322 - 74s 664ms/step - loss: 0.3228 - val_loss: 0.1397\n",
      "\n",
      "Epoch 00025: val_loss did not improve from 0.12397\n",
      "Epoch 26/30\n",
      "111/111 [==============================] - ETA: 59s - loss: 0.04 - ETA: 1:06 - loss: 0.095 - ETA: 1:04 - loss: 0.139 - ETA: 1:01 - loss: 0.193 - ETA: 1:00 - loss: 0.216 - ETA: 59s - loss: 0.232 - ETA: 1:00 - loss: 0.241 - ETA: 1:00 - loss: 0.251 - ETA: 1:00 - loss: 0.257 - ETA: 59s - loss: 0.262 - ETA: 58s - loss: 0.26 - ETA: 57s - loss: 0.26 - ETA: 56s - loss: 0.27 - ETA: 55s - loss: 0.27 - ETA: 54s - loss: 0.27 - ETA: 53s - loss: 0.27 - ETA: 53s - loss: 0.27 - ETA: 52s - loss: 0.27 - ETA: 52s - loss: 0.27 - ETA: 51s - loss: 0.27 - ETA: 51s - loss: 0.27 - ETA: 50s - loss: 0.27 - ETA: 50s - loss: 0.27 - ETA: 50s - loss: 0.27 - ETA: 50s - loss: 0.27 - ETA: 50s - loss: 0.27 - ETA: 49s - loss: 0.27 - ETA: 49s - loss: 0.27 - ETA: 48s - loss: 0.27 - ETA: 47s - loss: 0.27 - ETA: 47s - loss: 0.27 - ETA: 46s - loss: 0.27 - ETA: 45s - loss: 0.27 - ETA: 45s - loss: 0.27 - ETA: 44s - loss: 0.27 - ETA: 44s - loss: 0.27 - ETA: 43s - loss: 0.27 - ETA: 43s - loss: 0.27 - ETA: 42s - loss: 0.27 - ETA: 41s - loss: 0.27 - ETA: 41s - loss: 0.27 - ETA: 40s - loss: 0.27 - ETA: 40s - loss: 0.27 - ETA: 39s - loss: 0.27 - ETA: 39s - loss: 0.27 - ETA: 38s - loss: 0.27 - ETA: 37s - loss: 0.27 - ETA: 37s - loss: 0.27 - ETA: 36s - loss: 0.27 - ETA: 36s - loss: 0.27 - ETA: 35s - loss: 0.27 - ETA: 35s - loss: 0.27 - ETA: 34s - loss: 0.27 - ETA: 34s - loss: 0.27 - ETA: 33s - loss: 0.27 - ETA: 33s - loss: 0.27 - ETA: 33s - loss: 0.27 - ETA: 32s - loss: 0.27 - ETA: 31s - loss: 0.27 - ETA: 31s - loss: 0.27 - ETA: 30s - loss: 0.27 - ETA: 30s - loss: 0.27 - ETA: 29s - loss: 0.27 - ETA: 29s - loss: 0.27 - ETA: 28s - loss: 0.27 - ETA: 28s - loss: 0.27 - ETA: 27s - loss: 0.27 - ETA: 26s - loss: 0.27 - ETA: 26s - loss: 0.27 - ETA: 25s - loss: 0.27 - ETA: 25s - loss: 0.27 - ETA: 24s - loss: 0.27 - ETA: 23s - loss: 0.27 - ETA: 23s - loss: 0.27 - ETA: 22s - loss: 0.27 - ETA: 22s - loss: 0.27 - ETA: 21s - loss: 0.27 - ETA: 20s - loss: 0.27 - ETA: 20s - loss: 0.27 - ETA: 19s - loss: 0.27 - ETA: 18s - loss: 0.27 - ETA: 18s - loss: 0.27 - ETA: 17s - loss: 0.27 - ETA: 16s - loss: 0.27 - ETA: 16s - loss: 0.27 - ETA: 15s - loss: 0.27 - ETA: 14s - loss: 0.28 - ETA: 14s - loss: 0.28 - ETA: 13s - loss: 0.28 - ETA: 13s - loss: 0.28 - ETA: 12s - loss: 0.28 - ETA: 11s - loss: 0.28 - ETA: 11s - loss: 0.28 - ETA: 10s - loss: 0.28 - ETA: 10s - loss: 0.28 - ETA: 9s - loss: 0.2830 - ETA: 8s - loss: 0.283 - ETA: 8s - loss: 0.283 - ETA: 7s - loss: 0.284 - ETA: 6s - loss: 0.284 - ETA: 6s - loss: 0.284 - ETA: 5s - loss: 0.285 - ETA: 4s - loss: 0.285 - ETA: 4s - loss: 0.286 - ETA: 3s - loss: 0.286 - ETA: 3s - loss: 0.286 - ETA: 2s - loss: 0.287 - ETA: 1s - loss: 0.287 - ETA: 1s - loss: 0.287 - ETA: 0s - loss: 0.287 - ETA: 0s - loss: 0.288 - 75s 677ms/step - loss: 0.2885 - val_loss: 0.1291\n",
      "\n",
      "Epoch 00026: val_loss did not improve from 0.12397\n",
      "Epoch 27/30\n",
      "111/111 [==============================] - ETA: 56s - loss: 0.46 - ETA: 1:07 - loss: 0.441 - ETA: 1:04 - loss: 0.471 - ETA: 1:02 - loss: 0.467 - ETA: 1:01 - loss: 0.452 - ETA: 59s - loss: 0.441 - ETA: 58s - loss: 0.42 - ETA: 58s - loss: 0.42 - ETA: 57s - loss: 0.41 - ETA: 56s - loss: 0.40 - ETA: 56s - loss: 0.39 - ETA: 55s - loss: 0.38 - ETA: 54s - loss: 0.38 - ETA: 54s - loss: 0.37 - ETA: 53s - loss: 0.37 - ETA: 52s - loss: 0.36 - ETA: 51s - loss: 0.36 - ETA: 51s - loss: 0.36 - ETA: 51s - loss: 0.35 - ETA: 50s - loss: 0.35 - ETA: 49s - loss: 0.35 - ETA: 49s - loss: 0.35 - ETA: 48s - loss: 0.34 - ETA: 48s - loss: 0.34 - ETA: 47s - loss: 0.34 - ETA: 47s - loss: 0.34 - ETA: 46s - loss: 0.34 - ETA: 45s - loss: 0.34 - ETA: 45s - loss: 0.33 - ETA: 44s - loss: 0.33 - ETA: 44s - loss: 0.33 - ETA: 43s - loss: 0.33 - ETA: 43s - loss: 0.33 - ETA: 42s - loss: 0.33 - ETA: 42s - loss: 0.33 - ETA: 41s - loss: 0.33 - ETA: 41s - loss: 0.33 - ETA: 40s - loss: 0.33 - ETA: 40s - loss: 0.33 - ETA: 39s - loss: 0.33 - ETA: 39s - loss: 0.33 - ETA: 38s - loss: 0.33 - ETA: 38s - loss: 0.33 - ETA: 37s - loss: 0.33 - ETA: 37s - loss: 0.33 - ETA: 36s - loss: 0.33 - ETA: 36s - loss: 0.32 - ETA: 35s - loss: 0.33 - ETA: 35s - loss: 0.33 - ETA: 34s - loss: 0.33 - ETA: 33s - loss: 0.33 - ETA: 33s - loss: 0.33 - ETA: 32s - loss: 0.33 - ETA: 32s - loss: 0.33 - ETA: 31s - loss: 0.33 - ETA: 31s - loss: 0.33 - ETA: 30s - loss: 0.33 - ETA: 30s - loss: 0.33 - ETA: 29s - loss: 0.33 - ETA: 28s - loss: 0.33 - ETA: 28s - loss: 0.33 - ETA: 27s - loss: 0.33 - ETA: 27s - loss: 0.33 - ETA: 26s - loss: 0.33 - ETA: 26s - loss: 0.33 - ETA: 25s - loss: 0.33 - ETA: 25s - loss: 0.33 - ETA: 24s - loss: 0.33 - ETA: 23s - loss: 0.33 - ETA: 23s - loss: 0.33 - ETA: 22s - loss: 0.33 - ETA: 22s - loss: 0.33 - ETA: 21s - loss: 0.33 - ETA: 21s - loss: 0.33 - ETA: 20s - loss: 0.33 - ETA: 19s - loss: 0.33 - ETA: 19s - loss: 0.33 - ETA: 18s - loss: 0.33 - ETA: 18s - loss: 0.33 - ETA: 17s - loss: 0.33 - ETA: 17s - loss: 0.33 - ETA: 16s - loss: 0.33 - ETA: 15s - loss: 0.33 - ETA: 15s - loss: 0.33 - ETA: 14s - loss: 0.33 - ETA: 14s - loss: 0.33 - ETA: 13s - loss: 0.33 - ETA: 13s - loss: 0.33 - ETA: 12s - loss: 0.33 - ETA: 11s - loss: 0.33 - ETA: 11s - loss: 0.33 - ETA: 10s - loss: 0.33 - ETA: 10s - loss: 0.33 - ETA: 9s - loss: 0.3357 - ETA: 9s - loss: 0.335 - ETA: 8s - loss: 0.335 - ETA: 7s - loss: 0.335 - ETA: 7s - loss: 0.335 - ETA: 6s - loss: 0.335 - ETA: 6s - loss: 0.335 - ETA: 5s - loss: 0.335 - ETA: 5s - loss: 0.335 - ETA: 4s - loss: 0.335 - ETA: 4s - loss: 0.335 - ETA: 3s - loss: 0.335 - ETA: 2s - loss: 0.335 - ETA: 2s - loss: 0.335 - ETA: 1s - loss: 0.335 - ETA: 1s - loss: 0.335 - ETA: 0s - loss: 0.335 - ETA: 0s - loss: 0.335 - 71s 645ms/step - loss: 0.3349 - val_loss: 0.1417\n",
      "\n",
      "Epoch 00027: val_loss did not improve from 0.12397\n",
      "Epoch 00027: early stopping\n",
      "4/4 [==============================] - ETA: 7s - loss: 0.139 - ETA: 4s - loss: 0.133 - ETA: 2s - loss: 0.120 - ETA: 0s - loss: 0.160 - 8s 2s/step - loss: 0.1607\n",
      "Val Score:  0.16073524951934814\n",
      "====================================================================================\n",
      "\n",
      "\n",
      "Training on Fold:  2\n",
      "Epoch 1/30\n",
      "111/111 [==============================] - ETA: 28:23 - loss: 1.13 - ETA: 1:02 - loss: 0.9675 - ETA: 1:01 - loss: 0.927 - ETA: 1:02 - loss: 0.882 - ETA: 1:02 - loss: 0.844 - ETA: 1:01 - loss: 0.835 - ETA: 1:01 - loss: 0.825 - ETA: 1:00 - loss: 0.826 - ETA: 59s - loss: 0.829 - ETA: 58s - loss: 0.83 - ETA: 57s - loss: 0.83 - ETA: 58s - loss: 0.83 - ETA: 57s - loss: 0.83 - ETA: 57s - loss: 0.83 - ETA: 57s - loss: 0.83 - ETA: 56s - loss: 0.83 - ETA: 55s - loss: 0.84 - ETA: 54s - loss: 0.84 - ETA: 53s - loss: 0.84 - ETA: 52s - loss: 0.84 - ETA: 52s - loss: 0.84 - ETA: 51s - loss: 0.84 - ETA: 50s - loss: 0.84 - ETA: 49s - loss: 0.84 - ETA: 48s - loss: 0.85 - ETA: 48s - loss: 0.85 - ETA: 47s - loss: 0.86 - ETA: 47s - loss: 0.86 - ETA: 46s - loss: 0.87 - ETA: 45s - loss: 0.87 - ETA: 45s - loss: 0.87 - ETA: 45s - loss: 0.88 - ETA: 44s - loss: 0.88 - ETA: 43s - loss: 0.88 - ETA: 43s - loss: 0.88 - ETA: 42s - loss: 0.89 - ETA: 42s - loss: 0.89 - ETA: 41s - loss: 0.89 - ETA: 41s - loss: 0.89 - ETA: 40s - loss: 0.89 - ETA: 39s - loss: 0.89 - ETA: 39s - loss: 0.89 - ETA: 38s - loss: 0.89 - ETA: 38s - loss: 0.90 - ETA: 37s - loss: 0.90 - ETA: 37s - loss: 0.90 - ETA: 36s - loss: 0.90 - ETA: 36s - loss: 0.90 - ETA: 35s - loss: 0.90 - ETA: 34s - loss: 0.90 - ETA: 34s - loss: 0.90 - ETA: 33s - loss: 0.90 - ETA: 33s - loss: 0.91 - ETA: 32s - loss: 0.91 - ETA: 32s - loss: 0.91 - ETA: 31s - loss: 0.91 - ETA: 30s - loss: 0.91 - ETA: 30s - loss: 0.91 - ETA: 29s - loss: 0.91 - ETA: 29s - loss: 0.91 - ETA: 28s - loss: 0.91 - ETA: 28s - loss: 0.91 - ETA: 27s - loss: 0.91 - ETA: 27s - loss: 0.91 - ETA: 26s - loss: 0.91 - ETA: 26s - loss: 0.91 - ETA: 25s - loss: 0.91 - ETA: 24s - loss: 0.91 - ETA: 24s - loss: 0.92 - ETA: 23s - loss: 0.92 - ETA: 23s - loss: 0.92 - ETA: 23s - loss: 0.92 - ETA: 23s - loss: 0.92 - ETA: 22s - loss: 0.92 - ETA: 22s - loss: 0.92 - ETA: 21s - loss: 0.92 - ETA: 21s - loss: 0.92 - ETA: 20s - loss: 0.92 - ETA: 20s - loss: 0.92 - ETA: 19s - loss: 0.92 - ETA: 18s - loss: 0.92 - ETA: 18s - loss: 0.92 - ETA: 17s - loss: 0.92 - ETA: 17s - loss: 0.92 - ETA: 16s - loss: 0.92 - ETA: 15s - loss: 0.92 - ETA: 15s - loss: 0.92 - ETA: 14s - loss: 0.92 - ETA: 13s - loss: 0.92 - ETA: 13s - loss: 0.92 - ETA: 12s - loss: 0.92 - ETA: 12s - loss: 0.92 - ETA: 11s - loss: 0.92 - ETA: 10s - loss: 0.92 - ETA: 10s - loss: 0.92 - ETA: 9s - loss: 0.9225 - ETA: 8s - loss: 0.922 - ETA: 8s - loss: 0.922 - ETA: 7s - loss: 0.922 - ETA: 6s - loss: 0.922 - ETA: 6s - loss: 0.922 - ETA: 5s - loss: 0.922 - ETA: 5s - loss: 0.922 - ETA: 4s - loss: 0.922 - ETA: 3s - loss: 0.921 - ETA: 3s - loss: 0.921 - ETA: 2s - loss: 0.921 - ETA: 1s - loss: 0.921 - ETA: 1s - loss: 0.920 - ETA: 0s - loss: 0.920 - ETA: 0s - loss: 0.920 - 94s 716ms/step - loss: 0.9204 - val_loss: 0.5051\n",
      "\n",
      "Epoch 00001: val_loss did not improve from 0.12397\n",
      "Epoch 2/30\n",
      "111/111 [==============================] - ETA: 58s - loss: 0.31 - ETA: 1:03 - loss: 0.373 - ETA: 1:02 - loss: 0.428 - ETA: 1:00 - loss: 0.481 - ETA: 1:00 - loss: 0.520 - ETA: 1:00 - loss: 0.554 - ETA: 59s - loss: 0.584 - ETA: 58s - loss: 0.60 - ETA: 57s - loss: 0.62 - ETA: 57s - loss: 0.64 - ETA: 57s - loss: 0.67 - ETA: 56s - loss: 0.68 - ETA: 56s - loss: 0.70 - ETA: 55s - loss: 0.71 - ETA: 55s - loss: 0.72 - ETA: 54s - loss: 0.73 - ETA: 53s - loss: 0.74 - ETA: 53s - loss: 0.74 - ETA: 52s - loss: 0.75 - ETA: 51s - loss: 0.75 - ETA: 51s - loss: 0.75 - ETA: 50s - loss: 0.76 - ETA: 49s - loss: 0.76 - ETA: 49s - loss: 0.77 - ETA: 48s - loss: 0.77 - ETA: 47s - loss: 0.78 - ETA: 47s - loss: 0.78 - ETA: 46s - loss: 0.78 - ETA: 46s - loss: 0.79 - ETA: 45s - loss: 0.79 - ETA: 45s - loss: 0.79 - ETA: 44s - loss: 0.80 - ETA: 43s - loss: 0.80 - ETA: 43s - loss: 0.80 - ETA: 42s - loss: 0.80 - ETA: 42s - loss: 0.81 - ETA: 41s - loss: 0.81 - ETA: 41s - loss: 0.81 - ETA: 40s - loss: 0.81 - ETA: 40s - loss: 0.81 - ETA: 39s - loss: 0.81 - ETA: 39s - loss: 0.81 - ETA: 38s - loss: 0.81 - ETA: 38s - loss: 0.81 - ETA: 37s - loss: 0.81 - ETA: 37s - loss: 0.81 - ETA: 36s - loss: 0.81 - ETA: 36s - loss: 0.81 - ETA: 35s - loss: 0.81 - ETA: 35s - loss: 0.81 - ETA: 34s - loss: 0.81 - ETA: 33s - loss: 0.81 - ETA: 33s - loss: 0.80 - ETA: 32s - loss: 0.80 - ETA: 32s - loss: 0.80 - ETA: 31s - loss: 0.80 - ETA: 30s - loss: 0.80 - ETA: 30s - loss: 0.80 - ETA: 29s - loss: 0.80 - ETA: 29s - loss: 0.80 - ETA: 28s - loss: 0.80 - ETA: 27s - loss: 0.80 - ETA: 27s - loss: 0.80 - ETA: 26s - loss: 0.80 - ETA: 26s - loss: 0.80 - ETA: 25s - loss: 0.79 - ETA: 24s - loss: 0.79 - ETA: 24s - loss: 0.79 - ETA: 23s - loss: 0.79 - ETA: 23s - loss: 0.79 - ETA: 22s - loss: 0.79 - ETA: 22s - loss: 0.79 - ETA: 21s - loss: 0.79 - ETA: 21s - loss: 0.79 - ETA: 20s - loss: 0.79 - ETA: 19s - loss: 0.78 - ETA: 19s - loss: 0.78 - ETA: 18s - loss: 0.78 - ETA: 18s - loss: 0.78 - ETA: 17s - loss: 0.78 - ETA: 17s - loss: 0.78 - ETA: 16s - loss: 0.78 - ETA: 16s - loss: 0.78 - ETA: 15s - loss: 0.78 - ETA: 15s - loss: 0.77 - ETA: 14s - loss: 0.77 - ETA: 13s - loss: 0.77 - ETA: 13s - loss: 0.77 - ETA: 12s - loss: 0.77 - ETA: 12s - loss: 0.77 - ETA: 11s - loss: 0.77 - ETA: 11s - loss: 0.77 - ETA: 10s - loss: 0.76 - ETA: 10s - loss: 0.76 - ETA: 9s - loss: 0.7673 - ETA: 8s - loss: 0.766 - ETA: 8s - loss: 0.764 - ETA: 7s - loss: 0.763 - ETA: 7s - loss: 0.762 - ETA: 6s - loss: 0.761 - ETA: 6s - loss: 0.760 - ETA: 5s - loss: 0.759 - ETA: 4s - loss: 0.758 - ETA: 4s - loss: 0.757 - ETA: 3s - loss: 0.756 - ETA: 3s - loss: 0.754 - ETA: 2s - loss: 0.753 - ETA: 1s - loss: 0.752 - ETA: 1s - loss: 0.751 - ETA: 0s - loss: 0.750 - ETA: 0s - loss: 0.749 - 76s 682ms/step - loss: 0.7483 - val_loss: 0.2244\n",
      "\n",
      "Epoch 00002: val_loss did not improve from 0.12397\n",
      "Epoch 3/30\n",
      "111/111 [==============================] - ETA: 1:04 - loss: 0.271 - ETA: 1:05 - loss: 0.349 - ETA: 1:06 - loss: 0.357 - ETA: 1:05 - loss: 0.368 - ETA: 1:04 - loss: 0.379 - ETA: 1:05 - loss: 0.384 - ETA: 1:05 - loss: 0.385 - ETA: 1:04 - loss: 0.385 - ETA: 1:03 - loss: 0.382 - ETA: 1:03 - loss: 0.378 - ETA: 1:03 - loss: 0.379 - ETA: 1:02 - loss: 0.381 - ETA: 1:01 - loss: 0.386 - ETA: 1:01 - loss: 0.389 - ETA: 1:00 - loss: 0.390 - ETA: 1:00 - loss: 0.391 - ETA: 59s - loss: 0.391 - ETA: 59s - loss: 0.39 - ETA: 58s - loss: 0.39 - ETA: 58s - loss: 0.39 - ETA: 57s - loss: 0.39 - ETA: 56s - loss: 0.39 - ETA: 55s - loss: 0.39 - ETA: 55s - loss: 0.39 - ETA: 54s - loss: 0.39 - ETA: 53s - loss: 0.39 - ETA: 53s - loss: 0.39 - ETA: 52s - loss: 0.39 - ETA: 52s - loss: 0.40 - ETA: 51s - loss: 0.40 - ETA: 50s - loss: 0.40 - ETA: 49s - loss: 0.40 - ETA: 48s - loss: 0.40 - ETA: 48s - loss: 0.40 - ETA: 47s - loss: 0.41 - ETA: 46s - loss: 0.41 - ETA: 46s - loss: 0.41 - ETA: 45s - loss: 0.41 - ETA: 44s - loss: 0.41 - ETA: 43s - loss: 0.41 - ETA: 43s - loss: 0.41 - ETA: 42s - loss: 0.41 - ETA: 42s - loss: 0.41 - ETA: 41s - loss: 0.41 - ETA: 40s - loss: 0.41 - ETA: 40s - loss: 0.42 - ETA: 39s - loss: 0.42 - ETA: 39s - loss: 0.42 - ETA: 38s - loss: 0.42 - ETA: 38s - loss: 0.42 - ETA: 37s - loss: 0.42 - ETA: 36s - loss: 0.42 - ETA: 36s - loss: 0.42 - ETA: 35s - loss: 0.42 - ETA: 34s - loss: 0.42 - ETA: 34s - loss: 0.42 - ETA: 33s - loss: 0.42 - ETA: 32s - loss: 0.42 - ETA: 32s - loss: 0.42 - ETA: 31s - loss: 0.42 - ETA: 30s - loss: 0.42 - ETA: 30s - loss: 0.42 - ETA: 29s - loss: 0.42 - ETA: 28s - loss: 0.42 - ETA: 28s - loss: 0.42 - ETA: 27s - loss: 0.42 - ETA: 26s - loss: 0.42 - ETA: 26s - loss: 0.42 - ETA: 25s - loss: 0.42 - ETA: 24s - loss: 0.42 - ETA: 24s - loss: 0.42 - ETA: 23s - loss: 0.42 - ETA: 23s - loss: 0.42 - ETA: 22s - loss: 0.42 - ETA: 21s - loss: 0.42 - ETA: 21s - loss: 0.42 - ETA: 20s - loss: 0.42 - ETA: 19s - loss: 0.42 - ETA: 19s - loss: 0.42 - ETA: 18s - loss: 0.42 - ETA: 18s - loss: 0.42 - ETA: 17s - loss: 0.42 - ETA: 16s - loss: 0.42 - ETA: 16s - loss: 0.42 - ETA: 15s - loss: 0.42 - ETA: 15s - loss: 0.42 - ETA: 14s - loss: 0.42 - ETA: 13s - loss: 0.42 - ETA: 13s - loss: 0.42 - ETA: 12s - loss: 0.42 - ETA: 12s - loss: 0.42 - ETA: 11s - loss: 0.42 - ETA: 10s - loss: 0.42 - ETA: 10s - loss: 0.42 - ETA: 9s - loss: 0.4287 - ETA: 9s - loss: 0.428 - ETA: 8s - loss: 0.429 - ETA: 7s - loss: 0.429 - ETA: 7s - loss: 0.429 - ETA: 6s - loss: 0.429 - ETA: 6s - loss: 0.429 - ETA: 5s - loss: 0.429 - ETA: 4s - loss: 0.429 - ETA: 4s - loss: 0.429 - ETA: 3s - loss: 0.429 - ETA: 2s - loss: 0.429 - ETA: 2s - loss: 0.430 - ETA: 1s - loss: 0.430 - ETA: 1s - loss: 0.430 - ETA: 0s - loss: 0.430 - ETA: 0s - loss: 0.430 - 74s 668ms/step - loss: 0.4311 - val_loss: 0.1622\n",
      "\n",
      "Epoch 00003: val_loss did not improve from 0.12397\n",
      "Epoch 4/30\n",
      "111/111 [==============================] - ETA: 1:18 - loss: 0.640 - ETA: 1:12 - loss: 0.502 - ETA: 1:14 - loss: 0.451 - ETA: 1:15 - loss: 0.413 - ETA: 1:16 - loss: 0.384 - ETA: 1:14 - loss: 0.366 - ETA: 1:14 - loss: 0.350 - ETA: 1:12 - loss: 0.337 - ETA: 1:11 - loss: 0.332 - ETA: 1:10 - loss: 0.329 - ETA: 1:08 - loss: 0.331 - ETA: 1:07 - loss: 0.334 - ETA: 1:06 - loss: 0.339 - ETA: 1:05 - loss: 0.343 - ETA: 1:06 - loss: 0.345 - ETA: 1:05 - loss: 0.349 - ETA: 1:04 - loss: 0.352 - ETA: 1:03 - loss: 0.355 - ETA: 1:02 - loss: 0.358 - ETA: 1:01 - loss: 0.360 - ETA: 1:00 - loss: 0.361 - ETA: 59s - loss: 0.363 - ETA: 58s - loss: 0.36 - ETA: 58s - loss: 0.36 - ETA: 56s - loss: 0.36 - ETA: 55s - loss: 0.36 - ETA: 54s - loss: 0.36 - ETA: 54s - loss: 0.36 - ETA: 53s - loss: 0.36 - ETA: 52s - loss: 0.36 - ETA: 51s - loss: 0.37 - ETA: 50s - loss: 0.37 - ETA: 49s - loss: 0.37 - ETA: 49s - loss: 0.37 - ETA: 48s - loss: 0.37 - ETA: 47s - loss: 0.38 - ETA: 47s - loss: 0.38 - ETA: 46s - loss: 0.38 - ETA: 45s - loss: 0.38 - ETA: 45s - loss: 0.38 - ETA: 44s - loss: 0.39 - ETA: 44s - loss: 0.39 - ETA: 43s - loss: 0.39 - ETA: 42s - loss: 0.39 - ETA: 42s - loss: 0.39 - ETA: 41s - loss: 0.39 - ETA: 40s - loss: 0.39 - ETA: 40s - loss: 0.39 - ETA: 39s - loss: 0.39 - ETA: 39s - loss: 0.39 - ETA: 38s - loss: 0.39 - ETA: 37s - loss: 0.39 - ETA: 36s - loss: 0.40 - ETA: 36s - loss: 0.40 - ETA: 35s - loss: 0.40 - ETA: 34s - loss: 0.40 - ETA: 34s - loss: 0.40 - ETA: 33s - loss: 0.40 - ETA: 32s - loss: 0.40 - ETA: 31s - loss: 0.40 - ETA: 31s - loss: 0.40 - ETA: 30s - loss: 0.40 - ETA: 29s - loss: 0.40 - ETA: 29s - loss: 0.40 - ETA: 28s - loss: 0.40 - ETA: 27s - loss: 0.41 - ETA: 27s - loss: 0.41 - ETA: 26s - loss: 0.41 - ETA: 26s - loss: 0.41 - ETA: 25s - loss: 0.41 - ETA: 24s - loss: 0.41 - ETA: 24s - loss: 0.41 - ETA: 23s - loss: 0.41 - ETA: 22s - loss: 0.41 - ETA: 22s - loss: 0.41 - ETA: 21s - loss: 0.41 - ETA: 20s - loss: 0.41 - ETA: 20s - loss: 0.41 - ETA: 19s - loss: 0.41 - ETA: 18s - loss: 0.41 - ETA: 18s - loss: 0.41 - ETA: 17s - loss: 0.41 - ETA: 17s - loss: 0.41 - ETA: 16s - loss: 0.41 - ETA: 15s - loss: 0.41 - ETA: 15s - loss: 0.41 - ETA: 14s - loss: 0.42 - ETA: 13s - loss: 0.42 - ETA: 13s - loss: 0.42 - ETA: 12s - loss: 0.42 - ETA: 12s - loss: 0.42 - ETA: 11s - loss: 0.42 - ETA: 10s - loss: 0.42 - ETA: 10s - loss: 0.42 - ETA: 9s - loss: 0.4233 - ETA: 9s - loss: 0.423 - ETA: 8s - loss: 0.424 - ETA: 7s - loss: 0.424 - ETA: 7s - loss: 0.424 - ETA: 6s - loss: 0.425 - ETA: 6s - loss: 0.425 - ETA: 5s - loss: 0.425 - ETA: 4s - loss: 0.426 - ETA: 4s - loss: 0.426 - ETA: 3s - loss: 0.427 - ETA: 3s - loss: 0.427 - ETA: 2s - loss: 0.428 - ETA: 1s - loss: 0.428 - ETA: 1s - loss: 0.429 - ETA: 0s - loss: 0.429 - ETA: 0s - loss: 0.430 - 74s 666ms/step - loss: 0.4305 - val_loss: 0.1714\n",
      "\n",
      "Epoch 00004: val_loss did not improve from 0.12397\n",
      "Epoch 5/30\n",
      "111/111 [==============================] - ETA: 1:04 - loss: 0.354 - ETA: 59s - loss: 0.373 - ETA: 1:00 - loss: 0.360 - ETA: 59s - loss: 0.368 - ETA: 1:00 - loss: 0.375 - ETA: 59s - loss: 0.377 - ETA: 59s - loss: 0.37 - ETA: 59s - loss: 0.38 - ETA: 58s - loss: 0.38 - ETA: 58s - loss: 0.39 - ETA: 58s - loss: 0.39 - ETA: 58s - loss: 0.39 - ETA: 58s - loss: 0.39 - ETA: 58s - loss: 0.39 - ETA: 57s - loss: 0.39 - ETA: 57s - loss: 0.39 - ETA: 57s - loss: 0.39 - ETA: 56s - loss: 0.40 - ETA: 56s - loss: 0.40 - ETA: 55s - loss: 0.40 - ETA: 54s - loss: 0.40 - ETA: 54s - loss: 0.40 - ETA: 53s - loss: 0.41 - ETA: 52s - loss: 0.41 - ETA: 52s - loss: 0.41 - ETA: 51s - loss: 0.41 - ETA: 50s - loss: 0.41 - ETA: 50s - loss: 0.41 - ETA: 49s - loss: 0.41 - ETA: 49s - loss: 0.41 - ETA: 48s - loss: 0.41 - ETA: 48s - loss: 0.41 - ETA: 47s - loss: 0.41 - ETA: 47s - loss: 0.41 - ETA: 46s - loss: 0.41 - ETA: 46s - loss: 0.41 - ETA: 45s - loss: 0.41 - ETA: 44s - loss: 0.41 - ETA: 44s - loss: 0.41 - ETA: 43s - loss: 0.41 - ETA: 42s - loss: 0.41 - ETA: 42s - loss: 0.41 - ETA: 41s - loss: 0.41 - ETA: 41s - loss: 0.41 - ETA: 40s - loss: 0.41 - ETA: 40s - loss: 0.41 - ETA: 39s - loss: 0.41 - ETA: 38s - loss: 0.41 - ETA: 38s - loss: 0.41 - ETA: 37s - loss: 0.41 - ETA: 37s - loss: 0.41 - ETA: 36s - loss: 0.41 - ETA: 36s - loss: 0.41 - ETA: 35s - loss: 0.41 - ETA: 34s - loss: 0.41 - ETA: 34s - loss: 0.41 - ETA: 33s - loss: 0.41 - ETA: 32s - loss: 0.41 - ETA: 32s - loss: 0.41 - ETA: 31s - loss: 0.41 - ETA: 30s - loss: 0.41 - ETA: 30s - loss: 0.41 - ETA: 29s - loss: 0.41 - ETA: 28s - loss: 0.40 - ETA: 28s - loss: 0.40 - ETA: 27s - loss: 0.40 - ETA: 27s - loss: 0.40 - ETA: 26s - loss: 0.40 - ETA: 26s - loss: 0.40 - ETA: 25s - loss: 0.40 - ETA: 24s - loss: 0.40 - ETA: 24s - loss: 0.40 - ETA: 23s - loss: 0.40 - ETA: 22s - loss: 0.40 - ETA: 22s - loss: 0.40 - ETA: 21s - loss: 0.40 - ETA: 21s - loss: 0.40 - ETA: 20s - loss: 0.40 - ETA: 19s - loss: 0.40 - ETA: 19s - loss: 0.40 - ETA: 18s - loss: 0.40 - ETA: 17s - loss: 0.40 - ETA: 17s - loss: 0.40 - ETA: 16s - loss: 0.40 - ETA: 15s - loss: 0.40 - ETA: 15s - loss: 0.40 - ETA: 14s - loss: 0.40 - ETA: 14s - loss: 0.40 - ETA: 13s - loss: 0.40 - ETA: 12s - loss: 0.40 - ETA: 12s - loss: 0.40 - ETA: 11s - loss: 0.40 - ETA: 10s - loss: 0.40 - ETA: 10s - loss: 0.40 - ETA: 9s - loss: 0.4048 - ETA: 9s - loss: 0.404 - ETA: 8s - loss: 0.404 - ETA: 7s - loss: 0.404 - ETA: 7s - loss: 0.404 - ETA: 6s - loss: 0.404 - ETA: 6s - loss: 0.404 - ETA: 5s - loss: 0.404 - ETA: 4s - loss: 0.404 - ETA: 4s - loss: 0.404 - ETA: 3s - loss: 0.403 - ETA: 3s - loss: 0.403 - ETA: 2s - loss: 0.403 - ETA: 1s - loss: 0.403 - ETA: 1s - loss: 0.403 - ETA: 0s - loss: 0.403 - ETA: 0s - loss: 0.403 - 74s 664ms/step - loss: 0.4033 - val_loss: 0.1580\n",
      "\n",
      "Epoch 00005: val_loss did not improve from 0.12397\n",
      "Epoch 6/30\n",
      "111/111 [==============================] - ETA: 1:00 - loss: 0.608 - ETA: 1:00 - loss: 0.545 - ETA: 1:00 - loss: 0.497 - ETA: 1:00 - loss: 0.462 - ETA: 59s - loss: 0.433 - ETA: 59s - loss: 0.41 - ETA: 58s - loss: 0.40 - ETA: 58s - loss: 0.40 - ETA: 58s - loss: 0.39 - ETA: 58s - loss: 0.39 - ETA: 57s - loss: 0.38 - ETA: 57s - loss: 0.38 - ETA: 57s - loss: 0.38 - ETA: 56s - loss: 0.37 - ETA: 55s - loss: 0.37 - ETA: 54s - loss: 0.37 - ETA: 54s - loss: 0.36 - ETA: 53s - loss: 0.36 - ETA: 52s - loss: 0.36 - ETA: 52s - loss: 0.36 - ETA: 51s - loss: 0.36 - ETA: 51s - loss: 0.36 - ETA: 50s - loss: 0.36 - ETA: 50s - loss: 0.36 - ETA: 49s - loss: 0.36 - ETA: 48s - loss: 0.37 - ETA: 48s - loss: 0.37 - ETA: 47s - loss: 0.37 - ETA: 47s - loss: 0.37 - ETA: 46s - loss: 0.37 - ETA: 46s - loss: 0.37 - ETA: 45s - loss: 0.37 - ETA: 45s - loss: 0.37 - ETA: 44s - loss: 0.37 - ETA: 44s - loss: 0.38 - ETA: 44s - loss: 0.38 - ETA: 43s - loss: 0.38 - ETA: 43s - loss: 0.38 - ETA: 42s - loss: 0.38 - ETA: 41s - loss: 0.38 - ETA: 41s - loss: 0.38 - ETA: 40s - loss: 0.38 - ETA: 40s - loss: 0.38 - ETA: 40s - loss: 0.38 - ETA: 39s - loss: 0.38 - ETA: 39s - loss: 0.38 - ETA: 38s - loss: 0.38 - ETA: 37s - loss: 0.38 - ETA: 37s - loss: 0.38 - ETA: 36s - loss: 0.39 - ETA: 36s - loss: 0.39 - ETA: 35s - loss: 0.39 - ETA: 35s - loss: 0.39 - ETA: 34s - loss: 0.39 - ETA: 34s - loss: 0.39 - ETA: 33s - loss: 0.39 - ETA: 32s - loss: 0.39 - ETA: 32s - loss: 0.39 - ETA: 31s - loss: 0.39 - ETA: 31s - loss: 0.39 - ETA: 30s - loss: 0.39 - ETA: 30s - loss: 0.39 - ETA: 29s - loss: 0.39 - ETA: 29s - loss: 0.39 - ETA: 28s - loss: 0.39 - ETA: 27s - loss: 0.39 - ETA: 27s - loss: 0.39 - ETA: 26s - loss: 0.39 - ETA: 25s - loss: 0.40 - ETA: 25s - loss: 0.40 - ETA: 24s - loss: 0.40 - ETA: 24s - loss: 0.40 - ETA: 23s - loss: 0.40 - ETA: 22s - loss: 0.40 - ETA: 22s - loss: 0.40 - ETA: 21s - loss: 0.40 - ETA: 20s - loss: 0.40 - ETA: 20s - loss: 0.40 - ETA: 19s - loss: 0.40 - ETA: 18s - loss: 0.40 - ETA: 18s - loss: 0.40 - ETA: 17s - loss: 0.40 - ETA: 17s - loss: 0.40 - ETA: 16s - loss: 0.40 - ETA: 15s - loss: 0.40 - ETA: 15s - loss: 0.40 - ETA: 14s - loss: 0.40 - ETA: 14s - loss: 0.40 - ETA: 13s - loss: 0.40 - ETA: 12s - loss: 0.40 - ETA: 12s - loss: 0.40 - ETA: 11s - loss: 0.40 - ETA: 10s - loss: 0.40 - ETA: 10s - loss: 0.40 - ETA: 9s - loss: 0.4032 - ETA: 9s - loss: 0.403 - ETA: 8s - loss: 0.403 - ETA: 7s - loss: 0.403 - ETA: 7s - loss: 0.403 - ETA: 6s - loss: 0.403 - ETA: 6s - loss: 0.403 - ETA: 5s - loss: 0.403 - ETA: 4s - loss: 0.403 - ETA: 4s - loss: 0.403 - ETA: 3s - loss: 0.404 - ETA: 3s - loss: 0.404 - ETA: 2s - loss: 0.404 - ETA: 1s - loss: 0.404 - ETA: 1s - loss: 0.404 - ETA: 0s - loss: 0.404 - ETA: 0s - loss: 0.404 - 74s 672ms/step - loss: 0.4051 - val_loss: 0.1437\n",
      "\n",
      "Epoch 00006: val_loss did not improve from 0.12397\n",
      "Epoch 7/30\n",
      "111/111 [==============================] - ETA: 1:03 - loss: 0.301 - ETA: 57s - loss: 0.255 - ETA: 58s - loss: 0.27 - ETA: 59s - loss: 0.30 - ETA: 58s - loss: 0.32 - ETA: 57s - loss: 0.32 - ETA: 56s - loss: 0.32 - ETA: 55s - loss: 0.33 - ETA: 55s - loss: 0.33 - ETA: 54s - loss: 0.34 - ETA: 54s - loss: 0.34 - ETA: 53s - loss: 0.34 - ETA: 53s - loss: 0.35 - ETA: 52s - loss: 0.36 - ETA: 52s - loss: 0.36 - ETA: 52s - loss: 0.36 - ETA: 51s - loss: 0.37 - ETA: 51s - loss: 0.38 - ETA: 50s - loss: 0.39 - ETA: 49s - loss: 0.39 - ETA: 49s - loss: 0.39 - ETA: 49s - loss: 0.40 - ETA: 48s - loss: 0.40 - ETA: 48s - loss: 0.40 - ETA: 47s - loss: 0.40 - ETA: 47s - loss: 0.40 - ETA: 46s - loss: 0.41 - ETA: 46s - loss: 0.41 - ETA: 46s - loss: 0.41 - ETA: 45s - loss: 0.42 - ETA: 45s - loss: 0.42 - ETA: 44s - loss: 0.42 - ETA: 44s - loss: 0.43 - ETA: 43s - loss: 0.43 - ETA: 43s - loss: 0.43 - ETA: 42s - loss: 0.43 - ETA: 42s - loss: 0.43 - ETA: 41s - loss: 0.44 - ETA: 41s - loss: 0.44 - ETA: 40s - loss: 0.44 - ETA: 39s - loss: 0.44 - ETA: 39s - loss: 0.44 - ETA: 38s - loss: 0.44 - ETA: 38s - loss: 0.44 - ETA: 37s - loss: 0.44 - ETA: 37s - loss: 0.44 - ETA: 36s - loss: 0.44 - ETA: 35s - loss: 0.44 - ETA: 35s - loss: 0.44 - ETA: 34s - loss: 0.44 - ETA: 34s - loss: 0.44 - ETA: 33s - loss: 0.44 - ETA: 32s - loss: 0.44 - ETA: 32s - loss: 0.44 - ETA: 31s - loss: 0.44 - ETA: 31s - loss: 0.44 - ETA: 30s - loss: 0.44 - ETA: 30s - loss: 0.44 - ETA: 29s - loss: 0.44 - ETA: 29s - loss: 0.44 - ETA: 28s - loss: 0.44 - ETA: 27s - loss: 0.44 - ETA: 27s - loss: 0.44 - ETA: 26s - loss: 0.44 - ETA: 26s - loss: 0.44 - ETA: 25s - loss: 0.44 - ETA: 25s - loss: 0.44 - ETA: 24s - loss: 0.44 - ETA: 24s - loss: 0.44 - ETA: 23s - loss: 0.44 - ETA: 23s - loss: 0.44 - ETA: 22s - loss: 0.44 - ETA: 22s - loss: 0.44 - ETA: 21s - loss: 0.44 - ETA: 21s - loss: 0.44 - ETA: 20s - loss: 0.44 - ETA: 20s - loss: 0.44 - ETA: 19s - loss: 0.44 - ETA: 19s - loss: 0.44 - ETA: 18s - loss: 0.44 - ETA: 18s - loss: 0.44 - ETA: 17s - loss: 0.44 - ETA: 17s - loss: 0.44 - ETA: 16s - loss: 0.44 - ETA: 15s - loss: 0.44 - ETA: 15s - loss: 0.44 - ETA: 14s - loss: 0.44 - ETA: 14s - loss: 0.44 - ETA: 13s - loss: 0.44 - ETA: 12s - loss: 0.44 - ETA: 12s - loss: 0.44 - ETA: 11s - loss: 0.44 - ETA: 11s - loss: 0.44 - ETA: 10s - loss: 0.44 - ETA: 9s - loss: 0.4462 - ETA: 9s - loss: 0.446 - ETA: 8s - loss: 0.446 - ETA: 8s - loss: 0.446 - ETA: 7s - loss: 0.446 - ETA: 6s - loss: 0.446 - ETA: 6s - loss: 0.446 - ETA: 5s - loss: 0.446 - ETA: 5s - loss: 0.446 - ETA: 4s - loss: 0.445 - ETA: 3s - loss: 0.445 - ETA: 3s - loss: 0.445 - ETA: 2s - loss: 0.445 - ETA: 1s - loss: 0.445 - ETA: 1s - loss: 0.445 - ETA: 0s - loss: 0.445 - ETA: 0s - loss: 0.445 - 77s 693ms/step - loss: 0.4458 - val_loss: 0.1946\n",
      "\n",
      "Epoch 00007: val_loss did not improve from 0.12397\n",
      "Epoch 8/30\n",
      "111/111 [==============================] - ETA: 1:05 - loss: 0.174 - ETA: 1:07 - loss: 0.263 - ETA: 1:03 - loss: 0.329 - ETA: 1:04 - loss: 0.356 - ETA: 1:02 - loss: 0.357 - ETA: 1:01 - loss: 0.354 - ETA: 1:00 - loss: 0.349 - ETA: 1:00 - loss: 0.342 - ETA: 59s - loss: 0.338 - ETA: 58s - loss: 0.33 - ETA: 57s - loss: 0.33 - ETA: 57s - loss: 0.33 - ETA: 56s - loss: 0.33 - ETA: 56s - loss: 0.33 - ETA: 55s - loss: 0.33 - ETA: 54s - loss: 0.33 - ETA: 54s - loss: 0.32 - ETA: 53s - loss: 0.32 - ETA: 53s - loss: 0.32 - ETA: 52s - loss: 0.32 - ETA: 51s - loss: 0.32 - ETA: 51s - loss: 0.32 - ETA: 50s - loss: 0.32 - ETA: 49s - loss: 0.32 - ETA: 49s - loss: 0.32 - ETA: 48s - loss: 0.32 - ETA: 48s - loss: 0.32 - ETA: 47s - loss: 0.32 - ETA: 47s - loss: 0.32 - ETA: 46s - loss: 0.32 - ETA: 46s - loss: 0.32 - ETA: 45s - loss: 0.32 - ETA: 44s - loss: 0.31 - ETA: 44s - loss: 0.31 - ETA: 43s - loss: 0.31 - ETA: 43s - loss: 0.31 - ETA: 42s - loss: 0.31 - ETA: 42s - loss: 0.31 - ETA: 41s - loss: 0.31 - ETA: 40s - loss: 0.31 - ETA: 40s - loss: 0.31 - ETA: 39s - loss: 0.31 - ETA: 39s - loss: 0.31 - ETA: 38s - loss: 0.31 - ETA: 37s - loss: 0.31 - ETA: 37s - loss: 0.31 - ETA: 36s - loss: 0.31 - ETA: 36s - loss: 0.31 - ETA: 35s - loss: 0.32 - ETA: 35s - loss: 0.32 - ETA: 34s - loss: 0.32 - ETA: 33s - loss: 0.32 - ETA: 33s - loss: 0.32 - ETA: 32s - loss: 0.32 - ETA: 32s - loss: 0.32 - ETA: 31s - loss: 0.32 - ETA: 31s - loss: 0.32 - ETA: 30s - loss: 0.32 - ETA: 30s - loss: 0.32 - ETA: 29s - loss: 0.32 - ETA: 29s - loss: 0.32 - ETA: 28s - loss: 0.32 - ETA: 28s - loss: 0.32 - ETA: 27s - loss: 0.32 - ETA: 26s - loss: 0.32 - ETA: 26s - loss: 0.32 - ETA: 25s - loss: 0.32 - ETA: 25s - loss: 0.32 - ETA: 24s - loss: 0.32 - ETA: 24s - loss: 0.32 - ETA: 23s - loss: 0.32 - ETA: 23s - loss: 0.32 - ETA: 22s - loss: 0.32 - ETA: 21s - loss: 0.32 - ETA: 21s - loss: 0.33 - ETA: 20s - loss: 0.33 - ETA: 20s - loss: 0.33 - ETA: 19s - loss: 0.33 - ETA: 18s - loss: 0.33 - ETA: 18s - loss: 0.33 - ETA: 17s - loss: 0.33 - ETA: 17s - loss: 0.33 - ETA: 16s - loss: 0.33 - ETA: 15s - loss: 0.33 - ETA: 15s - loss: 0.33 - ETA: 14s - loss: 0.33 - ETA: 14s - loss: 0.33 - ETA: 13s - loss: 0.33 - ETA: 12s - loss: 0.33 - ETA: 12s - loss: 0.33 - ETA: 11s - loss: 0.33 - ETA: 11s - loss: 0.33 - ETA: 10s - loss: 0.33 - ETA: 10s - loss: 0.34 - ETA: 9s - loss: 0.3409 - ETA: 8s - loss: 0.341 - ETA: 8s - loss: 0.342 - ETA: 7s - loss: 0.342 - ETA: 7s - loss: 0.343 - ETA: 6s - loss: 0.344 - ETA: 5s - loss: 0.344 - ETA: 5s - loss: 0.345 - ETA: 4s - loss: 0.345 - ETA: 4s - loss: 0.346 - ETA: 3s - loss: 0.346 - ETA: 2s - loss: 0.347 - ETA: 2s - loss: 0.347 - ETA: 1s - loss: 0.348 - ETA: 1s - loss: 0.348 - ETA: 0s - loss: 0.349 - ETA: 0s - loss: 0.349 - 73s 660ms/step - loss: 0.3499 - val_loss: 0.1558\n",
      "\n",
      "Epoch 00008: val_loss did not improve from 0.12397\n",
      "Epoch 9/30\n",
      "111/111 [==============================] - ETA: 1:09 - loss: 0.280 - ETA: 1:09 - loss: 0.255 - ETA: 1:10 - loss: 0.270 - ETA: 1:09 - loss: 0.286 - ETA: 1:09 - loss: 0.289 - ETA: 1:10 - loss: 0.294 - ETA: 1:10 - loss: 0.297 - ETA: 1:10 - loss: 0.298 - ETA: 1:10 - loss: 0.306 - ETA: 1:10 - loss: 0.311 - ETA: 1:09 - loss: 0.314 - ETA: 1:09 - loss: 0.317 - ETA: 1:09 - loss: 0.319 - ETA: 1:08 - loss: 0.320 - ETA: 1:07 - loss: 0.321 - ETA: 1:07 - loss: 0.322 - ETA: 1:06 - loss: 0.323 - ETA: 1:06 - loss: 0.323 - ETA: 1:06 - loss: 0.323 - ETA: 1:05 - loss: 0.323 - ETA: 1:04 - loss: 0.323 - ETA: 1:03 - loss: 0.323 - ETA: 1:02 - loss: 0.322 - ETA: 1:01 - loss: 0.323 - ETA: 1:00 - loss: 0.323 - ETA: 59s - loss: 0.322 - ETA: 59s - loss: 0.32 - ETA: 58s - loss: 0.32 - ETA: 57s - loss: 0.32 - ETA: 56s - loss: 0.32 - ETA: 55s - loss: 0.32 - ETA: 54s - loss: 0.32 - ETA: 54s - loss: 0.32 - ETA: 53s - loss: 0.32 - ETA: 52s - loss: 0.32 - ETA: 51s - loss: 0.32 - ETA: 51s - loss: 0.32 - ETA: 50s - loss: 0.32 - ETA: 49s - loss: 0.32 - ETA: 48s - loss: 0.32 - ETA: 48s - loss: 0.32 - ETA: 47s - loss: 0.32 - ETA: 46s - loss: 0.32 - ETA: 45s - loss: 0.32 - ETA: 44s - loss: 0.32 - ETA: 43s - loss: 0.32 - ETA: 43s - loss: 0.33 - ETA: 42s - loss: 0.33 - ETA: 41s - loss: 0.33 - ETA: 40s - loss: 0.33 - ETA: 39s - loss: 0.33 - ETA: 39s - loss: 0.33 - ETA: 38s - loss: 0.33 - ETA: 37s - loss: 0.33 - ETA: 36s - loss: 0.33 - ETA: 35s - loss: 0.33 - ETA: 35s - loss: 0.33 - ETA: 34s - loss: 0.33 - ETA: 33s - loss: 0.33 - ETA: 33s - loss: 0.33 - ETA: 32s - loss: 0.33 - ETA: 31s - loss: 0.33 - ETA: 30s - loss: 0.33 - ETA: 30s - loss: 0.33 - ETA: 29s - loss: 0.33 - ETA: 28s - loss: 0.33 - ETA: 28s - loss: 0.33 - ETA: 27s - loss: 0.33 - ETA: 26s - loss: 0.33 - ETA: 26s - loss: 0.34 - ETA: 25s - loss: 0.34 - ETA: 24s - loss: 0.34 - ETA: 24s - loss: 0.34 - ETA: 23s - loss: 0.34 - ETA: 22s - loss: 0.34 - ETA: 22s - loss: 0.34 - ETA: 21s - loss: 0.34 - ETA: 20s - loss: 0.34 - ETA: 20s - loss: 0.34 - ETA: 19s - loss: 0.34 - ETA: 18s - loss: 0.34 - ETA: 18s - loss: 0.34 - ETA: 17s - loss: 0.34 - ETA: 16s - loss: 0.34 - ETA: 16s - loss: 0.34 - ETA: 15s - loss: 0.34 - ETA: 14s - loss: 0.34 - ETA: 14s - loss: 0.34 - ETA: 13s - loss: 0.35 - ETA: 13s - loss: 0.35 - ETA: 12s - loss: 0.35 - ETA: 11s - loss: 0.35 - ETA: 11s - loss: 0.35 - ETA: 10s - loss: 0.35 - ETA: 9s - loss: 0.3533 - ETA: 9s - loss: 0.353 - ETA: 8s - loss: 0.354 - ETA: 8s - loss: 0.354 - ETA: 7s - loss: 0.355 - ETA: 6s - loss: 0.355 - ETA: 6s - loss: 0.355 - ETA: 5s - loss: 0.356 - ETA: 4s - loss: 0.356 - ETA: 4s - loss: 0.356 - ETA: 3s - loss: 0.357 - ETA: 3s - loss: 0.357 - ETA: 2s - loss: 0.357 - ETA: 1s - loss: 0.358 - ETA: 1s - loss: 0.358 - ETA: 0s - loss: 0.358 - ETA: 0s - loss: 0.359 - 74s 670ms/step - loss: 0.3594 - val_loss: 0.1267\n",
      "\n",
      "Epoch 00009: val_loss did not improve from 0.12397\n",
      "Epoch 10/30\n",
      "111/111 [==============================] - ETA: 1:00 - loss: 0.162 - ETA: 1:01 - loss: 0.298 - ETA: 1:05 - loss: 0.354 - ETA: 1:06 - loss: 0.374 - ETA: 1:04 - loss: 0.418 - ETA: 1:03 - loss: 0.446 - ETA: 1:02 - loss: 0.464 - ETA: 1:00 - loss: 0.473 - ETA: 59s - loss: 0.479 - ETA: 58s - loss: 0.48 - ETA: 57s - loss: 0.48 - ETA: 56s - loss: 0.47 - ETA: 56s - loss: 0.47 - ETA: 55s - loss: 0.47 - ETA: 54s - loss: 0.47 - ETA: 54s - loss: 0.46 - ETA: 53s - loss: 0.46 - ETA: 53s - loss: 0.46 - ETA: 52s - loss: 0.46 - ETA: 51s - loss: 0.45 - ETA: 51s - loss: 0.45 - ETA: 50s - loss: 0.45 - ETA: 49s - loss: 0.45 - ETA: 49s - loss: 0.44 - ETA: 48s - loss: 0.44 - ETA: 48s - loss: 0.44 - ETA: 47s - loss: 0.43 - ETA: 46s - loss: 0.43 - ETA: 46s - loss: 0.43 - ETA: 45s - loss: 0.42 - ETA: 45s - loss: 0.42 - ETA: 44s - loss: 0.42 - ETA: 44s - loss: 0.42 - ETA: 43s - loss: 0.42 - ETA: 43s - loss: 0.42 - ETA: 42s - loss: 0.42 - ETA: 42s - loss: 0.42 - ETA: 41s - loss: 0.42 - ETA: 40s - loss: 0.42 - ETA: 40s - loss: 0.42 - ETA: 39s - loss: 0.42 - ETA: 39s - loss: 0.42 - ETA: 39s - loss: 0.42 - ETA: 38s - loss: 0.42 - ETA: 38s - loss: 0.42 - ETA: 37s - loss: 0.42 - ETA: 37s - loss: 0.42 - ETA: 36s - loss: 0.42 - ETA: 36s - loss: 0.42 - ETA: 35s - loss: 0.42 - ETA: 34s - loss: 0.42 - ETA: 34s - loss: 0.42 - ETA: 33s - loss: 0.42 - ETA: 33s - loss: 0.42 - ETA: 32s - loss: 0.42 - ETA: 32s - loss: 0.42 - ETA: 31s - loss: 0.42 - ETA: 31s - loss: 0.43 - ETA: 30s - loss: 0.43 - ETA: 30s - loss: 0.43 - ETA: 29s - loss: 0.43 - ETA: 29s - loss: 0.43 - ETA: 28s - loss: 0.43 - ETA: 28s - loss: 0.43 - ETA: 27s - loss: 0.43 - ETA: 27s - loss: 0.43 - ETA: 26s - loss: 0.43 - ETA: 26s - loss: 0.43 - ETA: 25s - loss: 0.43 - ETA: 24s - loss: 0.43 - ETA: 24s - loss: 0.43 - ETA: 23s - loss: 0.43 - ETA: 23s - loss: 0.43 - ETA: 22s - loss: 0.43 - ETA: 21s - loss: 0.43 - ETA: 21s - loss: 0.43 - ETA: 20s - loss: 0.43 - ETA: 19s - loss: 0.43 - ETA: 19s - loss: 0.43 - ETA: 18s - loss: 0.43 - ETA: 18s - loss: 0.43 - ETA: 17s - loss: 0.43 - ETA: 16s - loss: 0.42 - ETA: 16s - loss: 0.42 - ETA: 15s - loss: 0.42 - ETA: 15s - loss: 0.42 - ETA: 14s - loss: 0.42 - ETA: 13s - loss: 0.42 - ETA: 13s - loss: 0.42 - ETA: 12s - loss: 0.42 - ETA: 12s - loss: 0.42 - ETA: 11s - loss: 0.42 - ETA: 10s - loss: 0.42 - ETA: 10s - loss: 0.42 - ETA: 9s - loss: 0.4277 - ETA: 8s - loss: 0.427 - ETA: 8s - loss: 0.427 - ETA: 7s - loss: 0.427 - ETA: 7s - loss: 0.426 - ETA: 6s - loss: 0.426 - ETA: 5s - loss: 0.426 - ETA: 5s - loss: 0.426 - ETA: 4s - loss: 0.426 - ETA: 4s - loss: 0.425 - ETA: 3s - loss: 0.425 - ETA: 2s - loss: 0.425 - ETA: 2s - loss: 0.425 - ETA: 1s - loss: 0.424 - ETA: 1s - loss: 0.424 - ETA: 0s - loss: 0.424 - ETA: 0s - loss: 0.424 - 72s 654ms/step - loss: 0.4241 - val_loss: 0.1369\n",
      "\n",
      "Epoch 00010: val_loss did not improve from 0.12397\n",
      "Epoch 11/30\n",
      "111/111 [==============================] - ETA: 58s - loss: 0.28 - ETA: 1:00 - loss: 0.425 - ETA: 1:00 - loss: 0.428 - ETA: 59s - loss: 0.410 - ETA: 58s - loss: 0.39 - ETA: 59s - loss: 0.38 - ETA: 58s - loss: 0.37 - ETA: 57s - loss: 0.36 - ETA: 56s - loss: 0.36 - ETA: 56s - loss: 0.35 - ETA: 56s - loss: 0.35 - ETA: 55s - loss: 0.35 - ETA: 55s - loss: 0.34 - ETA: 54s - loss: 0.34 - ETA: 53s - loss: 0.34 - ETA: 53s - loss: 0.33 - ETA: 52s - loss: 0.33 - ETA: 52s - loss: 0.33 - ETA: 51s - loss: 0.33 - ETA: 51s - loss: 0.33 - ETA: 50s - loss: 0.33 - ETA: 50s - loss: 0.33 - ETA: 49s - loss: 0.33 - ETA: 48s - loss: 0.33 - ETA: 48s - loss: 0.33 - ETA: 48s - loss: 0.34 - ETA: 47s - loss: 0.34 - ETA: 46s - loss: 0.34 - ETA: 46s - loss: 0.34 - ETA: 45s - loss: 0.34 - ETA: 44s - loss: 0.35 - ETA: 44s - loss: 0.35 - ETA: 43s - loss: 0.35 - ETA: 43s - loss: 0.35 - ETA: 42s - loss: 0.35 - ETA: 42s - loss: 0.35 - ETA: 41s - loss: 0.35 - ETA: 40s - loss: 0.35 - ETA: 40s - loss: 0.35 - ETA: 39s - loss: 0.35 - ETA: 39s - loss: 0.35 - ETA: 38s - loss: 0.35 - ETA: 38s - loss: 0.35 - ETA: 37s - loss: 0.35 - ETA: 36s - loss: 0.35 - ETA: 36s - loss: 0.35 - ETA: 35s - loss: 0.35 - ETA: 35s - loss: 0.35 - ETA: 34s - loss: 0.35 - ETA: 34s - loss: 0.35 - ETA: 33s - loss: 0.35 - ETA: 33s - loss: 0.35 - ETA: 32s - loss: 0.35 - ETA: 32s - loss: 0.35 - ETA: 31s - loss: 0.36 - ETA: 31s - loss: 0.36 - ETA: 30s - loss: 0.36 - ETA: 30s - loss: 0.36 - ETA: 29s - loss: 0.36 - ETA: 29s - loss: 0.36 - ETA: 28s - loss: 0.36 - ETA: 27s - loss: 0.36 - ETA: 27s - loss: 0.36 - ETA: 26s - loss: 0.36 - ETA: 26s - loss: 0.36 - ETA: 25s - loss: 0.36 - ETA: 25s - loss: 0.36 - ETA: 24s - loss: 0.36 - ETA: 24s - loss: 0.36 - ETA: 23s - loss: 0.36 - ETA: 23s - loss: 0.36 - ETA: 22s - loss: 0.36 - ETA: 22s - loss: 0.36 - ETA: 21s - loss: 0.36 - ETA: 20s - loss: 0.36 - ETA: 20s - loss: 0.36 - ETA: 19s - loss: 0.36 - ETA: 19s - loss: 0.36 - ETA: 18s - loss: 0.36 - ETA: 18s - loss: 0.36 - ETA: 17s - loss: 0.36 - ETA: 16s - loss: 0.36 - ETA: 16s - loss: 0.36 - ETA: 15s - loss: 0.36 - ETA: 15s - loss: 0.36 - ETA: 14s - loss: 0.36 - ETA: 14s - loss: 0.36 - ETA: 13s - loss: 0.36 - ETA: 12s - loss: 0.36 - ETA: 12s - loss: 0.36 - ETA: 11s - loss: 0.36 - ETA: 11s - loss: 0.36 - ETA: 10s - loss: 0.36 - ETA: 10s - loss: 0.36 - ETA: 9s - loss: 0.3649 - ETA: 8s - loss: 0.365 - ETA: 8s - loss: 0.365 - ETA: 7s - loss: 0.365 - ETA: 7s - loss: 0.365 - ETA: 6s - loss: 0.365 - ETA: 5s - loss: 0.365 - ETA: 5s - loss: 0.365 - ETA: 4s - loss: 0.366 - ETA: 4s - loss: 0.366 - ETA: 3s - loss: 0.366 - ETA: 2s - loss: 0.366 - ETA: 2s - loss: 0.367 - ETA: 1s - loss: 0.367 - ETA: 1s - loss: 0.367 - ETA: 0s - loss: 0.367 - ETA: 0s - loss: 0.367 - 73s 656ms/step - loss: 0.3680 - val_loss: 0.1403\n",
      "\n",
      "Epoch 00011: val_loss did not improve from 0.12397\n",
      "Epoch 12/30\n",
      "111/111 [==============================] - ETA: 1:03 - loss: 0.847 - ETA: 59s - loss: 0.686 - ETA: 59s - loss: 0.64 - ETA: 1:00 - loss: 0.600 - ETA: 59s - loss: 0.563 - ETA: 58s - loss: 0.54 - ETA: 57s - loss: 0.54 - ETA: 57s - loss: 0.54 - ETA: 56s - loss: 0.54 - ETA: 55s - loss: 0.53 - ETA: 55s - loss: 0.52 - ETA: 55s - loss: 0.51 - ETA: 55s - loss: 0.51 - ETA: 55s - loss: 0.50 - ETA: 55s - loss: 0.50 - ETA: 54s - loss: 0.49 - ETA: 53s - loss: 0.49 - ETA: 53s - loss: 0.49 - ETA: 52s - loss: 0.48 - ETA: 52s - loss: 0.48 - ETA: 51s - loss: 0.48 - ETA: 51s - loss: 0.48 - ETA: 50s - loss: 0.47 - ETA: 50s - loss: 0.47 - ETA: 49s - loss: 0.47 - ETA: 49s - loss: 0.47 - ETA: 48s - loss: 0.47 - ETA: 48s - loss: 0.47 - ETA: 47s - loss: 0.47 - ETA: 46s - loss: 0.47 - ETA: 46s - loss: 0.47 - ETA: 45s - loss: 0.47 - ETA: 44s - loss: 0.47 - ETA: 44s - loss: 0.46 - ETA: 43s - loss: 0.46 - ETA: 43s - loss: 0.46 - ETA: 42s - loss: 0.46 - ETA: 41s - loss: 0.46 - ETA: 41s - loss: 0.46 - ETA: 40s - loss: 0.45 - ETA: 39s - loss: 0.45 - ETA: 39s - loss: 0.45 - ETA: 38s - loss: 0.45 - ETA: 38s - loss: 0.45 - ETA: 37s - loss: 0.45 - ETA: 36s - loss: 0.45 - ETA: 36s - loss: 0.44 - ETA: 35s - loss: 0.44 - ETA: 35s - loss: 0.44 - ETA: 35s - loss: 0.44 - ETA: 34s - loss: 0.44 - ETA: 34s - loss: 0.44 - ETA: 33s - loss: 0.44 - ETA: 33s - loss: 0.44 - ETA: 32s - loss: 0.43 - ETA: 32s - loss: 0.43 - ETA: 31s - loss: 0.43 - ETA: 31s - loss: 0.43 - ETA: 30s - loss: 0.43 - ETA: 30s - loss: 0.43 - ETA: 29s - loss: 0.43 - ETA: 29s - loss: 0.43 - ETA: 28s - loss: 0.42 - ETA: 28s - loss: 0.42 - ETA: 27s - loss: 0.42 - ETA: 27s - loss: 0.42 - ETA: 26s - loss: 0.42 - ETA: 26s - loss: 0.42 - ETA: 25s - loss: 0.42 - ETA: 25s - loss: 0.42 - ETA: 24s - loss: 0.42 - ETA: 24s - loss: 0.42 - ETA: 23s - loss: 0.41 - ETA: 23s - loss: 0.41 - ETA: 22s - loss: 0.41 - ETA: 22s - loss: 0.41 - ETA: 21s - loss: 0.41 - ETA: 20s - loss: 0.41 - ETA: 20s - loss: 0.41 - ETA: 19s - loss: 0.41 - ETA: 19s - loss: 0.41 - ETA: 18s - loss: 0.41 - ETA: 18s - loss: 0.41 - ETA: 17s - loss: 0.41 - ETA: 16s - loss: 0.41 - ETA: 16s - loss: 0.41 - ETA: 15s - loss: 0.41 - ETA: 14s - loss: 0.40 - ETA: 14s - loss: 0.40 - ETA: 13s - loss: 0.40 - ETA: 12s - loss: 0.40 - ETA: 12s - loss: 0.40 - ETA: 11s - loss: 0.40 - ETA: 10s - loss: 0.40 - ETA: 10s - loss: 0.40 - ETA: 9s - loss: 0.4068 - ETA: 9s - loss: 0.406 - ETA: 8s - loss: 0.406 - ETA: 7s - loss: 0.405 - ETA: 7s - loss: 0.405 - ETA: 6s - loss: 0.404 - ETA: 5s - loss: 0.404 - ETA: 5s - loss: 0.404 - ETA: 4s - loss: 0.403 - ETA: 3s - loss: 0.403 - ETA: 3s - loss: 0.402 - ETA: 2s - loss: 0.402 - ETA: 1s - loss: 0.402 - ETA: 1s - loss: 0.401 - ETA: 0s - loss: 0.401 - ETA: 0s - loss: 0.400 - 79s 708ms/step - loss: 0.4006 - val_loss: 0.1485\n",
      "\n",
      "Epoch 00012: val_loss did not improve from 0.12397\n",
      "Epoch 13/30\n",
      "111/111 [==============================] - ETA: 54s - loss: 0.11 - ETA: 57s - loss: 0.13 - ETA: 54s - loss: 0.16 - ETA: 55s - loss: 0.17 - ETA: 55s - loss: 0.18 - ETA: 55s - loss: 0.19 - ETA: 54s - loss: 0.19 - ETA: 54s - loss: 0.20 - ETA: 53s - loss: 0.20 - ETA: 53s - loss: 0.21 - ETA: 52s - loss: 0.21 - ETA: 52s - loss: 0.21 - ETA: 52s - loss: 0.22 - ETA: 51s - loss: 0.22 - ETA: 51s - loss: 0.22 - ETA: 50s - loss: 0.23 - ETA: 50s - loss: 0.23 - ETA: 49s - loss: 0.24 - ETA: 49s - loss: 0.24 - ETA: 48s - loss: 0.25 - ETA: 48s - loss: 0.25 - ETA: 47s - loss: 0.25 - ETA: 47s - loss: 0.25 - ETA: 46s - loss: 0.26 - ETA: 46s - loss: 0.26 - ETA: 45s - loss: 0.26 - ETA: 44s - loss: 0.27 - ETA: 44s - loss: 0.27 - ETA: 43s - loss: 0.27 - ETA: 43s - loss: 0.27 - ETA: 42s - loss: 0.27 - ETA: 42s - loss: 0.27 - ETA: 41s - loss: 0.28 - ETA: 41s - loss: 0.28 - ETA: 40s - loss: 0.28 - ETA: 40s - loss: 0.28 - ETA: 39s - loss: 0.28 - ETA: 39s - loss: 0.28 - ETA: 38s - loss: 0.28 - ETA: 37s - loss: 0.29 - ETA: 37s - loss: 0.29 - ETA: 36s - loss: 0.29 - ETA: 36s - loss: 0.29 - ETA: 35s - loss: 0.29 - ETA: 35s - loss: 0.30 - ETA: 34s - loss: 0.30 - ETA: 34s - loss: 0.30 - ETA: 33s - loss: 0.30 - ETA: 33s - loss: 0.30 - ETA: 32s - loss: 0.30 - ETA: 31s - loss: 0.30 - ETA: 31s - loss: 0.30 - ETA: 31s - loss: 0.31 - ETA: 30s - loss: 0.31 - ETA: 30s - loss: 0.31 - ETA: 29s - loss: 0.31 - ETA: 29s - loss: 0.31 - ETA: 28s - loss: 0.31 - ETA: 27s - loss: 0.31 - ETA: 27s - loss: 0.31 - ETA: 26s - loss: 0.32 - ETA: 26s - loss: 0.32 - ETA: 25s - loss: 0.32 - ETA: 25s - loss: 0.32 - ETA: 24s - loss: 0.32 - ETA: 24s - loss: 0.32 - ETA: 23s - loss: 0.32 - ETA: 23s - loss: 0.33 - ETA: 22s - loss: 0.33 - ETA: 21s - loss: 0.33 - ETA: 21s - loss: 0.33 - ETA: 20s - loss: 0.33 - ETA: 20s - loss: 0.33 - ETA: 19s - loss: 0.33 - ETA: 19s - loss: 0.33 - ETA: 18s - loss: 0.34 - ETA: 18s - loss: 0.34 - ETA: 17s - loss: 0.34 - ETA: 17s - loss: 0.34 - ETA: 16s - loss: 0.34 - ETA: 16s - loss: 0.34 - ETA: 15s - loss: 0.34 - ETA: 14s - loss: 0.34 - ETA: 14s - loss: 0.34 - ETA: 13s - loss: 0.34 - ETA: 13s - loss: 0.34 - ETA: 12s - loss: 0.35 - ETA: 12s - loss: 0.35 - ETA: 11s - loss: 0.35 - ETA: 11s - loss: 0.35 - ETA: 10s - loss: 0.35 - ETA: 10s - loss: 0.35 - ETA: 9s - loss: 0.3555 - ETA: 9s - loss: 0.356 - ETA: 8s - loss: 0.357 - ETA: 8s - loss: 0.357 - ETA: 7s - loss: 0.358 - ETA: 6s - loss: 0.359 - ETA: 6s - loss: 0.359 - ETA: 5s - loss: 0.360 - ETA: 5s - loss: 0.360 - ETA: 4s - loss: 0.361 - ETA: 4s - loss: 0.361 - ETA: 3s - loss: 0.362 - ETA: 3s - loss: 0.362 - ETA: 2s - loss: 0.363 - ETA: 2s - loss: 0.364 - ETA: 1s - loss: 0.364 - ETA: 1s - loss: 0.365 - ETA: 0s - loss: 0.365 - ETA: 0s - loss: 0.366 - 67s 604ms/step - loss: 0.3670 - val_loss: 0.1241\n",
      "\n",
      "Epoch 00013: val_loss did not improve from 0.12397\n",
      "Epoch 14/30\n",
      "111/111 [==============================] - ETA: 1:07 - loss: 0.486 - ETA: 1:16 - loss: 0.393 - ETA: 1:17 - loss: 0.375 - ETA: 1:15 - loss: 0.374 - ETA: 1:14 - loss: 0.371 - ETA: 1:13 - loss: 0.361 - ETA: 1:11 - loss: 0.353 - ETA: 1:10 - loss: 0.347 - ETA: 1:09 - loss: 0.344 - ETA: 1:08 - loss: 0.342 - ETA: 1:08 - loss: 0.339 - ETA: 1:06 - loss: 0.337 - ETA: 1:06 - loss: 0.335 - ETA: 1:05 - loss: 0.336 - ETA: 1:04 - loss: 0.336 - ETA: 1:03 - loss: 0.337 - ETA: 1:03 - loss: 0.336 - ETA: 1:02 - loss: 0.336 - ETA: 1:02 - loss: 0.336 - ETA: 1:01 - loss: 0.336 - ETA: 1:00 - loss: 0.336 - ETA: 59s - loss: 0.336 - ETA: 58s - loss: 0.33 - ETA: 57s - loss: 0.33 - ETA: 57s - loss: 0.33 - ETA: 56s - loss: 0.33 - ETA: 55s - loss: 0.33 - ETA: 54s - loss: 0.33 - ETA: 53s - loss: 0.32 - ETA: 53s - loss: 0.32 - ETA: 52s - loss: 0.32 - ETA: 51s - loss: 0.32 - ETA: 50s - loss: 0.32 - ETA: 49s - loss: 0.32 - ETA: 48s - loss: 0.32 - ETA: 48s - loss: 0.32 - ETA: 47s - loss: 0.32 - ETA: 46s - loss: 0.32 - ETA: 45s - loss: 0.32 - ETA: 44s - loss: 0.32 - ETA: 43s - loss: 0.32 - ETA: 43s - loss: 0.32 - ETA: 42s - loss: 0.32 - ETA: 41s - loss: 0.32 - ETA: 40s - loss: 0.32 - ETA: 40s - loss: 0.32 - ETA: 39s - loss: 0.32 - ETA: 38s - loss: 0.32 - ETA: 37s - loss: 0.32 - ETA: 37s - loss: 0.32 - ETA: 36s - loss: 0.32 - ETA: 35s - loss: 0.32 - ETA: 35s - loss: 0.32 - ETA: 34s - loss: 0.32 - ETA: 33s - loss: 0.32 - ETA: 32s - loss: 0.32 - ETA: 32s - loss: 0.32 - ETA: 31s - loss: 0.32 - ETA: 30s - loss: 0.32 - ETA: 30s - loss: 0.32 - ETA: 29s - loss: 0.32 - ETA: 28s - loss: 0.32 - ETA: 28s - loss: 0.32 - ETA: 27s - loss: 0.32 - ETA: 27s - loss: 0.32 - ETA: 26s - loss: 0.32 - ETA: 25s - loss: 0.32 - ETA: 25s - loss: 0.32 - ETA: 24s - loss: 0.32 - ETA: 23s - loss: 0.32 - ETA: 23s - loss: 0.32 - ETA: 22s - loss: 0.32 - ETA: 22s - loss: 0.32 - ETA: 21s - loss: 0.32 - ETA: 20s - loss: 0.32 - ETA: 20s - loss: 0.32 - ETA: 19s - loss: 0.32 - ETA: 19s - loss: 0.32 - ETA: 18s - loss: 0.32 - ETA: 17s - loss: 0.32 - ETA: 17s - loss: 0.32 - ETA: 16s - loss: 0.32 - ETA: 16s - loss: 0.32 - ETA: 15s - loss: 0.32 - ETA: 14s - loss: 0.32 - ETA: 14s - loss: 0.32 - ETA: 13s - loss: 0.33 - ETA: 13s - loss: 0.33 - ETA: 12s - loss: 0.33 - ETA: 11s - loss: 0.33 - ETA: 11s - loss: 0.33 - ETA: 10s - loss: 0.33 - ETA: 10s - loss: 0.33 - ETA: 9s - loss: 0.3310 - ETA: 9s - loss: 0.331 - ETA: 8s - loss: 0.331 - ETA: 7s - loss: 0.331 - ETA: 7s - loss: 0.331 - ETA: 6s - loss: 0.332 - ETA: 6s - loss: 0.332 - ETA: 5s - loss: 0.332 - ETA: 5s - loss: 0.333 - ETA: 4s - loss: 0.333 - ETA: 3s - loss: 0.333 - ETA: 3s - loss: 0.334 - ETA: 2s - loss: 0.334 - ETA: 2s - loss: 0.334 - ETA: 1s - loss: 0.334 - ETA: 1s - loss: 0.335 - ETA: 0s - loss: 0.335 - ETA: 0s - loss: 0.335 - 69s 625ms/step - loss: 0.3359 - val_loss: 0.1193\n",
      "\n",
      "Epoch 00014: val_loss improved from 0.12397 to 0.11932, saving model to clean_notebooks\\cnn_injection_transfer_weights.h5\n",
      "Epoch 15/30\n",
      "111/111 [==============================] - ETA: 58s - loss: 0.20 - ETA: 1:07 - loss: 0.266 - ETA: 1:06 - loss: 0.258 - ETA: 1:05 - loss: 0.260 - ETA: 1:06 - loss: 0.259 - ETA: 1:06 - loss: 0.263 - ETA: 1:05 - loss: 0.266 - ETA: 1:07 - loss: 0.269 - ETA: 1:07 - loss: 0.269 - ETA: 1:08 - loss: 0.272 - ETA: 1:08 - loss: 0.274 - ETA: 1:08 - loss: 0.275 - ETA: 1:08 - loss: 0.275 - ETA: 1:07 - loss: 0.275 - ETA: 1:07 - loss: 0.276 - ETA: 1:05 - loss: 0.276 - ETA: 1:04 - loss: 0.277 - ETA: 1:03 - loss: 0.278 - ETA: 1:02 - loss: 0.280 - ETA: 1:01 - loss: 0.281 - ETA: 1:00 - loss: 0.281 - ETA: 59s - loss: 0.281 - ETA: 58s - loss: 0.28 - ETA: 58s - loss: 0.28 - ETA: 57s - loss: 0.28 - ETA: 56s - loss: 0.28 - ETA: 55s - loss: 0.28 - ETA: 54s - loss: 0.28 - ETA: 53s - loss: 0.28 - ETA: 52s - loss: 0.28 - ETA: 51s - loss: 0.28 - ETA: 50s - loss: 0.28 - ETA: 50s - loss: 0.28 - ETA: 49s - loss: 0.28 - ETA: 48s - loss: 0.28 - ETA: 47s - loss: 0.28 - ETA: 47s - loss: 0.28 - ETA: 46s - loss: 0.28 - ETA: 45s - loss: 0.28 - ETA: 44s - loss: 0.28 - ETA: 43s - loss: 0.28 - ETA: 43s - loss: 0.28 - ETA: 42s - loss: 0.28 - ETA: 41s - loss: 0.28 - ETA: 40s - loss: 0.28 - ETA: 40s - loss: 0.28 - ETA: 39s - loss: 0.28 - ETA: 38s - loss: 0.28 - ETA: 37s - loss: 0.28 - ETA: 37s - loss: 0.28 - ETA: 36s - loss: 0.28 - ETA: 35s - loss: 0.28 - ETA: 35s - loss: 0.28 - ETA: 34s - loss: 0.28 - ETA: 33s - loss: 0.28 - ETA: 33s - loss: 0.28 - ETA: 32s - loss: 0.28 - ETA: 31s - loss: 0.28 - ETA: 31s - loss: 0.28 - ETA: 30s - loss: 0.28 - ETA: 30s - loss: 0.28 - ETA: 29s - loss: 0.28 - ETA: 28s - loss: 0.29 - ETA: 28s - loss: 0.29 - ETA: 27s - loss: 0.29 - ETA: 26s - loss: 0.29 - ETA: 26s - loss: 0.29 - ETA: 25s - loss: 0.29 - ETA: 25s - loss: 0.29 - ETA: 24s - loss: 0.29 - ETA: 23s - loss: 0.29 - ETA: 23s - loss: 0.29 - ETA: 22s - loss: 0.29 - ETA: 22s - loss: 0.29 - ETA: 21s - loss: 0.29 - ETA: 20s - loss: 0.29 - ETA: 20s - loss: 0.29 - ETA: 19s - loss: 0.29 - ETA: 19s - loss: 0.29 - ETA: 18s - loss: 0.29 - ETA: 18s - loss: 0.29 - ETA: 17s - loss: 0.30 - ETA: 17s - loss: 0.30 - ETA: 16s - loss: 0.30 - ETA: 16s - loss: 0.30 - ETA: 15s - loss: 0.30 - ETA: 14s - loss: 0.30 - ETA: 14s - loss: 0.30 - ETA: 13s - loss: 0.30 - ETA: 12s - loss: 0.30 - ETA: 12s - loss: 0.30 - ETA: 11s - loss: 0.30 - ETA: 11s - loss: 0.30 - ETA: 10s - loss: 0.30 - ETA: 9s - loss: 0.3052 - ETA: 9s - loss: 0.305 - ETA: 8s - loss: 0.306 - ETA: 8s - loss: 0.306 - ETA: 7s - loss: 0.306 - ETA: 6s - loss: 0.307 - ETA: 6s - loss: 0.307 - ETA: 5s - loss: 0.307 - ETA: 4s - loss: 0.308 - ETA: 4s - loss: 0.308 - ETA: 3s - loss: 0.308 - ETA: 3s - loss: 0.309 - ETA: 2s - loss: 0.309 - ETA: 1s - loss: 0.309 - ETA: 1s - loss: 0.310 - ETA: 0s - loss: 0.310 - ETA: 0s - loss: 0.310 - 76s 689ms/step - loss: 0.3111 - val_loss: 0.1152\n",
      "\n",
      "Epoch 00015: val_loss improved from 0.11932 to 0.11523, saving model to clean_notebooks\\cnn_injection_transfer_weights.h5\n",
      "Epoch 16/30\n",
      "111/111 [==============================] - ETA: 1:11 - loss: 0.370 - ETA: 1:11 - loss: 0.306 - ETA: 1:11 - loss: 0.279 - ETA: 1:13 - loss: 0.267 - ETA: 1:12 - loss: 0.257 - ETA: 1:14 - loss: 0.248 - ETA: 1:14 - loss: 0.254 - ETA: 1:14 - loss: 0.258 - ETA: 1:12 - loss: 0.268 - ETA: 1:11 - loss: 0.278 - ETA: 1:09 - loss: 0.286 - ETA: 1:08 - loss: 0.293 - ETA: 1:07 - loss: 0.298 - ETA: 1:05 - loss: 0.305 - ETA: 1:04 - loss: 0.310 - ETA: 1:03 - loss: 0.314 - ETA: 1:02 - loss: 0.317 - ETA: 1:01 - loss: 0.320 - ETA: 1:00 - loss: 0.322 - ETA: 59s - loss: 0.323 - ETA: 58s - loss: 0.32 - ETA: 58s - loss: 0.32 - ETA: 57s - loss: 0.32 - ETA: 56s - loss: 0.32 - ETA: 55s - loss: 0.33 - ETA: 55s - loss: 0.33 - ETA: 54s - loss: 0.33 - ETA: 53s - loss: 0.33 - ETA: 52s - loss: 0.33 - ETA: 51s - loss: 0.33 - ETA: 51s - loss: 0.33 - ETA: 50s - loss: 0.33 - ETA: 49s - loss: 0.33 - ETA: 48s - loss: 0.33 - ETA: 48s - loss: 0.33 - ETA: 47s - loss: 0.33 - ETA: 46s - loss: 0.33 - ETA: 45s - loss: 0.33 - ETA: 45s - loss: 0.34 - ETA: 44s - loss: 0.34 - ETA: 43s - loss: 0.34 - ETA: 43s - loss: 0.34 - ETA: 42s - loss: 0.34 - ETA: 41s - loss: 0.34 - ETA: 41s - loss: 0.35 - ETA: 40s - loss: 0.35 - ETA: 39s - loss: 0.35 - ETA: 39s - loss: 0.35 - ETA: 38s - loss: 0.35 - ETA: 37s - loss: 0.35 - ETA: 37s - loss: 0.35 - ETA: 36s - loss: 0.35 - ETA: 35s - loss: 0.35 - ETA: 35s - loss: 0.35 - ETA: 34s - loss: 0.36 - ETA: 33s - loss: 0.36 - ETA: 33s - loss: 0.36 - ETA: 32s - loss: 0.36 - ETA: 31s - loss: 0.36 - ETA: 31s - loss: 0.36 - ETA: 30s - loss: 0.36 - ETA: 29s - loss: 0.36 - ETA: 29s - loss: 0.36 - ETA: 28s - loss: 0.36 - ETA: 28s - loss: 0.36 - ETA: 27s - loss: 0.37 - ETA: 26s - loss: 0.37 - ETA: 26s - loss: 0.37 - ETA: 25s - loss: 0.37 - ETA: 24s - loss: 0.37 - ETA: 24s - loss: 0.37 - ETA: 23s - loss: 0.37 - ETA: 23s - loss: 0.37 - ETA: 22s - loss: 0.37 - ETA: 21s - loss: 0.37 - ETA: 21s - loss: 0.37 - ETA: 20s - loss: 0.37 - ETA: 19s - loss: 0.37 - ETA: 19s - loss: 0.37 - ETA: 18s - loss: 0.37 - ETA: 18s - loss: 0.37 - ETA: 17s - loss: 0.38 - ETA: 16s - loss: 0.38 - ETA: 16s - loss: 0.38 - ETA: 15s - loss: 0.38 - ETA: 15s - loss: 0.38 - ETA: 14s - loss: 0.38 - ETA: 13s - loss: 0.38 - ETA: 13s - loss: 0.38 - ETA: 12s - loss: 0.38 - ETA: 12s - loss: 0.38 - ETA: 11s - loss: 0.38 - ETA: 10s - loss: 0.38 - ETA: 10s - loss: 0.38 - ETA: 9s - loss: 0.3836 - ETA: 9s - loss: 0.383 - ETA: 8s - loss: 0.383 - ETA: 7s - loss: 0.384 - ETA: 7s - loss: 0.384 - ETA: 6s - loss: 0.384 - ETA: 6s - loss: 0.384 - ETA: 5s - loss: 0.384 - ETA: 4s - loss: 0.384 - ETA: 4s - loss: 0.384 - ETA: 3s - loss: 0.384 - ETA: 3s - loss: 0.384 - ETA: 2s - loss: 0.384 - ETA: 1s - loss: 0.384 - ETA: 1s - loss: 0.384 - ETA: 0s - loss: 0.384 - ETA: 0s - loss: 0.383 - 78s 707ms/step - loss: 0.3838 - val_loss: 0.1265\n",
      "\n",
      "Epoch 00016: val_loss did not improve from 0.11523\n",
      "Epoch 17/30\n",
      "111/111 [==============================] - ETA: 1:17 - loss: 0.263 - ETA: 1:11 - loss: 0.324 - ETA: 1:07 - loss: 0.310 - ETA: 1:05 - loss: 0.310 - ETA: 1:05 - loss: 0.317 - ETA: 1:06 - loss: 0.320 - ETA: 1:05 - loss: 0.316 - ETA: 1:04 - loss: 0.315 - ETA: 1:03 - loss: 0.312 - ETA: 1:02 - loss: 0.309 - ETA: 1:01 - loss: 0.305 - ETA: 1:00 - loss: 0.306 - ETA: 1:00 - loss: 0.308 - ETA: 1:00 - loss: 0.309 - ETA: 59s - loss: 0.310 - ETA: 58s - loss: 0.31 - ETA: 57s - loss: 0.31 - ETA: 56s - loss: 0.32 - ETA: 55s - loss: 0.32 - ETA: 54s - loss: 0.32 - ETA: 54s - loss: 0.32 - ETA: 53s - loss: 0.32 - ETA: 52s - loss: 0.33 - ETA: 51s - loss: 0.33 - ETA: 51s - loss: 0.33 - ETA: 50s - loss: 0.33 - ETA: 50s - loss: 0.33 - ETA: 49s - loss: 0.33 - ETA: 49s - loss: 0.33 - ETA: 48s - loss: 0.33 - ETA: 47s - loss: 0.33 - ETA: 47s - loss: 0.33 - ETA: 46s - loss: 0.33 - ETA: 45s - loss: 0.33 - ETA: 45s - loss: 0.33 - ETA: 44s - loss: 0.33 - ETA: 44s - loss: 0.33 - ETA: 43s - loss: 0.33 - ETA: 42s - loss: 0.33 - ETA: 42s - loss: 0.33 - ETA: 41s - loss: 0.33 - ETA: 41s - loss: 0.33 - ETA: 40s - loss: 0.33 - ETA: 39s - loss: 0.33 - ETA: 39s - loss: 0.34 - ETA: 38s - loss: 0.34 - ETA: 38s - loss: 0.34 - ETA: 37s - loss: 0.34 - ETA: 36s - loss: 0.34 - ETA: 36s - loss: 0.34 - ETA: 35s - loss: 0.34 - ETA: 34s - loss: 0.34 - ETA: 34s - loss: 0.34 - ETA: 33s - loss: 0.34 - ETA: 33s - loss: 0.34 - ETA: 32s - loss: 0.34 - ETA: 31s - loss: 0.34 - ETA: 31s - loss: 0.34 - ETA: 30s - loss: 0.34 - ETA: 30s - loss: 0.35 - ETA: 29s - loss: 0.35 - ETA: 28s - loss: 0.35 - ETA: 28s - loss: 0.35 - ETA: 27s - loss: 0.35 - ETA: 27s - loss: 0.35 - ETA: 26s - loss: 0.35 - ETA: 25s - loss: 0.35 - ETA: 25s - loss: 0.35 - ETA: 24s - loss: 0.35 - ETA: 24s - loss: 0.35 - ETA: 23s - loss: 0.35 - ETA: 22s - loss: 0.35 - ETA: 22s - loss: 0.36 - ETA: 21s - loss: 0.36 - ETA: 21s - loss: 0.36 - ETA: 20s - loss: 0.36 - ETA: 20s - loss: 0.36 - ETA: 19s - loss: 0.36 - ETA: 18s - loss: 0.36 - ETA: 18s - loss: 0.36 - ETA: 17s - loss: 0.36 - ETA: 17s - loss: 0.36 - ETA: 16s - loss: 0.36 - ETA: 15s - loss: 0.36 - ETA: 15s - loss: 0.36 - ETA: 14s - loss: 0.36 - ETA: 14s - loss: 0.36 - ETA: 13s - loss: 0.36 - ETA: 12s - loss: 0.36 - ETA: 12s - loss: 0.36 - ETA: 11s - loss: 0.36 - ETA: 11s - loss: 0.36 - ETA: 10s - loss: 0.36 - ETA: 9s - loss: 0.3690 - ETA: 9s - loss: 0.369 - ETA: 8s - loss: 0.369 - ETA: 8s - loss: 0.370 - ETA: 7s - loss: 0.370 - ETA: 7s - loss: 0.371 - ETA: 6s - loss: 0.371 - ETA: 5s - loss: 0.371 - ETA: 5s - loss: 0.372 - ETA: 4s - loss: 0.372 - ETA: 4s - loss: 0.372 - ETA: 3s - loss: 0.373 - ETA: 2s - loss: 0.373 - ETA: 2s - loss: 0.373 - ETA: 1s - loss: 0.373 - ETA: 1s - loss: 0.374 - ETA: 0s - loss: 0.374 - ETA: 0s - loss: 0.374 - 73s 654ms/step - loss: 0.3750 - val_loss: 0.1263\n",
      "\n",
      "Epoch 00017: val_loss did not improve from 0.11523\n",
      "Epoch 18/30\n",
      "111/111 [==============================] - ETA: 1:02 - loss: 0.340 - ETA: 1:01 - loss: 0.411 - ETA: 1:01 - loss: 0.402 - ETA: 1:00 - loss: 0.506 - ETA: 59s - loss: 0.547 - ETA: 58s - loss: 0.56 - ETA: 57s - loss: 0.56 - ETA: 57s - loss: 0.56 - ETA: 56s - loss: 0.55 - ETA: 55s - loss: 0.54 - ETA: 55s - loss: 0.54 - ETA: 54s - loss: 0.53 - ETA: 53s - loss: 0.52 - ETA: 53s - loss: 0.51 - ETA: 53s - loss: 0.50 - ETA: 52s - loss: 0.50 - ETA: 52s - loss: 0.49 - ETA: 51s - loss: 0.48 - ETA: 51s - loss: 0.48 - ETA: 50s - loss: 0.47 - ETA: 50s - loss: 0.47 - ETA: 49s - loss: 0.46 - ETA: 49s - loss: 0.46 - ETA: 48s - loss: 0.45 - ETA: 48s - loss: 0.45 - ETA: 48s - loss: 0.45 - ETA: 47s - loss: 0.44 - ETA: 47s - loss: 0.44 - ETA: 46s - loss: 0.44 - ETA: 46s - loss: 0.44 - ETA: 45s - loss: 0.43 - ETA: 45s - loss: 0.43 - ETA: 44s - loss: 0.43 - ETA: 44s - loss: 0.43 - ETA: 44s - loss: 0.43 - ETA: 44s - loss: 0.43 - ETA: 44s - loss: 0.43 - ETA: 43s - loss: 0.42 - ETA: 43s - loss: 0.42 - ETA: 42s - loss: 0.42 - ETA: 42s - loss: 0.42 - ETA: 42s - loss: 0.42 - ETA: 41s - loss: 0.42 - ETA: 41s - loss: 0.42 - ETA: 40s - loss: 0.42 - ETA: 40s - loss: 0.41 - ETA: 39s - loss: 0.41 - ETA: 38s - loss: 0.41 - ETA: 38s - loss: 0.41 - ETA: 37s - loss: 0.41 - ETA: 37s - loss: 0.41 - ETA: 36s - loss: 0.41 - ETA: 36s - loss: 0.41 - ETA: 35s - loss: 0.41 - ETA: 34s - loss: 0.41 - ETA: 34s - loss: 0.41 - ETA: 33s - loss: 0.41 - ETA: 33s - loss: 0.40 - ETA: 32s - loss: 0.40 - ETA: 31s - loss: 0.40 - ETA: 31s - loss: 0.40 - ETA: 30s - loss: 0.40 - ETA: 30s - loss: 0.40 - ETA: 29s - loss: 0.40 - ETA: 28s - loss: 0.40 - ETA: 28s - loss: 0.40 - ETA: 27s - loss: 0.40 - ETA: 26s - loss: 0.40 - ETA: 26s - loss: 0.40 - ETA: 25s - loss: 0.40 - ETA: 25s - loss: 0.40 - ETA: 24s - loss: 0.40 - ETA: 23s - loss: 0.39 - ETA: 23s - loss: 0.39 - ETA: 22s - loss: 0.39 - ETA: 21s - loss: 0.39 - ETA: 21s - loss: 0.39 - ETA: 20s - loss: 0.39 - ETA: 20s - loss: 0.39 - ETA: 19s - loss: 0.39 - ETA: 18s - loss: 0.39 - ETA: 18s - loss: 0.39 - ETA: 17s - loss: 0.39 - ETA: 17s - loss: 0.39 - ETA: 16s - loss: 0.39 - ETA: 15s - loss: 0.38 - ETA: 15s - loss: 0.38 - ETA: 14s - loss: 0.38 - ETA: 13s - loss: 0.38 - ETA: 13s - loss: 0.38 - ETA: 12s - loss: 0.38 - ETA: 12s - loss: 0.38 - ETA: 11s - loss: 0.38 - ETA: 10s - loss: 0.38 - ETA: 10s - loss: 0.38 - ETA: 9s - loss: 0.3843 - ETA: 8s - loss: 0.383 - ETA: 8s - loss: 0.383 - ETA: 7s - loss: 0.383 - ETA: 6s - loss: 0.382 - ETA: 6s - loss: 0.382 - ETA: 5s - loss: 0.382 - ETA: 5s - loss: 0.381 - ETA: 4s - loss: 0.381 - ETA: 3s - loss: 0.380 - ETA: 3s - loss: 0.380 - ETA: 2s - loss: 0.380 - ETA: 1s - loss: 0.379 - ETA: 1s - loss: 0.379 - ETA: 0s - loss: 0.379 - ETA: 0s - loss: 0.378 - 82s 738ms/step - loss: 0.3784 - val_loss: 0.1297\n",
      "\n",
      "Epoch 00018: val_loss did not improve from 0.11523\n",
      "Epoch 19/30\n",
      "111/111 [==============================] - ETA: 1:18 - loss: 0.066 - ETA: 1:12 - loss: 0.096 - ETA: 1:19 - loss: 0.178 - ETA: 1:19 - loss: 0.200 - ETA: 1:19 - loss: 0.211 - ETA: 1:18 - loss: 0.228 - ETA: 1:18 - loss: 0.245 - ETA: 1:17 - loss: 0.258 - ETA: 1:16 - loss: 0.265 - ETA: 1:16 - loss: 0.272 - ETA: 1:16 - loss: 0.276 - ETA: 1:15 - loss: 0.279 - ETA: 1:15 - loss: 0.281 - ETA: 1:15 - loss: 0.287 - ETA: 1:14 - loss: 0.291 - ETA: 1:12 - loss: 0.295 - ETA: 1:11 - loss: 0.298 - ETA: 1:10 - loss: 0.301 - ETA: 1:09 - loss: 0.303 - ETA: 1:08 - loss: 0.305 - ETA: 1:07 - loss: 0.307 - ETA: 1:06 - loss: 0.310 - ETA: 1:05 - loss: 0.312 - ETA: 1:04 - loss: 0.314 - ETA: 1:03 - loss: 0.315 - ETA: 1:03 - loss: 0.316 - ETA: 1:02 - loss: 0.317 - ETA: 1:01 - loss: 0.319 - ETA: 1:00 - loss: 0.320 - ETA: 59s - loss: 0.322 - ETA: 58s - loss: 0.32 - ETA: 57s - loss: 0.32 - ETA: 56s - loss: 0.32 - ETA: 55s - loss: 0.32 - ETA: 54s - loss: 0.32 - ETA: 53s - loss: 0.32 - ETA: 52s - loss: 0.32 - ETA: 51s - loss: 0.32 - ETA: 50s - loss: 0.32 - ETA: 49s - loss: 0.32 - ETA: 48s - loss: 0.32 - ETA: 48s - loss: 0.33 - ETA: 47s - loss: 0.33 - ETA: 46s - loss: 0.33 - ETA: 45s - loss: 0.33 - ETA: 44s - loss: 0.33 - ETA: 43s - loss: 0.33 - ETA: 42s - loss: 0.33 - ETA: 41s - loss: 0.33 - ETA: 41s - loss: 0.33 - ETA: 40s - loss: 0.33 - ETA: 39s - loss: 0.33 - ETA: 38s - loss: 0.33 - ETA: 38s - loss: 0.33 - ETA: 37s - loss: 0.33 - ETA: 36s - loss: 0.33 - ETA: 35s - loss: 0.33 - ETA: 35s - loss: 0.33 - ETA: 34s - loss: 0.33 - ETA: 34s - loss: 0.34 - ETA: 33s - loss: 0.34 - ETA: 32s - loss: 0.34 - ETA: 32s - loss: 0.34 - ETA: 31s - loss: 0.34 - ETA: 30s - loss: 0.34 - ETA: 29s - loss: 0.34 - ETA: 29s - loss: 0.34 - ETA: 28s - loss: 0.34 - ETA: 27s - loss: 0.34 - ETA: 27s - loss: 0.34 - ETA: 26s - loss: 0.34 - ETA: 25s - loss: 0.34 - ETA: 24s - loss: 0.34 - ETA: 24s - loss: 0.34 - ETA: 23s - loss: 0.34 - ETA: 22s - loss: 0.34 - ETA: 22s - loss: 0.34 - ETA: 21s - loss: 0.34 - ETA: 20s - loss: 0.34 - ETA: 20s - loss: 0.34 - ETA: 19s - loss: 0.34 - ETA: 18s - loss: 0.34 - ETA: 18s - loss: 0.34 - ETA: 17s - loss: 0.34 - ETA: 16s - loss: 0.34 - ETA: 16s - loss: 0.34 - ETA: 15s - loss: 0.34 - ETA: 14s - loss: 0.34 - ETA: 14s - loss: 0.35 - ETA: 13s - loss: 0.35 - ETA: 12s - loss: 0.35 - ETA: 12s - loss: 0.35 - ETA: 11s - loss: 0.35 - ETA: 10s - loss: 0.35 - ETA: 10s - loss: 0.35 - ETA: 9s - loss: 0.3511 - ETA: 8s - loss: 0.351 - ETA: 8s - loss: 0.351 - ETA: 7s - loss: 0.351 - ETA: 7s - loss: 0.351 - ETA: 6s - loss: 0.351 - ETA: 5s - loss: 0.351 - ETA: 5s - loss: 0.351 - ETA: 4s - loss: 0.352 - ETA: 3s - loss: 0.352 - ETA: 3s - loss: 0.352 - ETA: 2s - loss: 0.352 - ETA: 1s - loss: 0.352 - ETA: 1s - loss: 0.352 - ETA: 0s - loss: 0.352 - ETA: 0s - loss: 0.352 - 77s 697ms/step - loss: 0.3529 - val_loss: 0.1192\n",
      "\n",
      "Epoch 00019: val_loss did not improve from 0.11523\n",
      "Epoch 20/30\n",
      "111/111 [==============================] - ETA: 1:05 - loss: 0.098 - ETA: 1:16 - loss: 0.196 - ETA: 1:08 - loss: 0.232 - ETA: 1:04 - loss: 0.246 - ETA: 1:02 - loss: 0.265 - ETA: 1:02 - loss: 0.280 - ETA: 1:02 - loss: 0.295 - ETA: 1:01 - loss: 0.302 - ETA: 1:01 - loss: 0.307 - ETA: 1:01 - loss: 0.310 - ETA: 1:00 - loss: 0.314 - ETA: 1:00 - loss: 0.320 - ETA: 59s - loss: 0.325 - ETA: 58s - loss: 0.32 - ETA: 58s - loss: 0.32 - ETA: 57s - loss: 0.32 - ETA: 57s - loss: 0.32 - ETA: 56s - loss: 0.33 - ETA: 55s - loss: 0.33 - ETA: 54s - loss: 0.33 - ETA: 54s - loss: 0.34 - ETA: 53s - loss: 0.34 - ETA: 53s - loss: 0.34 - ETA: 52s - loss: 0.35 - ETA: 51s - loss: 0.35 - ETA: 51s - loss: 0.35 - ETA: 51s - loss: 0.36 - ETA: 50s - loss: 0.36 - ETA: 50s - loss: 0.36 - ETA: 50s - loss: 0.36 - ETA: 49s - loss: 0.36 - ETA: 48s - loss: 0.36 - ETA: 48s - loss: 0.36 - ETA: 47s - loss: 0.37 - ETA: 46s - loss: 0.37 - ETA: 46s - loss: 0.37 - ETA: 46s - loss: 0.37 - ETA: 45s - loss: 0.37 - ETA: 45s - loss: 0.37 - ETA: 44s - loss: 0.37 - ETA: 43s - loss: 0.37 - ETA: 43s - loss: 0.37 - ETA: 42s - loss: 0.37 - ETA: 41s - loss: 0.37 - ETA: 41s - loss: 0.37 - ETA: 40s - loss: 0.37 - ETA: 39s - loss: 0.37 - ETA: 39s - loss: 0.37 - ETA: 38s - loss: 0.37 - ETA: 37s - loss: 0.37 - ETA: 37s - loss: 0.37 - ETA: 36s - loss: 0.37 - ETA: 36s - loss: 0.37 - ETA: 35s - loss: 0.37 - ETA: 34s - loss: 0.37 - ETA: 34s - loss: 0.37 - ETA: 33s - loss: 0.37 - ETA: 32s - loss: 0.37 - ETA: 32s - loss: 0.37 - ETA: 31s - loss: 0.37 - ETA: 30s - loss: 0.37 - ETA: 30s - loss: 0.37 - ETA: 29s - loss: 0.37 - ETA: 28s - loss: 0.37 - ETA: 28s - loss: 0.38 - ETA: 27s - loss: 0.38 - ETA: 27s - loss: 0.38 - ETA: 26s - loss: 0.38 - ETA: 25s - loss: 0.38 - ETA: 25s - loss: 0.38 - ETA: 24s - loss: 0.38 - ETA: 23s - loss: 0.38 - ETA: 23s - loss: 0.38 - ETA: 22s - loss: 0.38 - ETA: 21s - loss: 0.38 - ETA: 21s - loss: 0.38 - ETA: 20s - loss: 0.38 - ETA: 20s - loss: 0.38 - ETA: 19s - loss: 0.38 - ETA: 18s - loss: 0.38 - ETA: 18s - loss: 0.38 - ETA: 17s - loss: 0.38 - ETA: 17s - loss: 0.38 - ETA: 16s - loss: 0.38 - ETA: 16s - loss: 0.38 - ETA: 15s - loss: 0.38 - ETA: 14s - loss: 0.38 - ETA: 14s - loss: 0.38 - ETA: 13s - loss: 0.38 - ETA: 12s - loss: 0.38 - ETA: 12s - loss: 0.38 - ETA: 11s - loss: 0.38 - ETA: 11s - loss: 0.37 - ETA: 10s - loss: 0.37 - ETA: 9s - loss: 0.3795 - ETA: 9s - loss: 0.379 - ETA: 8s - loss: 0.379 - ETA: 8s - loss: 0.378 - ETA: 7s - loss: 0.378 - ETA: 6s - loss: 0.378 - ETA: 6s - loss: 0.378 - ETA: 5s - loss: 0.378 - ETA: 4s - loss: 0.377 - ETA: 4s - loss: 0.377 - ETA: 3s - loss: 0.377 - ETA: 3s - loss: 0.377 - ETA: 2s - loss: 0.377 - ETA: 1s - loss: 0.376 - ETA: 1s - loss: 0.376 - ETA: 0s - loss: 0.376 - ETA: 0s - loss: 0.376 - 77s 693ms/step - loss: 0.3764 - val_loss: 0.1136\n",
      "\n",
      "Epoch 00020: val_loss improved from 0.11523 to 0.11363, saving model to clean_notebooks\\cnn_injection_transfer_weights.h5\n",
      "Epoch 21/30\n",
      "111/111 [==============================] - ETA: 49s - loss: 0.06 - ETA: 54s - loss: 0.19 - ETA: 55s - loss: 0.25 - ETA: 55s - loss: 0.26 - ETA: 55s - loss: 0.26 - ETA: 55s - loss: 0.27 - ETA: 56s - loss: 0.27 - ETA: 55s - loss: 0.27 - ETA: 55s - loss: 0.27 - ETA: 55s - loss: 0.27 - ETA: 54s - loss: 0.27 - ETA: 54s - loss: 0.27 - ETA: 53s - loss: 0.26 - ETA: 53s - loss: 0.26 - ETA: 53s - loss: 0.26 - ETA: 52s - loss: 0.26 - ETA: 51s - loss: 0.26 - ETA: 51s - loss: 0.26 - ETA: 50s - loss: 0.26 - ETA: 50s - loss: 0.26 - ETA: 50s - loss: 0.26 - ETA: 50s - loss: 0.26 - ETA: 50s - loss: 0.26 - ETA: 49s - loss: 0.26 - ETA: 49s - loss: 0.26 - ETA: 48s - loss: 0.26 - ETA: 47s - loss: 0.26 - ETA: 47s - loss: 0.26 - ETA: 46s - loss: 0.26 - ETA: 46s - loss: 0.26 - ETA: 45s - loss: 0.26 - ETA: 45s - loss: 0.27 - ETA: 44s - loss: 0.27 - ETA: 44s - loss: 0.27 - ETA: 43s - loss: 0.27 - ETA: 42s - loss: 0.27 - ETA: 42s - loss: 0.27 - ETA: 41s - loss: 0.27 - ETA: 41s - loss: 0.27 - ETA: 40s - loss: 0.27 - ETA: 40s - loss: 0.27 - ETA: 39s - loss: 0.27 - ETA: 39s - loss: 0.27 - ETA: 38s - loss: 0.27 - ETA: 37s - loss: 0.27 - ETA: 37s - loss: 0.27 - ETA: 36s - loss: 0.27 - ETA: 36s - loss: 0.27 - ETA: 35s - loss: 0.27 - ETA: 34s - loss: 0.28 - ETA: 34s - loss: 0.28 - ETA: 33s - loss: 0.28 - ETA: 33s - loss: 0.28 - ETA: 32s - loss: 0.28 - ETA: 32s - loss: 0.28 - ETA: 31s - loss: 0.28 - ETA: 30s - loss: 0.28 - ETA: 30s - loss: 0.28 - ETA: 29s - loss: 0.28 - ETA: 29s - loss: 0.28 - ETA: 28s - loss: 0.28 - ETA: 28s - loss: 0.28 - ETA: 27s - loss: 0.28 - ETA: 26s - loss: 0.28 - ETA: 26s - loss: 0.28 - ETA: 25s - loss: 0.28 - ETA: 25s - loss: 0.28 - ETA: 24s - loss: 0.28 - ETA: 23s - loss: 0.28 - ETA: 23s - loss: 0.28 - ETA: 22s - loss: 0.28 - ETA: 22s - loss: 0.28 - ETA: 21s - loss: 0.28 - ETA: 20s - loss: 0.28 - ETA: 20s - loss: 0.28 - ETA: 19s - loss: 0.28 - ETA: 19s - loss: 0.28 - ETA: 18s - loss: 0.28 - ETA: 18s - loss: 0.28 - ETA: 17s - loss: 0.28 - ETA: 16s - loss: 0.28 - ETA: 16s - loss: 0.28 - ETA: 15s - loss: 0.29 - ETA: 15s - loss: 0.29 - ETA: 14s - loss: 0.29 - ETA: 14s - loss: 0.29 - ETA: 13s - loss: 0.29 - ETA: 12s - loss: 0.29 - ETA: 12s - loss: 0.29 - ETA: 11s - loss: 0.29 - ETA: 11s - loss: 0.29 - ETA: 10s - loss: 0.29 - ETA: 10s - loss: 0.29 - ETA: 9s - loss: 0.2943 - ETA: 9s - loss: 0.294 - ETA: 8s - loss: 0.295 - ETA: 7s - loss: 0.295 - ETA: 7s - loss: 0.295 - ETA: 6s - loss: 0.295 - ETA: 6s - loss: 0.296 - ETA: 5s - loss: 0.296 - ETA: 5s - loss: 0.296 - ETA: 4s - loss: 0.297 - ETA: 3s - loss: 0.297 - ETA: 3s - loss: 0.297 - ETA: 2s - loss: 0.297 - ETA: 2s - loss: 0.298 - ETA: 1s - loss: 0.298 - ETA: 1s - loss: 0.298 - ETA: 0s - loss: 0.298 - ETA: 0s - loss: 0.299 - 72s 653ms/step - loss: 0.2992 - val_loss: 0.1242\n",
      "\n",
      "Epoch 00021: val_loss did not improve from 0.11363\n",
      "Epoch 22/30\n",
      "111/111 [==============================] - ETA: 1:16 - loss: 0.464 - ETA: 1:12 - loss: 0.484 - ETA: 1:13 - loss: 0.463 - ETA: 1:13 - loss: 0.436 - ETA: 1:13 - loss: 0.409 - ETA: 1:11 - loss: 0.397 - ETA: 1:13 - loss: 0.385 - ETA: 1:13 - loss: 0.380 - ETA: 1:13 - loss: 0.373 - ETA: 1:13 - loss: 0.372 - ETA: 1:11 - loss: 0.370 - ETA: 1:10 - loss: 0.366 - ETA: 1:09 - loss: 0.363 - ETA: 1:08 - loss: 0.361 - ETA: 1:07 - loss: 0.359 - ETA: 1:06 - loss: 0.356 - ETA: 1:05 - loss: 0.353 - ETA: 1:04 - loss: 0.351 - ETA: 1:03 - loss: 0.349 - ETA: 1:02 - loss: 0.347 - ETA: 1:01 - loss: 0.345 - ETA: 1:00 - loss: 0.343 - ETA: 59s - loss: 0.342 - ETA: 57s - loss: 0.34 - ETA: 56s - loss: 0.33 - ETA: 55s - loss: 0.33 - ETA: 54s - loss: 0.33 - ETA: 53s - loss: 0.33 - ETA: 53s - loss: 0.33 - ETA: 52s - loss: 0.33 - ETA: 51s - loss: 0.33 - ETA: 50s - loss: 0.33 - ETA: 50s - loss: 0.33 - ETA: 49s - loss: 0.33 - ETA: 48s - loss: 0.33 - ETA: 48s - loss: 0.33 - ETA: 47s - loss: 0.33 - ETA: 47s - loss: 0.33 - ETA: 46s - loss: 0.33 - ETA: 45s - loss: 0.34 - ETA: 45s - loss: 0.34 - ETA: 44s - loss: 0.34 - ETA: 43s - loss: 0.34 - ETA: 43s - loss: 0.34 - ETA: 42s - loss: 0.34 - ETA: 41s - loss: 0.34 - ETA: 41s - loss: 0.34 - ETA: 40s - loss: 0.34 - ETA: 40s - loss: 0.34 - ETA: 39s - loss: 0.34 - ETA: 38s - loss: 0.34 - ETA: 38s - loss: 0.34 - ETA: 37s - loss: 0.34 - ETA: 37s - loss: 0.34 - ETA: 36s - loss: 0.34 - ETA: 35s - loss: 0.34 - ETA: 35s - loss: 0.34 - ETA: 34s - loss: 0.34 - ETA: 33s - loss: 0.34 - ETA: 32s - loss: 0.34 - ETA: 32s - loss: 0.34 - ETA: 31s - loss: 0.33 - ETA: 30s - loss: 0.33 - ETA: 30s - loss: 0.33 - ETA: 29s - loss: 0.33 - ETA: 28s - loss: 0.33 - ETA: 28s - loss: 0.33 - ETA: 27s - loss: 0.33 - ETA: 26s - loss: 0.33 - ETA: 26s - loss: 0.33 - ETA: 25s - loss: 0.33 - ETA: 24s - loss: 0.33 - ETA: 24s - loss: 0.33 - ETA: 23s - loss: 0.33 - ETA: 22s - loss: 0.33 - ETA: 22s - loss: 0.33 - ETA: 21s - loss: 0.33 - ETA: 20s - loss: 0.33 - ETA: 20s - loss: 0.33 - ETA: 19s - loss: 0.33 - ETA: 18s - loss: 0.33 - ETA: 18s - loss: 0.33 - ETA: 17s - loss: 0.33 - ETA: 16s - loss: 0.33 - ETA: 16s - loss: 0.33 - ETA: 15s - loss: 0.33 - ETA: 14s - loss: 0.33 - ETA: 14s - loss: 0.33 - ETA: 13s - loss: 0.33 - ETA: 13s - loss: 0.33 - ETA: 12s - loss: 0.33 - ETA: 11s - loss: 0.33 - ETA: 11s - loss: 0.33 - ETA: 10s - loss: 0.33 - ETA: 10s - loss: 0.33 - ETA: 9s - loss: 0.3358 - ETA: 8s - loss: 0.335 - ETA: 8s - loss: 0.335 - ETA: 7s - loss: 0.335 - ETA: 6s - loss: 0.335 - ETA: 6s - loss: 0.335 - ETA: 5s - loss: 0.335 - ETA: 5s - loss: 0.335 - ETA: 4s - loss: 0.335 - ETA: 3s - loss: 0.335 - ETA: 3s - loss: 0.335 - ETA: 2s - loss: 0.335 - ETA: 1s - loss: 0.335 - ETA: 1s - loss: 0.335 - ETA: 0s - loss: 0.335 - ETA: 0s - loss: 0.335 - 77s 697ms/step - loss: 0.3351 - val_loss: 0.1197\n",
      "\n",
      "Epoch 00022: val_loss did not improve from 0.11363\n",
      "Epoch 23/30\n",
      "111/111 [==============================] - ETA: 1:08 - loss: 0.282 - ETA: 1:21 - loss: 0.254 - ETA: 1:16 - loss: 0.257 - ETA: 1:14 - loss: 0.260 - ETA: 1:14 - loss: 0.267 - ETA: 1:11 - loss: 0.278 - ETA: 1:09 - loss: 0.285 - ETA: 1:07 - loss: 0.287 - ETA: 1:05 - loss: 0.287 - ETA: 1:04 - loss: 0.286 - ETA: 1:02 - loss: 0.283 - ETA: 1:01 - loss: 0.281 - ETA: 1:00 - loss: 0.278 - ETA: 59s - loss: 0.284 - ETA: 58s - loss: 0.28 - ETA: 57s - loss: 0.29 - ETA: 56s - loss: 0.29 - ETA: 56s - loss: 0.29 - ETA: 56s - loss: 0.29 - ETA: 56s - loss: 0.30 - ETA: 55s - loss: 0.30 - ETA: 54s - loss: 0.30 - ETA: 53s - loss: 0.30 - ETA: 53s - loss: 0.30 - ETA: 52s - loss: 0.30 - ETA: 52s - loss: 0.30 - ETA: 51s - loss: 0.30 - ETA: 50s - loss: 0.31 - ETA: 50s - loss: 0.31 - ETA: 49s - loss: 0.31 - ETA: 49s - loss: 0.31 - ETA: 48s - loss: 0.31 - ETA: 48s - loss: 0.31 - ETA: 48s - loss: 0.31 - ETA: 47s - loss: 0.31 - ETA: 46s - loss: 0.31 - ETA: 45s - loss: 0.31 - ETA: 45s - loss: 0.31 - ETA: 44s - loss: 0.31 - ETA: 43s - loss: 0.31 - ETA: 43s - loss: 0.31 - ETA: 42s - loss: 0.31 - ETA: 41s - loss: 0.31 - ETA: 41s - loss: 0.31 - ETA: 40s - loss: 0.31 - ETA: 40s - loss: 0.31 - ETA: 39s - loss: 0.31 - ETA: 38s - loss: 0.31 - ETA: 38s - loss: 0.31 - ETA: 37s - loss: 0.31 - ETA: 36s - loss: 0.31 - ETA: 36s - loss: 0.31 - ETA: 35s - loss: 0.31 - ETA: 35s - loss: 0.31 - ETA: 34s - loss: 0.31 - ETA: 33s - loss: 0.31 - ETA: 33s - loss: 0.31 - ETA: 32s - loss: 0.31 - ETA: 32s - loss: 0.31 - ETA: 31s - loss: 0.31 - ETA: 31s - loss: 0.31 - ETA: 30s - loss: 0.31 - ETA: 29s - loss: 0.31 - ETA: 29s - loss: 0.31 - ETA: 28s - loss: 0.31 - ETA: 27s - loss: 0.31 - ETA: 27s - loss: 0.31 - ETA: 26s - loss: 0.31 - ETA: 26s - loss: 0.31 - ETA: 25s - loss: 0.31 - ETA: 24s - loss: 0.31 - ETA: 24s - loss: 0.32 - ETA: 23s - loss: 0.32 - ETA: 22s - loss: 0.32 - ETA: 22s - loss: 0.32 - ETA: 21s - loss: 0.32 - ETA: 20s - loss: 0.32 - ETA: 20s - loss: 0.32 - ETA: 19s - loss: 0.32 - ETA: 19s - loss: 0.32 - ETA: 18s - loss: 0.32 - ETA: 17s - loss: 0.32 - ETA: 17s - loss: 0.32 - ETA: 16s - loss: 0.32 - ETA: 15s - loss: 0.32 - ETA: 15s - loss: 0.32 - ETA: 14s - loss: 0.32 - ETA: 14s - loss: 0.32 - ETA: 13s - loss: 0.32 - ETA: 12s - loss: 0.32 - ETA: 12s - loss: 0.32 - ETA: 11s - loss: 0.32 - ETA: 10s - loss: 0.32 - ETA: 10s - loss: 0.32 - ETA: 9s - loss: 0.3280 - ETA: 9s - loss: 0.328 - ETA: 8s - loss: 0.328 - ETA: 7s - loss: 0.328 - ETA: 7s - loss: 0.329 - ETA: 6s - loss: 0.329 - ETA: 6s - loss: 0.329 - ETA: 5s - loss: 0.329 - ETA: 4s - loss: 0.330 - ETA: 4s - loss: 0.330 - ETA: 3s - loss: 0.330 - ETA: 3s - loss: 0.330 - ETA: 2s - loss: 0.330 - ETA: 1s - loss: 0.331 - ETA: 1s - loss: 0.331 - ETA: 0s - loss: 0.331 - ETA: 0s - loss: 0.331 - 75s 672ms/step - loss: 0.3318 - val_loss: 0.1179\n",
      "\n",
      "Epoch 00023: val_loss did not improve from 0.11363\n",
      "Epoch 24/30\n",
      "111/111 [==============================] - ETA: 1:04 - loss: 0.410 - ETA: 1:00 - loss: 0.373 - ETA: 1:01 - loss: 0.383 - ETA: 1:00 - loss: 0.387 - ETA: 59s - loss: 0.378 - ETA: 59s - loss: 0.36 - ETA: 59s - loss: 0.35 - ETA: 1:00 - loss: 0.351 - ETA: 1:00 - loss: 0.341 - ETA: 1:00 - loss: 0.333 - ETA: 59s - loss: 0.325 - ETA: 58s - loss: 0.32 - ETA: 57s - loss: 0.31 - ETA: 57s - loss: 0.31 - ETA: 58s - loss: 0.31 - ETA: 57s - loss: 0.31 - ETA: 57s - loss: 0.30 - ETA: 57s - loss: 0.30 - ETA: 56s - loss: 0.30 - ETA: 55s - loss: 0.30 - ETA: 55s - loss: 0.31 - ETA: 54s - loss: 0.31 - ETA: 53s - loss: 0.31 - ETA: 52s - loss: 0.31 - ETA: 52s - loss: 0.31 - ETA: 51s - loss: 0.32 - ETA: 50s - loss: 0.32 - ETA: 50s - loss: 0.32 - ETA: 49s - loss: 0.32 - ETA: 48s - loss: 0.33 - ETA: 48s - loss: 0.33 - ETA: 47s - loss: 0.33 - ETA: 47s - loss: 0.33 - ETA: 46s - loss: 0.34 - ETA: 45s - loss: 0.34 - ETA: 45s - loss: 0.34 - ETA: 45s - loss: 0.34 - ETA: 44s - loss: 0.34 - ETA: 44s - loss: 0.34 - ETA: 43s - loss: 0.34 - ETA: 43s - loss: 0.35 - ETA: 42s - loss: 0.35 - ETA: 42s - loss: 0.35 - ETA: 41s - loss: 0.35 - ETA: 41s - loss: 0.35 - ETA: 40s - loss: 0.35 - ETA: 40s - loss: 0.35 - ETA: 39s - loss: 0.35 - ETA: 38s - loss: 0.35 - ETA: 38s - loss: 0.35 - ETA: 37s - loss: 0.35 - ETA: 36s - loss: 0.35 - ETA: 36s - loss: 0.35 - ETA: 35s - loss: 0.35 - ETA: 34s - loss: 0.36 - ETA: 34s - loss: 0.36 - ETA: 33s - loss: 0.36 - ETA: 32s - loss: 0.36 - ETA: 32s - loss: 0.36 - ETA: 31s - loss: 0.36 - ETA: 31s - loss: 0.36 - ETA: 30s - loss: 0.36 - ETA: 29s - loss: 0.36 - ETA: 29s - loss: 0.36 - ETA: 28s - loss: 0.36 - ETA: 28s - loss: 0.36 - ETA: 27s - loss: 0.36 - ETA: 26s - loss: 0.36 - ETA: 26s - loss: 0.36 - ETA: 25s - loss: 0.36 - ETA: 25s - loss: 0.36 - ETA: 24s - loss: 0.36 - ETA: 23s - loss: 0.36 - ETA: 23s - loss: 0.36 - ETA: 22s - loss: 0.36 - ETA: 21s - loss: 0.36 - ETA: 21s - loss: 0.36 - ETA: 20s - loss: 0.36 - ETA: 19s - loss: 0.36 - ETA: 19s - loss: 0.36 - ETA: 18s - loss: 0.37 - ETA: 18s - loss: 0.37 - ETA: 17s - loss: 0.37 - ETA: 16s - loss: 0.37 - ETA: 16s - loss: 0.37 - ETA: 15s - loss: 0.37 - ETA: 14s - loss: 0.37 - ETA: 14s - loss: 0.37 - ETA: 13s - loss: 0.37 - ETA: 12s - loss: 0.37 - ETA: 12s - loss: 0.37 - ETA: 11s - loss: 0.37 - ETA: 11s - loss: 0.37 - ETA: 10s - loss: 0.37 - ETA: 9s - loss: 0.3718 - ETA: 9s - loss: 0.371 - ETA: 8s - loss: 0.371 - ETA: 7s - loss: 0.371 - ETA: 7s - loss: 0.371 - ETA: 6s - loss: 0.372 - ETA: 6s - loss: 0.372 - ETA: 5s - loss: 0.372 - ETA: 4s - loss: 0.372 - ETA: 4s - loss: 0.372 - ETA: 3s - loss: 0.371 - ETA: 3s - loss: 0.372 - ETA: 2s - loss: 0.372 - ETA: 1s - loss: 0.372 - ETA: 1s - loss: 0.372 - ETA: 0s - loss: 0.372 - ETA: 0s - loss: 0.371 - 76s 683ms/step - loss: 0.3719 - val_loss: 0.1307\n",
      "\n",
      "Epoch 00024: val_loss did not improve from 0.11363\n",
      "Epoch 25/30\n",
      "111/111 [==============================] - ETA: 1:13 - loss: 0.207 - ETA: 1:23 - loss: 0.228 - ETA: 1:20 - loss: 0.218 - ETA: 1:18 - loss: 0.211 - ETA: 1:16 - loss: 0.203 - ETA: 1:16 - loss: 0.199 - ETA: 1:14 - loss: 0.196 - ETA: 1:11 - loss: 0.194 - ETA: 1:09 - loss: 0.193 - ETA: 1:08 - loss: 0.194 - ETA: 1:06 - loss: 0.194 - ETA: 1:05 - loss: 0.196 - ETA: 1:04 - loss: 0.198 - ETA: 1:03 - loss: 0.201 - ETA: 1:02 - loss: 0.203 - ETA: 1:01 - loss: 0.207 - ETA: 1:00 - loss: 0.211 - ETA: 59s - loss: 0.213 - ETA: 58s - loss: 0.21 - ETA: 57s - loss: 0.22 - ETA: 56s - loss: 0.22 - ETA: 55s - loss: 0.22 - ETA: 54s - loss: 0.23 - ETA: 54s - loss: 0.23 - ETA: 53s - loss: 0.23 - ETA: 52s - loss: 0.23 - ETA: 51s - loss: 0.24 - ETA: 50s - loss: 0.24 - ETA: 50s - loss: 0.24 - ETA: 49s - loss: 0.24 - ETA: 48s - loss: 0.25 - ETA: 47s - loss: 0.25 - ETA: 47s - loss: 0.25 - ETA: 46s - loss: 0.25 - ETA: 46s - loss: 0.25 - ETA: 45s - loss: 0.26 - ETA: 45s - loss: 0.26 - ETA: 44s - loss: 0.26 - ETA: 43s - loss: 0.26 - ETA: 43s - loss: 0.27 - ETA: 42s - loss: 0.27 - ETA: 41s - loss: 0.27 - ETA: 41s - loss: 0.27 - ETA: 40s - loss: 0.27 - ETA: 39s - loss: 0.27 - ETA: 39s - loss: 0.28 - ETA: 38s - loss: 0.28 - ETA: 37s - loss: 0.28 - ETA: 37s - loss: 0.28 - ETA: 36s - loss: 0.28 - ETA: 35s - loss: 0.28 - ETA: 35s - loss: 0.28 - ETA: 34s - loss: 0.29 - ETA: 34s - loss: 0.29 - ETA: 33s - loss: 0.29 - ETA: 32s - loss: 0.29 - ETA: 32s - loss: 0.29 - ETA: 31s - loss: 0.29 - ETA: 30s - loss: 0.29 - ETA: 30s - loss: 0.29 - ETA: 29s - loss: 0.29 - ETA: 29s - loss: 0.30 - ETA: 28s - loss: 0.30 - ETA: 27s - loss: 0.30 - ETA: 27s - loss: 0.30 - ETA: 26s - loss: 0.30 - ETA: 26s - loss: 0.30 - ETA: 25s - loss: 0.30 - ETA: 24s - loss: 0.30 - ETA: 24s - loss: 0.30 - ETA: 23s - loss: 0.30 - ETA: 23s - loss: 0.30 - ETA: 22s - loss: 0.30 - ETA: 21s - loss: 0.30 - ETA: 21s - loss: 0.31 - ETA: 20s - loss: 0.31 - ETA: 20s - loss: 0.31 - ETA: 19s - loss: 0.31 - ETA: 18s - loss: 0.31 - ETA: 18s - loss: 0.31 - ETA: 17s - loss: 0.31 - ETA: 17s - loss: 0.31 - ETA: 16s - loss: 0.31 - ETA: 15s - loss: 0.31 - ETA: 15s - loss: 0.31 - ETA: 14s - loss: 0.31 - ETA: 14s - loss: 0.31 - ETA: 13s - loss: 0.31 - ETA: 12s - loss: 0.31 - ETA: 12s - loss: 0.31 - ETA: 11s - loss: 0.31 - ETA: 11s - loss: 0.31 - ETA: 10s - loss: 0.32 - ETA: 9s - loss: 0.3207 - ETA: 9s - loss: 0.321 - ETA: 8s - loss: 0.321 - ETA: 8s - loss: 0.321 - ETA: 7s - loss: 0.322 - ETA: 7s - loss: 0.322 - ETA: 6s - loss: 0.323 - ETA: 5s - loss: 0.323 - ETA: 5s - loss: 0.323 - ETA: 4s - loss: 0.324 - ETA: 4s - loss: 0.324 - ETA: 3s - loss: 0.324 - ETA: 2s - loss: 0.325 - ETA: 2s - loss: 0.325 - ETA: 1s - loss: 0.325 - ETA: 1s - loss: 0.326 - ETA: 0s - loss: 0.326 - ETA: 0s - loss: 0.326 - 73s 659ms/step - loss: 0.3269 - val_loss: 0.1345\n",
      "\n",
      "Epoch 00025: val_loss did not improve from 0.11363\n",
      "Epoch 00025: early stopping\n",
      "4/4 [==============================] - ETA: 6s - loss: 0.227 - ETA: 4s - loss: 0.181 - ETA: 2s - loss: 0.166 - ETA: 0s - loss: 0.155 - 8s 2s/step - loss: 0.1554\n",
      "Val Score:  0.15544773638248444\n",
      "====================================================================================\n",
      "\n",
      "\n",
      "Computation time :  68.472 min\n"
     ]
    }
   ],
   "source": [
    "from time import time\n",
    "from processing.models import fit_and_evaluate\n",
    "t0 = time()\n",
    "n_folds = 2\n",
    "epochs = 30\n",
    "batch_size = 8\n",
Billy Amélie's avatar
Billy Amélie committed
945
    "\n",
Lafnoune Imane's avatar
Lafnoune Imane committed
946
    "\n",
947
948
    "#save the model history in a list after fitting so that we can plot later\n",
    "model_history = [] \n",
Lafnoune Imane's avatar
Lafnoune Imane committed
949
    "\n",
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
    "for i in range(n_folds):\n",
    "    print(\"Training on Fold: \",i+1)\n",
    "    new_model = efn.EfficientNetB1(weights='imagenet',include_top=False)\n",
    "    input_channel = 4\n",
    "    config = new_model.get_config()\n",
    "    config[\"layers\"][0][\"config\"][\"batch_input_shape\"] = (None, 240, 240, input_channel)\n",
    "    modify_name = config[\"layers\"][1][\"config\"][\"name\"]\n",
    "    custom_model = Model.from_config(config)\n",
    "    model = None\n",
    "    model = create_hybrid_transfer(trainAttrX.shape[1],new_model,custom_model,modify_name, input_channel, weight = True)\n",
    "    opt = Adam(lr=1e-3, decay=1e-3 / 200)\n",
    "    model.compile(loss=\"mean_squared_error\", optimizer=opt)\n",
    "    t_x, val_x, t_y, val_y = custom_shuffle_split(trainAttrX,train_dataset,trainY,test_size = 0.1)    \n",
    "    model_history.append(fit_and_evaluate(t_x, val_x, t_y, val_y, epochs, batch_size,model,es,cp))\n",
    "    print(\"=======\"*12, end=\"\\n\\n\\n\")\n",
    "\n",
    "print(\"Computation time : \", round((time() - t0)/60,3), \"min\")"
Lafnoune Imane's avatar
Lafnoune Imane committed
967
968
969
970
   ]
  },
  {
   "cell_type": "code",
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
   "execution_count": 22,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "TensorShape([None, 240, 240, 4])"
      ]
     },
     "execution_count": 22,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "import tensorflow as tf\n",
    "tf.TensorShape([None, 240, 240, input_channel])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(1093, 6) (140, 240, 240, 4)\n"
     ]
    }
   ],
   "source": [
    "print(trainAttrX.shape,trainImagesX.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(1093, 6) (1093, 240, 240, 4)\n"
     ]
    }
   ],
   "source": [
    "print(trainAttrX.shape,train_dataset.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>First_Percent</th>\n",
       "      <th>Delta_week</th>\n",
       "      <th>First_FVC_scaled</th>\n",
       "      <th>Age_scaled</th>\n",
       "      <th>Sex_le</th>\n",
       "      <th>SmokingStatus_le</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>0.582536</td>\n",
       "      <td>0.067669</td>\n",
       "      <td>-0.631784</td>\n",
       "      <td>1.684379</td>\n",
       "      <td>1</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>0.582536</td>\n",
       "      <td>0.082707</td>\n",
       "      <td>-0.631784</td>\n",
       "      <td>1.684379</td>\n",
       "      <td>1</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>0.582536</td>\n",
       "      <td>0.097744</td>\n",
       "      <td>-0.631784</td>\n",
       "      <td>1.684379</td>\n",
       "      <td>1</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>0.582536</td>\n",
       "      <td>0.112782</td>\n",
       "      <td>-0.631784</td>\n",
       "      <td>1.684379</td>\n",
       "      <td>1</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>0.582536</td>\n",
       "      <td>0.157895</td>\n",
       "      <td>-0.631784</td>\n",
       "      <td>1.684379</td>\n",
       "      <td>1</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "   First_Percent  Delta_week  First_FVC_scaled  Age_scaled  Sex_le  \\\n",
       "0       0.582536    0.067669         -0.631784    1.684379       1   \n",
       "1       0.582536    0.082707         -0.631784    1.684379       1   \n",
       "2       0.582536    0.097744         -0.631784    1.684379       1   \n",
       "3       0.582536    0.112782         -0.631784    1.684379       1   \n",
       "4       0.582536    0.157895         -0.631784    1.684379       1   \n",
       "\n",
       "   SmokingStatus_le  \n",
       "0                 1  \n",
       "1                 1  \n",
       "2                 1  \n",
       "3                 1  \n",
       "4                 1  "
      ]
     },
     "execution_count": 25,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "trainAttrX.head(5)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAEICAYAAABPgw/pAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAA63ElEQVR4nO3deViVZfrA8e/NLoKAiguC4q6YioprZmqW2mZ7mk3Zavsy7ctM/Zpppm1qqmm1fVEzm8rKsrHcSlMxTcUVFRURRZBN2Xl+f7wHRGQ5wIHDOef+XBcXnPe85z3366mbh2e5HzHGoJRSyvV5OTsApZRSjqEJXSml3IQmdKWUchOa0JVSyk1oQldKKTehCV0ppdyEJnSlmhkRmSEivzg7DuV6NKGrRiciSSIywdlx1IeIjBWRUhHJrfQ10tmxKVWZj7MDUMoFpBhjIp0dhFK10Ra6choR8ReRf4tIiu3r3yLib3uurYh8KyKZIpIhIitExMv23EMickBEckRku4icVcW1h4tIqoh4Vzh2sYhstP08TETiRSRbRA6JyIv1vIelIvJPEVlju9bXItK6wvMXikiC7T6WikjfCs9Fich/RSRNRNJF5D+Vrv2CiBwVkT0iMrnC8Rkistt2/3tEZHp9YlfuRxO6cqbHgBFALDAQGAY8bnvuPiAZCAfaA48CRkR6A3cAQ40xwcBEIKnyhY0xq4FjwPgKh68CZtt+fhl42RjTCugOzGvAfVwDXA90BIqBVwBEpBcwB7jHdh8LgW9ExM/2i+ZbYC8QDXQC5la45nBgO9AWeA54VywtbdefbLv/UcCGBsSu3IgmdOVM04GnjDGHjTFpwP8Bf7I9V4SVILsYY4qMMSuMVXioBPAHYkTE1xiTZIzZVc315wDTAEQkGDjXdqzs+j1EpK0xJtcY81sNcUbYWtgVv1pWeP5jY8xmY8wx4C/AFbaEfSXwnTHmf8aYIuAFoAVWEh4GRAAPGGOOGWPyjTEVB0L3GmNmGWNKgA9t/xbtbc+VAqeJSAtjzEFjTEINsSsPogldOVMEVgu1zF7bMYDngUTgR1v3wsMAxphErBbvk8BhEZkrIhFUbTZwia0b5xLgd2NM2fvdAPQCtonIWhE5v4Y4U4wxoZW+jlV4fn+le/DFalmfdH/GmFLbuZ2AKKykXVzNe6ZWeN1x249Btve9ErgFOCgi34lInxpiVx5EE7pyphSgS4XHnW3HMMbkGGPuM8Z0Ay4E/lzWV26MmW2MGW17rQGererixpgtWAl1Mid3t2CM2WmMmQa0s71+fqVWd11EVbqHIuBI5fsTEbGdewArsXcWkTpPTDDGLDLGnI3Vat8GzKpn3MrNaEJXTcVXRAIqfPlgdX88LiLhItIW+CvwCYCInC8iPWxJMAurq6VURHqLyHhbqzsfyMPqgqjObOBuYAzwedlBEblaRMJtreZM2+GarlOTq0UkRkQCgaeA+bauknnAeSJyloj4Yo0LFAArgTXAQeAZEWlp+zc5vbY3EpH2IjLF9sunAMhtQNzKzWhCV01lIVbyLft6Evg7EA9sBDYBv9uOAfQEFmMlrFXA68aYJVj9589gtYBTsVrYj9TwvnOAM4GfjTFHKhyfBCSISC7WAOlUY0xeNdeIqGIe+qUVnv8Y+MAWTwBwF4AxZjtwNfCqLd4LgAuMMYW2hH8B0APYhzUAfGUN91HGC/gzVus/w3Zvt9rxOuUBRDe4UKr+RGQp8Ikx5h1nx6KUttCVUspNaEJXSik3oV0uSinlJrSFrpRSbsJpxbnatm1roqOjnfX2SinlktatW3fEGBNe1XNOS+jR0dHEx8c76+2VUsolicje6p7TLhellHITmtCVUspNaEJXSik3oTsWKeUhioqKSE5OJj8/39mhKDsEBAQQGRmJr6+v3a/RhK6Uh0hOTiY4OJjo6GismmequTLGkJ6eTnJyMl27drX7ddrlopSHyM/Pp02bNprMXYCI0KZNmzr/NaUJXSkPosncddTns3K5hB6flMEz329DSxYopdTJXC6hJ6Rk8+ayXRzOKXB2KEqpOkhPTyc2NpbY2Fg6dOhAp06dyh8XFhbW+Nr4+HjuuuuuWt9j1KhRDol16dKlhISElMc3YcKEGs+Pjo7myJEjpxx/8skneeGFF045vnz5cgYPHoyPjw/z5893SMzggoOiMRGtANiSkk37VgFOjkYpZa82bdqwYcMGwEp0QUFB3H///eXPFxcX4+NTdUqKi4sjLi6u1vdYuXKlQ2IFOOOMM/j2228ddr2KOnfuzAcffFBlsm8Il2uh9+kQDMCWg9lOjkQp1VAzZszglltuYfjw4Tz44IOsWbOGkSNHMmjQIEaNGsX27dsBq8V8/vnWPt5PPvkk119/PWPHjqVbt2688sor5dcLCgoqP3/s2LFcdtll9OnTh+nTp5d30y5cuJA+ffowZMgQ7rrrrvLr2mPOnDn079+f0047jYceeqjKc55++ml69erF6NGjy+OvLDo6mgEDBuDl5dgUbFcLXUQmYW3T5Q28Y4x5ptLzXYD3gHCsbbGuNsYkOzRSm+AAX7q0CWRLiiZ0perr/75JcPj/QzERrXjign51fl1ycjIrV67E29ub7OxsVqxYgY+PD4sXL+bRRx/liy++OOU127ZtY8mSJeTk5NC7d29uvfXWU+Zrr1+/noSEBCIiIjj99NP59ddfiYuLY+bMmSxfvpyuXbsybdq0auNasWIFsbGxAFx++eVcd911PPTQQ6xbt46wsDDOOeccvvrqKy666KLy16xbt465c+eyYcMGiouLGTx4MEOGDKnzv0l91ZrQRcQbeA04G2vfw7UissC2o3qZF4CPjDEfish44J/AnxojYFLWc0/Ad7yScl6jXF4p1bQuv/xyvL29AcjKyuLaa69l586diAhFRUVVvua8887D398ff39/2rVrx6FDh4iMjDzpnGHDhpUfi42NJSkpiaCgILp161Y+t3vatGm8/fbbVb5H5S6Xr7/+mrFjxxIebhU6nD59OsuXLz8poa9YsYKLL76YwMBAAC688MJ6/IvUnz0t9GFAojFmN4CIzAWmABUTegzWxrUAS4CvHBjjyfau5OL0WTxVEEduQTFB/i43DKCU09WnJd1YWrZsWf7zX/7yF8aNG8eXX35JUlISY8eOrfI1/v7+5T97e3tTXFxcr3PcjT0dOJ2A/RUeJ9uOVfQHcInt54uBYBFpU/lCInKziMSLSHxaWlp94oWwaACiOMz2VO12UcqdZGVl0amTlV4++OADh1+/d+/e7N69m6SkJAA+++wzu187bNgwli1bxpEjRygpKWHOnDmceeaZJ50zZswYvvrqK/Ly8sjJyeGbb75xZPi1clSP/P3AmSKyHjgTOACUVD7JGPO2MSbOGBNX9mdLnYV2ASBK0rQfXSk38+CDD/LII48waNCgRmlRt2jRgtdff51JkyYxZMgQgoODCQkJseu1HTt25JlnnmHcuHEMHDiQIUOGMGXKlJPOGTx4MFdeeSUDBw5k8uTJDB06tMprrV27lsjISD7//HNmzpxJv36O+Yup1j1FRWQk8KQxZqLt8SMAxph/VnN+ELDNGBNZ1fNl4uLiTL02uCjIgX9G8rJMJ7X/LfzzkgF1v4ZSHmjr1q307dvX2WE4XW5uLkFBQRhjuP322+nZsyf33nuvs8OqUlWfmYisM8ZUOYfTnhb6WqCniHQVET9gKrCg0hu0FZGyaz2CNeOlcfgHQ2AbTgs8qi10pVSdzZo1i9jYWPr160dWVhYzZ850dkgOU+uIojGmWETuABZhTVt8zxiTICJPAfHGmAXAWOCfImKA5cDtjRgzhHaha+4RtqXmUFxSio+3y02nV0o5yb333ttsW+QNZdcUEWPMQmBhpWN/rfDzfMBx61drE9aFdlnrKCguZc+RY/RsH9xkb62UUs2VazZtw6JpmXcQL0p1xahSStm4ZkIP7YKUFhHlk6n96EopZeOaCd02F31U6xxtoSullI2LJnRrLvrg4Cy2pGRrbXSlXICWzz3hxRdfJCYmhgEDBnDWWWexd+9eh8TtmuvmQ6JAvOjln0H6sUIO5xRoKV2lmjktn3vCoEGDiI+PJzAwkDfeeIMHH3ywTqtWq+OaLXRvX2gVSSSHAEhIyXJyQEqp+vDU8rnjxo0rL+A1YsQIkpMdU5zWNVvoAGFdCC04CFibXYzv097JASnlQr5/GFI3OfaaHfrD5GdqP68STy+f++677zJ58mT7/8Fq4LoJPbQL3omLrdroOjCqlMvy5PK5n3zyCfHx8SxbtqzG8+zlugk9LBpyUxnYzZ+NOnVRqbqpR0u6sXhq+dzFixfz9NNPs2zZspNibQjX7EOH8pkuw8JySUo/Tm5B8/qwlFJ15ynlc9evX8/MmTNZsGAB7dq1q/f9VObCCT0agNMCMwDYpt0uSrk8Tymf+8ADD5Cbm8vll19ObGysw3Y2qrV8bmOpd/ncMjmH4F+9yBr3NAO/78pTU/pxzchoh8WnlLvR8rkWTy+f2zwFtQOfFrTKSyEs0FdLACil7OLR5XObLREI64Jk7iUmopXOdFFK2cWdy+e6bgsdrO3oju6lX0RIeW10pVT1tEyG66jPZ+XaCT2sC2TuJaZDMIXFpew+cszZESnVbAUEBJCenq5J3QUYY0hPTycgoG4lTVy3ywWsmS4F2ZzWxmqZb0nJppdudqFUlSIjI0lOTiYtLc3ZoSg7BAQEnLJYqjaundBDrbno0V5p+Pl4seVgNhcN6uTkoJRqnnx9fctXSCr35OJdLtEA+GTvo0+HYJ3popTyaC6e0K0WOkeTiOlozXTR/kGllKdy7YTuHwwtWsNRa+pixrFCDmUXODsqpZRyCtdO6GB1u9ha6ABbDmptdKWUZ3KDhG5NXexTltC1H10p5aFcP6GHdoHM/QT5CtFaG10p5cHsSugiMklEtotIoog8XMXznUVkiYisF5GNInKu40OtRlg0lBZBdopVAkBb6EopD1VrQhcRb+A1YDIQA0wTkZhKpz0OzDPGDAKmAq87OtBqlc10ydxLTMdWWhtdKeWx7GmhDwMSjTG7jTGFwFxgSqVzDNDK9nMIkOK4EGsRWjZ10ZrpAlobXSnlmexJ6J2A/RUeJ9uOVfQkcLWIJAMLgTurupCI3Cwi8SIS77DlxyFRIF62mS5WoXrtR1dKeSJHDYpOAz4wxkQC5wIfi8gp1zbGvG2MiTPGxJVttNpgPn7QqhNk7qV9K39at/TTfnSllEeyJ6EfAKIqPI60HavoBmAegDFmFRAAtHVEgHaxzUUXkfIVo0op5WnsSehrgZ4i0lVE/LAGPRdUOmcfcBaAiPTFSuhNV9LNVhcdICaildZGV0p5pFoTujGmGLgDWARsxZrNkiAiT4lI2c6m9wE3icgfwBxghmnKoiphXSA3FYryiOnYSmujK6U8kl3lc40xC7EGOyse+2uFn7cApzs2tDqwVV0kcx8xERGA1kZXSnke118pCidNXezWtmV5bXSllPIk7pHQKywu8vH2ok+HYBJStEiXUsqzuEdCD2oPPgFwNAnAmumSorXRlVKexT0SuohtpksSYM10OXq8iNTsfOfGpZRSTcg9EjrY5qLbpi5qKV2llAdyo4Ru1UXHGK2NrpTySO6T0EO7QEE25B0lyN9Ha6MrpTyO+yT0srnoFfrRNaErpTyJGyX0E1MXwepH35t+nJz8IicGpZRSTcd9EnqFxUXAidroqTnOikgppZqU+yT0gFbQonV5l0u/CFttdB0YVUp5CPdJ6HBipgvQLtifNlobXSnlQdwsoUeXd7mIiA6MKqU8insl9NAukLkPSksAa2B0+6EcirQ2ulLKA7hXQg/rAqVFkHMQsAZGC4tL2Z2mtdGVUu7PzRJ6tPW9QpEugC0HtfKiUsr9uVdCrzR1sWvblvj7eOnAqFLKI7hXQg+JAqR8pktZbfTNBzShK6Xcn3sldB8/CIks73IBGBAZyqYDWZSUam10pZR7c6+EDra66HvLH8ZGhZJbUMyutFwnBqWUUo3P/RJ6WHR5lwtAbOdQADbsy3RKOEop1VTcMKF3saYtFuUB0LVNS1oF+LB+f6Zz41JKqUbmfgm9bKZL5n4AvLyEgVGhbNCErpRyc+6X0CvNRQcYFBXK9tRsjhUUOyUkpZRqCnYldBGZJCLbRSRRRB6u4vmXRGSD7WuHiGQ6PFJ7VaqLDjCocxilBjYd0AVGSin3VWtCFxFv4DVgMhADTBORmIrnGGPuNcbEGmNigVeB/zZCrPYJag8+ASe10AdGhQJot4tSyq3Z00IfBiQaY3YbYwqBucCUGs6fBsxxRHD1ImKbuphUfqh1Sz+6tAnUmS5KKbdmT0LvBOyv8DjZduwUItIF6Ar8XM3zN4tIvIjEp6Wl1TVW+1Woi14mVgdGlVJuztGDolOB+caYkqqeNMa8bYyJM8bEhYeHO/itKyiri25OrA6NjQolNTufg1l5jfe+SinlRPYk9ANAVIXHkbZjVZmKM7tbyoR2gYJsyDtafii2rB9du12UUm7KnoS+FugpIl1FxA8raS+ofJKI9AHCgFWODbEeqpjpEhPRCj9vL+12UUq5rVoTujGmGLgDWARsBeYZYxJE5CkRubDCqVOBucYY51fBqmIuur+PNzERrXTFqFLKbfnYc5IxZiGwsNKxv1Z6/KTjwmqgSnXRy8RGhfLZ2v0Ul5Ti4+1+a6qUUp7NPbNaQCtoEXbKTJdBnUPJKyphxyGtvKiUcj/umdDBNtMl6aRDZQOj6/cfPeV0pZRyde6b0CvVRQfo3DqQ1i39dKaLUsotuW9CD+sCWfuh9MSUeBHRBUZKKbflxgk9GkoKrdroFcRGhZKYlktOfpFz4lJKqUbivgm9hpkuxsDGZK28qJRyL+6b0Mvmolea6VJWeXH9Ph0YVUq5F/dN6CFRgJwy0yWkhS/dwltqP7pSyu24b0L38YNWnU7pcoETlRebw6JWpZRyFPdN6FDlXHSwdjA6kltI8lGtvKiUch9untBPrYsO1h6joDsYKaXci3sn9NAu1rTFovyTDvfuEIy/jxfrdYGRUsqNuHdCL5/psu+kw77eXvTvFMIGLQGglHIjbp7QT62LXiY2KpTNKdkUFpc2cVBKKdU43DyhR1vfqxkYLSwuZVtqdpOGpJRSjcW9E3pQe/AJqDKhx3YOBXRgVCnlPtw7oYtAaOcqE3pESADhwf5aeVEp5TbcO6EDtOkJadtPOVxWeVG3pFNKuQv3T+gRsZCeCPmn9pXHRoWy58gxMo8XNn1cSinlYO6f0DvGAgZSN57ylC4wUkq5E/dP6BGx1veUDac8NSAqFBFN6Eop9+D+CT2onVWk6+CGU5/y96FXu2BN6Eopt+D+CR2sbpcqWuiglReVUu7DMxJ6TQOjnUPJPF5EUvrxpo9LKaUcyK6ELiKTRGS7iCSKyMPVnHOFiGwRkQQRme3YMBuohoHR2PKBUa3ropRybbUmdBHxBl4DJgMxwDQRial0Tk/gEeB0Y0w/4B7Hh9oANQyM9mofTKCfty4wUkq5PHta6MOARGPMbmNMITAXmFLpnJuA14wxRwGMMYcdG2YD1TAw6u0lDIgM0YFRpZTLsyehdwL2V3icbDtWUS+gl4j8KiK/icikqi4kIjeLSLyIxKelpdUv4vqqcWA0jC0Hs8kvKmnSkJRSypEcNSjqA/QExgLTgFkiElr5JGPM28aYOGNMXHh4uIPe2k61rBgtKjEkpGjlRaWU67InoR8Aoio8jrQdqygZWGCMKTLG7AF2YCX45qOmFaNaeVEp5QbsSehrgZ4i0lVE/ICpwIJK53yF1TpHRNpidcHsdlyYDlDDwGj7VgF0DAnQhK6Ucmm1JnRjTDFwB7AI2ArMM8YkiMhTInKh7bRFQLqIbAGWAA8YY9IbK+h6qWFgFMoWGOnURaWU6/Kx5yRjzEJgYaVjf63wswH+bPtqvmoYGB3UOZTvN6dyJLeAtkH+TRqWUko5gmesFC0TEQvpO6sZGA0D0PnoSimX5VkJvWOs9b2KgdH+nULw9hLtR1dKuSzPSug1DIy28POmd3utvKiUcl2eldBrGxjtHMof+zMpLdXKi0op1+NZCR1qHhiNCiWnoJjdR3KbNCSllHIEz0voNQyMli0wWrdXpy8qpVyP5yX0GgZGu7UNIrpNIJ+u3qcbXiilXI7nJfQaBka9vISZZ3ZnY3IWvyY2r3VRSilVG89L6LUMjF4yuBPtgv15Y1li08allFIN5HkJHWocGPX38ebGM7rya2I6f+gURqWUC/HMhF7DwCjAVcO70CrAhzeW7mrauJRSqgE8M6HXMDAKEOTvw7Wjolm0JZXEwzqFUSnlGjwzodcwMFpmxqho/H28eGuZttKVUq7BMxN6LQOjAG2C/Jk6tDNfrj9ASmZe08WmlFL15JkJHWwDo+trPOXGM7oC8M6KPU0QkFJKNYznJvQa9hgtExkWyIWxEcxZs4+MY4VNF5tSStWDByf0Qdb3agZGy9xyZnfyikr4cGVS48eklFIN4LkJvWymSw0DowC92gdzdkx7PliZxLGC4kYPSyml6stzE3pQeK0Do2VuHdudrLwi5qzZ1/hxKaVUPXluQge7BkYBBncOY0S31ryzYg8FxSWNH5dSStWDZyd0OwZGy9w6tgep2fl8vT6l8eNSSql68PCEbt/AKMCYnm3pF9GKN5ftokR3NFJKNUOendDtHBgFEBFuHdud3UeO8WNCaqOGpZRS9WFXQheRSSKyXUQSReThKp6fISJpIrLB9nWj40NtBHUYGAWYfFpHotsE8vrSXboBhlKq2ak1oYuIN/AaMBmIAaaJSEwVp35mjIm1fb3j4Dgbj50DowDetg0wNh3QDTCUUs2PPS30YUCiMWa3MaYQmAtMadywmlAdBkbhxAYYry/VDTCUUs2LPQm9E7C/wuNk27HKLhWRjSIyX0SiHBJdU6jDwCic2ABj5a50NugGGEqpZsRRg6LfANHGmAHA/4APqzpJRG4WkXgRiU9LS3PQWzdQHQZGy5zYAENb6Uqp5sOehH4AqNjijrQdK2eMSTfGFNgevgMMqepCxpi3jTFxxpi48PDw+sTreGUDo3b2o0OFDTASDpF4OKcRg1NKKfvZk9DXAj1FpKuI+AFTgQUVTxCRjhUeXghsdVyITaBjrN0zXcrMGBVNgK8Xby7b3SghKaVUXdWa0I0xxcAdwCKsRD3PGJMgIk+JyIW20+4SkQQR+QO4C5jRWAE3iohBdRoYhRMbYHy1/oD2pSulmgW7+tCNMQuNMb2MMd2NMU/bjv3VGLPA9vMjxph+xpiBxphxxphtjRm0w5VtSWfnwGiZ28f1oGNoANe8u5rNB7IcH5dSStWBZ68ULVOPgVGA8GB/Zt84guAAX65+dzXbUu1v4SullKNpQod6DYwCUFxIVOEuZt80HH8fL6bPWq2DpEopp9GEXqauA6OHtsCs8fDmaLpk/87sm0YgIlw1azV7jhxrrCiVUqpamtDL2DswWloKq16Dt8dCbiq0CINV/6F7eBCzbxpOcanhqlm/sS/9eJOErZRSZTShl7FnYDQrGT6eAosehR5nwa2rYNhM2PEDHEmkV/tgPrlhOMcLS5g26zeSj2pSV0o1HU3oZWobGN34Obw+CpLXwQWvwNTZVt/70BvA2w9+ex2AmIhWfHLDcLLzi7hq1mpSs/KbJHyllNKEXqa6gdG8ozD/evjvjRDeG279BYZcCyK217WDAVfAhtlwPAOA/pEhfHT9MDKOFXLVrN84nKNJXSnV+DShV1R5YHTXEqtVvuVrGP84XPc9tO526utG3A7FebDu/fJDgzqH8f51Q0nNzmf6rNWk5xac+jqllHIgTegVlQ2M5qbB9w/DxxeBfxDcuBjGPADePlW/rn0MdBsHq9+G4sLyw0OjW/POtXHsyzjO9HdWc/RYYdWvV0opB9CEXlHZwOgbI2H1G9aA583LTpTYrcnIO6xZLwn/PenwqO5tmXVNHLuPHONP760mK6/I8XErpRSa0E8WMQjEG7x84Or/wrnPgV+gfa/tcRaE94FV/4FK29ON6RXOW1cPYXtqDle+tYr4pIxGCF4p5ek0oVfUsi3c9DPctspK0HUhAiNug9RNkPTLKU+P69OOWdfEkXm8iMveXMU9c9c7fgZMziHHXk8p5VI0oVcWEWstFqqPAVdAYBtr4VEVxvZux0/3nckd43qwcHMq4/+1lNeWJJJfVFL/eAGOZ1A6/wb4Vy9Kf3mlYddSSrksTeiO5NsCht4IO76HI1XvZtTS34f7J/Zm8b1nMrpHW55ftJ1zXlrOooRUTKWumtoYY9j16xfkvDSU0s1fsrk0Ghb/lczfv3TAzSilXI0mdEcbeqO10Gj1GzWe1rlNIG9fE8cnN1iFvWZ+vI5r3lvDzkO1F/falZbLfxauY+HTl9L9f9eTUtiC56Ne4+dRH7PJdMN/wUxW/7rEUXeklHIRUtdWoaPExcWZ+Ph4p7x3o/vqdtj8Bfx5CwS2rvX0opJSPl61l5cW7+B4YQnXjOzCPRN6EdLCt/ycw9n5LPgjha83pBB88Fee932bjpLB9u7X0enip2gVFATAnj2JBH00kaLSUj467T3unjKGFn7ejXarSqmmJSLrjDFxVT6nCb0RHEqAN0bBWX+FM+6z+2XpuQW88OMO5q7dR1igH/ed0ws/by++3pDCyl1H8Df5PB/yX84v+JbisO74XPIWRA095TqFyRvgvUlsKe7IYyHP8vy0EcREtHLgDSqlnEUTujN8dBGkbYO7N4KPX51euvlAFv/3TQJrk44C0Ll1ILd1S+OSfU/jl50Ew2+1flnUNKVy20LM3Kv4SUZwe+GdPDCpL9ef3hUvL6n/PbmiwuPw3kQYfA0Mu8nZ0SjVYDUldO1Dbywjb4ecg5BQ9wHK0zqFMG/mSD6+YRhf3jyYZQN/Yurmm/HzKoVrv4XJz9Q+P77PucjZTzHBrOKF8IX8/butXPv+Gg5ne1hdmfh3rQqaP//NqsujlBvThN5Yup8FbXtXudDIHiLCGYH7GbRwCrLqVRgyA25dCV3PsP8io+6EQVdzQeYnfDJsL2uTMpj08goWb/GQ+eoFufDLS9Cun1Xn/peXnB2RUo1KE3pj8fKCkbdZrcO9v9bttaUlsPx5ePdsKMiB6V/ABf8G/+C6XUcEznsJuoxmdMITLL4sgA6tArjxo3ge/2oTeYUNnP/e3K1+E46nw4WvwIArYfVbkHXA2VEp1Wg0oTemAVfWuNCoSpn74IPz4ee/Q98L4baV0HNC/WPw8YMrP4aQSCIX3ciX0yO56YyufPLbPi5+/Vf3rQKZlwkrX4FekyEyDsY9CqYUlj3j7MiUajSa0BuTbwuIuwG2fw/pu2o/f9N8eGO0VT7g4rfgsvfqv2q1osDWcNU8KC3C/7NpPHZWJB9cN5Q9R45x3QdryS0obvh7NDerXoP8LCuRA4R1sT6L9Z9A2g7nxqZUI9GE3tiG3gjevvBbDQuN8rPgi5vgixugXR9rE42BU09souEIbXvCFR/BkR0w/3rG9mjN69MHk5CSzcyP4ykodlD3S3EhbJwHn10N8e9b3UdN7Vi6tYNUzBToOODE8TH3g2+gNUDqKkrc8JetajR2JXQRmSQi20UkUUQeruG8S0XEiEiVU2o8UnB76H85bPi0fEejk+xdZbXKN38BYx+FGQshLLpxYuk2Fs57ARL/Bz8+zll92/PcpQP4NTGde+ZuoKS0AVNYs1Pg56fhpX7w35tgzwr49h6YNR72r3HUHdjn139D4THr37Oilm2tgeKtC6ytBJu7zV/AM1GwZYGzI1EuotaELiLewGvAZCAGmCYiMVWcFwzcDax2dJAub8RtUHQc1n1w4lhJkdVP/sG51gDq9Ytg7EPVb6LhKHHXW/GsfgNWvsqlp4Xyl/Nj+H5zKo9/talu9WSMgb0r4fMZ8O/+1kBupyFW6eEH98Cl70LuIWtw98tbmqYaZM4hWDPLKpTWrs+pz4+8HQLbwuIn6jX7qMms/xS+uNH67+an/9OWurKLPS30YUCiMWa3MaYQmAtMqeK8vwHPAh420dkOHU6zWsdrbDsape+yFrssfx4GToNbfqlyxWejOefv0HMi/Pg4PBPFDRuv4puo2Xite5+P/vu19cumJoW2X05vjob3J1tb9Y24Fe5aD1fNtUoPe3lB/8vgjngYfa81PvDqEFj56km7OjncLy9CSSGc+VDVz/sHw5kPQtIK2PVz48XREGvfga9vg65nwiWzrF20Nn3u7KiUC6h1paiIXAZMMsbcaHv8J2C4MeaOCucMBh4zxlwqIkuB+40xNS4DdfuVopXt+BFmX27NfNn6rdUSv+Bl6Hexc+IpKYLdS+HAOjiwDnNgHXI8HYBiL398IgZare1OQ6DTYGsv1aNJVrJZ/7HV79/+NBh2s9WlVGGh05o9GXy4KomBkSFMG9aZ4ABf65fYDw/Dzh+hbS+Y/Cx0H+/Ye8pKhlcGWeMPF75a/XnFBfCfOAgItXak8mpGQ0mrXoNFj0KvSXD5h+DjD2+NgYJs65ejt2/t11BuraaVog3++15EvIAXgRl2nHszcDNA586dG/rWrqXHBCuRbfwMos+Ai9+EkEjnxePtCz3Ptr4AMYaSjCQ++PwLSpPXcfHxVNr+/uGJqpEBIdbiHPGCmAutRN555EkDt3vTj/HM99v4fnMqwf4+fLfxIK/+lMhVIzpz/eldaT/9c9j+g5XYP74Y+pwPE/9hzUBxhOXPW90oYx6o+Twffxj3OHx5s7VlYP/LHPP+DbX8easbLuYiq2VeVjJi3KMwZyr8MccqYaBUNexpoY8EnjTGTLQ9fgTAGPNP2+MQYBeQa3tJByADuLCmVrrHtdDBag0f2gKxV4FX86yAWFBcwg0fxLNqdzpvXzWQs9oeteJO+R2COsCQa6FVxEmvycor4rUliXzwaxLeXsKtY7tz0xnd2Hk4h7eW7+b7TQfx9hIuHtSJm8d0o0eYr7WCdsW/rLnhp98Do++xpnnWV8Zu+M9QGHKdNfBbm9JSeOsMa/D09jV1rrfjUMZYM29W/AsGTIUpr508lmKMNbh87Ajcuc65sSqna1BxLhHxAXYAZwEHgLXAVcaYhGrOX4p2ubi03IJips/6jW2pOXx0/TCGd2tT5XnFJaXMWbOPlxbv5OjxQi4bHMn9E3vTvlXASeftSz/OO7/sZl78fvKLSpnQtz23nNmNuLDj8ONfrFZyaGe48hPoOLB+QX95i1U3564N0Kqjfa8p6wY79wXnFe4yxupi+e11q7zDeS9V3QW0czF8eimc/5I1sK08VoOrLYrIucC/AW/gPWPM0yLyFBBvjFlQ6dylaEJ3eRnHCrn8zZUczi5g7swR9IsIKX/OGMPSHWk8/d1WEg/nMqJbax4/L4bTOoXUcEXrmh+tSuLDlUkcPV7E4M6hzDyzO2e32IHXV7day/QvfqPu4wppO+D14dbsnYlP2/86Y+D9c61Bx7s3gF/Lur1vQ5WWwnd/hnXvw/BbYNIz1a89MAbePQeyD1iDzz7+TRuraja0fK6ql5TMPC57YyWFJaXMv2UU0W1bsj01h79/t4UVO48Q3SaQR8/ty9kx7ZE6LILKKyzh83X7mbViN/sz8ugW3pK7h4dw4baHkOTV1gyVMx+2f7Dy8+tgxyK4Z6M117wu9q+xplWOf7z2vndHKimGBXdY/eKj74Wznqh9IdmuJfDxRc79i0I5nSZ0VW+Jh3O5/M2VtPT34Yyebfls7X6C/H24e0Iv/jSiC34+9Z8hUlxSyvebU3l7+W42Hchict8wXg76GL9Ns6HvBXDRm+AfVPNFUjfDm6dbG4mc9df6BTLnKmsa491/2LXDVIOVFFmLrxK+hHGPWb9I7PmFWPYXRcZu6y+Khow5KJel9dBVvfVoF8QH1w0j41ghn8cnc83IaJY9MI4bRndtUDIH8PH24oKBESy443SeuCCG/23PZOLuKzg88gnY9p01Vz9zX80XWfIP8A+xVoDW11l/gcJca1CysRUXwLxrrGR+9t+sOfH2/nUjYs14yU21yip4KGMMX65PZntq7fvvehptoSu77E7LxdtL6NKm8fqZ1yZlcNunv5ObX8y7o7MY9fsD1vTKKz+BLiNPfcGBddbsj3GPWYmxIb66zVr8dOc6CI1q2LWqU5ADn/0Jdi9pWLfJhxfA4a3WXxRN3e/fDLy+NJHnfthOdJtAFt07Bn+f5jljrLFoC101WLfwoEZN5gBDo1vz3Z2j6d8phKuWBPFq9zcxAaFWAlv34akv+PlpaNHaGlBsqLGPWN+XNlJ53eyD1qraPcutaYkN6QMf+ygcS4O17zouPhcxZ80+nvthO4M7h5KUfpx3VuxxdkjNiiZ01ay0axXApzcN54bRXfnX74YZ3v+gIOp0+OYu+P6hEzVN9q6CXT9Z89cDHLABdmiUlWT/mA2HtzX8ehUd2gLvTICMPTB9Hgy6umHX6zLSWmX767+tXZnqo/A4LLgL3jkbvr3X6sI5sA6Kqq7cUVxSyvx1ySQeruf7OcDCTQd57MtNjO0dzmczRzKxX3v+83MiKZl51b+oKB9+exPeOhO+vNUqeObGWxFql4tqtr75I4WHvthIsK/wVa8f6Lj1PasmzmXvW10XR3bYuh1q2V/VXscz4OWB1kreabMdc83dS61YfQNh+ucnl/OtQXFJKfPik/l09V56tw/m3P4dGd2zLQG+tu6F5Hh45yxrIPiM++oWU3YKzJkGB/+AyKGQth0KsqznxBvCe0OHAdChP3QcQLJ/D+76ag+/78vEx0u4ZmQ0d0/oSUiLpitD8MvOI1z/wVoGRIbw8Q3DaeHnzf6M40x4cRkTYtrz2lWDT35BcSFs+ASWv2BN9ewwwBqPyc+0VjtHDrOtlD7Huk9HlqpuZDrLRbmsHYdyuOXjdezNOM77A7dxxvZ/IIFtrIHBSc/CCAd0t1S07HlY8ndrkc/YR63yx/W1YY41NbFNTyuZ29E3b4zhp62HeeaHbSQezqVPh2BSMvPIzi8myN+HCX3bcW7/jozpFU7AvGmwfzXcs8n+v1JS1lvJvCAHLn0Hek+2Zs8cTbK2S0zdBAc3Wj/nHCx/WTLhmA4D+TJ4Oi9t9ics0I/7zunF1KGd8fZq3GS4YX8mV836jc6tA/ns5pGEBJ74RfLy4p28tHgHs28czqgeba2/4DbNs7rOMvdaiXv849DtTKs2/4F1Vj2hnT9av9DAWgHdcwL0OBu6j7PKXDRjmtCVS8vJL+L+z/9gUcIh7uxxhHsz/oaXb4BVrMo3oPYL1EVxASx+0irB6+0Hp98No+6o2+CjMVZdliVPQ9cxcMXH0CK01pdt2J/JPxZuZc2eDLq1bclDk/twTkx7ikoMK3cdYeGmg/y45RCZx4to6efNjK6ZPLB3JkVjHsF3fLXbFJyQ8JW1orZlW5g216oCWo3s/CKenb+C/Vt+Y2Lrw1wckUFg8q9gStl5/uc89ksRa/Zk0LdjK564IIYR1awmbqjEwzlc9uYqWgX4Mv+WkbSrtAo5v6iEs19aRgtvYeGEI/gsfxbSd1orjsf/xaqhVF3rO+cQJC629gdI/Nn6K8XLB6KGW633YTc3y0FnTejK5RljeGv5bp77YRsD2sLjE7sTEdmFtkH+DZ4+WaX0XVYd8i1fWy248Y9B7PTaa/CUFFkbe6z/xKrLcuGrtdZe2Zt+jOcWbee7jQdpG+TH3RN6MXVoFL7ep95XUUkpq3als3DTQRYlpPJs0TOM8NrK33p8xvjYnozt3Y4WfpViNMbqeljyd6vFOvVTCGpXbTzxSRncPXcDqdn53HNWT24b18Nqhafvgvcmgbcv5rrvWbjfj38s3MqBzDzO69+RR87tQ2SYg7q/gOSjx7nsjVWUGMMXt4yic5sqrm0MGxZ/iv+KZ+jrtR/axViznvqcV7dulJJiSF5rtdwT/2d1zzywu/H3J6gHTejKbaxMPMKdc9aTfuxETfXQQF/Cg/wJD7Z92X5uW+FY9/Cg+iX+fautuvHJa6xkcfbfrHrvVSWL/Gz4/FqrzvqYB6054zUklYxjhbz6804++W0vPl5e3DSmGzeP6UaQv31JpKiklM3xvzDo+wt4Wy7nH3kX08LXm/F92nHegI6M692OFlJkdfts+twq3XzBK9X+VVNcUsorPyfyn593EhkWyMtTYxnUudKetqmbrcVNLdvC9YvI92/N28t38/rSRIyBmWO6ccvY7gT6NSwRpucWcPmbq0jLLWDezJH07VipS8kYSPzJ+iWVsp6DPpG8WHQpD9z3MO1aOeCXSn5Ws+160YSu3Ep6bgHr92WSlltAWk6FrwqP84pO3su0pZ83o3q0ZWzvcMb2bken0DqssjTG2rbuf0/A0T3WwOzZfzt5gDM7BT69Ag5vgQv+XWOZ2/yiEt77dQ9vLNnFscJirhwaxb0Tep3SnWC3z/6E2bWEtVOW8PWOfBYlpHIkt5Ao3xw+avlvuuZvpXDs4/ideX+1v2D2pR/n7s/Ws35fJpcOjuT/pvSr/hfLvt/go4usfWpnfAsBIaRk5vHM99tY8EcKHUMCeHhyHy4cGFGnkhBlcvKLmDbrNxIP5/LxDcMZGl1h9a4xsPN/sOIFa/wgtDOMfYQ9Eecx8eWVnD+wIy9eEVvn93QlmtCVRzHGcKywhCO2JH8wK5/fdqezbHsaB2xT3Hq2CypP7nHRYfYtTikuhPj3YNkzkJdplUEe95g1c+LTy61W3RUfWv22VTiSW8D3m1N5fUkiB7PymdC3HQ9N6kPP9sENu+FDW+CNUVZNmAlPUFJq2LTuF6J/vB7/oizuKbyV5d4jGdcnnHP7d2R8n3blLWhr1eUB/vp1AiLwj4v7c8HAiFreECupzplq9Tdf/UV5GYK1SRn83zcJbD6QTWxUKJNO68Cwrq3p3ymkyi6kyvKLSpjx/hrik44y65o4xvWxdQ2Vllq/VFf8yxqwDYmy7nfQn8q7tJ77YRuvL93F/FtGEhfdBCUcnEQTulJYySvxcC7LdqSxdHsaa/ZkUFhSSqCfN6O6l7Xew2vvB87LtLa6++1Nawqcl49Vc+aqeadMSzyQmceizan8kJBKfFIGpQYGRobw8OS+jOzuwIHE+ddbm4fcs9FquX5xEwSEUDJ1DqvzI1m46SA/bLZa7gG+Xozr3Y7J/Tvyvy2H+OaPFIZ1bc1LV8bW7S+XTfOtfU97TYIrPy7fTamk1PDFumTeWr6LXWnHAGjh683gLqEMi27DsK6tGdQ59MQUTJviklJu+/R3ftxyiJenxjIltpM1JrFpvvXvfWQHtOlhJfIBV56ye9PxwmLO+tcywgL9+ObO0Y0++8ZZNKErVYVjBcWs2pXO0h2HWbo9jeSjVuu9W3hL+ncKoXeHYPp2aEXvDsF0DAk4tfsgc5+1w1DmPmsKoG0HqsTDuSxKSOWHzalsOmDN7+7dPpiJ/doz8bQOxHRsVa+uiBqVlRDuMMCajhcxCKbNgeAO5aeUlBrW7Mlg4aaDfL85lSO5BXh7CX8+uxe3nNm9fglwzSxYeL81AHzRG6dUyEzLKSA+KYPVezJYsyeDranZGAO+3sLAyFCGdW3N0K6tGdIljL99s4XP1yXz5AUxzBjWETZ8ai2eytxnbXd4xn0QM6XGgenvNh7k9tm/87eLTuNPIxy0E1YzowldqVoYY9iVdoyl2w+zclc62w5mk5J1YtVkqwAf+tiSe5+OwfTpEEyv9sEEB/hijGHzgWx+SDjIooRD5aspY6NCmdivAxP7tadbeC1VIx3hvzdbWxz2uwQuer3GaowlpYbf9x0lLNCXHu0a2OWz7DlriubwW2HSP2scCM7KK2Ld3hMJflNyFsWlJ3LQfWMjubPVCmsz8dxUa+HTGfdDr4l2zVoxxjD9ndUkpGSz5P6xtG7pfrs7aUJXqh6yjhex/VAO21Oz2Zaaw7bUHLan5pBbUFx+TmRYC4yxula8vYThXVszsV8HzunXno4hTVzeNi8T9v4Kvc9t2pWPFXddqmOhtOOFxazfl8nG7YmMzPyGgcmzkbwMa7XumAesefx1vJedh3KY/PIKLo+L4p+X9K/r3TR7mtCVchBjDAcy89huS/DbUnMoKi5lfN92TOjb3i1bhHYpLYWvb7dq4Ux+HobfXPP5xYXWVNDEn6yaPGWrNntOhDH3Q9SwBoXz92+38O6ve/j69tMZEBnaoGs1N5rQlVKNr6QY5v0Jti+ES96BAZefeM4Ya2OOXT9bSTxphVWD3svHWuzUYzz0Pg/axzgklJz8Isa9sIxOYS348tZReLnRAGlNCb35LYNSSrkmbx+rcNonl8JXt1iPvXytFnjiT1ZtFYCwaGuWSvfxVpeKI6plVhIc4Muj5/bhz/P+YP66ZK4Y2kg17u1QWmo4nFNAUvox9qYfIyn9OBP7dSA2KtTh76UJXSnlOL4B1uyaD8+Hz2dYx/yCrD7xUXdaSbxN9yYJ5eJBnZi9eh/P/rCNif06nFTUy9FKSg0pmXnsTT9+UuLem36MvenHKSguLT/X11vo3DqwURK6drkopRzvWLpVvrbTEKtLpZZ6No0lISWLC179hWtGRvPkhf0Aa/FS5vEisvKq/srOKyK3oJjC4lKKSqyvwhJDYXEJRSXGelxcSqHtuaJiQ/qxAopKTuRSfx8vurQJpEublkSXf29JlzaBRIS2aNAcee1yUUo1rZZtrEqVTtYvIoTpw7vw0aokvtt0kKy8IgortJarEhzgQ7C/D34+Xvh6W19+Pl74eXsR4OtFcIAPft5e+NqO+XoLrVv6n0jcbQNpHxzglH57TehKKbd2/zm9KSwuxcsLWrXwJaSGr+AAX5deYaoJXSnl1kICfXn2Mvt2inJ1dtUTFZFJIrJdRBJF5JRK+iJyi4hsEpENIvKLiDhm7pFSSim71ZrQRcQbeA2YDMQA06pI2LONMf2NMbHAc8CLjg5UKaVUzexpoQ8DEo0xu40xhcBcYErFE4wx2RUetgScM3VGKaU8mD196J2A/RUeJwPDK58kIrcDfwb8gPFVXUhEbgZuBujcuXNdY1VKKVUDh23GaIx5zRjTHXgIeLyac942xsQZY+LCw8Md9dZKKaWwL6EfACqum420HavOXOCiBsSklFKqHuxJ6GuBniLSVUT8gKnAgooniEjPCg/PA3Y6LkSllFL2qLUP3RhTLCJ3AIsAb+A9Y0yCiDwFxBtjFgB3iMgEoAg4ClzbmEErpZQ6ldNquYhIGrC3ni9vCxxxYDjNmafcq6fcJ3jOvXrKfULT3msXY0yVg5BOS+gNISLx1RWncTeecq+ecp/gOffqKfcJzedeHTbLRSmllHNpQldKKTfhqgn9bWcH0IQ85V495T7Bc+7VU+4Tmsm9umQfulJKqVO5agtdKaVUJZrQlVLKTbhcQq+tNru7EJGkCjXm3WrzVRF5T0QOi8jmCsdai8j/RGSn7XuYM2N0lGru9UkROWD7bDeIyLnOjNERRCRKRJaIyBYRSRCRu23H3epzreE+m8Vn6lJ96Lba7DuAs7GqPq4Fphljtjg1sEYgIklAnDHG7RZmiMgYIBf4yBhzmu3Yc0CGMeYZ2y/qMGPMQ86M0xGqudcngVxjzAvOjM2RRKQj0NEY87uIBAPrsGo6zcCNPtca7vMKmsFn6mot9Fprs6vmzxizHMiodHgK8KHt5w9xkwJv1dyr2zHGHDTG/G77OQfYilV6260+1xrus1lwtYReVW32ZvOP6WAG+FFE1tnqyLu79saYg7afU4H2zgymCdwhIhttXTIu3Q1RmYhEA4OA1bjx51rpPqEZfKaultA9yWhjzGCsrf9ut/3p7hGM1Q/oOn2BdfcG0B2IBQ4C/3JqNA4kIkHAF8A9lXYyc6vPtYr7bBafqasl9LrWZndZxpgDtu+HgS+xupvc2SFb/2RZP+VhJ8fTaIwxh4wxJcaYUmAWbvLZiogvVpL71BjzX9tht/tcq7rP5vKZulpCr7U2uzsQkZa2ARdEpCVwDrC55le5vAWcKLt8LfC1E2NpVGUJzuZi3OCzFREB3gW2GmMqbhLvVp9rdffZXD5Tl5rlAmCbDvRvTtRmf9q5ETmeiHTDapWDVbN+tjvdp4jMAcZilRw9BDwBfAXMAzpjlVW+whjj8oOJ1dzrWKw/zQ2QBMys0M/skkRkNLAC2ASU2g4/itW/7Dafaw33OY1m8Jm6XEJXSilVNVfrclFKKVUNTehKKeUmNKErpZSb0ISulFJuQhO6Ukq5CU3oSinlJjShK6WUm/h/9Jb46AaOx+EAAAAASUVORK5CYII=\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "import matplotlib.pyplot as plt \n",
    "\n",
    "plt.title('Loss vs Epochs')\n",
    "plt.plot(model_history[0].history['loss'], label='Training Fold 1')\n",
    "plt.plot(model_history[1].history['loss'], label='Training Fold 2')\n",
    "plt.legend()\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [],
   "source": [
    "from keras.models import load_model\n",
    "from efficientnet.tfkeras import EfficientNetB1"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {},
   "outputs": [],
   "source": [
    "#Load model\n",
    "model = None\n",
    "model = load_model('clean_notebooks/cnn_injection_transfer_weights.h5')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
Lafnoune Imane's avatar
Lafnoune Imane committed
1185
1186
1187
1188
1189
1190
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
1191
      "Model: \"model_3\"\n",
Billy Amélie's avatar
Billy Amélie committed
1192
1193
1194
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
1195
      "input_3 (InputLayer)            [(None, 240, 240, 4) 0                                            \n",
Billy Amélie's avatar
Billy Amélie committed
1196
      "__________________________________________________________________________________________________\n",
1197
      "stem_conv (Conv2D)              (None, 120, 120, 32) 1152        input_3[0][0]                    \n",
Billy Amélie's avatar
Billy Amélie committed
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556
1557
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571
1572
1573
1574
1575
1576
1577
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587
1588
1589
1590
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600
1601
1602
1603
1604
1605
1606
1607
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632
1633
1634
1635
1636
1637
1638
1639
1640
1641
1642
1643
1644
1645
1646
1647
1648
1649
1650
1651
1652
1653
1654
1655
1656
1657
1658
1659
1660
1661
1662
1663
1664
1665
1666
1667
1668
1669
1670
1671
1672
1673
1674
1675
1676
1677
1678
1679
1680
1681
1682
1683
1684
1685
1686
1687
1688
1689
1690
1691
1692
1693
1694
1695
1696
1697
1698
1699
1700
1701
1702
1703
1704
1705
1706
1707
1708
1709
1710
1711
1712
1713
1714
1715
1716
1717
1718
1719
1720
1721
1722
1723
1724
1725
1726
1727
1728
1729
1730
1731
1732
1733
1734
1735
1736
1737
1738
1739
1740
1741
1742
1743
1744
1745
1746
1747
1748
1749
1750
1751
1752
1753
1754
1755
1756
1757
1758
1759
1760
1761
1762
1763
1764
1765
1766
1767
1768
1769
1770
1771
1772
1773
1774
1775
1776
1777
1778
1779
1780
1781
1782
1783
1784
1785
1786
1787
1788
1789
1790
1791
1792
1793
1794
1795
1796
1797
1798
1799
1800
1801
1802
1803
1804
1805
1806
1807
1808
1809
1810
1811
1812
1813
1814
1815
1816
1817
1818
1819
1820
1821
1822
1823
1824
1825
1826
1827
1828
1829
1830
1831
1832
1833
1834
1835
1836
1837
1838
1839
1840
1841
1842
1843
1844
1845
1846
1847
1848
1849
1850
1851
1852
1853
1854
1855
1856
1857
1858
1859
1860
1861
1862
1863
1864
1865
1866
1867
1868
1869
1870
1871
1872
1873
1874
1875
1876
1877
1878
1879
1880
1881
1882
1883
1884
1885
1886
1887
1888
1889
1890
1891
1892
1893
      "__________________________________________________________________________________________________\n",
      "stem_bn (BatchNormalization)    (None, 120, 120, 32) 128         stem_conv[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "stem_activation (Activation)    (None, 120, 120, 32) 0           stem_bn[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "block1a_dwconv (DepthwiseConv2D (None, 120, 120, 32) 288         stem_activation[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "block1a_bn (BatchNormalization) (None, 120, 120, 32) 128         block1a_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block1a_activation (Activation) (None, 120, 120, 32) 0           block1a_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block1a_se_squeeze (GlobalAvera (None, 32)           0           block1a_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block1a_se_reshape (Reshape)    (None, 1, 1, 32)     0           block1a_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block1a_se_reduce (Conv2D)      (None, 1, 1, 8)      264         block1a_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block1a_se_expand (Conv2D)      (None, 1, 1, 32)     288         block1a_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block1a_se_excite (Multiply)    (None, 120, 120, 32) 0           block1a_activation[0][0]         \n",
      "                                                                 block1a_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block1a_project_conv (Conv2D)   (None, 120, 120, 16) 512         block1a_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block1a_project_bn (BatchNormal (None, 120, 120, 16) 64          block1a_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block1b_dwconv (DepthwiseConv2D (None, 120, 120, 16) 144         block1a_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block1b_bn (BatchNormalization) (None, 120, 120, 16) 64          block1b_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block1b_activation (Activation) (None, 120, 120, 16) 0           block1b_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block1b_se_squeeze (GlobalAvera (None, 16)           0           block1b_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block1b_se_reshape (Reshape)    (None, 1, 1, 16)     0           block1b_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block1b_se_reduce (Conv2D)      (None, 1, 1, 4)      68          block1b_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block1b_se_expand (Conv2D)      (None, 1, 1, 16)     80          block1b_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block1b_se_excite (Multiply)    (None, 120, 120, 16) 0           block1b_activation[0][0]         \n",
      "                                                                 block1b_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block1b_project_conv (Conv2D)   (None, 120, 120, 16) 256         block1b_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block1b_project_bn (BatchNormal (None, 120, 120, 16) 64          block1b_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block1b_drop (FixedDropout)     (None, 120, 120, 16) 0           block1b_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block1b_add (Add)               (None, 120, 120, 16) 0           block1b_drop[0][0]               \n",
      "                                                                 block1a_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block2a_expand_conv (Conv2D)    (None, 120, 120, 96) 1536        block1b_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block2a_expand_bn (BatchNormali (None, 120, 120, 96) 384         block2a_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block2a_expand_activation (Acti (None, 120, 120, 96) 0           block2a_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block2a_dwconv (DepthwiseConv2D (None, 60, 60, 96)   864         block2a_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block2a_bn (BatchNormalization) (None, 60, 60, 96)   384         block2a_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block2a_activation (Activation) (None, 60, 60, 96)   0           block2a_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block2a_se_squeeze (GlobalAvera (None, 96)           0           block2a_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block2a_se_reshape (Reshape)    (None, 1, 1, 96)     0           block2a_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block2a_se_reduce (Conv2D)      (None, 1, 1, 4)      388         block2a_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block2a_se_expand (Conv2D)      (None, 1, 1, 96)     480         block2a_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block2a_se_excite (Multiply)    (None, 60, 60, 96)   0           block2a_activation[0][0]         \n",
      "                                                                 block2a_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block2a_project_conv (Conv2D)   (None, 60, 60, 24)   2304        block2a_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block2a_project_bn (BatchNormal (None, 60, 60, 24)   96          block2a_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block2b_expand_conv (Conv2D)    (None, 60, 60, 144)  3456        block2a_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block2b_expand_bn (BatchNormali (None, 60, 60, 144)  576         block2b_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block2b_expand_activation (Acti (None, 60, 60, 144)  0           block2b_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block2b_dwconv (DepthwiseConv2D (None, 60, 60, 144)  1296        block2b_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block2b_bn (BatchNormalization) (None, 60, 60, 144)  576         block2b_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block2b_activation (Activation) (None, 60, 60, 144)  0           block2b_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block2b_se_squeeze (GlobalAvera (None, 144)          0           block2b_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block2b_se_reshape (Reshape)    (None, 1, 1, 144)    0           block2b_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block2b_se_reduce (Conv2D)      (None, 1, 1, 6)      870         block2b_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block2b_se_expand (Conv2D)      (None, 1, 1, 144)    1008        block2b_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block2b_se_excite (Multiply)    (None, 60, 60, 144)  0           block2b_activation[0][0]         \n",
      "                                                                 block2b_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block2b_project_conv (Conv2D)   (None, 60, 60, 24)   3456        block2b_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block2b_project_bn (BatchNormal (None, 60, 60, 24)   96          block2b_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block2b_drop (FixedDropout)     (None, 60, 60, 24)   0           block2b_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block2b_add (Add)               (None, 60, 60, 24)   0           block2b_drop[0][0]               \n",
      "                                                                 block2a_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block2c_expand_conv (Conv2D)    (None, 60, 60, 144)  3456        block2b_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block2c_expand_bn (BatchNormali (None, 60, 60, 144)  576         block2c_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block2c_expand_activation (Acti (None, 60, 60, 144)  0           block2c_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block2c_dwconv (DepthwiseConv2D (None, 60, 60, 144)  1296        block2c_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block2c_bn (BatchNormalization) (None, 60, 60, 144)  576         block2c_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block2c_activation (Activation) (None, 60, 60, 144)  0           block2c_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block2c_se_squeeze (GlobalAvera (None, 144)          0           block2c_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block2c_se_reshape (Reshape)    (None, 1, 1, 144)    0           block2c_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block2c_se_reduce (Conv2D)      (None, 1, 1, 6)      870         block2c_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block2c_se_expand (Conv2D)      (None, 1, 1, 144)    1008        block2c_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block2c_se_excite (Multiply)    (None, 60, 60, 144)  0           block2c_activation[0][0]         \n",
      "                                                                 block2c_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block2c_project_conv (Conv2D)   (None, 60, 60, 24)   3456        block2c_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block2c_project_bn (BatchNormal (None, 60, 60, 24)   96          block2c_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block2c_drop (FixedDropout)     (None, 60, 60, 24)   0           block2c_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block2c_add (Add)               (None, 60, 60, 24)   0           block2c_drop[0][0]               \n",
      "                                                                 block2b_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block3a_expand_conv (Conv2D)    (None, 60, 60, 144)  3456        block2c_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block3a_expand_bn (BatchNormali (None, 60, 60, 144)  576         block3a_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block3a_expand_activation (Acti (None, 60, 60, 144)  0           block3a_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block3a_dwconv (DepthwiseConv2D (None, 30, 30, 144)  3600        block3a_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block3a_bn (BatchNormalization) (None, 30, 30, 144)  576         block3a_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block3a_activation (Activation) (None, 30, 30, 144)  0           block3a_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block3a_se_squeeze (GlobalAvera (None, 144)          0           block3a_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block3a_se_reshape (Reshape)    (None, 1, 1, 144)    0           block3a_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block3a_se_reduce (Conv2D)      (None, 1, 1, 6)      870         block3a_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block3a_se_expand (Conv2D)      (None, 1, 1, 144)    1008        block3a_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block3a_se_excite (Multiply)    (None, 30, 30, 144)  0           block3a_activation[0][0]         \n",
      "                                                                 block3a_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block3a_project_conv (Conv2D)   (None, 30, 30, 40)   5760        block3a_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block3a_project_bn (BatchNormal (None, 30, 30, 40)   160         block3a_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block3b_expand_conv (Conv2D)    (None, 30, 30, 240)  9600        block3a_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block3b_expand_bn (BatchNormali (None, 30, 30, 240)  960         block3b_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block3b_expand_activation (Acti (None, 30, 30, 240)  0           block3b_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block3b_dwconv (DepthwiseConv2D (None, 30, 30, 240)  6000        block3b_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block3b_bn (BatchNormalization) (None, 30, 30, 240)  960         block3b_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block3b_activation (Activation) (None, 30, 30, 240)  0           block3b_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block3b_se_squeeze (GlobalAvera (None, 240)          0           block3b_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block3b_se_reshape (Reshape)    (None, 1, 1, 240)    0           block3b_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block3b_se_reduce (Conv2D)      (None, 1, 1, 10)     2410        block3b_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block3b_se_expand (Conv2D)      (None, 1, 1, 240)    2640        block3b_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block3b_se_excite (Multiply)    (None, 30, 30, 240)  0           block3b_activation[0][0]         \n",
      "                                                                 block3b_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block3b_project_conv (Conv2D)   (None, 30, 30, 40)   9600        block3b_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block3b_project_bn (BatchNormal (None, 30, 30, 40)   160         block3b_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block3b_drop (FixedDropout)     (None, 30, 30, 40)   0           block3b_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block3b_add (Add)               (None, 30, 30, 40)   0           block3b_drop[0][0]               \n",
      "                                                                 block3a_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block3c_expand_conv (Conv2D)    (None, 30, 30, 240)  9600        block3b_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block3c_expand_bn (BatchNormali (None, 30, 30, 240)  960         block3c_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block3c_expand_activation (Acti (None, 30, 30, 240)  0           block3c_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block3c_dwconv (DepthwiseConv2D (None, 30, 30, 240)  6000        block3c_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block3c_bn (BatchNormalization) (None, 30, 30, 240)  960         block3c_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block3c_activation (Activation) (None, 30, 30, 240)  0           block3c_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block3c_se_squeeze (GlobalAvera (None, 240)          0           block3c_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block3c_se_reshape (Reshape)    (None, 1, 1, 240)    0           block3c_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block3c_se_reduce (Conv2D)      (None, 1, 1, 10)     2410        block3c_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block3c_se_expand (Conv2D)      (None, 1, 1, 240)    2640        block3c_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block3c_se_excite (Multiply)    (None, 30, 30, 240)  0           block3c_activation[0][0]         \n",
      "                                                                 block3c_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block3c_project_conv (Conv2D)   (None, 30, 30, 40)   9600        block3c_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block3c_project_bn (BatchNormal (None, 30, 30, 40)   160         block3c_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block3c_drop (FixedDropout)     (None, 30, 30, 40)   0           block3c_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block3c_add (Add)               (None, 30, 30, 40)   0           block3c_drop[0][0]               \n",
      "                                                                 block3b_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block4a_expand_conv (Conv2D)    (None, 30, 30, 240)  9600        block3c_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block4a_expand_bn (BatchNormali (None, 30, 30, 240)  960         block4a_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block4a_expand_activation (Acti (None, 30, 30, 240)  0           block4a_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4a_dwconv (DepthwiseConv2D (None, 15, 15, 240)  2160        block4a_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block4a_bn (BatchNormalization) (None, 15, 15, 240)  960         block4a_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block4a_activation (Activation) (None, 15, 15, 240)  0           block4a_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block4a_se_squeeze (GlobalAvera (None, 240)          0           block4a_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4a_se_reshape (Reshape)    (None, 1, 1, 240)    0           block4a_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4a_se_reduce (Conv2D)      (None, 1, 1, 10)     2410        block4a_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4a_se_expand (Conv2D)      (None, 1, 1, 240)    2640        block4a_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4a_se_excite (Multiply)    (None, 15, 15, 240)  0           block4a_activation[0][0]         \n",
      "                                                                 block4a_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4a_project_conv (Conv2D)   (None, 15, 15, 80)   19200       block4a_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4a_project_bn (BatchNormal (None, 15, 15, 80)   320         block4a_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block4b_expand_conv (Conv2D)    (None, 15, 15, 480)  38400       block4a_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4b_expand_bn (BatchNormali (None, 15, 15, 480)  1920        block4b_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block4b_expand_activation (Acti (None, 15, 15, 480)  0           block4b_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4b_dwconv (DepthwiseConv2D (None, 15, 15, 480)  4320        block4b_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block4b_bn (BatchNormalization) (None, 15, 15, 480)  1920        block4b_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block4b_activation (Activation) (None, 15, 15, 480)  0           block4b_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block4b_se_squeeze (GlobalAvera (None, 480)          0           block4b_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4b_se_reshape (Reshape)    (None, 1, 1, 480)    0           block4b_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4b_se_reduce (Conv2D)      (None, 1, 1, 20)     9620        block4b_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4b_se_expand (Conv2D)      (None, 1, 1, 480)    10080       block4b_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4b_se_excite (Multiply)    (None, 15, 15, 480)  0           block4b_activation[0][0]         \n",
      "                                                                 block4b_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4b_project_conv (Conv2D)   (None, 15, 15, 80)   38400       block4b_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4b_project_bn (BatchNormal (None, 15, 15, 80)   320         block4b_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block4b_drop (FixedDropout)     (None, 15, 15, 80)   0           block4b_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4b_add (Add)               (None, 15, 15, 80)   0           block4b_drop[0][0]               \n",
      "                                                                 block4a_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4c_expand_conv (Conv2D)    (None, 15, 15, 480)  38400       block4b_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block4c_expand_bn (BatchNormali (None, 15, 15, 480)  1920        block4c_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block4c_expand_activation (Acti (None, 15, 15, 480)  0           block4c_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4c_dwconv (DepthwiseConv2D (None, 15, 15, 480)  4320        block4c_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block4c_bn (BatchNormalization) (None, 15, 15, 480)  1920        block4c_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block4c_activation (Activation) (None, 15, 15, 480)  0           block4c_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block4c_se_squeeze (GlobalAvera (None, 480)          0           block4c_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4c_se_reshape (Reshape)    (None, 1, 1, 480)    0           block4c_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4c_se_reduce (Conv2D)      (None, 1, 1, 20)     9620        block4c_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4c_se_expand (Conv2D)      (None, 1, 1, 480)    10080       block4c_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4c_se_excite (Multiply)    (None, 15, 15, 480)  0           block4c_activation[0][0]         \n",
      "                                                                 block4c_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4c_project_conv (Conv2D)   (None, 15, 15, 80)   38400       block4c_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4c_project_bn (BatchNormal (None, 15, 15, 80)   320         block4c_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block4c_drop (FixedDropout)     (None, 15, 15, 80)   0           block4c_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4c_add (Add)               (None, 15, 15, 80)   0           block4c_drop[0][0]               \n",
      "                                                                 block4b_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block4d_expand_conv (Conv2D)    (None, 15, 15, 480)  38400       block4c_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block4d_expand_bn (BatchNormali (None, 15, 15, 480)  1920        block4d_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block4d_expand_activation (Acti (None, 15, 15, 480)  0           block4d_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4d_dwconv (DepthwiseConv2D (None, 15, 15, 480)  4320        block4d_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block4d_bn (BatchNormalization) (None, 15, 15, 480)  1920        block4d_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block4d_activation (Activation) (None, 15, 15, 480)  0           block4d_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block4d_se_squeeze (GlobalAvera (None, 480)          0           block4d_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4d_se_reshape (Reshape)    (None, 1, 1, 480)    0           block4d_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4d_se_reduce (Conv2D)      (None, 1, 1, 20)     9620        block4d_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4d_se_expand (Conv2D)      (None, 1, 1, 480)    10080       block4d_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4d_se_excite (Multiply)    (None, 15, 15, 480)  0           block4d_activation[0][0]         \n",
      "                                                                 block4d_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4d_project_conv (Conv2D)   (None, 15, 15, 80)   38400       block4d_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4d_project_bn (BatchNormal (None, 15, 15, 80)   320         block4d_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block4d_drop (FixedDropout)     (None, 15, 15, 80)   0           block4d_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4d_add (Add)               (None, 15, 15, 80)   0           block4d_drop[0][0]               \n",
      "                                                                 block4c_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block5a_expand_conv (Conv2D)    (None, 15, 15, 480)  38400       block4d_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block5a_expand_bn (BatchNormali (None, 15, 15, 480)  1920        block5a_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block5a_expand_activation (Acti (None, 15, 15, 480)  0           block5a_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5a_dwconv (DepthwiseConv2D (None, 15, 15, 480)  12000       block5a_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block5a_bn (BatchNormalization) (None, 15, 15, 480)  1920        block5a_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block5a_activation (Activation) (None, 15, 15, 480)  0           block5a_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block5a_se_squeeze (GlobalAvera (None, 480)          0           block5a_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5a_se_reshape (Reshape)    (None, 1, 1, 480)    0           block5a_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5a_se_reduce (Conv2D)      (None, 1, 1, 20)     9620        block5a_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5a_se_expand (Conv2D)      (None, 1, 1, 480)    10080       block5a_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5a_se_excite (Multiply)    (None, 15, 15, 480)  0           block5a_activation[0][0]         \n",
      "                                                                 block5a_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5a_project_conv (Conv2D)   (None, 15, 15, 112)  53760       block5a_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5a_project_bn (BatchNormal (None, 15, 15, 112)  448         block5a_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block5b_expand_conv (Conv2D)    (None, 15, 15, 672)  75264       block5a_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5b_expand_bn (BatchNormali (None, 15, 15, 672)  2688        block5b_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block5b_expand_activation (Acti (None, 15, 15, 672)  0           block5b_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5b_dwconv (DepthwiseConv2D (None, 15, 15, 672)  16800       block5b_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block5b_bn (BatchNormalization) (None, 15, 15, 672)  2688        block5b_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block5b_activation (Activation) (None, 15, 15, 672)  0           block5b_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block5b_se_squeeze (GlobalAvera (None, 672)          0           block5b_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5b_se_reshape (Reshape)    (None, 1, 1, 672)    0           block5b_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5b_se_reduce (Conv2D)      (None, 1, 1, 28)     18844       block5b_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5b_se_expand (Conv2D)      (None, 1, 1, 672)    19488       block5b_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5b_se_excite (Multiply)    (None, 15, 15, 672)  0           block5b_activation[0][0]         \n",
      "                                                                 block5b_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5b_project_conv (Conv2D)   (None, 15, 15, 112)  75264       block5b_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5b_project_bn (BatchNormal (None, 15, 15, 112)  448         block5b_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block5b_drop (FixedDropout)     (None, 15, 15, 112)  0           block5b_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5b_add (Add)               (None, 15, 15, 112)  0           block5b_drop[0][0]               \n",
      "                                                                 block5a_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5c_expand_conv (Conv2D)    (None, 15, 15, 672)  75264       block5b_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block5c_expand_bn (BatchNormali (None, 15, 15, 672)  2688        block5c_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block5c_expand_activation (Acti (None, 15, 15, 672)  0           block5c_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5c_dwconv (DepthwiseConv2D (None, 15, 15, 672)  16800       block5c_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block5c_bn (BatchNormalization) (None, 15, 15, 672)  2688        block5c_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block5c_activation (Activation) (None, 15, 15, 672)  0           block5c_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block5c_se_squeeze (GlobalAvera (None, 672)          0           block5c_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5c_se_reshape (Reshape)    (None, 1, 1, 672)    0           block5c_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5c_se_reduce (Conv2D)      (None, 1, 1, 28)     18844       block5c_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5c_se_expand (Conv2D)      (None, 1, 1, 672)    19488       block5c_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5c_se_excite (Multiply)    (None, 15, 15, 672)  0           block5c_activation[0][0]         \n",
      "                                                                 block5c_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5c_project_conv (Conv2D)   (None, 15, 15, 112)  75264       block5c_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5c_project_bn (BatchNormal (None, 15, 15, 112)  448         block5c_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block5c_drop (FixedDropout)     (None, 15, 15, 112)  0           block5c_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5c_add (Add)               (None, 15, 15, 112)  0           block5c_drop[0][0]               \n",
      "                                                                 block5b_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block5d_expand_conv (Conv2D)    (None, 15, 15, 672)  75264       block5c_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block5d_expand_bn (BatchNormali (None, 15, 15, 672)  2688        block5d_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block5d_expand_activation (Acti (None, 15, 15, 672)  0           block5d_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5d_dwconv (DepthwiseConv2D (None, 15, 15, 672)  16800       block5d_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block5d_bn (BatchNormalization) (None, 15, 15, 672)  2688        block5d_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block5d_activation (Activation) (None, 15, 15, 672)  0           block5d_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block5d_se_squeeze (GlobalAvera (None, 672)          0           block5d_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5d_se_reshape (Reshape)    (None, 1, 1, 672)    0           block5d_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5d_se_reduce (Conv2D)      (None, 1, 1, 28)     18844       block5d_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5d_se_expand (Conv2D)      (None, 1, 1, 672)    19488       block5d_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5d_se_excite (Multiply)    (None, 15, 15, 672)  0           block5d_activation[0][0]         \n",
      "                                                                 block5d_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5d_project_conv (Conv2D)   (None, 15, 15, 112)  75264       block5d_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5d_project_bn (BatchNormal (None, 15, 15, 112)  448         block5d_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block5d_drop (FixedDropout)     (None, 15, 15, 112)  0           block5d_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5d_add (Add)               (None, 15, 15, 112)  0           block5d_drop[0][0]               \n",
      "                                                                 block5c_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block6a_expand_conv (Conv2D)    (None, 15, 15, 672)  75264       block5d_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block6a_expand_bn (BatchNormali (None, 15, 15, 672)  2688        block6a_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block6a_expand_activation (Acti (None, 15, 15, 672)  0           block6a_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6a_dwconv (DepthwiseConv2D (None, 8, 8, 672)    16800       block6a_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block6a_bn (BatchNormalization) (None, 8, 8, 672)    2688        block6a_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block6a_activation (Activation) (None, 8, 8, 672)    0           block6a_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block6a_se_squeeze (GlobalAvera (None, 672)          0           block6a_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6a_se_reshape (Reshape)    (None, 1, 1, 672)    0           block6a_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6a_se_reduce (Conv2D)      (None, 1, 1, 28)     18844       block6a_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6a_se_expand (Conv2D)      (None, 1, 1, 672)    19488       block6a_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6a_se_excite (Multiply)    (None, 8, 8, 672)    0           block6a_activation[0][0]         \n",
      "                                                                 block6a_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6a_project_conv (Conv2D)   (None, 8, 8, 192)    129024      block6a_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6a_project_bn (BatchNormal (None, 8, 8, 192)    768         block6a_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block6b_expand_conv (Conv2D)    (None, 8, 8, 1152)   221184      block6a_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6b_expand_bn (BatchNormali (None, 8, 8, 1152)   4608        block6b_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block6b_expand_activation (Acti (None, 8, 8, 1152)   0           block6b_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6b_dwconv (DepthwiseConv2D (None, 8, 8, 1152)   28800       block6b_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block6b_bn (BatchNormalization) (None, 8, 8, 1152)   4608        block6b_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block6b_activation (Activation) (None, 8, 8, 1152)   0           block6b_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block6b_se_squeeze (GlobalAvera (None, 1152)         0           block6b_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6b_se_reshape (Reshape)    (None, 1, 1, 1152)   0           block6b_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6b_se_reduce (Conv2D)      (None, 1, 1, 48)     55344       block6b_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6b_se_expand (Conv2D)      (None, 1, 1, 1152)   56448       block6b_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6b_se_excite (Multiply)    (None, 8, 8, 1152)   0           block6b_activation[0][0]         \n",
      "                                                                 block6b_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6b_project_conv (Conv2D)   (None, 8, 8, 192)    221184      block6b_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6b_project_bn (BatchNormal (None, 8, 8, 192)    768         block6b_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block6b_drop (FixedDropout)     (None, 8, 8, 192)    0           block6b_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6b_add (Add)               (None, 8, 8, 192)    0           block6b_drop[0][0]               \n",
      "                                                                 block6a_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6c_expand_conv (Conv2D)    (None, 8, 8, 1152)   221184      block6b_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block6c_expand_bn (BatchNormali (None, 8, 8, 1152)   4608        block6c_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block6c_expand_activation (Acti (None, 8, 8, 1152)   0           block6c_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6c_dwconv (DepthwiseConv2D (None, 8, 8, 1152)   28800       block6c_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block6c_bn (BatchNormalization) (None, 8, 8, 1152)   4608        block6c_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block6c_activation (Activation) (None, 8, 8, 1152)   0           block6c_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block6c_se_squeeze (GlobalAvera (None, 1152)         0           block6c_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6c_se_reshape (Reshape)    (None, 1, 1, 1152)   0           block6c_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6c_se_reduce (Conv2D)      (None, 1, 1, 48)     55344       block6c_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6c_se_expand (Conv2D)      (None, 1, 1, 1152)   56448       block6c_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6c_se_excite (Multiply)    (None, 8, 8, 1152)   0           block6c_activation[0][0]         \n",
      "                                                                 block6c_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6c_project_conv (Conv2D)   (None, 8, 8, 192)    221184      block6c_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6c_project_bn (BatchNormal (None, 8, 8, 192)    768         block6c_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block6c_drop (FixedDropout)     (None, 8, 8, 192)    0           block6c_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6c_add (Add)               (None, 8, 8, 192)    0           block6c_drop[0][0]               \n",
      "                                                                 block6b_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block6d_expand_conv (Conv2D)    (None, 8, 8, 1152)   221184      block6c_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block6d_expand_bn (BatchNormali (None, 8, 8, 1152)   4608        block6d_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block6d_expand_activation (Acti (None, 8, 8, 1152)   0           block6d_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6d_dwconv (DepthwiseConv2D (None, 8, 8, 1152)   28800       block6d_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block6d_bn (BatchNormalization) (None, 8, 8, 1152)   4608        block6d_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block6d_activation (Activation) (None, 8, 8, 1152)   0           block6d_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block6d_se_squeeze (GlobalAvera (None, 1152)         0           block6d_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6d_se_reshape (Reshape)    (None, 1, 1, 1152)   0           block6d_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6d_se_reduce (Conv2D)      (None, 1, 1, 48)     55344       block6d_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6d_se_expand (Conv2D)      (None, 1, 1, 1152)   56448       block6d_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6d_se_excite (Multiply)    (None, 8, 8, 1152)   0           block6d_activation[0][0]         \n",
      "                                                                 block6d_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6d_project_conv (Conv2D)   (None, 8, 8, 192)    221184      block6d_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6d_project_bn (BatchNormal (None, 8, 8, 192)    768         block6d_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block6d_drop (FixedDropout)     (None, 8, 8, 192)    0           block6d_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6d_add (Add)               (None, 8, 8, 192)    0           block6d_drop[0][0]               \n",
      "                                                                 block6c_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block6e_expand_conv (Conv2D)    (None, 8, 8, 1152)   221184      block6d_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block6e_expand_bn (BatchNormali (None, 8, 8, 1152)   4608        block6e_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block6e_expand_activation (Acti (None, 8, 8, 1152)   0           block6e_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6e_dwconv (DepthwiseConv2D (None, 8, 8, 1152)   28800       block6e_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block6e_bn (BatchNormalization) (None, 8, 8, 1152)   4608        block6e_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block6e_activation (Activation) (None, 8, 8, 1152)   0           block6e_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block6e_se_squeeze (GlobalAvera (None, 1152)         0           block6e_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6e_se_reshape (Reshape)    (None, 1, 1, 1152)   0           block6e_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6e_se_reduce (Conv2D)      (None, 1, 1, 48)     55344       block6e_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6e_se_expand (Conv2D)      (None, 1, 1, 1152)   56448       block6e_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6e_se_excite (Multiply)    (None, 8, 8, 1152)   0           block6e_activation[0][0]         \n",
      "                                                                 block6e_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6e_project_conv (Conv2D)   (None, 8, 8, 192)    221184      block6e_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6e_project_bn (BatchNormal (None, 8, 8, 192)    768         block6e_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block6e_drop (FixedDropout)     (None, 8, 8, 192)    0           block6e_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6e_add (Add)               (None, 8, 8, 192)    0           block6e_drop[0][0]               \n",
      "                                                                 block6d_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block7a_expand_conv (Conv2D)    (None, 8, 8, 1152)   221184      block6e_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block7a_expand_bn (BatchNormali (None, 8, 8, 1152)   4608        block7a_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block7a_expand_activation (Acti (None, 8, 8, 1152)   0           block7a_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block7a_dwconv (DepthwiseConv2D (None, 8, 8, 1152)   10368       block7a_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block7a_bn (BatchNormalization) (None, 8, 8, 1152)   4608        block7a_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block7a_activation (Activation) (None, 8, 8, 1152)   0           block7a_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block7a_se_squeeze (GlobalAvera (None, 1152)         0           block7a_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block7a_se_reshape (Reshape)    (None, 1, 1, 1152)   0           block7a_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block7a_se_reduce (Conv2D)      (None, 1, 1, 48)     55344       block7a_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block7a_se_expand (Conv2D)      (None, 1, 1, 1152)   56448       block7a_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block7a_se_excite (Multiply)    (None, 8, 8, 1152)   0           block7a_activation[0][0]         \n",
      "                                                                 block7a_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block7a_project_conv (Conv2D)   (None, 8, 8, 320)    368640      block7a_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block7a_project_bn (BatchNormal (None, 8, 8, 320)    1280        block7a_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block7b_expand_conv (Conv2D)    (None, 8, 8, 1920)   614400      block7a_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block7b_expand_bn (BatchNormali (None, 8, 8, 1920)   7680        block7b_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block7b_expand_activation (Acti (None, 8, 8, 1920)   0           block7b_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block7b_dwconv (DepthwiseConv2D (None, 8, 8, 1920)   17280       block7b_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block7b_bn (BatchNormalization) (None, 8, 8, 1920)   7680        block7b_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block7b_activation (Activation) (None, 8, 8, 1920)   0           block7b_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block7b_se_squeeze (GlobalAvera (None, 1920)         0           block7b_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block7b_se_reshape (Reshape)    (None, 1, 1, 1920)   0           block7b_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block7b_se_reduce (Conv2D)      (None, 1, 1, 80)     153680      block7b_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block7b_se_expand (Conv2D)      (None, 1, 1, 1920)   155520      block7b_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block7b_se_excite (Multiply)    (None, 8, 8, 1920)   0           block7b_activation[0][0]         \n",
      "                                                                 block7b_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block7b_project_conv (Conv2D)   (None, 8, 8, 320)    614400      block7b_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block7b_project_bn (BatchNormal (None, 8, 8, 320)    1280        block7b_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block7b_drop (FixedDropout)     (None, 8, 8, 320)    0           block7b_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block7b_add (Add)               (None, 8, 8, 320)    0           block7b_drop[0][0]               \n",
      "                                                                 block7a_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "top_conv (Conv2D)               (None, 8, 8, 1280)   409600      block7b_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
1894
1895
      "dense_4_input (InputLayer)      [(None, 6)]          0                                            \n",
      "__________________________________________________________________________________________________\n",
Billy Amélie's avatar
Billy Amélie committed
1896
1897
      "top_bn (BatchNormalization)     (None, 8, 8, 1280)   5120        top_conv[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
1898
      "dense_4 (Dense)                 (None, 8)            56          dense_4_input[0][0]              \n",
Billy Amélie's avatar
Billy Amélie committed
1899
1900
1901
      "__________________________________________________________________________________________________\n",
      "top_activation (Activation)     (None, 8, 8, 1280)   0           top_bn[0][0]                     \n",
      "__________________________________________________________________________________________________\n",
1902
      "dropout_3 (Dropout)             (None, 8)            0           dense_4[0][0]                    \n",
Billy Amélie's avatar
Billy Amélie committed
1903
      "__________________________________________________________________________________________________\n",
1904
      "global_average_pooling2d_1 (Glo (None, 1280)         0           top_activation[0][0]             \n",
Billy Amélie's avatar
Billy Amélie committed
1905
      "__________________________________________________________________________________________________\n",
1906
      "dense_5 (Dense)                 (None, 4)            36          dropout_3[0][0]                  \n",
Billy Amélie's avatar
Billy Amélie committed
1907
      "__________________________________________________________________________________________________\n",
1908
      "dropout_4 (Dropout)             (None, 1280)         0           global_average_pooling2d_1[0][0] \n",
Billy Amélie's avatar
Billy Amélie committed
1909
      "__________________________________________________________________________________________________\n",
1910
1911
      "concatenate_1 (Concatenate)     (None, 1284)         0           dense_5[0][0]                    \n",
      "                                                                 dropout_4[0][0]                  \n",
Billy Amélie's avatar
Billy Amélie committed
1912
      "__________________________________________________________________________________________________\n",
1913
      "dense_6 (Dense)                 (None, 4)            5140        concatenate_1[0][0]              \n",
Billy Amélie's avatar
Billy Amélie committed
1914
      "__________________________________________________________________________________________________\n",
1915
      "dropout_5 (Dropout)             (None, 4)            0           dense_6[0][0]                    \n",
Billy Amélie's avatar
Billy Amélie committed
1916
      "__________________________________________________________________________________________________\n",
1917
      "dense_7 (Dense)                 (None, 1)            5           dropout_5[0][0]                  \n",
Billy Amélie's avatar
Billy Amélie committed
1918
      "==================================================================================================\n",
1919
1920
      "Total params: 6,580,757\n",
      "Trainable params: 5,237\n",
Billy Amélie's avatar
Billy Amélie committed
1921
1922
      "Non-trainable params: 6,575,520\n",
      "__________________________________________________________________________________________________\n"
Lafnoune Imane's avatar
Lafnoune Imane committed
1923
1924
1925
1926
     ]
    }
   ],
   "source": [
Billy Amélie's avatar
Billy Amélie committed
1927
    "model.summary()"
Lafnoune Imane's avatar
Lafnoune Imane committed
1928
1929
   ]
  },
1930
1931
1932
1933
1934
1935
1936
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# F - Postprocessing : Evaluation"
   ]
  },
Lafnoune Imane's avatar
Lafnoune Imane committed
1937
1938
  {
   "cell_type": "code",
1939
1940
1941
1942
1943
1944
1945
1946
1947
1948
1949
1950
1951
1952
1953
1954
1955
1956
1957
1958
1959
1960
1961
1962
1963
1964
1965
1966
1967
1968
1969
1970
1971
1972
1973
   "execution_count": 30,
   "metadata": {},
   "outputs": [],
   "source": [
    "from postprocessing.evaluate import evaluate_hybrid, compute_score\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO:predicting ...\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "avg. FVC: 2690.479018721756, std FVC 832.7709592986739\n",
      "mean difference : 151.66%, std: 981.78%\n"
     ]
    }
   ],
   "source": [
    "preds = evaluate_hybrid(model, df, trainAttrX, train_dataset, trainY, sc)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 32,
Lafnoune Imane's avatar
Lafnoune Imane committed
1974
   "metadata": {},
Lafnoune Imane's avatar
Lafnoune Imane committed
1975
   "outputs": [
1976
1977
1978
1979
1980
1981
1982
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "35/35 [==============================] - ETA: 3:48 - loss: 0.114 - ETA: 1:14 - loss: 0.086 - ETA: 1:11 - loss: 0.098 - ETA: 1:07 - loss: 0.108 - ETA: 1:06 - loss: 0.111 - ETA: 1:04 - loss: 0.107 - ETA: 1:01 - loss: 0.130 - ETA: 59s - loss: 0.124 - ETA: 56s - loss: 0.11 - ETA: 54s - loss: 0.11 - ETA: 53s - loss: 0.10 - ETA: 51s - loss: 0.10 - ETA: 49s - loss: 0.10 - ETA: 47s - loss: 0.10 - ETA: 45s - loss: 0.10 - ETA: 42s - loss: 0.11 - ETA: 40s - loss: 0.11 - ETA: 37s - loss: 0.14 - ETA: 35s - loss: 0.17 - ETA: 33s - loss: 0.16 - ETA: 31s - loss: 0.16 - ETA: 29s - loss: 0.16 - ETA: 26s - loss: 0.16 - ETA: 24s - loss: 0.16 - ETA: 22s - loss: 0.15 - ETA: 19s - loss: 0.15 - ETA: 17s - loss: 0.15 - ETA: 15s - loss: 0.15 - ETA: 13s - loss: 0.15 - ETA: 11s - loss: 0.15 - ETA: 8s - loss: 0.1498 - ETA: 6s - loss: 0.148 - ETA: 4s - loss: 0.145 - ETA: 2s - loss: 0.144 - ETA: 0s - loss: 0.144 - 80s 2s/step - loss: 0.1441\n"
     ]
    },
Lafnoune Imane's avatar
Lafnoune Imane committed
1983
1984
1985
    {
     "data": {
      "text/plain": [
1986
       "0.1440872848033905"
Lafnoune Imane's avatar
Lafnoune Imane committed
1987
1988
      ]
     },
1989
     "execution_count": 32,
Billy Amélie's avatar
Billy Amélie committed
1990
1991
     "metadata": {},
     "output_type": "execute_result"
Lafnoune Imane's avatar
Lafnoune Imane committed
1992
1993
    }
   ],
Lafnoune Imane's avatar
Lafnoune Imane committed
1994
   "source": [
1995
1996
1997
1998
1999
2000
2001
2002
2003
2004
2005
2006
2007
2008
2009
2010
2011
2012
2013
2014
2015
2016
2017
2018
2019
2020
2021
2022
2023
2024
2025
2026
2027
2028
2029
2030
2031
2032
2033
2034
2035
2036
2037
2038
2039
2040
2041
2042
2043
2044
2045
2046
2047
2048
2049
2050
2051
2052
2053
2054
2055
2056
2057
2058
2059
2060
2061
    "model.evaluate([trainAttrX, train_dataset], trainY)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Test set"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 35,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO:predicting ...\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "avg. FVC: 2690.479018721756, std FVC 832.7709592986739\n",
      "mean difference : 81.09%, std: 222.01%\n"
     ]
    }
   ],
   "source": [
    "preds = evaluate_hybrid(model, df, testAttrX, test_dataset, testY, sc)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 36,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "9/9 [==============================] - ETA: 22s - loss: 0.10 - ETA: 18s - loss: 0.10 - ETA: 14s - loss: 0.11 - ETA: 12s - loss: 0.15 - ETA: 9s - loss: 0.1351 - ETA: 6s - loss: 0.121 - ETA: 4s - loss: 0.117 - ETA: 2s - loss: 0.107 - ETA: 0s - loss: 0.107 - 20s 2s/step - loss: 0.1073\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "0.1072525605559349"
      ]
     },
     "execution_count": 36,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "model.evaluate([testAttrX, test_dataset], testY)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# G - Postprocessing : Competition score"
Lafnoune Imane's avatar
Lafnoune Imane committed
2062
2063
2064
   ]
  },
  {
Billy Amélie's avatar
Billy Amélie committed
2065
2066
   "cell_type": "code",
   "execution_count": 18,
Lafnoune Imane's avatar
Lafnoune Imane committed
2067
   "metadata": {},
2068
2069
2070
2071
2072
2073
2074
2075
2076
2077
2078
2079
   "outputs": [],
   "source": [
    "from postprocessing.dropout_predictions import create_dropout_predict_function\n",
    "import tensorflow as tf\n",
    "import matplotlib.pyplot as plt\n",
    "from postprocessing.evaluate import compute_score"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
Billy Amélie's avatar
Billy Amélie committed
2080
2081
2082
2083
2084
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
2085
      "[<tf.Tensor 'dense_4_input_2:0' shape=(None, 6) dtype=float32>, <tf.Tensor 'input_3_2:0' shape=(None, 240, 240, 4) dtype=float32>]\n"
Billy Amélie's avatar
Billy Amélie committed
2086
2087
2088
     ]
    }
   ],
Lafnoune Imane's avatar
Lafnoune Imane committed
2089
   "source": [
2090
2091
2092
2093
2094
2095
2096
2097
2098
2099
2100
2101
2102
2103
2104
2105
2106
2107
2108
2109
2110
2111
2112
2113
2114
2115
2116
2117
2118
2119
2120
2121
2122
2123
2124
    "tf.compat.v1.disable_eager_execution()\n",
    "\n",
    "dropout = 0.5\n",
    "num_iter = 20\n",
    " \n",
    "input_data=[trainAttrX, train_dataset]\n",
    "input_data[1] = np.asarray(input_data[1]).reshape(train_dataset.shape[0],240,240,4)\n",
    "num_samples = input_data[0].shape[0]\n",
    "\n",
    "predict_with_dropout = create_dropout_predict_function(model, dropout)\n",
    "\n",
    "predictions = np.zeros((num_samples, num_iter))\n",
    "for i in range(num_iter):\n",
    "    predictions[:,i] = predict_with_dropout([input_data,1])[0].reshape(-1)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Problème de mémoire, ne fonctionne pas"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Test"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "**Dropout 0.4**"
Lafnoune Imane's avatar
Lafnoune Imane committed
2125
2126
2127
2128
   ]
  },
  {
   "cell_type": "code",
2129
   "execution_count": 24,
Lafnoune Imane's avatar
Lafnoune Imane committed
2130
   "metadata": {},
Billy Amélie's avatar
Billy Amélie committed
2131
2132
2133
2134
2135
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
2136
      "[<tf.Tensor 'dense_4_input_4:0' shape=(None, 6) dtype=float32>, <tf.Tensor 'input_3_4:0' shape=(None, 240, 240, 4) dtype=float32>]\n"
Billy Amélie's avatar
Billy Amélie committed
2137
2138
2139
     ]
    }
   ],
Lafnoune Imane's avatar
Lafnoune Imane committed
2140
   "source": [
2141
2142
2143
2144
2145
2146
2147
2148
2149
2150
2151
2152
2153
2154
    "tf.compat.v1.disable_eager_execution()\n",
    "\n",
    "dropout = 0.4\n",
    "num_iter = 20\n",
    " \n",
    "input_data=[testAttrX, test_dataset]\n",
    "input_data[1] = np.asarray(input_data[1]).reshape(test_dataset.shape[0],240,240,4)\n",
    "num_samples = input_data[0].shape[0]\n",
    "\n",
    "predict_with_dropout = create_dropout_predict_function(model, dropout)\n",
    "\n",
    "predictions = np.zeros((num_samples, num_iter))\n",
    "for i in range(num_iter):\n",
    "    predictions[:,i] = predict_with_dropout([input_data,1])[0].reshape(-1)"
Lafnoune Imane's avatar
Lafnoune Imane committed
2155
2156
2157
2158
   ]
  },
  {
   "cell_type": "code",
2159
   "execution_count": 25,
Lafnoune Imane's avatar
Lafnoune Imane committed
2160
2161
2162
   "metadata": {},
   "outputs": [
    {
Billy Amélie's avatar
Billy Amélie committed
2163
     "data": {
2164
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXAAAAD4CAYAAAD1jb0+AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAPnUlEQVR4nO3df6yeZX3H8fdHQHHqBOSsaShdUYiELKOYsw6CWbQOw8AoJmSRGNc/SI5LNMHMzFWXbJpsCSRTdIkhq6L2D+d0qIOgUxlgjMlS1kopLZVQsUaaQssElf3BUvjuj+cqnh3O4TznnOc5p9fp+5U8Ofd93fd57u9FHz69ej33j1QVkqT+vGylC5AkLY4BLkmdMsAlqVMGuCR1ygCXpE6dupwHO/vss2vDhg3LeUhJ6t6uXbuerKqJme3LGuAbNmxg586dy3lISepekp/N1u4UiiR1ygCXpE4Z4JLUKQNckjplgEtSpwxwSeqUAS5JnTLAJalTBrgkdWpZr8Rc7TZs/dZI3+/gjVeP9P0krS6OwCWpU0MHeJJTktyf5M62fl6SHUkOJPlqkpePr0xJ0kwLGYHfAOyftn4TcHNVnQ88BVw/ysIkSS9tqABPsg64Gvh8Ww+wGbit7bIduGYM9UmS5jDsCPzTwEeA59v664Cnq+pYW38MOGe2X0wylWRnkp1Hjx5dSq2SpGnmDfAk7wCOVNWuxRygqrZV1WRVTU5MvOh+5JKkRRrmNMLLgXcmuQo4Hfht4DPAGUlObaPwdcCh8ZUpSZpp3hF4VX20qtZV1QbgPcA9VfVe4F7g2rbbFuD2sVUpSXqRpZwH/lfAXyQ5wGBO/NbRlCRJGsaCrsSsqu8D32/LjwKbRl+SJGkYXokpSZ0ywCWpUwa4JHXKAJekTnk7WS2Jt9CVVo4jcEnqlAEuSZ0ywCWpUwa4JHXKAJekThngktQpA1ySOmWAS1KnDHBJ6pRXYp7AvMpR0ktxBC5JnRrmocanJ7kvyQNJ9iX5RGv/UpKfJtndXhvHXq0k6QXDTKE8C2yuqmeSnAb8MMm/t21/WVW3ja88SdJc5g3wqirgmbZ6WnvVOIuSJM1vqDnwJKck2Q0cAe6qqh1t098n2ZPk5iSvmON3p5LsTLLz6NGjo6lakjRcgFfVc1W1EVgHbErye8BHgQuBPwDOYvCU+tl+d1tVTVbV5MTExGiqliQt7CyUqnoauBe4sqoO18CzwBfxCfWStKyGOQtlIskZbfmVwBXAj5OsbW0BrgH2jq9MSdJMw5yFshbYnuQUBoH/taq6M8k9SSaAALuBPx9fmZKkmYY5C2UPcMks7ZvHUpEkaSheiSlJnTLAJalTBrgkdcoAl6ROGeCS1CkDXJI6ZYBLUqcMcEnqlAEuSZ0ywCWpUwa4JHXKAJekThngktQpA1ySOmWAS1KnDHBJ6tQwj1Q7Pcl9SR5Isi/JJ1r7eUl2JDmQ5KtJXj7+ciVJxw0zAn8W2FxVFwMbgSuTXArcBNxcVecDTwHXj61KSdKLzBvg7cnzz7TV09qrgM3Aba19O4MHG0uSlskwDzWmPdB4F3A+8FngJ8DTVXWs7fIYcM4cvzsFTAGsX79+qfVqCTZs/dZKlyBphIb6ErOqnquqjcA6YBNw4bAHqKptVTVZVZMTExOLq1KS9CILOgulqp4G7gUuA85IcnwEvw44NNrSJEkvZZizUCaSnNGWXwlcAexnEOTXtt22ALePqUZJ0iyGmQNfC2xv8+AvA75WVXcmeQj4lyR/B9wP3DrGOiVJM8wb4FW1B7hklvZHGcyHS5JWgFdiSlKnDHBJ6pQBLkmdMsAlqVMGuCR1ygCXpE4Z4JLUKQNckjplgEtSpwxwSeqUAS5JnTLAJalTBrgkdcoAl6RODfVMTKlXo34O6MEbrx7p+0lL4Qhckjo1zCPVzk1yb5KHkuxLckNr/3iSQ0l2t9dV4y9XknTcMFMox4APV9WPkrwG2JXkrrbt5qr6h/GVJ0mayzCPVDsMHG7Lv06yHzhn3IVJkl7agubAk2xg8HzMHa3pg0n2JPlCkjNHXZwkaW5DB3iSVwNfBz5UVb8CbgHeAGxkMEL/5By/N5VkZ5KdR48eXXrFkiRgyABPchqD8P5yVX0DoKqeqKrnqup54HPM8YT6qtpWVZNVNTkxMTGquiXppDfMWSgBbgX2V9WnprWvnbbbu4G9oy9PkjSXYc5CuRx4H/Bgkt2t7WPAdUk2AgUcBN4/hvokSXMY5iyUHwKZZdO3R1+OJGlYXokpSZ0ywCWpUwa4JHXKAJekThngktSpk/Z+4KO+T7QkLTdH4JLUKQNckjplgEtSpwxwSerUSfslpk5MfrksDc8RuCR1ygCXpE4Z4JLUKQNckjplgEtSpwxwSerUMM/EPDfJvUkeSrIvyQ2t/awkdyV5pP08c/zlSpKOG2YEfgz4cFVdBFwKfCDJRcBW4O6qugC4u61LkpbJvAFeVYer6kdt+dfAfuAc4F3A9rbbduCaMdUoSZrFgq7ETLIBuATYAaypqsNt0+PAmjl+ZwqYAli/fv2iC/UKPUn6/4b+EjPJq4GvAx+qql9N31ZVBdRsv1dV26pqsqomJyYmllSsJOk3hgrwJKcxCO8vV9U3WvMTSda27WuBI+MpUZI0m2HOQglwK7C/qj41bdMdwJa2vAW4ffTlSZLmMswc+OXA+4AHk+xubR8DbgS+luR64GfAn46lQknSrOYN8Kr6IZA5Nr9ttOVIkobllZiS1CkDXJI6ZYBLUqcMcEnqlAEuSZ0ywCWpUwa4JHXKAJekThngktQpA1ySOmWAS1KnDHBJ6pQBLkmdMsAlqVMGuCR1ygCXpE4N80i1LyQ5kmTvtLaPJzmUZHd7XTXeMiVJMw0zAv8ScOUs7TdX1cb2+vZoy5IkzWfeAK+qHwC/WIZaJEkLsJQ58A8m2dOmWM6ca6ckU0l2Jtl59OjRJRxOkjTdYgP8FuANwEbgMPDJuXasqm1VNVlVkxMTE4s8nCRppkUFeFU9UVXPVdXzwOeATaMtS5I0n0UFeJK101bfDeyda19J0nicOt8OSb4CvAU4O8ljwN8Cb0myESjgIPD+8ZUoSZrNvAFeVdfN0nzrGGqRTngbtn5r5O958MarR/6eOjl4JaYkdcoAl6ROGeCS1CkDXJI6ZYBLUqcMcEnqlAEuSZ0ywCWpUwa4JHXKAJekThngktQpA1ySOmWAS1KnDHBJ6tS8t5OV1JdR3/LW292euByBS1Kn5g3w9tT5I0n2Tms7K8ldSR5pP+d8Kr0kaTyGGYF/CbhyRttW4O6qugC4u61LkpbRvAFeVT8AfjGj+V3A9ra8HbhmtGVJkuaz2C8x11TV4bb8OLBmrh2TTAFTAOvXr1/k4aTVaxzP2dTJYclfYlZVMXg6/Vzbt1XVZFVNTkxMLPVwkqRmsQH+RJK1AO3nkdGVJEkaxmID/A5gS1veAtw+mnIkScMa5jTCrwD/CbwxyWNJrgduBK5I8gjwx21dkrSM5v0Ss6qum2PT20ZciyRpAbwSU5I6ZYBLUqcMcEnqlAEuSZ0ywCWpUwa4JHXKAJekThngktQpA1ySOmWAS1KnDHBJ6pQBLkmdMsAlqVMGuCR1arHPxJSkE8aonyt68MarR/p+4+IIXJI6taQReJKDwK+B54BjVTU5iqIkSfMbxRTKW6vqyRG8jyRpAZxCkaROLTXAC/hekl1JpkZRkCRpOEudQnlzVR1K8jvAXUl+XFU/mL5DC/YpgPXr1y/xcJKk45Y0Aq+qQ+3nEeCbwKZZ9tlWVZNVNTkxMbGUw0mSpll0gCd5VZLXHF8G3g7sHVVhkqSXtpQplDXAN5Mcf59/rqrvjKQqSdK8Fh3gVfUocPEIa5EkLYCnEUpSpwxwSeqUAS5JnTLAJalTBrgkdcoAl6ROGeCS1CkDXJI6ZYBLUqcMcEnqlA81lvSSRv3AYOjnocEnOkfgktQpA1ySOmWAS1KnDHBJ6pRfYkrSDL18cesIXJI6taQAT3JlkoeTHEiydVRFSZLmt5SHGp8CfBb4E+Ai4LokF42qMEnSS1vKCHwTcKCqHq2q/wX+BXjXaMqSJM1nKV9ingP8fNr6Y8AfztwpyRQw1VafSfLwEo7Zg7OBJ1e6iGVysvT1ZOknLFNfc9O4jzCUZf1zXWKff3e2xrGfhVJV24Bt4z7OiSLJzqqaXOk6lsPJ0teTpZ9gX3uzlCmUQ8C509bXtTZJ0jJYSoD/F3BBkvOSvBx4D3DHaMqSJM1n0VMoVXUsyQeB7wKnAF+oqn0jq6xfJ810ESdPX0+WfoJ97UqqaqVrkCQtgldiSlKnDHBJ6pQBvgBJzk1yb5KHkuxLckNrPyvJXUkeaT/PbO1J8o/tVgN7krxpZXswvCSnJ7kvyQOtr59o7ecl2dH69NX2BTZJXtHWD7TtG1a0AwuU5JQk9ye5s62v1n4eTPJgkt1Jdra2Vff5BUhyRpLbkvw4yf4kl622vhrgC3MM+HBVXQRcCnyg3T5gK3B3VV0A3N3WYXCbgQvaawq4ZflLXrRngc1VdTGwEbgyyaXATcDNVXU+8BRwfdv/euCp1n5z268nNwD7p62v1n4CvLWqNk47B3o1fn4BPgN8p6ouBC5m8Oe7uvpaVb4W+QJuB64AHgbWtra1wMNt+Z+A66bt/8J+Pb2A3wJ+xOBK2yeBU1v7ZcB32/J3gcva8qltv6x07UP2bx2D/5k3A3cCWY39bDUfBM6e0bbqPr/Aa4GfzvyzWW19dQS+SO2fzpcAO4A1VXW4bXocWNOWZ7vdwDnLVeNStWmF3cAR4C7gJ8DTVXWs7TK9Py/0tW3/JfC6ZS148T4NfAR4vq2/jtXZT4ACvpdkV7vNBazOz+95wFHgi21q7PNJXsUq66sBvghJXg18HfhQVf1q+rYa/PW9Ks7NrKrnqmojgxHqJuDCla1o9JK8AzhSVbtWupZl8uaqehODKYMPJPmj6RtX0ef3VOBNwC1VdQnwP/xmugRYHX01wBcoyWkMwvvLVfWN1vxEkrVt+1oGI1ZYJbcbqKqngXsZTCWckeT4BWDT+/NCX9v21wL/vbyVLsrlwDuTHGRwR83NDOZOV1s/AaiqQ+3nEeCbDP5iXo2f38eAx6pqR1u/jUGgr6q+GuALkCTArcD+qvrUtE13AFva8hYGc+PH2/+sfcN9KfDLaf98O6ElmUhyRlt+JYO5/v0MgvzattvMvh7/b3AtcE8b4ZzQquqjVbWuqjYwuB3EPVX1XlZZPwGSvCrJa44vA28H9rIKP79V9Tjw8yRvbE1vAx5itfV1pSfhe3oBb2bwT649wO72uorBHOjdwCPAfwBntf3D4KEXPwEeBCZXug8L6OvvA/e3vu4F/qa1vx64DzgA/CvwitZ+els/0La/fqX7sIg+vwW4c7X2s/XpgfbaB/x1a191n99W/0ZgZ/sM/xtw5mrrq5fSS1KnnEKRpE4Z4JLUKQNckjplgEtSpwxwSeqUAS5JnTLAJalT/wfcE1fFNcsJ3QAAAABJRU5ErkJggg==\n",
Billy Amélie's avatar
Billy Amélie committed
2165
      "text/plain": [
2166
       "<Figure size 432x288 with 1 Axes>"
Billy Amélie's avatar
Billy Amélie committed
2167
2168
      ]
     },
2169
2170
2171
2172
2173
2174
2175
2176
2177
2178
2179
2180
2181
2182
2183
2184
2185
2186
2187
2188
2189
2190
2191
2192
2193
2194
2195
2196
2197
2198
2199
2200
2201
2202
2203
2204
2205
2206
2207
2208
2209
2210
2211
2212
2213
2214
2215
2216
2217
2218
2219
2220
2221
2222
2223
2224
2225
2226
2227
2228
2229
2230
2231
2232
2233
2234
2235
2236
2237
2238
2239
2240
2241
2242
2243
2244
2245
2246
2247
2248
2249
2250
2251
2252
2253
2254
2255
2256
2257
2258
2259
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "FVC_pred = sc.inverse_transform(predictions)\n",
    "plt.hist(np.std(FVC_pred, axis = 1), bins=15)\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXAAAAD4CAYAAAD1jb0+AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAANQUlEQVR4nO3dX4il9X3H8fenG01LIriy0+3in45NpWVvsspgLYZg88cYvVChhHiRLKmwXigkkF5s04vY2os11AiFIqwo2UKilSbiEm2SjVgk0JrMJhtdteLGjtRl3R3RNIZCyppvL+YZOh3P7Dlnzjkz+5t5v+BwnvN7npnne37MfPjNb37Pc1JVSJLa8xvrXYAkaXUMcElqlAEuSY0ywCWpUQa4JDXqPWt5sm3bttX09PRanlKSmnf48OE3qmpqefuaBvj09DSzs7NreUpJal6SV3u1O4UiSY0ywCWpUQa4JDXKAJekRhngktQoA1ySGmWAS1KjDHBJapQBLkmNWtMrMbXxTO99fKDj5vbdMOFKpM3HEbgkNcoAl6RGGeCS1CgDXJIaZYBLUqMMcElqlAEuSY0ywCWpUX0DPMlvJvlhkp8meT7JX3XtlyZ5JsmxJP+Y5NzJlytJWjTICPxXwEeq6oPALuC6JFcBdwP3VtXvA28Bt06sSknSu/QN8Frwy+7lOd2jgI8A/9S1HwBumkSBkqTeBpoDT7IlyRHgFHAI+Bnw86o63R3yGnDhRCqUJPU0UIBX1TtVtQu4CLgS+MNBT5BkT5LZJLPz8/Orq1KS9C5DrUKpqp8DTwF/DJyfZPFuhhcBx1f4mv1VNVNVM1NTU6PUKklaYpBVKFNJzu+2fwv4OPAiC0H+p91hu4HHJlSjJKmHQe4HvgM4kGQLC4H/SFV9O8kLwMNJ/gb4CfDABOuUJC3TN8Cr6lng8h7tr7AwHy5JWgdeiSlJjTLAJalRBrgkNcoAl6RGGeCS1CgDXJIaZYBLUqMMcElqlAEuSY0ywCWpUQa4JDXKAJekRhngktQoA1ySGjXI/cClkU3vfXyg4+b23TDhSqSNwxG4JDXKAJekRhngktQoA1ySGmWAS1KjDHBJapQBLkmNMsAlqVEGuCQ1qm+AJ7k4yVNJXkjyfJLPd+13Jjme5Ej3uH7y5UqSFg1yKf1p4ItV9eMk5wGHkxzq9t1bVX87ufIkSSvpG+BVdQI40W2/neRF4MJJFyZJOrOhbmaVZBq4HHgGuBq4I8lngVkWRulv9fiaPcAegEsuuWTUeqWheBMtbWQD/xMzyfuBbwJfqKpfAPcBHwB2sTBCv6fX11XV/qqaqaqZqamp0SuWJAEDBniSc1gI769X1bcAqupkVb1TVb8G7geunFyZkqTlBlmFEuAB4MWq+uqS9h1LDrsZODr+8iRJKxlkDvxq4DPAc0mOdG1fAm5JsgsoYA64bQL1SZJWMMgqlB8A6bHrifGXI0kalB+pprOKq0akwXkpvSQ1ygCXpEYZ4JLUKANckhplgEtSowxwSWqUAS5JjTLAJalRBrgkNcoAl6RGGeCS1CgDXJIaZYBLUqMMcElqlAEuSY0ywCWpUQa4JDXKAJekRhngktQoPxNT7zLo51KupxZqlCbNEbgkNcoAl6RG9Q3wJBcneSrJC0meT/L5rv2CJIeSvNw9b518uZKkRYOMwE8DX6yqncBVwO1JdgJ7gSer6jLgye61JGmN9A3wqjpRVT/utt8GXgQuBG4EDnSHHQBumlCNkqQehpoDTzINXA48A2yvqhPdrteB7St8zZ4ks0lm5+fnR6lVkrTEwAGe5P3AN4EvVNUvlu6rqgKq19dV1f6qmqmqmampqZGKlST9n4ECPMk5LIT316vqW13zySQ7uv07gFOTKVGS1Msgq1ACPAC8WFVfXbLrILC7294NPDb+8iRJKxnkSsyrgc8AzyU50rV9CdgHPJLkVuBV4FMTqVCS1FPfAK+qHwBZYfdHx1uOJGlQXokpSY3yZlabiDeAkjYWR+CS1CgDXJIaZYBLUqMMcElqlAEuSY0ywCWpUQa4JDXKAJekRhngktQoA1ySGmWAS1KjDHBJapQBLkmNMsAlqVEGuCQ1ygCXpEYZ4JLUKANckhrlR6pJQxjmY+nm9t0wwUokR+CS1CwDXJIaZYBLUqP6BniSB5OcSnJ0SdudSY4nOdI9rp9smZKk5QYZgX8NuK5H+71Vtat7PDHesiRJ/fQN8Kp6GnhzDWqRJA1hlDnwO5I8202xbF3poCR7kswmmZ2fnx/hdJKkpVYb4PcBHwB2ASeAe1Y6sKr2V9VMVc1MTU2t8nSSpOVWFeBVdbKq3qmqXwP3A1eOtyxJUj+rCvAkO5a8vBk4utKxkqTJ6HspfZKHgGuAbUleA74MXJNkF1DAHHDb5EqUJPXSN8Cr6pYezQ9MoBZJ0hC8ElOSGmWAS1KjDHBJapQBLkmNMsAlqVEGuCQ1ygCXpEb5mZgbwDCf06je7EO1yBG4JDXKAJekRhngktQoA1ySGmWAS1KjDHBJapQBLkmNMsAlqVEGuCQ1ygCXpEYZ4JLUKANckhplgEtSowxwSWqUAS5Jjeob4EkeTHIqydElbRckOZTk5e5562TLlCQtN8gI/GvAdcva9gJPVtVlwJPda0nSGuob4FX1NPDmsuYbgQPd9gHgpvGWJUnqZ7Vz4Nur6kS3/TqwfaUDk+xJMptkdn5+fpWnkyQtN/I/MauqgDrD/v1VNVNVM1NTU6OeTpLUWW2An0yyA6B7PjW+kiRJg1htgB8Ednfbu4HHxlOOJGlQgywjfAj4V+APkryW5FZgH/DxJC8DH+teS5LW0Hv6HVBVt6yw66NjrkWSNASvxJSkRvUdgUuarOm9j4/1+83tu2Gs309nL0fgktQoA1ySGmWAS1KjDHBJapQBLkmNchXKOhj3qgNpqUF/vlyt0j5H4JLUKANckhplgEtSowxwSWqUAS5JjTLAJalRLiMcI5cHSlpLjsAlqVEGuCQ1ygCXpEYZ4JLUKANckhrlKhRpQlyVpElzBC5JjTLAJalRI02hJJkD3gbeAU5X1cw4ipIk9TeOOfA/qao3xvB9JElDcApFkho1aoAX8L0kh5Ps6XVAkj1JZpPMzs/Pj3g6SdKiUQP8Q1V1BfBJ4PYkH15+QFXtr6qZqpqZmpoa8XSSpEUjBXhVHe+eTwGPAleOoyhJUn+rDvAk70ty3uI2cC1wdFyFSZLObJRVKNuBR5Msfp9vVNV3xlKVJKmvVQd4Vb0CfHCMtUiShuAyQklqlAEuSY0ywCWpUQa4JDXKAJekRhngktQoA1ySGuVHqg3Aj8aSdDZyBC5JjTLAJalRBrgkNcoAl6RGGeCS1CgDXJIatWmXEbo0UBq/QX+v5vbdMOFKNgdH4JLUKANckhplgEtSowxwSWqUAS5JjWpmFYqrRqTxauF3atw1jnv1yzD1TWLljSNwSWqUAS5JjTLAJalRIwV4kuuSvJTkWJK94ypKktTfqgM8yRbg74FPAjuBW5LsHFdhkqQzG2UEfiVwrKpeqar/AR4GbhxPWZKkfkZZRngh8J9LXr8G/NHyg5LsAfZ0L3+Z5KURztmqbcAb613EWcq+WdmG7ZvcPZZvM3T/jOm8qzLiuX+3V+PE14FX1X5g/6TPczZLMltVM+tdx9nIvlmZfXNm9s9oUyjHgYuXvL6oa5MkrYFRAvxHwGVJLk1yLvBp4OB4ypIk9bPqKZSqOp3kDuC7wBbgwap6fmyVbSybegqpD/tmZfbNmW36/klVrXcNkqRV8EpMSWqUAS5JjTLAxyjJnUmOJznSPa5fsu8vulsOvJTkE0vaN+XtCDbr+14uyVyS57qfl9mu7YIkh5K83D1v7dqT5O+6Pns2yRXrW/14JXkwyakkR5e0Dd0XSXZ3x7+cZPd6vJc1U1U+xvQA7gT+vEf7TuCnwHuBS4GfsfCP3y3d9u8B53bH7Fzv97EG/bQp3/cKfTEHbFvW9hVgb7e9F7i7274e+GcgwFXAM+td/5j74sPAFcDR1fYFcAHwSve8tdveut7vbVIPR+Br40bg4ar6VVX9B3CMhVsRbNbbEWzW9z2oG4ED3fYB4KYl7f9QC/4NOD/JjnWobyKq6mngzWXNw/bFJ4BDVfVmVb0FHAKum3jx68QAH787uj/pHlz8c4/etx248AztG91mfd+9FPC9JIe7204AbK+qE93268D2bnsz9tuwfbGp+qiZj1Q7WyT5PvA7PXb9JXAfcBcLv5R3AfcAf7Z21alBH6qq40l+GziU5N+X7qyqSuJaX+yLXgzwIVXVxwY5Lsn9wLe7l2e67cBmvB2Bt2HoVNXx7vlUkkdZmF46mWRHVZ3opgVOdYdvxn4bti+OA9csa/+XNahzXTiFMkbL5iNvBhb/m34Q+HSS9ya5FLgM+CGb93YEm/V9/z9J3pfkvMVt4FoWfmYOAourJ3YDj3XbB4HPdiswrgL+a8n0wkY1bF98F7g2ydZuCvParm1DcgQ+Xl9JsouFKZQ54DaAqno+ySPAC8Bp4PaqegdgM96OoLwNw6LtwKNJYOF38RtV9Z0kPwIeSXIr8Crwqe74J1hYfXEM+G/gc2tf8uQkeYiF0fO2JK8BXwb2MURfVNWbSe5iYZAA8NdVtfwfoxuGl9JLUqOcQpGkRhngktQoA1ySGmWAS1KjDHBJapQBLkmNMsAlqVH/C9CjBZFOXEk9AAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "diff_fvc = (np.mean(FVC_pred,axis=1) - sc.inverse_transform(testY))\n",
    "diff = (np.mean(predictions,axis=1) - testY)\n",
    "plt.hist(diff_fvc, bins=30)\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "MSE :  107606.7506415237\n",
      "RMSE :  328.03467902269676\n"
     ]
    }
   ],
   "source": [
    "MSE = np.mean(diff_fvc*diff_fvc)\n",
    "RMSE = np.sqrt(MSE)\n",
    "print(\"MSE : \",MSE)\n",
    "print(\"RMSE : \", RMSE)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "MSE :  0.15470168389000788\n",
      "RMSE :  0.39332134939513247\n"
     ]
    }
   ],
   "source": [
    "MSE = np.mean(diff*diff)\n",
    "RMSE = np.sqrt(MSE)\n",
    "print(\"MSE : \",MSE)\n",
    "print(\"RMSE : \", RMSE)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "-11.283512383097307"
      ]
     },
     "execution_count": 29,
Billy Amélie's avatar
Billy Amélie committed
2260
2261
     "metadata": {},
     "output_type": "execute_result"
Lafnoune Imane's avatar
Lafnoune Imane committed
2262
2263
2264
    }
   ],
   "source": [
2265
2266
2267
2268
2269
2270
2271
2272
    "compute_score(testY,np.mean(FVC_pred,axis=1),np.std(FVC_pred,axis=1))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "**Dropout 0.5**"
Lafnoune Imane's avatar
Lafnoune Imane committed
2273
2274
2275
2276
   ]
  },
  {
   "cell_type": "code",
2277
   "execution_count": 30,
Lafnoune Imane's avatar
Lafnoune Imane committed
2278
2279
2280
2281
2282
2283
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
2284
      "[<tf.Tensor 'dense_4_input_5:0' shape=(None, 6) dtype=float32>, <tf.Tensor 'input_3_5:0' shape=(None, 240, 240, 4) dtype=float32>]\n"
Lafnoune Imane's avatar
Lafnoune Imane committed
2285
     ]
Billy Amélie's avatar
Billy Amélie committed
2286
2287
2288
    }
   ],
   "source": [
2289
2290
2291
2292
2293
2294
2295
2296
2297
2298
2299
2300
2301
2302
<