CNN_injection_transfer.ipynb 341 KB
Newer Older
Lafnoune Imane's avatar
Lafnoune Imane committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {
    "papermill": {
     "duration": 0.012261,
     "end_time": "2020-08-20T13:10:33.841122",
     "exception": false,
     "start_time": "2020-08-20T13:10:33.828861",
     "status": "completed"
    },
    "tags": []
   },
   "source": [
16
    "# CNN with transfer learning with no weights (efficientnet) + MLP\n",
Lafnoune Imane's avatar
Lafnoune Imane committed
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
    "\n",
    "This notebook contains the configurations required to train an efficientnet model for K-folds.\n",
    "\n",
    "It is possible to hit -0.6910 LB by tweaking parameters in this notebook!\n",
    "\n",
    "https://www.kaggle.com/khoongweihao/k-fold-tf-efficientnet-models-training"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import pandas as pd\n",
    "import os\n",
    "import logging\n",
    "logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## A - Preprocessing : Reading Data"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "os.chdir('../')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>Patient</th>\n",
       "      <th>Weeks</th>\n",
       "      <th>FVC</th>\n",
       "      <th>Percent</th>\n",
       "      <th>Age</th>\n",
       "      <th>Sex</th>\n",
       "      <th>SmokingStatus</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>-4</td>\n",
       "      <td>2315</td>\n",
       "      <td>58.253649</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>5</td>\n",
       "      <td>2214</td>\n",
       "      <td>55.712129</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>7</td>\n",
       "      <td>2061</td>\n",
       "      <td>51.862104</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>9</td>\n",
       "      <td>2144</td>\n",
       "      <td>53.950679</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>11</td>\n",
       "      <td>2069</td>\n",
       "      <td>52.063412</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                     Patient  Weeks   FVC    Percent  Age   Sex SmokingStatus\n",
       "0  ID00007637202177411956430     -4  2315  58.253649   79  Male     Ex-smoker\n",
       "1  ID00007637202177411956430      5  2214  55.712129   79  Male     Ex-smoker\n",
       "2  ID00007637202177411956430      7  2061  51.862104   79  Male     Ex-smoker\n",
       "3  ID00007637202177411956430      9  2144  53.950679   79  Male     Ex-smoker\n",
       "4  ID00007637202177411956430     11  2069  52.063412   79  Male     Ex-smoker"
      ]
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from preprocessing.read_load_data import read_data\n",
    "\n",
    "input_directory='../osic-pulmonary-fibrosis-progression'\n",
    "train_df, test_df, sample_df = read_data(input_directory)   \n",
    "train_df.head()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## B - Preprocessing : Loading Data"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "patients_train_ids= train_df.Patient.unique()\n",
    "patient_test_list= test_df.Patient.unique()\n",
    "patients_train_ids = [pat for pat in patients_train_ids]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO:loading  attributes...\n",
      "INFO:loading images...\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
Billy Amélie's avatar
Billy Amélie committed
201
202
203
      "Array shape:  (176, 240, 240, 4)\n",
      "min value:  -0.1251496147096971\n",
      "max value:  0.16921848376184256\n"
Lafnoune Imane's avatar
Lafnoune Imane committed
204
205
206
207
208
209
210
211
212
213
214
215
216
217
     ]
    }
   ],
   "source": [
    "from preprocessing.read_load_data import load_images\n",
    "\n",
    "logging.info(\"loading  attributes...\")\n",
    "df = pd.read_csv(f'{input_directory}/train.csv')\n",
    "patients_train_ids= df.Patient.unique().tolist()\n",
    "\n",
    "logging.info(\"loading images...\")\n",
    "images = load_images(input_directory,\n",
    "                    'train',\n",
    "                     patients_train_ids,\n",
Billy Amélie's avatar
Billy Amélie committed
218
    "                     option='superposition',\n",
Lafnoune Imane's avatar
Lafnoune Imane committed
219
220
221
222
223
224
225
226
227
    "                     outputH = 240,\n",
    "                     outputW = 240)\n",
    "\n",
    "print(\"Array shape: \", images.shape)\n",
    "#check value between -1,1\n",
    "print('min value: ', np.amin(images))\n",
    "print('max value: ', np.amax(images))"
   ]
  },
Lafnoune Imane's avatar
Lafnoune Imane committed
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>Patient</th>\n",
       "      <th>Weeks</th>\n",
       "      <th>FVC</th>\n",
       "      <th>Percent</th>\n",
       "      <th>Age</th>\n",
       "      <th>Sex</th>\n",
       "      <th>SmokingStatus</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>-4</td>\n",
       "      <td>2315</td>\n",
       "      <td>58.253649</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>5</td>\n",
       "      <td>2214</td>\n",
       "      <td>55.712129</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>7</td>\n",
       "      <td>2061</td>\n",
       "      <td>51.862104</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>9</td>\n",
       "      <td>2144</td>\n",
       "      <td>53.950679</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>11</td>\n",
       "      <td>2069</td>\n",
       "      <td>52.063412</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                     Patient  Weeks   FVC    Percent  Age   Sex SmokingStatus\n",
       "0  ID00007637202177411956430     -4  2315  58.253649   79  Male     Ex-smoker\n",
       "1  ID00007637202177411956430      5  2214  55.712129   79  Male     Ex-smoker\n",
       "2  ID00007637202177411956430      7  2061  51.862104   79  Male     Ex-smoker\n",
       "3  ID00007637202177411956430      9  2144  53.950679   79  Male     Ex-smoker\n",
       "4  ID00007637202177411956430     11  2069  52.063412   79  Male     Ex-smoker"
      ]
     },
     "execution_count": 6,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>Patient</th>\n",
       "      <th>Weeks</th>\n",
       "      <th>FVC</th>\n",
       "      <th>Percent</th>\n",
       "      <th>Age</th>\n",
       "      <th>Sex</th>\n",
       "      <th>SmokingStatus</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>9</th>\n",
       "      <td>ID00009637202177434476278</td>\n",
       "      <td>8</td>\n",
       "      <td>3660</td>\n",
       "      <td>85.282878</td>\n",
       "      <td>69</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>10</th>\n",
       "      <td>ID00009637202177434476278</td>\n",
       "      <td>9</td>\n",
       "      <td>3610</td>\n",
       "      <td>84.117812</td>\n",
       "      <td>69</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11</th>\n",
       "      <td>ID00009637202177434476278</td>\n",
       "      <td>11</td>\n",
       "      <td>3895</td>\n",
       "      <td>90.758691</td>\n",
       "      <td>69</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>12</th>\n",
       "      <td>ID00009637202177434476278</td>\n",
       "      <td>13</td>\n",
       "      <td>3759</td>\n",
       "      <td>87.589710</td>\n",
       "      <td>69</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>13</th>\n",
       "      <td>ID00009637202177434476278</td>\n",
       "      <td>15</td>\n",
       "      <td>3639</td>\n",
       "      <td>84.793550</td>\n",
       "      <td>69</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>14</th>\n",
       "      <td>ID00009637202177434476278</td>\n",
       "      <td>22</td>\n",
       "      <td>3578</td>\n",
       "      <td>83.372169</td>\n",
       "      <td>69</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>15</th>\n",
       "      <td>ID00009637202177434476278</td>\n",
       "      <td>33</td>\n",
       "      <td>3625</td>\n",
       "      <td>84.467332</td>\n",
       "      <td>69</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>16</th>\n",
       "      <td>ID00009637202177434476278</td>\n",
       "      <td>45</td>\n",
       "      <td>3390</td>\n",
       "      <td>78.991518</td>\n",
       "      <td>69</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>17</th>\n",
       "      <td>ID00009637202177434476278</td>\n",
       "      <td>60</td>\n",
       "      <td>3214</td>\n",
       "      <td>74.890484</td>\n",
       "      <td>69</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                      Patient  Weeks   FVC    Percent  Age   Sex SmokingStatus\n",
       "9   ID00009637202177434476278      8  3660  85.282878   69  Male     Ex-smoker\n",
       "10  ID00009637202177434476278      9  3610  84.117812   69  Male     Ex-smoker\n",
       "11  ID00009637202177434476278     11  3895  90.758691   69  Male     Ex-smoker\n",
       "12  ID00009637202177434476278     13  3759  87.589710   69  Male     Ex-smoker\n",
       "13  ID00009637202177434476278     15  3639  84.793550   69  Male     Ex-smoker\n",
       "14  ID00009637202177434476278     22  3578  83.372169   69  Male     Ex-smoker\n",
       "15  ID00009637202177434476278     33  3625  84.467332   69  Male     Ex-smoker\n",
       "16  ID00009637202177434476278     45  3390  78.991518   69  Male     Ex-smoker\n",
       "17  ID00009637202177434476278     60  3214  74.890484   69  Male     Ex-smoker"
      ]
     },
     "execution_count": 7,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df.loc[np.where(df.Patient == 'ID00009637202177434476278')]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "Patient          ID00009637202177434476278\n",
       "Weeks                                    9\n",
       "FVC                                   3610\n",
Billy Amélie's avatar
Billy Amélie committed
499
       "Percent                          84.117812\n",
Lafnoune Imane's avatar
Lafnoune Imane committed
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
       "Age                                     69\n",
       "Sex                                   Male\n",
       "SmokingStatus                    Ex-smoker\n",
       "Name: 10, dtype: object"
      ]
     },
     "execution_count": 8,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df.iloc[10]"
   ]
  },
Lafnoune Imane's avatar
Lafnoune Imane committed
515
516
517
518
519
520
521
522
523
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## C - Preprocessing : shuffle"
   ]
  },
  {
   "cell_type": "code",
Lafnoune Imane's avatar
Lafnoune Imane committed
524
525
526
527
528
529
530
531
532
533
534
535
536
   "execution_count": 9,
   "metadata": {},
   "outputs": [],
   "source": [
    " from sklearn.model_selection import train_test_split\n",
    "\n",
    "split = train_test_split(patients_train_ids, images, test_size=0.2, random_state=42)\n",
    "(trainPatient, testPatient, trainImagesX, testImagesX) = split"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
Lafnoune Imane's avatar
Lafnoune Imane committed
537
538
539
   "metadata": {},
   "outputs": [],
   "source": [
Lafnoune Imane's avatar
Lafnoune Imane committed
540
541
542
543
544
545
546
547
548
549
    " #split the dataframe like the images\n",
    "df_train = df[df.Patient.isin(trainPatient)].copy()\n",
    "df_test = df[df.Patient.isin(testPatient)].copy()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [
Billy Amélie's avatar
Billy Amélie committed
550
551
552
553
554
555
556
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO:NumExpr defaulting to 8 threads.\n"
     ]
    },
Lafnoune Imane's avatar
Lafnoune Imane committed
557
558
559
560
561
562
563
564
565
566
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(1093, 8) (280, 8)\n"
     ]
    }
   ],
   "source": [
    "from preprocessing.read_load_data import create_dataframe\n",
Lafnoune Imane's avatar
Lafnoune Imane committed
567
    "\n",
Lafnoune Imane's avatar
Lafnoune Imane committed
568
569
570
571
572
573
574
575
576
577
578
    "trainAttrX = create_dataframe(df_train)\n",
    "testAttrX = create_dataframe(df_test)\n",
    "print(trainAttrX.shape, testAttrX.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
Billy Amélie's avatar
Billy Amélie committed
579
580
581
582
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "1093 280\n"
Lafnoune Imane's avatar
Lafnoune Imane committed
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
     ]
    }
   ],
   "source": [
    "#set one image per training row\n",
    "\n",
    "indice = 0\n",
    "train_dataset = np.ndarray((len(trainAttrX),240,240,4))\n",
    "for i,patient in enumerate(trainPatient):\n",
    "    nb_data = len(trainAttrX[trainAttrX.PatientID ==patient])\n",
    "    for ii in range(nb_data):\n",
    "        train_dataset[indice]=(trainImagesX[i])\n",
    "        indice+=1\n",
    "        \n",
    "        \n",
    "indicet = 0        \n",
    "test_dataset = np.ndarray((len(testAttrX),240,240,4))\n",
    "for i,patient in enumerate(testPatient):\n",
    "    nb_data = len(testAttrX[testAttrX.PatientID ==patient])\n",
    "    for ii in range(nb_data):\n",
    "        test_dataset[indicet] = testImagesX[i]\n",
    "        indicet+=1\n",
    "        \n",
    "        \n",
    "print(len(train_dataset),len(test_dataset))"
Lafnoune Imane's avatar
Lafnoune Imane committed
608
609
610
611
612
613
614
615
616
617
618
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## D - Preprocessing : Scaling + Encoding"
   ]
  },
  {
   "cell_type": "code",
Billy Amélie's avatar
Billy Amélie committed
619
   "execution_count": 13,
Lafnoune Imane's avatar
Lafnoune Imane committed
620
621
622
623
624
   "metadata": {},
   "outputs": [],
   "source": [
    "from preprocessing.scale_data import scale_variable\n",
    "\n",
Lafnoune Imane's avatar
Lafnoune Imane committed
625
    "sc, trainAttrX, testAttrX = scale_variable(trainAttrX, testAttrX,'Target_FVC')\n",
626
627
628
    "sc1, trainAttrX, testAttrX = scale_variable(trainAttrX, testAttrX,'First_FVC')\n",
    "sc2, trainAttrX, testAttrX = scale_variable(trainAttrX, testAttrX,'Age')\n",
    "\n",
Lafnoune Imane's avatar
Lafnoune Imane committed
629
630
    "trainY = trainAttrX.loc[:,'Target_FVC_scaled']\n",
    "testY = testAttrX.loc[:,'Target_FVC_scaled']"
Lafnoune Imane's avatar
Lafnoune Imane committed
631
632
633
634
   ]
  },
  {
   "cell_type": "code",
Billy Amélie's avatar
Billy Amélie committed
635
   "execution_count": 14,
Lafnoune Imane's avatar
Lafnoune Imane committed
636
637
638
639
640
641
642
643
   "metadata": {},
   "outputs": [],
   "source": [
    "from preprocessing.scale_data import encode_variable\n",
    "\n",
    "trainAttrX, testAttrX = encode_variable(trainAttrX, testAttrX,'Sex')\n",
    "trainAttrX, testAttrX = encode_variable(trainAttrX, testAttrX,'SmokingStatus')\n",
    "\n",
644
645
646
647
648
    "for dft in [trainAttrX,testAttrX]:\n",
    "    dft.drop(columns = ['Sex','SmokingStatus','Target_FVC','Target_FVC_scaled',\n",
    "                          'PatientID','First_FVC','Age'], inplace = True)\n",
    "    dft.loc[:,'First_Percent'] = dft.loc[:,'First_Percent']/100\n",
    "    dft.loc[:,'Delta_week'] = dft.loc[:,'Delta_week']/133"
Lafnoune Imane's avatar
Lafnoune Imane committed
649
650
651
652
653
654
655
656
657
658
659
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## E - Processing : Create models"
   ]
  },
  {
   "cell_type": "code",
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
   "execution_count": 15,
   "metadata": {},
   "outputs": [],
   "source": [
    "from tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping\n",
    "\n",
    "#set early stopping criteria\n",
    "pat = 5 #this is the number of epochs with no improvment after which the training will stop\n",
    "es = EarlyStopping(monitor='val_loss', patience=pat, verbose=1)\n",
    "\n",
    "#define the model checkpoint callback -> this will keep on saving the model as a physical file\n",
    "cp = ModelCheckpoint('clean_notebooks/cnn_injection_transfer.h5', verbose=1, save_best_only=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {},
   "outputs": [],
   "source": [
    "def custom_shuffle_split(trainAttrX,train_dataset,trainY,test_size = 0.1 ):\n",
    "    cut = int(len(trainY)*test_size)\n",
    "    arr = list(np.arange(len(trainY)))\n",
    "    np.random.shuffle(arr)\n",
    "    trainidx = arr[cut:]\n",
    "    testidx = arr[:cut]\n",
    "    train_x, train_y = [trainAttrX.iloc[trainidx], train_dataset[trainidx]] , trainY[trainidx]\n",
    "    val_x, val_y = [trainAttrX.iloc[testidx], train_dataset[testidx]] , trainY[testidx]\n",
    "    return train_x, val_x, train_y, val_y"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
Lafnoune Imane's avatar
Lafnoune Imane committed
694
695
696
697
698
699
700
   "metadata": {},
   "outputs": [],
   "source": [
    "from processing.models import create_hybrid_transfer\n",
    "from keras.optimizers import Adam\n",
    "from tensorflow.keras.models import Model\n",
    "import efficientnet.tfkeras as efn\n",
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
    "from time import time\n",
    "from processing.models import fit_and_evaluate"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 1/30\n",
      "111/111 [==============================] - ETA: 25:53 - loss: 0.69 - ETA: 4:07 - loss: 0.8006 - ETA: 4:01 - loss: 0.921 - ETA: 3:49 - loss: 0.998 - ETA: 3:41 - loss: 1.026 - ETA: 3:35 - loss: 1.034 - ETA: 3:30 - loss: 1.028 - ETA: 3:27 - loss: 1.015 - ETA: 3:25 - loss: 1.006 - ETA: 3:26 - loss: 0.999 - ETA: 3:23 - loss: 0.998 - ETA: 3:24 - loss: 1.001 - ETA: 3:24 - loss: 1.003 - ETA: 3:25 - loss: 1.006 - ETA: 3:24 - loss: 1.010 - ETA: 3:22 - loss: 1.011 - ETA: 3:20 - loss: 1.014 - ETA: 3:17 - loss: 1.016 - ETA: 3:14 - loss: 1.017 - ETA: 3:14 - loss: 1.018 - ETA: 3:14 - loss: 1.018 - ETA: 3:11 - loss: 1.018 - ETA: 3:09 - loss: 1.017 - ETA: 3:06 - loss: 1.016 - ETA: 3:04 - loss: 1.014 - ETA: 3:01 - loss: 1.011 - ETA: 2:59 - loss: 1.008 - ETA: 2:56 - loss: 1.006 - ETA: 2:53 - loss: 1.004 - ETA: 2:51 - loss: 1.002 - ETA: 2:48 - loss: 0.999 - ETA: 2:45 - loss: 0.995 - ETA: 2:43 - loss: 0.993 - ETA: 2:40 - loss: 0.991 - ETA: 2:38 - loss: 0.989 - ETA: 2:36 - loss: 0.988 - ETA: 2:33 - loss: 0.987 - ETA: 2:31 - loss: 0.986 - ETA: 2:29 - loss: 0.985 - ETA: 2:27 - loss: 0.983 - ETA: 2:25 - loss: 0.982 - ETA: 2:22 - loss: 0.980 - ETA: 2:20 - loss: 0.979 - ETA: 2:18 - loss: 0.977 - ETA: 2:16 - loss: 0.976 - ETA: 2:13 - loss: 0.974 - ETA: 2:11 - loss: 0.973 - ETA: 2:09 - loss: 0.971 - ETA: 2:07 - loss: 0.970 - ETA: 2:05 - loss: 0.969 - ETA: 2:03 - loss: 0.968 - ETA: 2:01 - loss: 0.967 - ETA: 1:58 - loss: 0.966 - ETA: 1:56 - loss: 0.964 - ETA: 1:54 - loss: 0.963 - ETA: 1:52 - loss: 0.962 - ETA: 1:50 - loss: 0.961 - ETA: 1:48 - loss: 0.960 - ETA: 1:46 - loss: 0.959 - ETA: 1:44 - loss: 0.958 - ETA: 1:42 - loss: 0.957 - ETA: 1:40 - loss: 0.956 - ETA: 1:38 - loss: 0.955 - ETA: 1:36 - loss: 0.954 - ETA: 1:34 - loss: 0.953 - ETA: 1:32 - loss: 0.952 - ETA: 1:30 - loss: 0.951 - ETA: 1:28 - loss: 0.950 - ETA: 1:26 - loss: 0.950 - ETA: 1:24 - loss: 0.949 - ETA: 1:22 - loss: 0.948 - ETA: 1:20 - loss: 0.947 - ETA: 1:18 - loss: 0.946 - ETA: 1:16 - loss: 0.945 - ETA: 1:14 - loss: 0.944 - ETA: 1:12 - loss: 0.944 - ETA: 1:10 - loss: 0.943 - ETA: 1:07 - loss: 0.942 - ETA: 1:05 - loss: 0.941 - ETA: 1:03 - loss: 0.940 - ETA: 1:01 - loss: 0.940 - ETA: 59s - loss: 0.939 - ETA: 57s - loss: 0.93 - ETA: 55s - loss: 0.93 - ETA: 53s - loss: 0.93 - ETA: 51s - loss: 0.93 - ETA: 49s - loss: 0.93 - ETA: 47s - loss: 0.93 - ETA: 45s - loss: 0.93 - ETA: 43s - loss: 0.93 - ETA: 41s - loss: 0.93 - ETA: 39s - loss: 0.93 - ETA: 37s - loss: 0.93 - ETA: 35s - loss: 0.92 - ETA: 33s - loss: 0.92 - ETA: 31s - loss: 0.92 - ETA: 28s - loss: 0.92 - ETA: 26s - loss: 0.92 - ETA: 24s - loss: 0.92 - ETA: 22s - loss: 0.92 - ETA: 20s - loss: 0.92 - ETA: 18s - loss: 0.92 - ETA: 16s - loss: 0.92 - ETA: 14s - loss: 0.92 - ETA: 12s - loss: 0.92 - ETA: 10s - loss: 0.92 - ETA: 8s - loss: 0.9231 - ETA: 6s - loss: 0.922 - ETA: 4s - loss: 0.922 - ETA: 2s - loss: 0.922 - ETA: 0s - loss: 0.921 - 247s 2s/step - loss: 0.9213 - val_loss: 0.6694\n",
      "\n",
      "Epoch 00001: val_loss improved from inf to 0.66943, saving model to clean_notebooks\\cnn_injection_transfer.h5\n",
      "Epoch 2/30\n",
      "111/111 [==============================] - ETA: 3:19 - loss: 0.307 - ETA: 3:20 - loss: 0.295 - ETA: 3:18 - loss: 0.336 - ETA: 3:15 - loss: 0.381 - ETA: 3:12 - loss: 0.398 - ETA: 3:10 - loss: 0.435 - ETA: 3:08 - loss: 0.465 - ETA: 3:07 - loss: 0.502 - ETA: 3:05 - loss: 0.524 - ETA: 3:03 - loss: 0.543 - ETA: 3:02 - loss: 0.553 - ETA: 3:00 - loss: 0.563 - ETA: 2:58 - loss: 0.572 - ETA: 2:56 - loss: 0.579 - ETA: 2:55 - loss: 0.586 - ETA: 2:52 - loss: 0.594 - ETA: 2:50 - loss: 0.601 - ETA: 2:49 - loss: 0.607 - ETA: 2:47 - loss: 0.613 - ETA: 2:45 - loss: 0.617 - ETA: 2:43 - loss: 0.621 - ETA: 2:42 - loss: 0.624 - ETA: 2:40 - loss: 0.626 - ETA: 2:38 - loss: 0.628 - ETA: 2:36 - loss: 0.629 - ETA: 2:34 - loss: 0.630 - ETA: 2:32 - loss: 0.633 - ETA: 2:30 - loss: 0.635 - ETA: 2:29 - loss: 0.638 - ETA: 2:27 - loss: 0.640 - ETA: 2:25 - loss: 0.642 - ETA: 2:23 - loss: 0.644 - ETA: 2:21 - loss: 0.646 - ETA: 2:20 - loss: 0.647 - ETA: 2:18 - loss: 0.647 - ETA: 2:16 - loss: 0.648 - ETA: 2:14 - loss: 0.649 - ETA: 2:13 - loss: 0.649 - ETA: 2:11 - loss: 0.649 - ETA: 2:10 - loss: 0.649 - ETA: 2:08 - loss: 0.649 - ETA: 2:06 - loss: 0.649 - ETA: 2:04 - loss: 0.649 - ETA: 2:02 - loss: 0.649 - ETA: 2:00 - loss: 0.649 - ETA: 1:59 - loss: 0.648 - ETA: 1:57 - loss: 0.648 - ETA: 1:55 - loss: 0.647 - ETA: 1:53 - loss: 0.647 - ETA: 1:52 - loss: 0.646 - ETA: 1:50 - loss: 0.645 - ETA: 1:48 - loss: 0.645 - ETA: 1:46 - loss: 0.645 - ETA: 1:44 - loss: 0.644 - ETA: 1:43 - loss: 0.644 - ETA: 1:41 - loss: 0.644 - ETA: 1:39 - loss: 0.644 - ETA: 1:37 - loss: 0.644 - ETA: 1:35 - loss: 0.643 - ETA: 1:34 - loss: 0.643 - ETA: 1:32 - loss: 0.643 - ETA: 1:30 - loss: 0.643 - ETA: 1:28 - loss: 0.642 - ETA: 1:26 - loss: 0.642 - ETA: 1:24 - loss: 0.642 - ETA: 1:23 - loss: 0.641 - ETA: 1:21 - loss: 0.641 - ETA: 1:19 - loss: 0.641 - ETA: 1:17 - loss: 0.640 - ETA: 1:15 - loss: 0.640 - ETA: 1:13 - loss: 0.640 - ETA: 1:11 - loss: 0.639 - ETA: 1:10 - loss: 0.639 - ETA: 1:08 - loss: 0.638 - ETA: 1:06 - loss: 0.638 - ETA: 1:04 - loss: 0.637 - ETA: 1:02 - loss: 0.637 - ETA: 1:00 - loss: 0.637 - ETA: 58s - loss: 0.637 - ETA: 57s - loss: 0.63 - ETA: 55s - loss: 0.63 - ETA: 53s - loss: 0.63 - ETA: 51s - loss: 0.63 - ETA: 49s - loss: 0.63 - ETA: 47s - loss: 0.63 - ETA: 45s - loss: 0.63 - ETA: 44s - loss: 0.63 - ETA: 42s - loss: 0.63 - ETA: 40s - loss: 0.63 - ETA: 38s - loss: 0.63 - ETA: 36s - loss: 0.63 - ETA: 34s - loss: 0.63 - ETA: 33s - loss: 0.63 - ETA: 31s - loss: 0.63 - ETA: 29s - loss: 0.63 - ETA: 27s - loss: 0.63 - ETA: 25s - loss: 0.63 - ETA: 23s - loss: 0.63 - ETA: 22s - loss: 0.63 - ETA: 20s - loss: 0.63 - ETA: 18s - loss: 0.63 - ETA: 16s - loss: 0.63 - ETA: 14s - loss: 0.63 - ETA: 12s - loss: 0.63 - ETA: 11s - loss: 0.63 - ETA: 9s - loss: 0.6340 - ETA: 7s - loss: 0.633 - ETA: 5s - loss: 0.633 - ETA: 3s - loss: 0.633 - ETA: 1s - loss: 0.633 - ETA: 0s - loss: 0.633 - 210s 2s/step - loss: 0.6334 - val_loss: 0.3297\n",
      "\n",
      "Epoch 00002: val_loss improved from 0.66943 to 0.32972, saving model to clean_notebooks\\cnn_injection_transfer.h5\n",
      "Epoch 3/30\n",
      "111/111 [==============================] - ETA: 3:09 - loss: 0.779 - ETA: 3:20 - loss: 0.639 - ETA: 3:15 - loss: 0.571 - ETA: 3:13 - loss: 0.598 - ETA: 3:11 - loss: 0.607 - ETA: 3:09 - loss: 0.613 - ETA: 3:07 - loss: 0.613 - ETA: 3:05 - loss: 0.613 - ETA: 3:03 - loss: 0.608 - ETA: 3:02 - loss: 0.599 - ETA: 3:00 - loss: 0.593 - ETA: 2:58 - loss: 0.594 - ETA: 2:56 - loss: 0.595 - ETA: 2:54 - loss: 0.596 - ETA: 2:53 - loss: 0.598 - ETA: 2:51 - loss: 0.599 - ETA: 2:49 - loss: 0.599 - ETA: 2:47 - loss: 0.598 - ETA: 2:46 - loss: 0.596 - ETA: 2:44 - loss: 0.594 - ETA: 2:42 - loss: 0.592 - ETA: 2:40 - loss: 0.590 - ETA: 2:38 - loss: 0.588 - ETA: 2:37 - loss: 0.586 - ETA: 2:35 - loss: 0.585 - ETA: 2:33 - loss: 0.584 - ETA: 2:31 - loss: 0.583 - ETA: 2:29 - loss: 0.582 - ETA: 2:27 - loss: 0.581 - ETA: 2:25 - loss: 0.581 - ETA: 2:24 - loss: 0.581 - ETA: 2:22 - loss: 0.580 - ETA: 2:20 - loss: 0.579 - ETA: 2:18 - loss: 0.578 - ETA: 2:16 - loss: 0.578 - ETA: 2:15 - loss: 0.577 - ETA: 2:13 - loss: 0.576 - ETA: 2:11 - loss: 0.576 - ETA: 2:09 - loss: 0.575 - ETA: 2:07 - loss: 0.575 - ETA: 2:06 - loss: 0.574 - ETA: 2:04 - loss: 0.573 - ETA: 2:02 - loss: 0.572 - ETA: 2:00 - loss: 0.572 - ETA: 1:59 - loss: 0.571 - ETA: 1:57 - loss: 0.570 - ETA: 1:55 - loss: 0.570 - ETA: 1:53 - loss: 0.569 - ETA: 1:51 - loss: 0.568 - ETA: 1:49 - loss: 0.567 - ETA: 1:48 - loss: 0.566 - ETA: 1:46 - loss: 0.565 - ETA: 1:44 - loss: 0.565 - ETA: 1:42 - loss: 0.564 - ETA: 1:40 - loss: 0.563 - ETA: 1:39 - loss: 0.562 - ETA: 1:37 - loss: 0.561 - ETA: 1:35 - loss: 0.560 - ETA: 1:33 - loss: 0.559 - ETA: 1:31 - loss: 0.558 - ETA: 1:30 - loss: 0.557 - ETA: 1:28 - loss: 0.556 - ETA: 1:26 - loss: 0.556 - ETA: 1:24 - loss: 0.555 - ETA: 1:23 - loss: 0.554 - ETA: 1:21 - loss: 0.554 - ETA: 1:19 - loss: 0.553 - ETA: 1:17 - loss: 0.552 - ETA: 1:15 - loss: 0.551 - ETA: 1:14 - loss: 0.550 - ETA: 1:12 - loss: 0.550 - ETA: 1:10 - loss: 0.549 - ETA: 1:08 - loss: 0.548 - ETA: 1:06 - loss: 0.547 - ETA: 1:05 - loss: 0.546 - ETA: 1:03 - loss: 0.545 - ETA: 1:01 - loss: 0.545 - ETA: 59s - loss: 0.544 - ETA: 57s - loss: 0.54 - ETA: 56s - loss: 0.54 - ETA: 54s - loss: 0.54 - ETA: 52s - loss: 0.54 - ETA: 50s - loss: 0.54 - ETA: 48s - loss: 0.54 - ETA: 46s - loss: 0.54 - ETA: 45s - loss: 0.53 - ETA: 43s - loss: 0.53 - ETA: 41s - loss: 0.53 - ETA: 39s - loss: 0.53 - ETA: 37s - loss: 0.53 - ETA: 36s - loss: 0.53 - ETA: 34s - loss: 0.53 - ETA: 32s - loss: 0.53 - ETA: 30s - loss: 0.53 - ETA: 28s - loss: 0.53 - ETA: 27s - loss: 0.53 - ETA: 25s - loss: 0.53 - ETA: 23s - loss: 0.53 - ETA: 21s - loss: 0.53 - ETA: 19s - loss: 0.53 - ETA: 18s - loss: 0.53 - ETA: 16s - loss: 0.53 - ETA: 14s - loss: 0.53 - ETA: 12s - loss: 0.53 - ETA: 10s - loss: 0.53 - ETA: 9s - loss: 0.5299 - ETA: 7s - loss: 0.529 - ETA: 5s - loss: 0.529 - ETA: 3s - loss: 0.528 - ETA: 1s - loss: 0.528 - ETA: 0s - loss: 0.527 - 206s 2s/step - loss: 0.5274 - val_loss: 0.1941\n",
      "\n",
      "Epoch 00003: val_loss improved from 0.32972 to 0.19406, saving model to clean_notebooks\\cnn_injection_transfer.h5\n",
      "Epoch 4/30\n",
      "111/111 [==============================] - ETA: 3:02 - loss: 0.507 - ETA: 3:17 - loss: 0.516 - ETA: 3:13 - loss: 0.490 - ETA: 3:11 - loss: 0.502 - ETA: 3:09 - loss: 0.510 - ETA: 3:08 - loss: 0.506 - ETA: 3:06 - loss: 0.508 - ETA: 3:04 - loss: 0.506 - ETA: 3:03 - loss: 0.505 - ETA: 3:00 - loss: 0.503 - ETA: 2:59 - loss: 0.498 - ETA: 2:57 - loss: 0.496 - ETA: 2:56 - loss: 0.495 - ETA: 2:54 - loss: 0.493 - ETA: 2:53 - loss: 0.493 - ETA: 2:52 - loss: 0.493 - ETA: 2:51 - loss: 0.493 - ETA: 2:49 - loss: 0.492 - ETA: 2:47 - loss: 0.490 - ETA: 2:46 - loss: 0.488 - ETA: 2:44 - loss: 0.487 - ETA: 2:42 - loss: 0.488 - ETA: 2:40 - loss: 0.489 - ETA: 2:38 - loss: 0.489 - ETA: 2:36 - loss: 0.489 - ETA: 2:34 - loss: 0.488 - ETA: 2:32 - loss: 0.488 - ETA: 2:31 - loss: 0.488 - ETA: 2:29 - loss: 0.487 - ETA: 2:27 - loss: 0.486 - ETA: 2:25 - loss: 0.486 - ETA: 2:23 - loss: 0.485 - ETA: 2:21 - loss: 0.484 - ETA: 2:19 - loss: 0.484 - ETA: 2:18 - loss: 0.484 - ETA: 2:16 - loss: 0.483 - ETA: 2:14 - loss: 0.483 - ETA: 2:12 - loss: 0.482 - ETA: 2:10 - loss: 0.482 - ETA: 2:08 - loss: 0.482 - ETA: 2:06 - loss: 0.482 - ETA: 2:05 - loss: 0.482 - ETA: 2:03 - loss: 0.482 - ETA: 2:01 - loss: 0.483 - ETA: 1:59 - loss: 0.484 - ETA: 1:57 - loss: 0.484 - ETA: 1:56 - loss: 0.485 - ETA: 1:55 - loss: 0.485 - ETA: 1:53 - loss: 0.485 - ETA: 1:51 - loss: 0.486 - ETA: 1:49 - loss: 0.486 - ETA: 1:47 - loss: 0.487 - ETA: 1:45 - loss: 0.487 - ETA: 1:44 - loss: 0.487 - ETA: 1:42 - loss: 0.487 - ETA: 1:40 - loss: 0.487 - ETA: 1:38 - loss: 0.487 - ETA: 1:36 - loss: 0.487 - ETA: 1:34 - loss: 0.487 - ETA: 1:32 - loss: 0.487 - ETA: 1:31 - loss: 0.487 - ETA: 1:29 - loss: 0.487 - ETA: 1:27 - loss: 0.486 - ETA: 1:25 - loss: 0.486 - ETA: 1:23 - loss: 0.486 - ETA: 1:22 - loss: 0.486 - ETA: 1:20 - loss: 0.486 - ETA: 1:18 - loss: 0.486 - ETA: 1:16 - loss: 0.485 - ETA: 1:14 - loss: 0.485 - ETA: 1:12 - loss: 0.485 - ETA: 1:11 - loss: 0.485 - ETA: 1:09 - loss: 0.486 - ETA: 1:07 - loss: 0.485 - ETA: 1:05 - loss: 0.485 - ETA: 1:03 - loss: 0.485 - ETA: 1:01 - loss: 0.485 - ETA: 1:00 - loss: 0.485 - ETA: 58s - loss: 0.485 - ETA: 56s - loss: 0.48 - ETA: 54s - loss: 0.48 - ETA: 52s - loss: 0.48 - ETA: 51s - loss: 0.48 - ETA: 49s - loss: 0.48 - ETA: 47s - loss: 0.48 - ETA: 45s - loss: 0.48 - ETA: 43s - loss: 0.48 - ETA: 42s - loss: 0.48 - ETA: 40s - loss: 0.48 - ETA: 38s - loss: 0.48 - ETA: 36s - loss: 0.48 - ETA: 34s - loss: 0.48 - ETA: 32s - loss: 0.48 - ETA: 31s - loss: 0.48 - ETA: 29s - loss: 0.48 - ETA: 27s - loss: 0.48 - ETA: 25s - loss: 0.48 - ETA: 23s - loss: 0.48 - ETA: 21s - loss: 0.48 - ETA: 20s - loss: 0.48 - ETA: 18s - loss: 0.47 - ETA: 16s - loss: 0.47 - ETA: 14s - loss: 0.47 - ETA: 12s - loss: 0.47 - ETA: 10s - loss: 0.47 - ETA: 9s - loss: 0.4790 - ETA: 7s - loss: 0.478 - ETA: 5s - loss: 0.478 - ETA: 3s - loss: 0.478 - ETA: 1s - loss: 0.478 - ETA: 0s - loss: 0.478 - 208s 2s/step - loss: 0.4778 - val_loss: 0.1621\n",
      "\n",
      "Epoch 00004: val_loss improved from 0.19406 to 0.16212, saving model to clean_notebooks\\cnn_injection_transfer.h5\n",
      "Epoch 5/30\n",
      "111/111 [==============================] - ETA: 3:06 - loss: 0.305 - ETA: 3:12 - loss: 0.261 - ETA: 3:09 - loss: 0.262 - ETA: 3:12 - loss: 0.278 - ETA: 3:12 - loss: 0.292 - ETA: 3:10 - loss: 0.304 - ETA: 3:08 - loss: 0.310 - ETA: 3:05 - loss: 0.314 - ETA: 3:04 - loss: 0.316 - ETA: 3:02 - loss: 0.318 - ETA: 3:00 - loss: 0.320 - ETA: 2:59 - loss: 0.322 - ETA: 2:57 - loss: 0.323 - ETA: 2:55 - loss: 0.324 - ETA: 2:53 - loss: 0.329 - ETA: 2:52 - loss: 0.333 - ETA: 2:50 - loss: 0.337 - ETA: 2:48 - loss: 0.339 - ETA: 2:46 - loss: 0.341 - ETA: 2:44 - loss: 0.343 - ETA: 2:43 - loss: 0.344 - ETA: 2:41 - loss: 0.345 - ETA: 2:39 - loss: 0.347 - ETA: 2:37 - loss: 0.349 - ETA: 2:35 - loss: 0.350 - ETA: 2:33 - loss: 0.352 - ETA: 2:31 - loss: 0.352 - ETA: 2:30 - loss: 0.353 - ETA: 2:28 - loss: 0.354 - ETA: 2:26 - loss: 0.355 - ETA: 2:24 - loss: 0.355 - ETA: 2:23 - loss: 0.356 - ETA: 2:21 - loss: 0.356 - ETA: 2:19 - loss: 0.356 - ETA: 2:18 - loss: 0.356 - ETA: 2:16 - loss: 0.356 - ETA: 2:14 - loss: 0.356 - ETA: 2:12 - loss: 0.356 - ETA: 2:10 - loss: 0.356 - ETA: 2:09 - loss: 0.356 - ETA: 2:07 - loss: 0.355 - ETA: 2:05 - loss: 0.355 - ETA: 2:03 - loss: 0.355 - ETA: 2:01 - loss: 0.355 - ETA: 1:59 - loss: 0.356 - ETA: 1:58 - loss: 0.356 - ETA: 1:56 - loss: 0.355 - ETA: 1:54 - loss: 0.355 - ETA: 1:52 - loss: 0.355 - ETA: 1:50 - loss: 0.355 - ETA: 1:48 - loss: 0.355 - ETA: 1:46 - loss: 0.355 - ETA: 1:45 - loss: 0.355 - ETA: 1:43 - loss: 0.355 - ETA: 1:41 - loss: 0.354 - ETA: 1:39 - loss: 0.354 - ETA: 1:37 - loss: 0.354 - ETA: 1:36 - loss: 0.354 - ETA: 1:34 - loss: 0.355 - ETA: 1:32 - loss: 0.355 - ETA: 1:30 - loss: 0.355 - ETA: 1:29 - loss: 0.355 - ETA: 1:27 - loss: 0.356 - ETA: 1:25 - loss: 0.356 - ETA: 1:23 - loss: 0.356 - ETA: 1:21 - loss: 0.357 - ETA: 1:20 - loss: 0.357 - ETA: 1:18 - loss: 0.357 - ETA: 1:16 - loss: 0.358 - ETA: 1:14 - loss: 0.358 - ETA: 1:12 - loss: 0.358 - ETA: 1:10 - loss: 0.359 - ETA: 1:09 - loss: 0.359 - ETA: 1:07 - loss: 0.360 - ETA: 1:05 - loss: 0.360 - ETA: 1:03 - loss: 0.360 - ETA: 1:01 - loss: 0.361 - ETA: 59s - loss: 0.361 - ETA: 58s - loss: 0.36 - ETA: 56s - loss: 0.36 - ETA: 54s - loss: 0.36 - ETA: 52s - loss: 0.36 - ETA: 50s - loss: 0.36 - ETA: 49s - loss: 0.36 - ETA: 47s - loss: 0.36 - ETA: 45s - loss: 0.36 - ETA: 43s - loss: 0.36 - ETA: 41s - loss: 0.36 - ETA: 39s - loss: 0.36 - ETA: 38s - loss: 0.36 - ETA: 36s - loss: 0.36 - ETA: 34s - loss: 0.36 - ETA: 32s - loss: 0.36 - ETA: 31s - loss: 0.36 - ETA: 29s - loss: 0.36 - ETA: 27s - loss: 0.36 - ETA: 25s - loss: 0.36 - ETA: 24s - loss: 0.36 - ETA: 22s - loss: 0.36 - ETA: 20s - loss: 0.36 - ETA: 18s - loss: 0.36 - ETA: 16s - loss: 0.36 - ETA: 15s - loss: 0.36 - ETA: 13s - loss: 0.36 - ETA: 11s - loss: 0.36 - ETA: 9s - loss: 0.3684 - ETA: 7s - loss: 0.368 - ETA: 5s - loss: 0.368 - ETA: 3s - loss: 0.369 - ETA: 1s - loss: 0.369 - ETA: 0s - loss: 0.369 - 216s 2s/step - loss: 0.3701 - val_loss: 0.1330\n",
      "\n",
      "Epoch 00005: val_loss improved from 0.16212 to 0.13303, saving model to clean_notebooks\\cnn_injection_transfer.h5\n",
      "Epoch 6/30\n",
      "111/111 [==============================] - ETA: 3:10 - loss: 0.419 - ETA: 3:12 - loss: 0.363 - ETA: 3:12 - loss: 0.435 - ETA: 3:12 - loss: 0.446 - ETA: 3:10 - loss: 0.444 - ETA: 3:09 - loss: 0.433 - ETA: 3:07 - loss: 0.430 - ETA: 3:07 - loss: 0.424 - ETA: 3:04 - loss: 0.419 - ETA: 3:03 - loss: 0.413 - ETA: 3:01 - loss: 0.407 - ETA: 3:00 - loss: 0.402 - ETA: 2:58 - loss: 0.398 - ETA: 2:56 - loss: 0.394 - ETA: 2:54 - loss: 0.391 - ETA: 2:52 - loss: 0.388 - ETA: 2:50 - loss: 0.385 - ETA: 2:48 - loss: 0.382 - ETA: 2:48 - loss: 0.380 - ETA: 2:46 - loss: 0.379 - ETA: 2:44 - loss: 0.377 - ETA: 2:42 - loss: 0.376 - ETA: 2:41 - loss: 0.376 - ETA: 2:39 - loss: 0.375 - ETA: 2:37 - loss: 0.375 - ETA: 2:35 - loss: 0.374 - ETA: 2:33 - loss: 0.373 - ETA: 2:31 - loss: 0.373 - ETA: 2:29 - loss: 0.372 - ETA: 2:27 - loss: 0.372 - ETA: 2:25 - loss: 0.372 - ETA: 2:24 - loss: 0.371 - ETA: 2:22 - loss: 0.371 - ETA: 2:20 - loss: 0.371 - ETA: 2:18 - loss: 0.370 - ETA: 2:16 - loss: 0.370 - ETA: 2:14 - loss: 0.370 - ETA: 2:13 - loss: 0.370 - ETA: 2:11 - loss: 0.370 - ETA: 2:09 - loss: 0.369 - ETA: 2:07 - loss: 0.369 - ETA: 2:05 - loss: 0.369 - ETA: 2:03 - loss: 0.369 - ETA: 2:02 - loss: 0.369 - ETA: 2:00 - loss: 0.369 - ETA: 1:58 - loss: 0.369 - ETA: 1:56 - loss: 0.370 - ETA: 1:55 - loss: 0.370 - ETA: 1:53 - loss: 0.371 - ETA: 1:51 - loss: 0.371 - ETA: 1:49 - loss: 0.371 - ETA: 1:47 - loss: 0.372 - ETA: 1:45 - loss: 0.372 - ETA: 1:44 - loss: 0.372 - ETA: 1:42 - loss: 0.372 - ETA: 1:40 - loss: 0.372 - ETA: 1:38 - loss: 0.372 - ETA: 1:36 - loss: 0.373 - ETA: 1:34 - loss: 0.373 - ETA: 1:33 - loss: 0.373 - ETA: 1:31 - loss: 0.373 - ETA: 1:29 - loss: 0.374 - ETA: 1:27 - loss: 0.374 - ETA: 1:25 - loss: 0.374 - ETA: 1:23 - loss: 0.374 - ETA: 1:22 - loss: 0.374 - ETA: 1:20 - loss: 0.374 - ETA: 1:18 - loss: 0.374 - ETA: 1:16 - loss: 0.374 - ETA: 1:14 - loss: 0.374 - ETA: 1:12 - loss: 0.374 - ETA: 1:11 - loss: 0.374 - ETA: 1:09 - loss: 0.375 - ETA: 1:07 - loss: 0.375 - ETA: 1:05 - loss: 0.375 - ETA: 1:03 - loss: 0.375 - ETA: 1:02 - loss: 0.375 - ETA: 1:00 - loss: 0.375 - ETA: 58s - loss: 0.375 - ETA: 56s - loss: 0.37 - ETA: 54s - loss: 0.37 - ETA: 52s - loss: 0.37 - ETA: 51s - loss: 0.37 - ETA: 49s - loss: 0.37 - ETA: 47s - loss: 0.37 - ETA: 45s - loss: 0.37 - ETA: 43s - loss: 0.37 - ETA: 41s - loss: 0.37 - ETA: 40s - loss: 0.37 - ETA: 38s - loss: 0.37 - ETA: 36s - loss: 0.37 - ETA: 34s - loss: 0.37 - ETA: 32s - loss: 0.37 - ETA: 30s - loss: 0.37 - ETA: 29s - loss: 0.37 - ETA: 27s - loss: 0.37 - ETA: 25s - loss: 0.37 - ETA: 23s - loss: 0.37 - ETA: 21s - loss: 0.37 - ETA: 20s - loss: 0.37 - ETA: 18s - loss: 0.37 - ETA: 16s - loss: 0.37 - ETA: 14s - loss: 0.37 - ETA: 12s - loss: 0.37 - ETA: 10s - loss: 0.37 - ETA: 9s - loss: 0.3749 - ETA: 7s - loss: 0.375 - ETA: 5s - loss: 0.375 - ETA: 3s - loss: 0.375 - ETA: 1s - loss: 0.375 - ETA: 0s - loss: 0.375 - 208s 2s/step - loss: 0.3757 - val_loss: 0.1408\n",
      "\n",
      "Epoch 00006: val_loss did not improve from 0.13303\n",
      "Epoch 7/30\n",
      "111/111 [==============================] - ETA: 3:27 - loss: 0.245 - ETA: 3:28 - loss: 0.248 - ETA: 3:21 - loss: 0.326 - ETA: 3:16 - loss: 0.346 - ETA: 3:15 - loss: 0.347 - ETA: 3:12 - loss: 0.347 - ETA: 3:10 - loss: 0.345 - ETA: 3:07 - loss: 0.341 - ETA: 3:06 - loss: 0.337 - ETA: 3:04 - loss: 0.335 - ETA: 3:01 - loss: 0.333 - ETA: 2:59 - loss: 0.332 - ETA: 2:57 - loss: 0.331 - ETA: 2:56 - loss: 0.332 - ETA: 2:53 - loss: 0.334 - ETA: 2:52 - loss: 0.336 - ETA: 2:50 - loss: 0.337 - ETA: 2:48 - loss: 0.338 - ETA: 2:46 - loss: 0.338 - ETA: 2:45 - loss: 0.338 - ETA: 2:43 - loss: 0.338 - ETA: 2:41 - loss: 0.338 - ETA: 2:39 - loss: 0.338 - ETA: 2:37 - loss: 0.337 - ETA: 2:36 - loss: 0.337 - ETA: 2:34 - loss: 0.336 - ETA: 2:32 - loss: 0.336 - ETA: 2:30 - loss: 0.336 - ETA: 2:29 - loss: 0.336 - ETA: 2:27 - loss: 0.336 - ETA: 2:25 - loss: 0.335 - ETA: 2:23 - loss: 0.335 - ETA: 2:21 - loss: 0.335 - ETA: 2:20 - loss: 0.334 - ETA: 2:18 - loss: 0.334 - ETA: 2:16 - loss: 0.334 - ETA: 2:14 - loss: 0.334 - ETA: 2:12 - loss: 0.334 - ETA: 2:10 - loss: 0.334 - ETA: 2:09 - loss: 0.333 - ETA: 2:07 - loss: 0.334 - ETA: 2:05 - loss: 0.334 - ETA: 2:03 - loss: 0.334 - ETA: 2:01 - loss: 0.334 - ETA: 1:59 - loss: 0.334 - ETA: 1:58 - loss: 0.334 - ETA: 1:56 - loss: 0.335 - ETA: 1:54 - loss: 0.335 - ETA: 1:52 - loss: 0.336 - ETA: 1:50 - loss: 0.337 - ETA: 1:49 - loss: 0.338 - ETA: 1:47 - loss: 0.339 - ETA: 1:45 - loss: 0.340 - ETA: 1:43 - loss: 0.341 - ETA: 1:41 - loss: 0.342 - ETA: 1:39 - loss: 0.344 - ETA: 1:38 - loss: 0.345 - ETA: 1:36 - loss: 0.346 - ETA: 1:34 - loss: 0.347 - ETA: 1:32 - loss: 0.348 - ETA: 1:30 - loss: 0.349 - ETA: 1:29 - loss: 0.349 - ETA: 1:27 - loss: 0.350 - ETA: 1:25 - loss: 0.351 - ETA: 1:23 - loss: 0.352 - ETA: 1:22 - loss: 0.352 - ETA: 1:20 - loss: 0.353 - ETA: 1:18 - loss: 0.354 - ETA: 1:16 - loss: 0.354 - ETA: 1:14 - loss: 0.355 - ETA: 1:12 - loss: 0.355 - ETA: 1:11 - loss: 0.355 - ETA: 1:09 - loss: 0.356 - ETA: 1:07 - loss: 0.356 - ETA: 1:05 - loss: 0.356 - ETA: 1:03 - loss: 0.357 - ETA: 1:01 - loss: 0.357 - ETA: 1:00 - loss: 0.357 - ETA: 58s - loss: 0.358 - ETA: 56s - loss: 0.35 - ETA: 54s - loss: 0.35 - ETA: 52s - loss: 0.35 - ETA: 50s - loss: 0.35 - ETA: 49s - loss: 0.35 - ETA: 47s - loss: 0.35 - ETA: 45s - loss: 0.36 - ETA: 43s - loss: 0.36 - ETA: 41s - loss: 0.36 - ETA: 40s - loss: 0.36 - ETA: 38s - loss: 0.36 - ETA: 36s - loss: 0.36 - ETA: 34s - loss: 0.36 - ETA: 32s - loss: 0.36 - ETA: 30s - loss: 0.36 - ETA: 29s - loss: 0.36 - ETA: 27s - loss: 0.36 - ETA: 25s - loss: 0.36 - ETA: 23s - loss: 0.36 - ETA: 21s - loss: 0.36 - ETA: 19s - loss: 0.36 - ETA: 18s - loss: 0.36 - ETA: 16s - loss: 0.36 - ETA: 14s - loss: 0.36 - ETA: 12s - loss: 0.36 - ETA: 10s - loss: 0.36 - ETA: 9s - loss: 0.3660 - ETA: 7s - loss: 0.366 - ETA: 5s - loss: 0.366 - ETA: 3s - loss: 0.366 - ETA: 1s - loss: 0.366 - ETA: 0s - loss: 0.367 - 207s 2s/step - loss: 0.3673 - val_loss: 0.1300\n",
      "\n",
      "Epoch 00007: val_loss improved from 0.13303 to 0.13003, saving model to clean_notebooks\\cnn_injection_transfer.h5\n",
      "Epoch 8/30\n",
      "111/111 [==============================] - ETA: 3:04 - loss: 0.432 - ETA: 3:21 - loss: 0.361 - ETA: 3:23 - loss: 0.339 - ETA: 3:20 - loss: 0.316 - ETA: 3:17 - loss: 0.318 - ETA: 3:13 - loss: 0.317 - ETA: 3:11 - loss: 0.319 - ETA: 3:08 - loss: 0.319 - ETA: 3:07 - loss: 0.321 - ETA: 3:05 - loss: 0.321 - ETA: 3:03 - loss: 0.322 - ETA: 3:01 - loss: 0.323 - ETA: 2:59 - loss: 0.329 - ETA: 2:58 - loss: 0.333 - ETA: 2:56 - loss: 0.337 - ETA: 2:55 - loss: 0.339 - ETA: 2:54 - loss: 0.341 - ETA: 2:52 - loss: 0.342 - ETA: 2:50 - loss: 0.343 - ETA: 2:49 - loss: 0.344 - ETA: 2:48 - loss: 0.344 - ETA: 2:46 - loss: 0.345 - ETA: 2:44 - loss: 0.347 - ETA: 2:42 - loss: 0.347 - ETA: 2:40 - loss: 0.349 - ETA: 2:38 - loss: 0.350 - ETA: 2:36 - loss: 0.352 - ETA: 2:34 - loss: 0.353 - ETA: 2:32 - loss: 0.354 - ETA: 2:30 - loss: 0.354 - ETA: 2:28 - loss: 0.354 - ETA: 2:26 - loss: 0.355 - ETA: 2:24 - loss: 0.355 - ETA: 2:22 - loss: 0.357 - ETA: 2:20 - loss: 0.357 - ETA: 2:18 - loss: 0.358 - ETA: 2:17 - loss: 0.359 - ETA: 2:15 - loss: 0.360 - ETA: 2:13 - loss: 0.360 - ETA: 2:11 - loss: 0.361 - ETA: 2:09 - loss: 0.362 - ETA: 2:07 - loss: 0.363 - ETA: 2:05 - loss: 0.363 - ETA: 2:03 - loss: 0.363 - ETA: 2:01 - loss: 0.364 - ETA: 2:00 - loss: 0.364 - ETA: 1:58 - loss: 0.365 - ETA: 1:56 - loss: 0.365 - ETA: 1:54 - loss: 0.365 - ETA: 1:52 - loss: 0.366 - ETA: 1:50 - loss: 0.366 - ETA: 1:48 - loss: 0.367 - ETA: 1:46 - loss: 0.367 - ETA: 1:45 - loss: 0.367 - ETA: 1:43 - loss: 0.368 - ETA: 1:41 - loss: 0.368 - ETA: 1:39 - loss: 0.368 - ETA: 1:37 - loss: 0.368 - ETA: 1:35 - loss: 0.368 - ETA: 1:33 - loss: 0.368 - ETA: 1:31 - loss: 0.368 - ETA: 1:30 - loss: 0.368 - ETA: 1:28 - loss: 0.368 - ETA: 1:26 - loss: 0.368 - ETA: 1:24 - loss: 0.367 - ETA: 1:22 - loss: 0.367 - ETA: 1:20 - loss: 0.367 - ETA: 1:18 - loss: 0.367 - ETA: 1:17 - loss: 0.367 - ETA: 1:15 - loss: 0.367 - ETA: 1:13 - loss: 0.367 - ETA: 1:11 - loss: 0.367 - ETA: 1:09 - loss: 0.367 - ETA: 1:07 - loss: 0.367 - ETA: 1:06 - loss: 0.367 - ETA: 1:04 - loss: 0.367 - ETA: 1:02 - loss: 0.367 - ETA: 1:00 - loss: 0.367 - ETA: 58s - loss: 0.367 - ETA: 56s - loss: 0.36 - ETA: 55s - loss: 0.36 - ETA: 53s - loss: 0.36 - ETA: 51s - loss: 0.36 - ETA: 49s - loss: 0.36 - ETA: 47s - loss: 0.36 - ETA: 45s - loss: 0.36 - ETA: 44s - loss: 0.36 - ETA: 42s - loss: 0.36 - ETA: 40s - loss: 0.36 - ETA: 38s - loss: 0.36 - ETA: 36s - loss: 0.36 - ETA: 34s - loss: 0.36 - ETA: 33s - loss: 0.36 - ETA: 31s - loss: 0.36 - ETA: 29s - loss: 0.36 - ETA: 27s - loss: 0.36 - ETA: 25s - loss: 0.36 - ETA: 23s - loss: 0.36 - ETA: 22s - loss: 0.36 - ETA: 20s - loss: 0.36 - ETA: 18s - loss: 0.36 - ETA: 16s - loss: 0.36 - ETA: 14s - loss: 0.36 - ETA: 12s - loss: 0.36 - ETA: 11s - loss: 0.36 - ETA: 9s - loss: 0.3671 - ETA: 7s - loss: 0.367 - ETA: 5s - loss: 0.367 - ETA: 3s - loss: 0.367 - ETA: 1s - loss: 0.367 - ETA: 0s - loss: 0.367 - 209s 2s/step - loss: 0.3677 - val_loss: 0.1162\n",
      "\n",
      "Epoch 00008: val_loss improved from 0.13003 to 0.11616, saving model to clean_notebooks\\cnn_injection_transfer.h5\n",
      "Epoch 9/30\n",
      "111/111 [==============================] - ETA: 3:06 - loss: 0.141 - ETA: 3:08 - loss: 0.163 - ETA: 3:11 - loss: 0.188 - ETA: 3:10 - loss: 0.218 - ETA: 3:08 - loss: 0.236 - ETA: 3:07 - loss: 0.256 - ETA: 3:05 - loss: 0.265 - ETA: 3:05 - loss: 0.275 - ETA: 3:03 - loss: 0.279 - ETA: 3:01 - loss: 0.284 - ETA: 2:59 - loss: 0.289 - ETA: 2:57 - loss: 0.293 - ETA: 2:56 - loss: 0.296 - ETA: 2:54 - loss: 0.298 - ETA: 2:52 - loss: 0.299 - ETA: 2:50 - loss: 0.301 - ETA: 2:48 - loss: 0.303 - ETA: 2:47 - loss: 0.307 - ETA: 2:45 - loss: 0.309 - ETA: 2:43 - loss: 0.311 - ETA: 2:42 - loss: 0.313 - ETA: 2:40 - loss: 0.315 - ETA: 2:38 - loss: 0.316 - ETA: 2:36 - loss: 0.318 - ETA: 2:35 - loss: 0.320 - ETA: 2:33 - loss: 0.321 - ETA: 2:31 - loss: 0.322 - ETA: 2:29 - loss: 0.323 - ETA: 2:27 - loss: 0.323 - ETA: 2:26 - loss: 0.323 - ETA: 2:24 - loss: 0.324 - ETA: 2:23 - loss: 0.324 - ETA: 2:21 - loss: 0.324 - ETA: 2:19 - loss: 0.324 - ETA: 2:17 - loss: 0.324 - ETA: 2:16 - loss: 0.324 - ETA: 2:14 - loss: 0.323 - ETA: 2:12 - loss: 0.323 - ETA: 2:10 - loss: 0.323 - ETA: 2:08 - loss: 0.323 - ETA: 2:06 - loss: 0.322 - ETA: 2:05 - loss: 0.322 - ETA: 2:03 - loss: 0.322 - ETA: 2:01 - loss: 0.322 - ETA: 1:59 - loss: 0.322 - ETA: 1:57 - loss: 0.322 - ETA: 1:55 - loss: 0.322 - ETA: 1:54 - loss: 0.322 - ETA: 1:52 - loss: 0.321 - ETA: 1:50 - loss: 0.321 - ETA: 1:48 - loss: 0.321 - ETA: 1:46 - loss: 0.321 - ETA: 1:44 - loss: 0.321 - ETA: 1:43 - loss: 0.321 - ETA: 1:41 - loss: 0.321 - ETA: 1:39 - loss: 0.321 - ETA: 1:37 - loss: 0.321 - ETA: 1:35 - loss: 0.321 - ETA: 1:34 - loss: 0.321 - ETA: 1:32 - loss: 0.322 - ETA: 1:30 - loss: 0.322 - ETA: 1:28 - loss: 0.323 - ETA: 1:26 - loss: 0.323 - ETA: 1:25 - loss: 0.323 - ETA: 1:23 - loss: 0.323 - ETA: 1:21 - loss: 0.324 - ETA: 1:19 - loss: 0.324 - ETA: 1:17 - loss: 0.324 - ETA: 1:15 - loss: 0.324 - ETA: 1:14 - loss: 0.325 - ETA: 1:12 - loss: 0.325 - ETA: 1:10 - loss: 0.325 - ETA: 1:08 - loss: 0.326 - ETA: 1:06 - loss: 0.326 - ETA: 1:05 - loss: 0.327 - ETA: 1:03 - loss: 0.327 - ETA: 1:01 - loss: 0.327 - ETA: 59s - loss: 0.328 - ETA: 57s - loss: 0.32 - ETA: 56s - loss: 0.32 - ETA: 54s - loss: 0.32 - ETA: 52s - loss: 0.32 - ETA: 50s - loss: 0.32 - ETA: 48s - loss: 0.32 - ETA: 47s - loss: 0.33 - ETA: 45s - loss: 0.33 - ETA: 43s - loss: 0.33 - ETA: 41s - loss: 0.33 - ETA: 39s - loss: 0.33 - ETA: 37s - loss: 0.33 - ETA: 36s - loss: 0.33 - ETA: 34s - loss: 0.33 - ETA: 32s - loss: 0.33 - ETA: 30s - loss: 0.33 - ETA: 28s - loss: 0.33 - ETA: 27s - loss: 0.33 - ETA: 25s - loss: 0.33 - ETA: 23s - loss: 0.33 - ETA: 21s - loss: 0.33 - ETA: 19s - loss: 0.33 - ETA: 18s - loss: 0.33 - ETA: 16s - loss: 0.33 - ETA: 14s - loss: 0.33 - ETA: 12s - loss: 0.33 - ETA: 10s - loss: 0.33 - ETA: 9s - loss: 0.3349 - ETA: 7s - loss: 0.335 - ETA: 5s - loss: 0.335 - ETA: 3s - loss: 0.335 - ETA: 1s - loss: 0.335 - ETA: 0s - loss: 0.335 - 207s 2s/step - loss: 0.3358 - val_loss: 0.1354\n",
      "\n",
      "Epoch 00009: val_loss did not improve from 0.11616\n",
      "Epoch 10/30\n",
      "111/111 [==============================] - ETA: 3:49 - loss: 0.202 - ETA: 3:31 - loss: 0.447 - ETA: 3:27 - loss: 0.461 - ETA: 3:24 - loss: 0.463 - ETA: 3:20 - loss: 0.456 - ETA: 3:19 - loss: 0.443 - ETA: 3:17 - loss: 0.435 - ETA: 3:14 - loss: 0.430 - ETA: 3:12 - loss: 0.424 - ETA: 3:10 - loss: 0.418 - ETA: 3:07 - loss: 0.411 - ETA: 3:05 - loss: 0.403 - ETA: 3:03 - loss: 0.397 - ETA: 3:01 - loss: 0.391 - ETA: 2:59 - loss: 0.385 - ETA: 2:57 - loss: 0.379 - ETA: 2:55 - loss: 0.374 - ETA: 2:52 - loss: 0.370 - ETA: 2:50 - loss: 0.370 - ETA: 2:49 - loss: 0.370 - ETA: 2:47 - loss: 0.371 - ETA: 2:45 - loss: 0.371 - ETA: 2:43 - loss: 0.371 - ETA: 2:41 - loss: 0.371 - ETA: 2:39 - loss: 0.370 - ETA: 2:37 - loss: 0.370 - ETA: 2:35 - loss: 0.370 - ETA: 2:33 - loss: 0.370 - ETA: 2:31 - loss: 0.370 - ETA: 2:29 - loss: 0.370 - ETA: 2:27 - loss: 0.370 - ETA: 2:25 - loss: 0.369 - ETA: 2:23 - loss: 0.370 - ETA: 2:21 - loss: 0.371 - ETA: 2:19 - loss: 0.371 - ETA: 2:17 - loss: 0.372 - ETA: 2:15 - loss: 0.372 - ETA: 2:14 - loss: 0.372 - ETA: 2:12 - loss: 0.373 - ETA: 2:10 - loss: 0.374 - ETA: 2:08 - loss: 0.374 - ETA: 2:06 - loss: 0.375 - ETA: 2:04 - loss: 0.376 - ETA: 2:02 - loss: 0.376 - ETA: 2:01 - loss: 0.377 - ETA: 1:59 - loss: 0.378 - ETA: 1:57 - loss: 0.378 - ETA: 1:55 - loss: 0.379 - ETA: 1:53 - loss: 0.379 - ETA: 1:51 - loss: 0.380 - ETA: 1:50 - loss: 0.380 - ETA: 1:48 - loss: 0.380 - ETA: 1:46 - loss: 0.381 - ETA: 1:44 - loss: 0.381 - ETA: 1:42 - loss: 0.381 - ETA: 1:40 - loss: 0.381 - ETA: 1:39 - loss: 0.381 - ETA: 1:37 - loss: 0.381 - ETA: 1:35 - loss: 0.381 - ETA: 1:33 - loss: 0.381 - ETA: 1:31 - loss: 0.381 - ETA: 1:29 - loss: 0.381 - ETA: 1:27 - loss: 0.381 - ETA: 1:26 - loss: 0.381 - ETA: 1:24 - loss: 0.381 - ETA: 1:22 - loss: 0.381 - ETA: 1:20 - loss: 0.381 - ETA: 1:18 - loss: 0.382 - ETA: 1:16 - loss: 0.382 - ETA: 1:15 - loss: 0.382 - ETA: 1:13 - loss: 0.382 - ETA: 1:11 - loss: 0.383 - ETA: 1:09 - loss: 0.383 - ETA: 1:07 - loss: 0.383 - ETA: 1:05 - loss: 0.383 - ETA: 1:04 - loss: 0.383 - ETA: 1:02 - loss: 0.384 - ETA: 1:00 - loss: 0.384 - ETA: 58s - loss: 0.384 - ETA: 56s - loss: 0.38 - ETA: 54s - loss: 0.38 - ETA: 53s - loss: 0.38 - ETA: 51s - loss: 0.38 - ETA: 49s - loss: 0.38 - ETA: 47s - loss: 0.38 - ETA: 45s - loss: 0.38 - ETA: 43s - loss: 0.38 - ETA: 42s - loss: 0.38 - ETA: 40s - loss: 0.38 - ETA: 38s - loss: 0.38 - ETA: 36s - loss: 0.38 - ETA: 34s - loss: 0.38 - ETA: 32s - loss: 0.38 - ETA: 31s - loss: 0.38 - ETA: 29s - loss: 0.38 - ETA: 27s - loss: 0.38 - ETA: 25s - loss: 0.38 - ETA: 23s - loss: 0.38 - ETA: 21s - loss: 0.38 - ETA: 20s - loss: 0.38 - ETA: 18s - loss: 0.38 - ETA: 16s - loss: 0.38 - ETA: 14s - loss: 0.38 - ETA: 12s - loss: 0.38 - ETA: 10s - loss: 0.38 - ETA: 9s - loss: 0.3834 - ETA: 7s - loss: 0.383 - ETA: 5s - loss: 0.383 - ETA: 3s - loss: 0.382 - ETA: 1s - loss: 0.382 - ETA: 0s - loss: 0.382 - 209s 2s/step - loss: 0.3824 - val_loss: 0.1193\n",
      "\n",
      "Epoch 00010: val_loss did not improve from 0.11616\n",
      "Epoch 11/30\n",
      "111/111 [==============================] - ETA: 3:33 - loss: 0.197 - ETA: 3:23 - loss: 0.230 - ETA: 3:22 - loss: 0.258 - ETA: 3:21 - loss: 0.272 - ETA: 3:18 - loss: 0.275 - ETA: 3:16 - loss: 0.278 - ETA: 3:13 - loss: 0.279 - ETA: 3:11 - loss: 0.282 - ETA: 3:08 - loss: 0.285 - ETA: 3:06 - loss: 0.289 - ETA: 3:04 - loss: 0.290 - ETA: 3:02 - loss: 0.291 - ETA: 2:59 - loss: 0.291 - ETA: 2:58 - loss: 0.291 - ETA: 2:56 - loss: 0.291 - ETA: 2:53 - loss: 0.293 - ETA: 2:51 - loss: 0.293 - ETA: 2:50 - loss: 0.293 - ETA: 2:48 - loss: 0.294 - ETA: 2:46 - loss: 0.295 - ETA: 2:44 - loss: 0.294 - ETA: 2:42 - loss: 0.294 - ETA: 2:40 - loss: 0.297 - ETA: 2:38 - loss: 0.301 - ETA: 2:36 - loss: 0.305 - ETA: 2:34 - loss: 0.310 - ETA: 2:33 - loss: 0.313 - ETA: 2:31 - loss: 0.317 - ETA: 2:29 - loss: 0.320 - ETA: 2:27 - loss: 0.323 - ETA: 2:25 - loss: 0.325 - ETA: 2:23 - loss: 0.327 - ETA: 2:21 - loss: 0.329 - ETA: 2:20 - loss: 0.330 - ETA: 2:18 - loss: 0.332 - ETA: 2:16 - loss: 0.333 - ETA: 2:14 - loss: 0.335 - ETA: 2:12 - loss: 0.336 - ETA: 2:10 - loss: 0.337 - ETA: 2:09 - loss: 0.338 - ETA: 2:07 - loss: 0.339 - ETA: 2:05 - loss: 0.340 - ETA: 2:03 - loss: 0.341 - ETA: 2:01 - loss: 0.342 - ETA: 2:00 - loss: 0.342 - ETA: 1:58 - loss: 0.343 - ETA: 1:56 - loss: 0.344 - ETA: 1:54 - loss: 0.344 - ETA: 1:52 - loss: 0.345 - ETA: 1:50 - loss: 0.345 - ETA: 1:49 - loss: 0.346 - ETA: 1:47 - loss: 0.346 - ETA: 1:45 - loss: 0.347 - ETA: 1:43 - loss: 0.347 - ETA: 1:41 - loss: 0.347 - ETA: 1:39 - loss: 0.348 - ETA: 1:38 - loss: 0.348 - ETA: 1:36 - loss: 0.348 - ETA: 1:34 - loss: 0.348 - ETA: 1:32 - loss: 0.348 - ETA: 1:30 - loss: 0.348 - ETA: 1:28 - loss: 0.348 - ETA: 1:27 - loss: 0.349 - ETA: 1:25 - loss: 0.349 - ETA: 1:23 - loss: 0.349 - ETA: 1:21 - loss: 0.349 - ETA: 1:20 - loss: 0.350 - ETA: 1:18 - loss: 0.350 - ETA: 1:16 - loss: 0.350 - ETA: 1:14 - loss: 0.350 - ETA: 1:12 - loss: 0.351 - ETA: 1:11 - loss: 0.351 - ETA: 1:09 - loss: 0.351 - ETA: 1:07 - loss: 0.351 - ETA: 1:05 - loss: 0.352 - ETA: 1:03 - loss: 0.352 - ETA: 1:01 - loss: 0.352 - ETA: 1:00 - loss: 0.352 - ETA: 58s - loss: 0.352 - ETA: 56s - loss: 0.35 - ETA: 54s - loss: 0.35 - ETA: 52s - loss: 0.35 - ETA: 50s - loss: 0.35 - ETA: 49s - loss: 0.35 - ETA: 47s - loss: 0.35 - ETA: 45s - loss: 0.35 - ETA: 43s - loss: 0.35 - ETA: 41s - loss: 0.35 - ETA: 40s - loss: 0.35 - ETA: 38s - loss: 0.35 - ETA: 36s - loss: 0.35 - ETA: 34s - loss: 0.35 - ETA: 32s - loss: 0.35 - ETA: 30s - loss: 0.35 - ETA: 29s - loss: 0.35 - ETA: 27s - loss: 0.35 - ETA: 25s - loss: 0.35 - ETA: 23s - loss: 0.35 - ETA: 21s - loss: 0.35 - ETA: 20s - loss: 0.35 - ETA: 18s - loss: 0.35 - ETA: 16s - loss: 0.35 - ETA: 14s - loss: 0.35 - ETA: 12s - loss: 0.35 - ETA: 10s - loss: 0.35 - ETA: 9s - loss: 0.3554 - ETA: 7s - loss: 0.355 - ETA: 5s - loss: 0.355 - ETA: 3s - loss: 0.355 - ETA: 1s - loss: 0.355 - ETA: 0s - loss: 0.355 - 208s 2s/step - loss: 0.3559 - val_loss: 0.1017\n",
      "\n",
      "Epoch 00011: val_loss improved from 0.11616 to 0.10170, saving model to clean_notebooks\\cnn_injection_transfer.h5\n",
      "Epoch 12/30\n",
      "111/111 [==============================] - ETA: 3:07 - loss: 0.206 - ETA: 3:17 - loss: 0.266 - ETA: 3:14 - loss: 0.287 - ETA: 3:12 - loss: 0.294 - ETA: 3:09 - loss: 0.318 - ETA: 3:09 - loss: 0.335 - ETA: 3:07 - loss: 0.344 - ETA: 3:05 - loss: 0.352 - ETA: 3:03 - loss: 0.364 - ETA: 3:02 - loss: 0.371 - ETA: 3:00 - loss: 0.373 - ETA: 2:58 - loss: 0.374 - ETA: 2:56 - loss: 0.373 - ETA: 2:55 - loss: 0.373 - ETA: 2:53 - loss: 0.373 - ETA: 2:51 - loss: 0.374 - ETA: 2:49 - loss: 0.375 - ETA: 2:48 - loss: 0.375 - ETA: 2:46 - loss: 0.375 - ETA: 2:44 - loss: 0.375 - ETA: 2:43 - loss: 0.375 - ETA: 2:41 - loss: 0.375 - ETA: 2:40 - loss: 0.375 - ETA: 2:38 - loss: 0.374 - ETA: 2:36 - loss: 0.374 - ETA: 2:34 - loss: 0.373 - ETA: 2:32 - loss: 0.371 - ETA: 2:30 - loss: 0.370 - ETA: 2:28 - loss: 0.369 - ETA: 2:26 - loss: 0.369 - ETA: 2:25 - loss: 0.368 - ETA: 2:23 - loss: 0.367 - ETA: 2:21 - loss: 0.366 - ETA: 2:19 - loss: 0.365 - ETA: 2:17 - loss: 0.365 - ETA: 2:15 - loss: 0.364 - ETA: 2:13 - loss: 0.363 - ETA: 2:12 - loss: 0.362 - ETA: 2:10 - loss: 0.361 - ETA: 2:08 - loss: 0.360 - ETA: 2:06 - loss: 0.359 - ETA: 2:04 - loss: 0.358 - ETA: 2:03 - loss: 0.357 - ETA: 2:01 - loss: 0.356 - ETA: 1:59 - loss: 0.356 - ETA: 1:57 - loss: 0.355 - ETA: 1:55 - loss: 0.354 - ETA: 1:54 - loss: 0.354 - ETA: 1:52 - loss: 0.353 - ETA: 1:50 - loss: 0.353 - ETA: 1:48 - loss: 0.352 - ETA: 1:46 - loss: 0.352 - ETA: 1:44 - loss: 0.352 - ETA: 1:43 - loss: 0.352 - ETA: 1:41 - loss: 0.352 - ETA: 1:39 - loss: 0.352 - ETA: 1:37 - loss: 0.352 - ETA: 1:35 - loss: 0.352 - ETA: 1:34 - loss: 0.352 - ETA: 1:32 - loss: 0.352 - ETA: 1:30 - loss: 0.352 - ETA: 1:28 - loss: 0.351 - ETA: 1:26 - loss: 0.351 - ETA: 1:25 - loss: 0.351 - ETA: 1:23 - loss: 0.351 - ETA: 1:21 - loss: 0.351 - ETA: 1:19 - loss: 0.352 - ETA: 1:17 - loss: 0.352 - ETA: 1:15 - loss: 0.352 - ETA: 1:14 - loss: 0.352 - ETA: 1:12 - loss: 0.352 - ETA: 1:10 - loss: 0.352 - ETA: 1:08 - loss: 0.352 - ETA: 1:06 - loss: 0.353 - ETA: 1:05 - loss: 0.353 - ETA: 1:03 - loss: 0.353 - ETA: 1:01 - loss: 0.354 - ETA: 59s - loss: 0.354 - ETA: 57s - loss: 0.35 - ETA: 56s - loss: 0.35 - ETA: 54s - loss: 0.35 - ETA: 52s - loss: 0.35 - ETA: 50s - loss: 0.35 - ETA: 48s - loss: 0.35 - ETA: 47s - loss: 0.35 - ETA: 45s - loss: 0.35 - ETA: 43s - loss: 0.35 - ETA: 41s - loss: 0.35 - ETA: 39s - loss: 0.35 - ETA: 38s - loss: 0.35 - ETA: 36s - loss: 0.35 - ETA: 34s - loss: 0.35 - ETA: 32s - loss: 0.35 - ETA: 30s - loss: 0.35 - ETA: 29s - loss: 0.35 - ETA: 27s - loss: 0.35 - ETA: 25s - loss: 0.35 - ETA: 23s - loss: 0.35 - ETA: 21s - loss: 0.35 - ETA: 19s - loss: 0.35 - ETA: 18s - loss: 0.35 - ETA: 16s - loss: 0.35 - ETA: 14s - loss: 0.35 - ETA: 12s - loss: 0.35 - ETA: 10s - loss: 0.35 - ETA: 9s - loss: 0.3588 - ETA: 7s - loss: 0.358 - ETA: 5s - loss: 0.358 - ETA: 3s - loss: 0.358 - ETA: 1s - loss: 0.358 - ETA: 0s - loss: 0.359 - 209s 2s/step - loss: 0.3590 - val_loss: 0.1122\n",
      "\n",
      "Epoch 00012: val_loss did not improve from 0.10170\n",
      "Epoch 13/30\n",
      "111/111 [==============================] - ETA: 3:33 - loss: 0.223 - ETA: 3:18 - loss: 0.301 - ETA: 3:16 - loss: 0.337 - ETA: 3:18 - loss: 0.369 - ETA: 3:15 - loss: 0.388 - ETA: 3:14 - loss: 0.412 - ETA: 3:12 - loss: 0.428 - ETA: 3:11 - loss: 0.444 - ETA: 3:10 - loss: 0.456 - ETA: 3:08 - loss: 0.465 - ETA: 3:06 - loss: 0.469 - ETA: 3:04 - loss: 0.471 - ETA: 3:02 - loss: 0.470 - ETA: 3:00 - loss: 0.468 - ETA: 2:58 - loss: 0.467 - ETA: 2:56 - loss: 0.464 - ETA: 2:54 - loss: 0.460 - ETA: 2:52 - loss: 0.457 - ETA: 2:51 - loss: 0.454 - ETA: 2:49 - loss: 0.452 - ETA: 2:47 - loss: 0.451 - ETA: 2:45 - loss: 0.450 - ETA: 2:44 - loss: 0.450 - ETA: 2:41 - loss: 0.449 - ETA: 2:39 - loss: 0.448 - ETA: 2:37 - loss: 0.447 - ETA: 2:36 - loss: 0.446 - ETA: 2:34 - loss: 0.445 - ETA: 2:32 - loss: 0.443 - ETA: 2:30 - loss: 0.441 - ETA: 2:28 - loss: 0.440 - ETA: 2:26 - loss: 0.438 - ETA: 2:24 - loss: 0.437 - ETA: 2:22 - loss: 0.435 - ETA: 2:20 - loss: 0.433 - ETA: 2:19 - loss: 0.432 - ETA: 2:17 - loss: 0.431 - ETA: 2:15 - loss: 0.429 - ETA: 2:13 - loss: 0.428 - ETA: 2:12 - loss: 0.427 - ETA: 2:10 - loss: 0.426 - ETA: 2:08 - loss: 0.425 - ETA: 2:06 - loss: 0.424 - ETA: 2:04 - loss: 0.423 - ETA: 2:02 - loss: 0.421 - ETA: 2:00 - loss: 0.420 - ETA: 1:58 - loss: 0.419 - ETA: 1:56 - loss: 0.418 - ETA: 1:55 - loss: 0.417 - ETA: 1:53 - loss: 0.416 - ETA: 1:51 - loss: 0.416 - ETA: 1:49 - loss: 0.415 - ETA: 1:47 - loss: 0.414 - ETA: 1:45 - loss: 0.413 - ETA: 1:44 - loss: 0.413 - ETA: 1:42 - loss: 0.412 - ETA: 1:40 - loss: 0.412 - ETA: 1:38 - loss: 0.411 - ETA: 1:36 - loss: 0.410 - ETA: 1:34 - loss: 0.410 - ETA: 1:32 - loss: 0.410 - ETA: 1:30 - loss: 0.409 - ETA: 1:29 - loss: 0.409 - ETA: 1:27 - loss: 0.408 - ETA: 1:25 - loss: 0.408 - ETA: 1:23 - loss: 0.407 - ETA: 1:21 - loss: 0.407 - ETA: 1:19 - loss: 0.406 - ETA: 1:17 - loss: 0.406 - ETA: 1:16 - loss: 0.405 - ETA: 1:14 - loss: 0.405 - ETA: 1:12 - loss: 0.405 - ETA: 1:10 - loss: 0.404 - ETA: 1:08 - loss: 0.404 - ETA: 1:06 - loss: 0.403 - ETA: 1:04 - loss: 0.403 - ETA: 1:03 - loss: 0.402 - ETA: 1:01 - loss: 0.402 - ETA: 59s - loss: 0.402 - ETA: 57s - loss: 0.40 - ETA: 55s - loss: 0.40 - ETA: 53s - loss: 0.40 - ETA: 51s - loss: 0.40 - ETA: 50s - loss: 0.40 - ETA: 48s - loss: 0.39 - ETA: 46s - loss: 0.39 - ETA: 44s - loss: 0.39 - ETA: 42s - loss: 0.39 - ETA: 40s - loss: 0.39 - ETA: 38s - loss: 0.39 - ETA: 37s - loss: 0.39 - ETA: 35s - loss: 0.39 - ETA: 33s - loss: 0.39 - ETA: 31s - loss: 0.39 - ETA: 29s - loss: 0.39 - ETA: 27s - loss: 0.39 - ETA: 25s - loss: 0.39 - ETA: 24s - loss: 0.39 - ETA: 22s - loss: 0.39 - ETA: 20s - loss: 0.39 - ETA: 18s - loss: 0.39 - ETA: 16s - loss: 0.39 - ETA: 14s - loss: 0.39 - ETA: 12s - loss: 0.39 - ETA: 11s - loss: 0.39 - ETA: 9s - loss: 0.3935 - ETA: 7s - loss: 0.393 - ETA: 5s - loss: 0.393 - ETA: 3s - loss: 0.392 - ETA: 1s - loss: 0.392 - ETA: 0s - loss: 0.392 - 211s 2s/step - loss: 0.3923 - val_loss: 0.1111\n",
      "\n",
      "Epoch 00013: val_loss did not improve from 0.10170\n",
      "Epoch 14/30\n",
      "111/111 [==============================] - ETA: 3:24 - loss: 0.202 - ETA: 3:15 - loss: 0.336 - ETA: 3:17 - loss: 0.353 - ETA: 3:17 - loss: 0.350 - ETA: 3:15 - loss: 0.355 - ETA: 3:12 - loss: 0.358 - ETA: 3:10 - loss: 0.364 - ETA: 3:08 - loss: 0.376 - ETA: 3:07 - loss: 0.381 - ETA: 3:06 - loss: 0.382 - ETA: 3:04 - loss: 0.383 - ETA: 3:02 - loss: 0.381 - ETA: 3:00 - loss: 0.378 - ETA: 2:58 - loss: 0.377 - ETA: 2:56 - loss: 0.376 - ETA: 2:55 - loss: 0.374 - ETA: 2:53 - loss: 0.372 - ETA: 2:51 - loss: 0.370 - ETA: 2:49 - loss: 0.368 - ETA: 2:47 - loss: 0.366 - ETA: 2:45 - loss: 0.365 - ETA: 2:43 - loss: 0.364 - ETA: 2:41 - loss: 0.362 - ETA: 2:40 - loss: 0.361 - ETA: 2:38 - loss: 0.360 - ETA: 2:36 - loss: 0.359 - ETA: 2:34 - loss: 0.358 - ETA: 2:32 - loss: 0.356 - ETA: 2:30 - loss: 0.355 - ETA: 2:29 - loss: 0.354 - ETA: 2:27 - loss: 0.353 - ETA: 2:25 - loss: 0.351 - ETA: 2:23 - loss: 0.350 - ETA: 2:21 - loss: 0.350 - ETA: 2:19 - loss: 0.349 - ETA: 2:18 - loss: 0.348 - ETA: 2:16 - loss: 0.348 - ETA: 2:14 - loss: 0.348 - ETA: 2:12 - loss: 0.348 - ETA: 2:10 - loss: 0.349 - ETA: 2:08 - loss: 0.349 - ETA: 2:07 - loss: 0.349 - ETA: 2:05 - loss: 0.350 - ETA: 2:03 - loss: 0.350 - ETA: 2:01 - loss: 0.350 - ETA: 1:59 - loss: 0.350 - ETA: 1:57 - loss: 0.351 - ETA: 1:55 - loss: 0.351 - ETA: 1:54 - loss: 0.351 - ETA: 1:52 - loss: 0.351 - ETA: 1:50 - loss: 0.351 - ETA: 1:48 - loss: 0.351 - ETA: 1:46 - loss: 0.350 - ETA: 1:45 - loss: 0.351 - ETA: 1:43 - loss: 0.350 - ETA: 1:41 - loss: 0.350 - ETA: 1:39 - loss: 0.350 - ETA: 1:37 - loss: 0.350 - ETA: 1:35 - loss: 0.350 - ETA: 1:34 - loss: 0.350 - ETA: 1:32 - loss: 0.349 - ETA: 1:30 - loss: 0.349 - ETA: 1:28 - loss: 0.349 - ETA: 1:26 - loss: 0.349 - ETA: 1:24 - loss: 0.349 - ETA: 1:22 - loss: 0.349 - ETA: 1:21 - loss: 0.348 - ETA: 1:19 - loss: 0.348 - ETA: 1:17 - loss: 0.348 - ETA: 1:15 - loss: 0.348 - ETA: 1:13 - loss: 0.348 - ETA: 1:11 - loss: 0.347 - ETA: 1:10 - loss: 0.347 - ETA: 1:08 - loss: 0.347 - ETA: 1:06 - loss: 0.347 - ETA: 1:04 - loss: 0.347 - ETA: 1:02 - loss: 0.346 - ETA: 1:00 - loss: 0.346 - ETA: 59s - loss: 0.346 - ETA: 57s - loss: 0.34 - ETA: 55s - loss: 0.34 - ETA: 53s - loss: 0.34 - ETA: 51s - loss: 0.34 - ETA: 49s - loss: 0.34 - ETA: 47s - loss: 0.34 - ETA: 46s - loss: 0.34 - ETA: 44s - loss: 0.34 - ETA: 42s - loss: 0.34 - ETA: 40s - loss: 0.34 - ETA: 38s - loss: 0.34 - ETA: 36s - loss: 0.34 - ETA: 35s - loss: 0.34 - ETA: 33s - loss: 0.34 - ETA: 31s - loss: 0.34 - ETA: 29s - loss: 0.34 - ETA: 27s - loss: 0.34 - ETA: 25s - loss: 0.34 - ETA: 23s - loss: 0.34 - ETA: 22s - loss: 0.34 - ETA: 20s - loss: 0.34 - ETA: 18s - loss: 0.34 - ETA: 16s - loss: 0.34 - ETA: 14s - loss: 0.34 - ETA: 12s - loss: 0.34 - ETA: 11s - loss: 0.34 - ETA: 9s - loss: 0.3458 - ETA: 7s - loss: 0.345 - ETA: 5s - loss: 0.345 - ETA: 3s - loss: 0.345 - ETA: 1s - loss: 0.345 - ETA: 0s - loss: 0.345 - 210s 2s/step - loss: 0.3456 - val_loss: 0.1093\n",
      "\n",
      "Epoch 00014: val_loss did not improve from 0.10170\n",
      "Epoch 15/30\n",
      "111/111 [==============================] - ETA: 3:15 - loss: 0.466 - ETA: 3:20 - loss: 0.413 - ETA: 3:19 - loss: 0.411 - ETA: 3:19 - loss: 0.457 - ETA: 3:17 - loss: 0.487 - ETA: 3:15 - loss: 0.497 - ETA: 3:14 - loss: 0.503 - ETA: 3:12 - loss: 0.505 - ETA: 3:09 - loss: 0.504 - ETA: 3:07 - loss: 0.501 - ETA: 3:04 - loss: 0.497 - ETA: 3:02 - loss: 0.491 - ETA: 3:00 - loss: 0.487 - ETA: 2:58 - loss: 0.482 - ETA: 2:56 - loss: 0.477 - ETA: 2:54 - loss: 0.473 - ETA: 2:52 - loss: 0.468 - ETA: 2:50 - loss: 0.463 - ETA: 2:48 - loss: 0.458 - ETA: 2:46 - loss: 0.455 - ETA: 2:44 - loss: 0.451 - ETA: 2:42 - loss: 0.448 - ETA: 2:40 - loss: 0.445 - ETA: 2:39 - loss: 0.441 - ETA: 2:37 - loss: 0.438 - ETA: 2:35 - loss: 0.435 - ETA: 2:33 - loss: 0.432 - ETA: 2:31 - loss: 0.429 - ETA: 2:29 - loss: 0.427 - ETA: 2:27 - loss: 0.426 - ETA: 2:26 - loss: 0.425 - ETA: 2:24 - loss: 0.425 - ETA: 2:22 - loss: 0.425 - ETA: 2:20 - loss: 0.427 - ETA: 2:18 - loss: 0.428 - ETA: 2:16 - loss: 0.429 - ETA: 2:14 - loss: 0.430 - ETA: 2:13 - loss: 0.430 - ETA: 2:11 - loss: 0.431 - ETA: 2:09 - loss: 0.432 - ETA: 2:07 - loss: 0.433 - ETA: 2:05 - loss: 0.433 - ETA: 2:04 - loss: 0.434 - ETA: 2:02 - loss: 0.434 - ETA: 2:00 - loss: 0.435 - ETA: 1:58 - loss: 0.435 - ETA: 1:56 - loss: 0.435 - ETA: 1:54 - loss: 0.436 - ETA: 1:53 - loss: 0.436 - ETA: 1:51 - loss: 0.435 - ETA: 1:49 - loss: 0.435 - ETA: 1:47 - loss: 0.435 - ETA: 1:45 - loss: 0.435 - ETA: 1:43 - loss: 0.435 - ETA: 1:42 - loss: 0.434 - ETA: 1:40 - loss: 0.434 - ETA: 1:38 - loss: 0.434 - ETA: 1:36 - loss: 0.433 - ETA: 1:34 - loss: 0.433 - ETA: 1:32 - loss: 0.433 - ETA: 1:31 - loss: 0.432 - ETA: 1:29 - loss: 0.432 - ETA: 1:27 - loss: 0.432 - ETA: 1:25 - loss: 0.431 - ETA: 1:23 - loss: 0.431 - ETA: 1:22 - loss: 0.430 - ETA: 1:20 - loss: 0.430 - ETA: 1:18 - loss: 0.430 - ETA: 1:16 - loss: 0.429 - ETA: 1:14 - loss: 0.429 - ETA: 1:13 - loss: 0.429 - ETA: 1:11 - loss: 0.429 - ETA: 1:09 - loss: 0.428 - ETA: 1:07 - loss: 0.428 - ETA: 1:05 - loss: 0.428 - ETA: 1:03 - loss: 0.428 - ETA: 1:02 - loss: 0.427 - ETA: 1:00 - loss: 0.427 - ETA: 58s - loss: 0.427 - ETA: 56s - loss: 0.42 - ETA: 54s - loss: 0.42 - ETA: 52s - loss: 0.42 - ETA: 51s - loss: 0.42 - ETA: 49s - loss: 0.42 - ETA: 47s - loss: 0.42 - ETA: 45s - loss: 0.42 - ETA: 43s - loss: 0.42 - ETA: 41s - loss: 0.42 - ETA: 40s - loss: 0.42 - ETA: 38s - loss: 0.42 - ETA: 36s - loss: 0.42 - ETA: 34s - loss: 0.42 - ETA: 32s - loss: 0.42 - ETA: 31s - loss: 0.42 - ETA: 29s - loss: 0.42 - ETA: 27s - loss: 0.42 - ETA: 25s - loss: 0.41 - ETA: 23s - loss: 0.41 - ETA: 21s - loss: 0.41 - ETA: 20s - loss: 0.41 - ETA: 18s - loss: 0.41 - ETA: 16s - loss: 0.41 - ETA: 14s - loss: 0.41 - ETA: 12s - loss: 0.41 - ETA: 10s - loss: 0.41 - ETA: 9s - loss: 0.4157 - ETA: 7s - loss: 0.415 - ETA: 5s - loss: 0.414 - ETA: 3s - loss: 0.414 - ETA: 1s - loss: 0.413 - ETA: 0s - loss: 0.413 - 208s 2s/step - loss: 0.4129 - val_loss: 0.1132\n",
      "\n",
      "Epoch 00015: val_loss did not improve from 0.10170\n",
      "Epoch 16/30\n",
      "111/111 [==============================] - ETA: 3:16 - loss: 0.070 - ETA: 3:18 - loss: 0.117 - ETA: 3:13 - loss: 0.182 - ETA: 3:15 - loss: 0.202 - ETA: 3:12 - loss: 0.221 - ETA: 3:11 - loss: 0.240 - ETA: 3:09 - loss: 0.259 - ETA: 3:07 - loss: 0.269 - ETA: 3:06 - loss: 0.277 - ETA: 3:04 - loss: 0.288 - ETA: 3:01 - loss: 0.297 - ETA: 3:00 - loss: 0.304 - ETA: 2:58 - loss: 0.310 - ETA: 2:56 - loss: 0.314 - ETA: 2:54 - loss: 0.317 - ETA: 2:52 - loss: 0.320 - ETA: 2:50 - loss: 0.322 - ETA: 2:49 - loss: 0.323 - ETA: 2:47 - loss: 0.323 - ETA: 2:45 - loss: 0.324 - ETA: 2:43 - loss: 0.324 - ETA: 2:42 - loss: 0.324 - ETA: 2:40 - loss: 0.324 - ETA: 2:38 - loss: 0.324 - ETA: 2:37 - loss: 0.324 - ETA: 2:35 - loss: 0.324 - ETA: 2:33 - loss: 0.323 - ETA: 2:31 - loss: 0.323 - ETA: 2:29 - loss: 0.323 - ETA: 2:27 - loss: 0.322 - ETA: 2:25 - loss: 0.321 - ETA: 2:24 - loss: 0.321 - ETA: 2:22 - loss: 0.320 - ETA: 2:20 - loss: 0.319 - ETA: 2:18 - loss: 0.318 - ETA: 2:16 - loss: 0.317 - ETA: 2:14 - loss: 0.317 - ETA: 2:12 - loss: 0.316 - ETA: 2:11 - loss: 0.316 - ETA: 2:09 - loss: 0.315 - ETA: 2:07 - loss: 0.315 - ETA: 2:05 - loss: 0.315 - ETA: 2:03 - loss: 0.315 - ETA: 2:01 - loss: 0.315 - ETA: 1:59 - loss: 0.315 - ETA: 1:58 - loss: 0.315 - ETA: 1:56 - loss: 0.314 - ETA: 1:54 - loss: 0.314 - ETA: 1:52 - loss: 0.314 - ETA: 1:50 - loss: 0.314 - ETA: 1:48 - loss: 0.313 - ETA: 1:47 - loss: 0.313 - ETA: 1:45 - loss: 0.313 - ETA: 1:43 - loss: 0.312 - ETA: 1:41 - loss: 0.312 - ETA: 1:39 - loss: 0.312 - ETA: 1:38 - loss: 0.312 - ETA: 1:36 - loss: 0.312 - ETA: 1:34 - loss: 0.312 - ETA: 1:32 - loss: 0.312 - ETA: 1:30 - loss: 0.313 - ETA: 1:29 - loss: 0.313 - ETA: 1:27 - loss: 0.313 - ETA: 1:25 - loss: 0.313 - ETA: 1:23 - loss: 0.313 - ETA: 1:21 - loss: 0.313 - ETA: 1:19 - loss: 0.314 - ETA: 1:18 - loss: 0.314 - ETA: 1:16 - loss: 0.314 - ETA: 1:14 - loss: 0.314 - ETA: 1:12 - loss: 0.314 - ETA: 1:10 - loss: 0.314 - ETA: 1:09 - loss: 0.314 - ETA: 1:07 - loss: 0.314 - ETA: 1:05 - loss: 0.314 - ETA: 1:03 - loss: 0.314 - ETA: 1:01 - loss: 0.313 - ETA: 59s - loss: 0.313 - ETA: 58s - loss: 0.31 - ETA: 56s - loss: 0.31 - ETA: 54s - loss: 0.31 - ETA: 52s - loss: 0.31 - ETA: 50s - loss: 0.31 - ETA: 49s - loss: 0.31 - ETA: 47s - loss: 0.31 - ETA: 45s - loss: 0.31 - ETA: 43s - loss: 0.31 - ETA: 41s - loss: 0.31 - ETA: 40s - loss: 0.31 - ETA: 38s - loss: 0.31 - ETA: 36s - loss: 0.31 - ETA: 34s - loss: 0.31 - ETA: 32s - loss: 0.31 - ETA: 30s - loss: 0.31 - ETA: 29s - loss: 0.31 - ETA: 27s - loss: 0.31 - ETA: 25s - loss: 0.31 - ETA: 23s - loss: 0.31 - ETA: 21s - loss: 0.31 - ETA: 20s - loss: 0.31 - ETA: 18s - loss: 0.31 - ETA: 16s - loss: 0.31 - ETA: 14s - loss: 0.31 - ETA: 12s - loss: 0.31 - ETA: 10s - loss: 0.31 - ETA: 9s - loss: 0.3106 - ETA: 7s - loss: 0.310 - ETA: 5s - loss: 0.310 - ETA: 3s - loss: 0.310 - ETA: 1s - loss: 0.310 - ETA: 0s - loss: 0.310 - 208s 2s/step - loss: 0.3104 - val_loss: 0.0962\n",
      "\n",
      "Epoch 00016: val_loss improved from 0.10170 to 0.09619, saving model to clean_notebooks\\cnn_injection_transfer.h5\n",
      "Epoch 17/30\n",
      "111/111 [==============================] - ETA: 3:10 - loss: 0.254 - ETA: 3:14 - loss: 0.258 - ETA: 3:12 - loss: 0.249 - ETA: 3:11 - loss: 0.265 - ETA: 3:09 - loss: 0.274 - ETA: 3:08 - loss: 0.289 - ETA: 3:07 - loss: 0.297 - ETA: 3:05 - loss: 0.303 - ETA: 3:03 - loss: 0.305 - ETA: 3:02 - loss: 0.307 - ETA: 3:00 - loss: 0.307 - ETA: 2:58 - loss: 0.305 - ETA: 2:56 - loss: 0.308 - ETA: 2:55 - loss: 0.308 - ETA: 2:53 - loss: 0.310 - ETA: 2:51 - loss: 0.311 - ETA: 2:49 - loss: 0.313 - ETA: 2:47 - loss: 0.314 - ETA: 2:45 - loss: 0.315 - ETA: 2:44 - loss: 0.315 - ETA: 2:42 - loss: 0.315 - ETA: 2:40 - loss: 0.315 - ETA: 2:38 - loss: 0.314 - ETA: 2:37 - loss: 0.314 - ETA: 2:35 - loss: 0.314 - ETA: 2:33 - loss: 0.314 - ETA: 2:31 - loss: 0.314 - ETA: 2:29 - loss: 0.314 - ETA: 2:27 - loss: 0.313 - ETA: 2:26 - loss: 0.313 - ETA: 2:24 - loss: 0.312 - ETA: 2:22 - loss: 0.312 - ETA: 2:21 - loss: 0.312 - ETA: 2:19 - loss: 0.312 - ETA: 2:17 - loss: 0.312 - ETA: 2:15 - loss: 0.312 - ETA: 2:13 - loss: 0.312 - ETA: 2:12 - loss: 0.312 - ETA: 2:10 - loss: 0.312 - ETA: 2:08 - loss: 0.312 - ETA: 2:06 - loss: 0.311 - ETA: 2:05 - loss: 0.311 - ETA: 2:03 - loss: 0.310 - ETA: 2:01 - loss: 0.310 - ETA: 1:59 - loss: 0.311 - ETA: 1:57 - loss: 0.311 - ETA: 1:56 - loss: 0.311 - ETA: 1:54 - loss: 0.311 - ETA: 1:52 - loss: 0.311 - ETA: 1:50 - loss: 0.311 - ETA: 1:48 - loss: 0.311 - ETA: 1:46 - loss: 0.311 - ETA: 1:45 - loss: 0.311 - ETA: 1:43 - loss: 0.311 - ETA: 1:41 - loss: 0.312 - ETA: 1:39 - loss: 0.312 - ETA: 1:37 - loss: 0.312 - ETA: 1:36 - loss: 0.312 - ETA: 1:34 - loss: 0.312 - ETA: 1:32 - loss: 0.312 - ETA: 1:30 - loss: 0.312 - ETA: 1:28 - loss: 0.312 - ETA: 1:26 - loss: 0.312 - ETA: 1:25 - loss: 0.313 - ETA: 1:23 - loss: 0.313 - ETA: 1:21 - loss: 0.313 - ETA: 1:19 - loss: 0.314 - ETA: 1:17 - loss: 0.314 - ETA: 1:15 - loss: 0.314 - ETA: 1:14 - loss: 0.315 - ETA: 1:12 - loss: 0.315 - ETA: 1:10 - loss: 0.315 - ETA: 1:08 - loss: 0.316 - ETA: 1:06 - loss: 0.316 - ETA: 1:05 - loss: 0.317 - ETA: 1:03 - loss: 0.317 - ETA: 1:01 - loss: 0.318 - ETA: 59s - loss: 0.318 - ETA: 57s - loss: 0.31 - ETA: 56s - loss: 0.31 - ETA: 54s - loss: 0.31 - ETA: 52s - loss: 0.32 - ETA: 50s - loss: 0.32 - ETA: 48s - loss: 0.32 - ETA: 47s - loss: 0.32 - ETA: 45s - loss: 0.32 - ETA: 43s - loss: 0.32 - ETA: 41s - loss: 0.32 - ETA: 39s - loss: 0.32 - ETA: 37s - loss: 0.32 - ETA: 36s - loss: 0.32 - ETA: 34s - loss: 0.32 - ETA: 32s - loss: 0.32 - ETA: 30s - loss: 0.32 - ETA: 28s - loss: 0.32 - ETA: 27s - loss: 0.32 - ETA: 25s - loss: 0.32 - ETA: 23s - loss: 0.32 - ETA: 21s - loss: 0.32 - ETA: 19s - loss: 0.32 - ETA: 18s - loss: 0.32 - ETA: 16s - loss: 0.32 - ETA: 14s - loss: 0.32 - ETA: 12s - loss: 0.32 - ETA: 10s - loss: 0.32 - ETA: 9s - loss: 0.3278 - ETA: 7s - loss: 0.328 - ETA: 5s - loss: 0.328 - ETA: 3s - loss: 0.328 - ETA: 1s - loss: 0.328 - ETA: 0s - loss: 0.328 - 206s 2s/step - loss: 0.3291 - val_loss: 0.1088\n",
      "\n",
      "Epoch 00017: val_loss did not improve from 0.09619\n",
      "Epoch 18/30\n",
      "111/111 [==============================] - ETA: 3:14 - loss: 0.230 - ETA: 3:20 - loss: 0.208 - ETA: 3:18 - loss: 0.186 - ETA: 3:14 - loss: 0.194 - ETA: 3:13 - loss: 0.207 - ETA: 3:10 - loss: 0.215 - ETA: 3:08 - loss: 0.220 - ETA: 3:07 - loss: 0.221 - ETA: 3:05 - loss: 0.223 - ETA: 3:03 - loss: 0.224 - ETA: 3:01 - loss: 0.231 - ETA: 2:59 - loss: 0.238 - ETA: 2:57 - loss: 0.244 - ETA: 2:55 - loss: 0.248 - ETA: 2:53 - loss: 0.252 - ETA: 2:51 - loss: 0.255 - ETA: 2:49 - loss: 0.256 - ETA: 2:47 - loss: 0.261 - ETA: 2:46 - loss: 0.264 - ETA: 2:44 - loss: 0.269 - ETA: 2:42 - loss: 0.273 - ETA: 2:40 - loss: 0.276 - ETA: 2:38 - loss: 0.279 - ETA: 2:36 - loss: 0.282 - ETA: 2:35 - loss: 0.284 - ETA: 2:33 - loss: 0.286 - ETA: 2:31 - loss: 0.287 - ETA: 2:29 - loss: 0.289 - ETA: 2:27 - loss: 0.290 - ETA: 2:26 - loss: 0.291 - ETA: 2:24 - loss: 0.292 - ETA: 2:22 - loss: 0.292 - ETA: 2:21 - loss: 0.293 - ETA: 2:19 - loss: 0.295 - ETA: 2:17 - loss: 0.296 - ETA: 2:16 - loss: 0.298 - ETA: 2:14 - loss: 0.299 - ETA: 2:12 - loss: 0.301 - ETA: 2:10 - loss: 0.302 - ETA: 2:08 - loss: 0.304 - ETA: 2:06 - loss: 0.305 - ETA: 2:04 - loss: 0.306 - ETA: 2:03 - loss: 0.307 - ETA: 2:01 - loss: 0.308 - ETA: 1:59 - loss: 0.309 - ETA: 1:57 - loss: 0.310 - ETA: 1:55 - loss: 0.310 - ETA: 1:54 - loss: 0.311 - ETA: 1:52 - loss: 0.312 - ETA: 1:50 - loss: 0.312 - ETA: 1:48 - loss: 0.313 - ETA: 1:46 - loss: 0.314 - ETA: 1:44 - loss: 0.314 - ETA: 1:43 - loss: 0.315 - ETA: 1:41 - loss: 0.316 - ETA: 1:39 - loss: 0.317 - ETA: 1:37 - loss: 0.317 - ETA: 1:35 - loss: 0.318 - ETA: 1:34 - loss: 0.319 - ETA: 1:32 - loss: 0.320 - ETA: 1:30 - loss: 0.320 - ETA: 1:28 - loss: 0.321 - ETA: 1:27 - loss: 0.321 - ETA: 1:25 - loss: 0.322 - ETA: 1:23 - loss: 0.322 - ETA: 1:21 - loss: 0.323 - ETA: 1:19 - loss: 0.323 - ETA: 1:18 - loss: 0.323 - ETA: 1:16 - loss: 0.323 - ETA: 1:14 - loss: 0.324 - ETA: 1:12 - loss: 0.324 - ETA: 1:10 - loss: 0.324 - ETA: 1:08 - loss: 0.324 - ETA: 1:07 - loss: 0.325 - ETA: 1:05 - loss: 0.325 - ETA: 1:03 - loss: 0.326 - ETA: 1:01 - loss: 0.326 - ETA: 59s - loss: 0.326 - ETA: 58s - loss: 0.32 - ETA: 56s - loss: 0.32 - ETA: 54s - loss: 0.32 - ETA: 52s - loss: 0.32 - ETA: 50s - loss: 0.32 - ETA: 48s - loss: 0.32 - ETA: 47s - loss: 0.32 - ETA: 45s - loss: 0.32 - ETA: 43s - loss: 0.32 - ETA: 41s - loss: 0.32 - ETA: 39s - loss: 0.32 - ETA: 38s - loss: 0.32 - ETA: 36s - loss: 0.33 - ETA: 34s - loss: 0.33 - ETA: 32s - loss: 0.33 - ETA: 30s - loss: 0.33 - ETA: 29s - loss: 0.33 - ETA: 27s - loss: 0.33 - ETA: 25s - loss: 0.33 - ETA: 23s - loss: 0.33 - ETA: 21s - loss: 0.33 - ETA: 19s - loss: 0.33 - ETA: 18s - loss: 0.33 - ETA: 16s - loss: 0.33 - ETA: 14s - loss: 0.33 - ETA: 12s - loss: 0.33 - ETA: 10s - loss: 0.33 - ETA: 9s - loss: 0.3324 - ETA: 7s - loss: 0.332 - ETA: 5s - loss: 0.332 - ETA: 3s - loss: 0.332 - ETA: 1s - loss: 0.332 - ETA: 0s - loss: 0.332 - 207s 2s/step - loss: 0.3330 - val_loss: 0.0841\n",
      "\n",
      "Epoch 00018: val_loss improved from 0.09619 to 0.08409, saving model to clean_notebooks\\cnn_injection_transfer.h5\n",
      "Epoch 19/30\n",
      "111/111 [==============================] - ETA: 3:04 - loss: 0.814 - ETA: 3:11 - loss: 0.676 - ETA: 3:09 - loss: 0.612 - ETA: 3:10 - loss: 0.599 - ETA: 3:09 - loss: 0.578 - ETA: 3:08 - loss: 0.567 - ETA: 3:05 - loss: 0.553 - ETA: 3:04 - loss: 0.541 - ETA: 3:02 - loss: 0.528 - ETA: 3:02 - loss: 0.515 - ETA: 3:01 - loss: 0.502 - ETA: 2:59 - loss: 0.491 - ETA: 2:58 - loss: 0.481 - ETA: 2:56 - loss: 0.471 - ETA: 2:54 - loss: 0.465 - ETA: 2:52 - loss: 0.459 - ETA: 2:50 - loss: 0.457 - ETA: 2:48 - loss: 0.454 - ETA: 2:47 - loss: 0.453 - ETA: 2:45 - loss: 0.451 - ETA: 2:43 - loss: 0.449 - ETA: 2:41 - loss: 0.447 - ETA: 2:39 - loss: 0.445 - ETA: 2:37 - loss: 0.443 - ETA: 2:35 - loss: 0.442 - ETA: 2:34 - loss: 0.440 - ETA: 2:32 - loss: 0.439 - ETA: 2:30 - loss: 0.437 - ETA: 2:28 - loss: 0.436 - ETA: 2:26 - loss: 0.435 - ETA: 2:24 - loss: 0.434 - ETA: 2:22 - loss: 0.432 - ETA: 2:21 - loss: 0.431 - ETA: 2:19 - loss: 0.429 - ETA: 2:17 - loss: 0.428 - ETA: 2:15 - loss: 0.427 - ETA: 2:13 - loss: 0.426 - ETA: 2:12 - loss: 0.425 - ETA: 2:10 - loss: 0.424 - ETA: 2:08 - loss: 0.423 - ETA: 2:06 - loss: 0.422 - ETA: 2:04 - loss: 0.421 - ETA: 2:02 - loss: 0.420 - ETA: 2:01 - loss: 0.419 - ETA: 1:59 - loss: 0.418 - ETA: 1:57 - loss: 0.416 - ETA: 1:55 - loss: 0.415 - ETA: 1:53 - loss: 0.414 - ETA: 1:51 - loss: 0.413 - ETA: 1:50 - loss: 0.412 - ETA: 1:48 - loss: 0.411 - ETA: 1:46 - loss: 0.410 - ETA: 1:44 - loss: 0.409 - ETA: 1:42 - loss: 0.408 - ETA: 1:41 - loss: 0.408 - ETA: 1:39 - loss: 0.407 - ETA: 1:37 - loss: 0.406 - ETA: 1:35 - loss: 0.406 - ETA: 1:33 - loss: 0.405 - ETA: 1:32 - loss: 0.404 - ETA: 1:30 - loss: 0.404 - ETA: 1:28 - loss: 0.403 - ETA: 1:26 - loss: 0.403 - ETA: 1:24 - loss: 0.402 - ETA: 1:23 - loss: 0.402 - ETA: 1:21 - loss: 0.401 - ETA: 1:19 - loss: 0.401 - ETA: 1:17 - loss: 0.401 - ETA: 1:15 - loss: 0.400 - ETA: 1:14 - loss: 0.400 - ETA: 1:12 - loss: 0.399 - ETA: 1:10 - loss: 0.399 - ETA: 1:08 - loss: 0.398 - ETA: 1:06 - loss: 0.398 - ETA: 1:05 - loss: 0.398 - ETA: 1:03 - loss: 0.397 - ETA: 1:01 - loss: 0.397 - ETA: 59s - loss: 0.396 - ETA: 57s - loss: 0.39 - ETA: 56s - loss: 0.39 - ETA: 54s - loss: 0.39 - ETA: 52s - loss: 0.39 - ETA: 50s - loss: 0.39 - ETA: 48s - loss: 0.39 - ETA: 47s - loss: 0.39 - ETA: 45s - loss: 0.39 - ETA: 43s - loss: 0.39 - ETA: 41s - loss: 0.39 - ETA: 39s - loss: 0.39 - ETA: 38s - loss: 0.39 - ETA: 36s - loss: 0.39 - ETA: 34s - loss: 0.39 - ETA: 32s - loss: 0.39 - ETA: 30s - loss: 0.39 - ETA: 28s - loss: 0.39 - ETA: 27s - loss: 0.39 - ETA: 25s - loss: 0.39 - ETA: 23s - loss: 0.39 - ETA: 21s - loss: 0.39 - ETA: 19s - loss: 0.39 - ETA: 18s - loss: 0.39 - ETA: 16s - loss: 0.39 - ETA: 14s - loss: 0.38 - ETA: 12s - loss: 0.38 - ETA: 10s - loss: 0.38 - ETA: 9s - loss: 0.3889 - ETA: 7s - loss: 0.388 - ETA: 5s - loss: 0.388 - ETA: 3s - loss: 0.387 - ETA: 1s - loss: 0.387 - ETA: 0s - loss: 0.387 - 207s 2s/step - loss: 0.3869 - val_loss: 0.0946\n",
      "\n",
      "Epoch 00019: val_loss did not improve from 0.08409\n",
      "Epoch 20/30\n",
      "111/111 [==============================] - ETA: 3:29 - loss: 0.121 - ETA: 3:18 - loss: 0.149 - ETA: 3:18 - loss: 0.200 - ETA: 3:14 - loss: 0.215 - ETA: 3:12 - loss: 0.219 - ETA: 3:11 - loss: 0.225 - ETA: 3:09 - loss: 0.232 - ETA: 3:06 - loss: 0.236 - ETA: 3:04 - loss: 0.239 - ETA: 3:02 - loss: 0.241 - ETA: 3:01 - loss: 0.242 - ETA: 2:59 - loss: 0.243 - ETA: 2:58 - loss: 0.244 - ETA: 2:56 - loss: 0.246 - ETA: 2:54 - loss: 0.247 - ETA: 2:52 - loss: 0.248 - ETA: 2:51 - loss: 0.251 - ETA: 2:49 - loss: 0.254 - ETA: 2:47 - loss: 0.255 - ETA: 2:46 - loss: 0.257 - ETA: 2:44 - loss: 0.259 - ETA: 2:42 - loss: 0.260 - ETA: 2:40 - loss: 0.262 - ETA: 2:39 - loss: 0.263 - ETA: 2:37 - loss: 0.265 - ETA: 2:35 - loss: 0.266 - ETA: 2:33 - loss: 0.267 - ETA: 2:31 - loss: 0.267 - ETA: 2:29 - loss: 0.268 - ETA: 2:28 - loss: 0.268 - ETA: 2:26 - loss: 0.268 - ETA: 2:24 - loss: 0.269 - ETA: 2:22 - loss: 0.271 - ETA: 2:20 - loss: 0.273 - ETA: 2:18 - loss: 0.275 - ETA: 2:17 - loss: 0.277 - ETA: 2:15 - loss: 0.279 - ETA: 2:13 - loss: 0.281 - ETA: 2:11 - loss: 0.282 - ETA: 2:09 - loss: 0.284 - ETA: 2:07 - loss: 0.285 - ETA: 2:05 - loss: 0.286 - ETA: 2:03 - loss: 0.287 - ETA: 2:02 - loss: 0.289 - ETA: 2:00 - loss: 0.290 - ETA: 1:58 - loss: 0.291 - ETA: 1:56 - loss: 0.292 - ETA: 1:54 - loss: 0.292 - ETA: 1:52 - loss: 0.293 - ETA: 1:51 - loss: 0.293 - ETA: 1:49 - loss: 0.294 - ETA: 1:47 - loss: 0.295 - ETA: 1:45 - loss: 0.295 - ETA: 1:43 - loss: 0.296 - ETA: 1:42 - loss: 0.296 - ETA: 1:40 - loss: 0.296 - ETA: 1:38 - loss: 0.297 - ETA: 1:36 - loss: 0.297 - ETA: 1:34 - loss: 0.297 - ETA: 1:32 - loss: 0.298 - ETA: 1:30 - loss: 0.298 - ETA: 1:29 - loss: 0.298 - ETA: 1:27 - loss: 0.298 - ETA: 1:25 - loss: 0.299 - ETA: 1:23 - loss: 0.299 - ETA: 1:21 - loss: 0.299 - ETA: 1:19 - loss: 0.299 - ETA: 1:18 - loss: 0.299 - ETA: 1:16 - loss: 0.299 - ETA: 1:14 - loss: 0.299 - ETA: 1:12 - loss: 0.300 - ETA: 1:10 - loss: 0.300 - ETA: 1:09 - loss: 0.300 - ETA: 1:07 - loss: 0.300 - ETA: 1:05 - loss: 0.300 - ETA: 1:03 - loss: 0.300 - ETA: 1:01 - loss: 0.300 - ETA: 59s - loss: 0.301 - ETA: 58s - loss: 0.30 - ETA: 56s - loss: 0.30 - ETA: 54s - loss: 0.30 - ETA: 52s - loss: 0.30 - ETA: 50s - loss: 0.30 - ETA: 49s - loss: 0.30 - ETA: 47s - loss: 0.30 - ETA: 45s - loss: 0.30 - ETA: 43s - loss: 0.30 - ETA: 41s - loss: 0.30 - ETA: 39s - loss: 0.30 - ETA: 38s - loss: 0.30 - ETA: 36s - loss: 0.30 - ETA: 34s - loss: 0.30 - ETA: 32s - loss: 0.30 - ETA: 30s - loss: 0.30 - ETA: 29s - loss: 0.30 - ETA: 27s - loss: 0.30 - ETA: 25s - loss: 0.30 - ETA: 23s - loss: 0.30 - ETA: 21s - loss: 0.30 - ETA: 20s - loss: 0.30 - ETA: 18s - loss: 0.30 - ETA: 16s - loss: 0.30 - ETA: 14s - loss: 0.30 - ETA: 12s - loss: 0.30 - ETA: 10s - loss: 0.30 - ETA: 9s - loss: 0.3041 - ETA: 7s - loss: 0.304 - ETA: 5s - loss: 0.304 - ETA: 3s - loss: 0.304 - ETA: 1s - loss: 0.304 - ETA: 0s - loss: 0.304 - 207s 2s/step - loss: 0.3042 - val_loss: 0.0822\n",
      "\n",
      "Epoch 00020: val_loss improved from 0.08409 to 0.08220, saving model to clean_notebooks\\cnn_injection_transfer.h5\n",
      "Epoch 21/30\n",
      "111/111 [==============================] - ETA: 3:02 - loss: 0.081 - ETA: 3:16 - loss: 0.121 - ETA: 3:14 - loss: 0.130 - ETA: 3:12 - loss: 0.130 - ETA: 3:11 - loss: 0.132 - ETA: 3:11 - loss: 0.133 - ETA: 3:08 - loss: 0.140 - ETA: 3:06 - loss: 0.147 - ETA: 3:04 - loss: 0.156 - ETA: 3:03 - loss: 0.163 - ETA: 3:00 - loss: 0.170 - ETA: 2:59 - loss: 0.175 - ETA: 2:57 - loss: 0.179 - ETA: 2:55 - loss: 0.182 - ETA: 2:53 - loss: 0.185 - ETA: 2:51 - loss: 0.188 - ETA: 2:50 - loss: 0.190 - ETA: 2:48 - loss: 0.194 - ETA: 2:46 - loss: 0.198 - ETA: 2:44 - loss: 0.201 - ETA: 2:42 - loss: 0.203 - ETA: 2:41 - loss: 0.206 - ETA: 2:39 - loss: 0.209 - ETA: 2:37 - loss: 0.211 - ETA: 2:35 - loss: 0.213 - ETA: 2:33 - loss: 0.215 - ETA: 2:32 - loss: 0.217 - ETA: 2:30 - loss: 0.218 - ETA: 2:28 - loss: 0.220 - ETA: 2:26 - loss: 0.221 - ETA: 2:24 - loss: 0.223 - ETA: 2:22 - loss: 0.224 - ETA: 2:21 - loss: 0.226 - ETA: 2:19 - loss: 0.229 - ETA: 2:17 - loss: 0.231 - ETA: 2:15 - loss: 0.234 - ETA: 2:13 - loss: 0.236 - ETA: 2:11 - loss: 0.238 - ETA: 2:10 - loss: 0.240 - ETA: 2:08 - loss: 0.242 - ETA: 2:06 - loss: 0.244 - ETA: 2:04 - loss: 0.246 - ETA: 2:02 - loss: 0.248 - ETA: 2:01 - loss: 0.250 - ETA: 1:59 - loss: 0.252 - ETA: 1:57 - loss: 0.254 - ETA: 1:55 - loss: 0.256 - ETA: 1:54 - loss: 0.258 - ETA: 1:52 - loss: 0.259 - ETA: 1:50 - loss: 0.261 - ETA: 1:48 - loss: 0.262 - ETA: 1:47 - loss: 0.264 - ETA: 1:45 - loss: 0.265 - ETA: 1:43 - loss: 0.266 - ETA: 1:41 - loss: 0.268 - ETA: 1:39 - loss: 0.269 - ETA: 1:37 - loss: 0.270 - ETA: 1:36 - loss: 0.271 - ETA: 1:34 - loss: 0.271 - ETA: 1:32 - loss: 0.272 - ETA: 1:30 - loss: 0.273 - ETA: 1:28 - loss: 0.274 - ETA: 1:26 - loss: 0.274 - ETA: 1:25 - loss: 0.275 - ETA: 1:23 - loss: 0.276 - ETA: 1:21 - loss: 0.276 - ETA: 1:19 - loss: 0.277 - ETA: 1:17 - loss: 0.277 - ETA: 1:16 - loss: 0.278 - ETA: 1:14 - loss: 0.278 - ETA: 1:12 - loss: 0.279 - ETA: 1:11 - loss: 0.279 - ETA: 1:09 - loss: 0.280 - ETA: 1:07 - loss: 0.280 - ETA: 1:05 - loss: 0.280 - ETA: 1:03 - loss: 0.281 - ETA: 1:02 - loss: 0.281 - ETA: 1:00 - loss: 0.282 - ETA: 58s - loss: 0.282 - ETA: 56s - loss: 0.28 - ETA: 54s - loss: 0.28 - ETA: 53s - loss: 0.28 - ETA: 51s - loss: 0.28 - ETA: 49s - loss: 0.28 - ETA: 47s - loss: 0.28 - ETA: 45s - loss: 0.28 - ETA: 43s - loss: 0.28 - ETA: 42s - loss: 0.28 - ETA: 40s - loss: 0.28 - ETA: 38s - loss: 0.28 - ETA: 36s - loss: 0.28 - ETA: 34s - loss: 0.28 - ETA: 32s - loss: 0.28 - ETA: 31s - loss: 0.28 - ETA: 29s - loss: 0.28 - ETA: 27s - loss: 0.28 - ETA: 25s - loss: 0.28 - ETA: 23s - loss: 0.28 - ETA: 21s - loss: 0.28 - ETA: 20s - loss: 0.28 - ETA: 18s - loss: 0.28 - ETA: 16s - loss: 0.28 - ETA: 14s - loss: 0.28 - ETA: 12s - loss: 0.28 - ETA: 10s - loss: 0.28 - ETA: 9s - loss: 0.2886 - ETA: 7s - loss: 0.288 - ETA: 5s - loss: 0.288 - ETA: 3s - loss: 0.289 - ETA: 1s - loss: 0.289 - ETA: 0s - loss: 0.289 - 209s 2s/step - loss: 0.2895 - val_loss: 0.1006\n",
      "\n",
      "Epoch 00021: val_loss did not improve from 0.08220\n",
      "Epoch 22/30\n",
      "111/111 [==============================] - ETA: 3:43 - loss: 0.617 - ETA: 3:52 - loss: 0.666 - ETA: 3:43 - loss: 0.619 - ETA: 3:39 - loss: 0.585 - ETA: 3:34 - loss: 0.551 - ETA: 3:29 - loss: 0.547 - ETA: 3:24 - loss: 0.540 - ETA: 3:21 - loss: 0.534 - ETA: 3:18 - loss: 0.525 - ETA: 3:15 - loss: 0.517 - ETA: 3:12 - loss: 0.509 - ETA: 3:10 - loss: 0.504 - ETA: 3:07 - loss: 0.497 - ETA: 3:05 - loss: 0.491 - ETA: 3:03 - loss: 0.487 - ETA: 3:01 - loss: 0.483 - ETA: 2:58 - loss: 0.478 - ETA: 2:56 - loss: 0.474 - ETA: 2:54 - loss: 0.469 - ETA: 2:52 - loss: 0.465 - ETA: 2:50 - loss: 0.463 - ETA: 2:48 - loss: 0.462 - ETA: 2:46 - loss: 0.460 - ETA: 2:44 - loss: 0.458 - ETA: 2:42 - loss: 0.455 - ETA: 2:40 - loss: 0.453 - ETA: 2:38 - loss: 0.450 - ETA: 2:36 - loss: 0.447 - ETA: 2:34 - loss: 0.445 - ETA: 2:32 - loss: 0.443 - ETA: 2:30 - loss: 0.440 - ETA: 2:28 - loss: 0.438 - ETA: 2:26 - loss: 0.436 - ETA: 2:24 - loss: 0.433 - ETA: 2:22 - loss: 0.431 - ETA: 2:20 - loss: 0.429 - ETA: 2:18 - loss: 0.427 - ETA: 2:16 - loss: 0.425 - ETA: 2:14 - loss: 0.423 - ETA: 2:12 - loss: 0.421 - ETA: 2:10 - loss: 0.419 - ETA: 2:08 - loss: 0.417 - ETA: 2:06 - loss: 0.415 - ETA: 2:05 - loss: 0.414 - ETA: 2:03 - loss: 0.412 - ETA: 2:01 - loss: 0.410 - ETA: 1:59 - loss: 0.409 - ETA: 1:57 - loss: 0.408 - ETA: 1:55 - loss: 0.407 - ETA: 1:53 - loss: 0.406 - ETA: 1:51 - loss: 0.405 - ETA: 1:49 - loss: 0.404 - ETA: 1:47 - loss: 0.403 - ETA: 1:46 - loss: 0.402 - ETA: 1:44 - loss: 0.401 - ETA: 1:42 - loss: 0.400 - ETA: 1:40 - loss: 0.399 - ETA: 1:38 - loss: 0.399 - ETA: 1:36 - loss: 0.398 - ETA: 1:34 - loss: 0.397 - ETA: 1:32 - loss: 0.396 - ETA: 1:30 - loss: 0.396 - ETA: 1:29 - loss: 0.395 - ETA: 1:27 - loss: 0.394 - ETA: 1:25 - loss: 0.393 - ETA: 1:23 - loss: 0.392 - ETA: 1:21 - loss: 0.392 - ETA: 1:19 - loss: 0.391 - ETA: 1:18 - loss: 0.391 - ETA: 1:16 - loss: 0.390 - ETA: 1:14 - loss: 0.389 - ETA: 1:12 - loss: 0.389 - ETA: 1:10 - loss: 0.388 - ETA: 1:08 - loss: 0.388 - ETA: 1:06 - loss: 0.387 - ETA: 1:04 - loss: 0.387 - ETA: 1:03 - loss: 0.386 - ETA: 1:01 - loss: 0.386 - ETA: 59s - loss: 0.385 - ETA: 57s - loss: 0.38 - ETA: 55s - loss: 0.38 - ETA: 53s - loss: 0.38 - ETA: 51s - loss: 0.38 - ETA: 50s - loss: 0.38 - ETA: 48s - loss: 0.38 - ETA: 46s - loss: 0.38 - ETA: 44s - loss: 0.38 - ETA: 42s - loss: 0.38 - ETA: 40s - loss: 0.38 - ETA: 38s - loss: 0.38 - ETA: 37s - loss: 0.37 - ETA: 35s - loss: 0.37 - ETA: 33s - loss: 0.37 - ETA: 31s - loss: 0.37 - ETA: 29s - loss: 0.37 - ETA: 27s - loss: 0.37 - ETA: 25s - loss: 0.37 - ETA: 24s - loss: 0.37 - ETA: 22s - loss: 0.37 - ETA: 20s - loss: 0.37 - ETA: 18s - loss: 0.37 - ETA: 16s - loss: 0.37 - ETA: 14s - loss: 0.37 - ETA: 12s - loss: 0.37 - ETA: 11s - loss: 0.37 - ETA: 9s - loss: 0.3745 - ETA: 7s - loss: 0.374 - ETA: 5s - loss: 0.373 - ETA: 3s - loss: 0.373 - ETA: 1s - loss: 0.373 - ETA: 0s - loss: 0.372 - 211s 2s/step - loss: 0.3725 - val_loss: 0.0863\n",
      "\n",
      "Epoch 00022: val_loss did not improve from 0.08220\n",
      "Epoch 23/30\n",
      "111/111 [==============================] - ETA: 3:19 - loss: 0.439 - ETA: 3:21 - loss: 0.381 - ETA: 3:20 - loss: 0.349 - ETA: 3:16 - loss: 0.367 - ETA: 3:13 - loss: 0.387 - ETA: 3:11 - loss: 0.390 - ETA: 3:11 - loss: 0.394 - ETA: 3:08 - loss: 0.393 - ETA: 3:06 - loss: 0.395 - ETA: 3:04 - loss: 0.394 - ETA: 3:02 - loss: 0.395 - ETA: 3:01 - loss: 0.396 - ETA: 2:59 - loss: 0.397 - ETA: 2:58 - loss: 0.395 - ETA: 2:57 - loss: 0.393 - ETA: 2:55 - loss: 0.392 - ETA: 2:54 - loss: 0.390 - ETA: 2:52 - loss: 0.389 - ETA: 2:50 - loss: 0.389 - ETA: 2:48 - loss: 0.389 - ETA: 2:46 - loss: 0.389 - ETA: 2:44 - loss: 0.389 - ETA: 2:42 - loss: 0.389 - ETA: 2:41 - loss: 0.389 - ETA: 2:39 - loss: 0.389 - ETA: 2:37 - loss: 0.388 - ETA: 2:35 - loss: 0.387 - ETA: 2:33 - loss: 0.387 - ETA: 2:31 - loss: 0.387 - ETA: 2:29 - loss: 0.387 - ETA: 2:27 - loss: 0.387 - ETA: 2:25 - loss: 0.386 - ETA: 2:23 - loss: 0.386 - ETA: 2:22 - loss: 0.385 - ETA: 2:20 - loss: 0.384 - ETA: 2:18 - loss: 0.384 - ETA: 2:16 - loss: 0.383 - ETA: 2:14 - loss: 0.382 - ETA: 2:12 - loss: 0.381 - ETA: 2:10 - loss: 0.380 - ETA: 2:09 - loss: 0.380 - ETA: 2:07 - loss: 0.379 - ETA: 2:05 - loss: 0.378 - ETA: 2:03 - loss: 0.377 - ETA: 2:01 - loss: 0.376 - ETA: 1:59 - loss: 0.375 - ETA: 1:57 - loss: 0.374 - ETA: 1:56 - loss: 0.372 - ETA: 1:54 - loss: 0.371 - ETA: 1:52 - loss: 0.371 - ETA: 1:50 - loss: 0.370 - ETA: 1:48 - loss: 0.370 - ETA: 1:46 - loss: 0.369 - ETA: 1:44 - loss: 0.368 - ETA: 1:43 - loss: 0.367 - ETA: 1:41 - loss: 0.367 - ETA: 1:39 - loss: 0.366 - ETA: 1:37 - loss: 0.365 - ETA: 1:35 - loss: 0.364 - ETA: 1:33 - loss: 0.364 - ETA: 1:31 - loss: 0.363 - ETA: 1:30 - loss: 0.363 - ETA: 1:28 - loss: 0.362 - ETA: 1:26 - loss: 0.362 - ETA: 1:24 - loss: 0.361 - ETA: 1:22 - loss: 0.361 - ETA: 1:20 - loss: 0.360 - ETA: 1:18 - loss: 0.360 - ETA: 1:17 - loss: 0.360 - ETA: 1:15 - loss: 0.359 - ETA: 1:13 - loss: 0.359 - ETA: 1:11 - loss: 0.358 - ETA: 1:09 - loss: 0.358 - ETA: 1:07 - loss: 0.357 - ETA: 1:06 - loss: 0.357 - ETA: 1:04 - loss: 0.356 - ETA: 1:02 - loss: 0.356 - ETA: 1:00 - loss: 0.355 - ETA: 58s - loss: 0.355 - ETA: 56s - loss: 0.35 - ETA: 55s - loss: 0.35 - ETA: 53s - loss: 0.35 - ETA: 51s - loss: 0.35 - ETA: 49s - loss: 0.35 - ETA: 47s - loss: 0.35 - ETA: 46s - loss: 0.35 - ETA: 44s - loss: 0.35 - ETA: 42s - loss: 0.35 - ETA: 40s - loss: 0.35 - ETA: 38s - loss: 0.35 - ETA: 36s - loss: 0.34 - ETA: 34s - loss: 0.34 - ETA: 33s - loss: 0.34 - ETA: 31s - loss: 0.34 - ETA: 29s - loss: 0.34 - ETA: 27s - loss: 0.34 - ETA: 25s - loss: 0.34 - ETA: 23s - loss: 0.34 - ETA: 22s - loss: 0.34 - ETA: 20s - loss: 0.34 - ETA: 18s - loss: 0.34 - ETA: 16s - loss: 0.34 - ETA: 14s - loss: 0.34 - ETA: 12s - loss: 0.34 - ETA: 11s - loss: 0.34 - ETA: 9s - loss: 0.3438 - ETA: 7s - loss: 0.343 - ETA: 5s - loss: 0.343 - ETA: 3s - loss: 0.342 - ETA: 1s - loss: 0.342 - ETA: 0s - loss: 0.342 - 210s 2s/step - loss: 0.3420 - val_loss: 0.0921\n",
      "\n",
      "Epoch 00023: val_loss did not improve from 0.08220\n",
      "Epoch 24/30\n",
      "111/111 [==============================] - ETA: 3:23 - loss: 0.411 - ETA: 3:19 - loss: 0.370 - ETA: 3:17 - loss: 0.354 - ETA: 3:15 - loss: 0.337 - ETA: 3:14 - loss: 0.323 - ETA: 3:11 - loss: 0.319 - ETA: 3:10 - loss: 0.320 - ETA: 3:07 - loss: 0.324 - ETA: 3:05 - loss: 0.327 - ETA: 3:03 - loss: 0.331 - ETA: 3:02 - loss: 0.334 - ETA: 3:00 - loss: 0.335 - ETA: 2:58 - loss: 0.335 - ETA: 2:56 - loss: 0.336 - ETA: 2:54 - loss: 0.337 - ETA: 2:53 - loss: 0.338 - ETA: 2:51 - loss: 0.338 - ETA: 2:49 - loss: 0.337 - ETA: 2:47 - loss: 0.337 - ETA: 2:45 - loss: 0.337 - ETA: 2:43 - loss: 0.337 - ETA: 2:41 - loss: 0.337 - ETA: 2:40 - loss: 0.336 - ETA: 2:38 - loss: 0.336 - ETA: 2:36 - loss: 0.335 - ETA: 2:34 - loss: 0.335 - ETA: 2:32 - loss: 0.334 - ETA: 2:31 - loss: 0.334 - ETA: 2:29 - loss: 0.335 - ETA: 2:27 - loss: 0.335 - ETA: 2:25 - loss: 0.335 - ETA: 2:23 - loss: 0.335 - ETA: 2:22 - loss: 0.335 - ETA: 2:20 - loss: 0.336 - ETA: 2:18 - loss: 0.336 - ETA: 2:17 - loss: 0.336 - ETA: 2:15 - loss: 0.336 - ETA: 2:13 - loss: 0.336 - ETA: 2:11 - loss: 0.336 - ETA: 2:09 - loss: 0.336 - ETA: 2:07 - loss: 0.335 - ETA: 2:06 - loss: 0.335 - ETA: 2:04 - loss: 0.335 - ETA: 2:02 - loss: 0.335 - ETA: 2:00 - loss: 0.334 - ETA: 1:58 - loss: 0.334 - ETA: 1:57 - loss: 0.334 - ETA: 1:55 - loss: 0.334 - ETA: 1:53 - loss: 0.333 - ETA: 1:51 - loss: 0.333 - ETA: 1:49 - loss: 0.332 - ETA: 1:48 - loss: 0.332 - ETA: 1:46 - loss: 0.332 - ETA: 1:44 - loss: 0.331 - ETA: 1:42 - loss: 0.331 - ETA: 1:40 - loss: 0.331 - ETA: 1:38 - loss: 0.330 - ETA: 1:37 - loss: 0.330 - ETA: 1:35 - loss: 0.330 - ETA: 1:33 - loss: 0.330 - ETA: 1:31 - loss: 0.330 - ETA: 1:29 - loss: 0.330 - ETA: 1:27 - loss: 0.330 - ETA: 1:26 - loss: 0.330 - ETA: 1:24 - loss: 0.330 - ETA: 1:22 - loss: 0.330 - ETA: 1:20 - loss: 0.330 - ETA: 1:18 - loss: 0.330 - ETA: 1:16 - loss: 0.330 - ETA: 1:15 - loss: 0.330 - ETA: 1:13 - loss: 0.330 - ETA: 1:11 - loss: 0.330 - ETA: 1:09 - loss: 0.330 - ETA: 1:07 - loss: 0.330 - ETA: 1:05 - loss: 0.330 - ETA: 1:04 - loss: 0.330 - ETA: 1:02 - loss: 0.329 - ETA: 1:00 - loss: 0.329 - ETA: 58s - loss: 0.329 - ETA: 56s - loss: 0.32 - ETA: 54s - loss: 0.32 - ETA: 53s - loss: 0.32 - ETA: 51s - loss: 0.32 - ETA: 49s - loss: 0.32 - ETA: 47s - loss: 0.32 - ETA: 45s - loss: 0.32 - ETA: 43s - loss: 0.32 - ETA: 42s - loss: 0.32 - ETA: 40s - loss: 0.32 - ETA: 38s - loss: 0.32 - ETA: 36s - loss: 0.32 - ETA: 34s - loss: 0.32 - ETA: 32s - loss: 0.32 - ETA: 31s - loss: 0.32 - ETA: 29s - loss: 0.32 - ETA: 27s - loss: 0.32 - ETA: 25s - loss: 0.32 - ETA: 23s - loss: 0.32 - ETA: 22s - loss: 0.32 - ETA: 20s - loss: 0.32 - ETA: 18s - loss: 0.32 - ETA: 16s - loss: 0.32 - ETA: 14s - loss: 0.32 - ETA: 12s - loss: 0.32 - ETA: 11s - loss: 0.32 - ETA: 9s - loss: 0.3240 - ETA: 7s - loss: 0.323 - ETA: 5s - loss: 0.323 - ETA: 3s - loss: 0.323 - ETA: 1s - loss: 0.323 - ETA: 0s - loss: 0.323 - 210s 2s/step - loss: 0.3235 - val_loss: 0.0981\n",
      "\n",
      "Epoch 00024: val_loss did not improve from 0.08220\n",
      "Epoch 25/30\n",
      "111/111 [==============================] - ETA: 3:24 - loss: 0.193 - ETA: 3:16 - loss: 0.164 - ETA: 3:17 - loss: 0.172 - ETA: 3:15 - loss: 0.171 - ETA: 3:12 - loss: 0.234 - ETA: 3:10 - loss: 0.272 - ETA: 3:09 - loss: 0.295 - ETA: 3:08 - loss: 0.310 - ETA: 3:06 - loss: 0.321 - ETA: 3:05 - loss: 0.328 - ETA: 3:03 - loss: 0.337 - ETA: 3:01 - loss: 0.343 - ETA: 2:59 - loss: 0.347 - ETA: 2:57 - loss: 0.351 - ETA: 2:55 - loss: 0.353 - ETA: 2:54 - loss: 0.356 - ETA: 2:52 - loss: 0.357 - ETA: 2:50 - loss: 0.360 - ETA: 2:48 - loss: 0.362 - ETA: 2:46 - loss: 0.364 - ETA: 2:44 - loss: 0.366 - ETA: 2:42 - loss: 0.368 - ETA: 2:40 - loss: 0.369 - ETA: 2:39 - loss: 0.369 - ETA: 2:37 - loss: 0.370 - ETA: 2:35 - loss: 0.370 - ETA: 2:33 - loss: 0.370 - ETA: 2:32 - loss: 0.370 - ETA: 2:30 - loss: 0.370 - ETA: 2:28 - loss: 0.369 - ETA: 2:26 - loss: 0.369 - ETA: 2:24 - loss: 0.368 - ETA: 2:22 - loss: 0.368 - ETA: 2:20 - loss: 0.367 - ETA: 2:19 - loss: 0.366 - ETA: 2:17 - loss: 0.365 - ETA: 2:15 - loss: 0.364 - ETA: 2:13 - loss: 0.364 - ETA: 2:11 - loss: 0.363 - ETA: 2:09 - loss: 0.363 - ETA: 2:08 - loss: 0.362 - ETA: 2:06 - loss: 0.362 - ETA: 2:04 - loss: 0.362 - ETA: 2:02 - loss: 0.361 - ETA: 2:00 - loss: 0.360 - ETA: 1:58 - loss: 0.360 - ETA: 1:57 - loss: 0.359 - ETA: 1:55 - loss: 0.359 - ETA: 1:53 - loss: 0.358 - ETA: 1:52 - loss: 0.358 - ETA: 1:50 - loss: 0.357 - ETA: 1:48 - loss: 0.357 - ETA: 1:47 - loss: 0.356 - ETA: 1:45 - loss: 0.356 - ETA: 1:43 - loss: 0.355 - ETA: 1:41 - loss: 0.355 - ETA: 1:39 - loss: 0.354 - ETA: 1:37 - loss: 0.354 - ETA: 1:36 - loss: 0.353 - ETA: 1:34 - loss: 0.353 - ETA: 1:32 - loss: 0.352 - ETA: 1:30 - loss: 0.352 - ETA: 1:28 - loss: 0.351 - ETA: 1:26 - loss: 0.351 - ETA: 1:24 - loss: 0.350 - ETA: 1:23 - loss: 0.350 - ETA: 1:21 - loss: 0.349 - ETA: 1:19 - loss: 0.348 - ETA: 1:17 - loss: 0.348 - ETA: 1:15 - loss: 0.347 - ETA: 1:13 - loss: 0.347 - ETA: 1:11 - loss: 0.346 - ETA: 1:10 - loss: 0.345 - ETA: 1:08 - loss: 0.345 - ETA: 1:06 - loss: 0.344 - ETA: 1:04 - loss: 0.344 - ETA: 1:02 - loss: 0.343 - ETA: 1:00 - loss: 0.343 - ETA: 58s - loss: 0.342 - ETA: 57s - loss: 0.34 - ETA: 55s - loss: 0.34 - ETA: 53s - loss: 0.34 - ETA: 51s - loss: 0.34 - ETA: 49s - loss: 0.34 - ETA: 47s - loss: 0.34 - ETA: 46s - loss: 0.33 - ETA: 44s - loss: 0.33 - ETA: 42s - loss: 0.33 - ETA: 40s - loss: 0.33 - ETA: 38s - loss: 0.33 - ETA: 36s - loss: 0.33 - ETA: 34s - loss: 0.33 - ETA: 33s - loss: 0.33 - ETA: 31s - loss: 0.33 - ETA: 29s - loss: 0.33 - ETA: 27s - loss: 0.33 - ETA: 25s - loss: 0.33 - ETA: 23s - loss: 0.33 - ETA: 22s - loss: 0.33 - ETA: 20s - loss: 0.33 - ETA: 18s - loss: 0.33 - ETA: 16s - loss: 0.33 - ETA: 14s - loss: 0.33 - ETA: 12s - loss: 0.33 - ETA: 11s - loss: 0.33 - ETA: 9s - loss: 0.3316 - ETA: 7s - loss: 0.331 - ETA: 5s - loss: 0.330 - ETA: 3s - loss: 0.330 - ETA: 1s - loss: 0.330 - ETA: 0s - loss: 0.329 - 210s 2s/step - loss: 0.3295 - val_loss: 0.0927\n",
      "\n",
      "Epoch 00025: val_loss did not improve from 0.08220\n",
      "Epoch 00025: early stopping\n",
      "4/4 [==============================] - ETA: 6s - loss: 0.207 - ETA: 4s - loss: 0.208 - ETA: 2s - loss: 0.202 - ETA: 0s - loss: 0.186 - 7s 2s/step - loss: 0.1865\n",
      "Val Score:  0.18647873401641846\n",
      "====================================================================================\n",
      "\n",
      "\n",
      "Epoch 1/30\n",
      "111/111 [==============================] - ETA: 19:33 - loss: 1.36 - ETA: 3:59 - loss: 1.3747 - ETA: 3:46 - loss: 1.308 - ETA: 3:41 - loss: 1.238 - ETA: 3:38 - loss: 1.208 - ETA: 3:35 - loss: 1.180 - ETA: 3:31 - loss: 1.161 - ETA: 3:27 - loss: 1.156 - ETA: 3:24 - loss: 1.156 - ETA: 3:21 - loss: 1.149 - ETA: 3:19 - loss: 1.145 - ETA: 3:16 - loss: 1.138 - ETA: 3:14 - loss: 1.133 - ETA: 3:12 - loss: 1.126 - ETA: 3:09 - loss: 1.118 - ETA: 3:07 - loss: 1.109 - ETA: 3:05 - loss: 1.103 - ETA: 3:03 - loss: 1.098 - ETA: 3:01 - loss: 1.093 - ETA: 2:58 - loss: 1.087 - ETA: 2:56 - loss: 1.080 - ETA: 2:54 - loss: 1.075 - ETA: 2:52 - loss: 1.069 - ETA: 2:50 - loss: 1.063 - ETA: 2:47 - loss: 1.057 - ETA: 2:45 - loss: 1.052 - ETA: 2:43 - loss: 1.048 - ETA: 2:40 - loss: 1.042 - ETA: 2:38 - loss: 1.037 - ETA: 2:36 - loss: 1.033 - ETA: 2:34 - loss: 1.029 - ETA: 2:31 - loss: 1.027 - ETA: 2:29 - loss: 1.024 - ETA: 2:27 - loss: 1.023 - ETA: 2:25 - loss: 1.022 - ETA: 2:23 - loss: 1.021 - ETA: 2:21 - loss: 1.020 - ETA: 2:19 - loss: 1.019 - ETA: 2:17 - loss: 1.018 - ETA: 2:15 - loss: 1.017 - ETA: 2:13 - loss: 1.016 - ETA: 2:11 - loss: 1.015 - ETA: 2:09 - loss: 1.014 - ETA: 2:07 - loss: 1.014 - ETA: 2:05 - loss: 1.014 - ETA: 2:03 - loss: 1.013 - ETA: 2:01 - loss: 1.012 - ETA: 1:59 - loss: 1.011 - ETA: 1:57 - loss: 1.011 - ETA: 1:55 - loss: 1.009 - ETA: 1:53 - loss: 1.009 - ETA: 1:52 - loss: 1.008 - ETA: 1:50 - loss: 1.007 - ETA: 1:48 - loss: 1.007 - ETA: 1:46 - loss: 1.006 - ETA: 1:44 - loss: 1.005 - ETA: 1:42 - loss: 1.004 - ETA: 1:40 - loss: 1.002 - ETA: 1:38 - loss: 1.001 - ETA: 1:36 - loss: 1.000 - ETA: 1:34 - loss: 0.999 - ETA: 1:32 - loss: 0.998 - ETA: 1:30 - loss: 0.997 - ETA: 1:28 - loss: 0.996 - ETA: 1:27 - loss: 0.995 - ETA: 1:25 - loss: 0.994 - ETA: 1:23 - loss: 0.993 - ETA: 1:21 - loss: 0.992 - ETA: 1:19 - loss: 0.991 - ETA: 1:17 - loss: 0.990 - ETA: 1:15 - loss: 0.989 - ETA: 1:13 - loss: 0.988 - ETA: 1:11 - loss: 0.987 - ETA: 1:09 - loss: 0.986 - ETA: 1:07 - loss: 0.986 - ETA: 1:06 - loss: 0.985 - ETA: 1:04 - loss: 0.984 - ETA: 1:02 - loss: 0.984 - ETA: 1:00 - loss: 0.983 - ETA: 58s - loss: 0.983 - ETA: 56s - loss: 0.98 - ETA: 54s - loss: 0.98 - ETA: 52s - loss: 0.98 - ETA: 50s - loss: 0.98 - ETA: 48s - loss: 0.98 - ETA: 47s - loss: 0.98 - ETA: 45s - loss: 0.97 - ETA: 43s - loss: 0.97 - ETA: 41s - loss: 0.97 - ETA: 39s - loss: 0.97 - ETA: 37s - loss: 0.97 - ETA: 35s - loss: 0.97 - ETA: 33s - loss: 0.97 - ETA: 31s - loss: 0.97 - ETA: 30s - loss: 0.97 - ETA: 28s - loss: 0.97 - ETA: 26s - loss: 0.97 - ETA: 24s - loss: 0.97 - ETA: 22s - loss: 0.97 - ETA: 20s - loss: 0.97 - ETA: 18s - loss: 0.97 - ETA: 16s - loss: 0.97 - ETA: 14s - loss: 0.97 - ETA: 13s - loss: 0.97 - ETA: 11s - loss: 0.97 - ETA: 9s - loss: 0.9693 - ETA: 7s - loss: 0.968 - ETA: 5s - loss: 0.968 - ETA: 3s - loss: 0.967 - ETA: 1s - loss: 0.966 - ETA: 0s - loss: 0.966 - 224s 2s/step - loss: 0.9653 - val_loss: 0.7248\n",
      "\n",
      "Epoch 00001: val_loss did not improve from 0.08220\n",
      "Epoch 2/30\n",
      "111/111 [==============================] - ETA: 3:27 - loss: 0.535 - ETA: 3:25 - loss: 0.621 - ETA: 3:25 - loss: 0.624 - ETA: 3:23 - loss: 0.713 - ETA: 3:19 - loss: 0.746 - ETA: 3:16 - loss: 0.764 - ETA: 3:15 - loss: 0.764 - ETA: 3:14 - loss: 0.764 - ETA: 3:13 - loss: 0.762 - ETA: 3:13 - loss: 0.758 - ETA: 3:10 - loss: 0.754 - ETA: 3:07 - loss: 0.748 - ETA: 3:04 - loss: 0.742 - ETA: 3:02 - loss: 0.736 - ETA: 3:00 - loss: 0.733 - ETA: 2:58 - loss: 0.731 - ETA: 2:55 - loss: 0.729 - ETA: 2:53 - loss: 0.726 - ETA: 2:51 - loss: 0.721 - ETA: 2:49 - loss: 0.717 - ETA: 2:47 - loss: 0.715 - ETA: 2:45 - loss: 0.714 - ETA: 2:43 - loss: 0.712 - ETA: 2:41 - loss: 0.712 - ETA: 2:39 - loss: 0.711 - ETA: 2:37 - loss: 0.710 - ETA: 2:35 - loss: 0.709 - ETA: 2:33 - loss: 0.708 - ETA: 2:31 - loss: 0.708 - ETA: 2:29 - loss: 0.708 - ETA: 2:27 - loss: 0.707 - ETA: 2:25 - loss: 0.706 - ETA: 2:24 - loss: 0.704 - ETA: 2:22 - loss: 0.702 - ETA: 2:20 - loss: 0.700 - ETA: 2:18 - loss: 0.698 - ETA: 2:16 - loss: 0.696 - ETA: 2:14 - loss: 0.694 - ETA: 2:12 - loss: 0.692 - ETA: 2:10 - loss: 0.690 - ETA: 2:08 - loss: 0.688 - ETA: 2:06 - loss: 0.686 - ETA: 2:04 - loss: 0.685 - ETA: 2:03 - loss: 0.683 - ETA: 2:01 - loss: 0.682 - ETA: 1:59 - loss: 0.680 - ETA: 1:57 - loss: 0.679 - ETA: 1:55 - loss: 0.677 - ETA: 1:53 - loss: 0.676 - ETA: 1:51 - loss: 0.675 - ETA: 1:50 - loss: 0.673 - ETA: 1:48 - loss: 0.672 - ETA: 1:46 - loss: 0.671 - ETA: 1:44 - loss: 0.670 - ETA: 1:42 - loss: 0.668 - ETA: 1:40 - loss: 0.667 - ETA: 1:38 - loss: 0.666 - ETA: 1:37 - loss: 0.665 - ETA: 1:35 - loss: 0.663 - ETA: 1:33 - loss: 0.662 - ETA: 1:31 - loss: 0.661 - ETA: 1:29 - loss: 0.660 - ETA: 1:27 - loss: 0.659 - ETA: 1:25 - loss: 0.658 - ETA: 1:24 - loss: 0.657 - ETA: 1:22 - loss: 0.656 - ETA: 1:20 - loss: 0.655 - ETA: 1:18 - loss: 0.654 - ETA: 1:16 - loss: 0.652 - ETA: 1:15 - loss: 0.651 - ETA: 1:13 - loss: 0.650 - ETA: 1:11 - loss: 0.649 - ETA: 1:09 - loss: 0.648 - ETA: 1:07 - loss: 0.647 - ETA: 1:05 - loss: 0.646 - ETA: 1:04 - loss: 0.645 - ETA: 1:02 - loss: 0.644 - ETA: 1:00 - loss: 0.643 - ETA: 58s - loss: 0.642 - ETA: 56s - loss: 0.64 - ETA: 54s - loss: 0.64 - ETA: 53s - loss: 0.63 - ETA: 51s - loss: 0.63 - ETA: 49s - loss: 0.63 - ETA: 47s - loss: 0.63 - ETA: 45s - loss: 0.63 - ETA: 43s - loss: 0.63 - ETA: 42s - loss: 0.63 - ETA: 40s - loss: 0.63 - ETA: 38s - loss: 0.63 - ETA: 36s - loss: 0.63 - ETA: 34s - loss: 0.63 - ETA: 32s - loss: 0.63 - ETA: 31s - loss: 0.63 - ETA: 29s - loss: 0.62 - ETA: 27s - loss: 0.62 - ETA: 25s - loss: 0.62 - ETA: 23s - loss: 0.62 - ETA: 21s - loss: 0.62 - ETA: 20s - loss: 0.62 - ETA: 18s - loss: 0.62 - ETA: 16s - loss: 0.62 - ETA: 14s - loss: 0.62 - ETA: 12s - loss: 0.62 - ETA: 10s - loss: 0.62 - ETA: 9s - loss: 0.6255 - ETA: 7s - loss: 0.625 - ETA: 5s - loss: 0.624 - ETA: 3s - loss: 0.624 - ETA: 1s - loss: 0.624 - ETA: 0s - loss: 0.623 - 209s 2s/step - loss: 0.6236 - val_loss: 0.2970\n",
      "\n",
      "Epoch 00002: val_loss did not improve from 0.08220\n",
      "Epoch 3/30\n",
      "111/111 [==============================] - ETA: 3:22 - loss: 0.144 - ETA: 3:16 - loss: 0.483 - ETA: 3:14 - loss: 0.561 - ETA: 3:13 - loss: 0.573 - ETA: 3:11 - loss: 0.577 - ETA: 3:09 - loss: 0.573 - ETA: 3:08 - loss: 0.563 - ETA: 3:06 - loss: 0.549 - ETA: 3:04 - loss: 0.538 - ETA: 3:02 - loss: 0.528 - ETA: 3:01 - loss: 0.521 - ETA: 2:59 - loss: 0.515 - ETA: 2:57 - loss: 0.510 - ETA: 2:56 - loss: 0.504 - ETA: 2:54 - loss: 0.498 - ETA: 2:52 - loss: 0.494 - ETA: 2:50 - loss: 0.491 - ETA: 2:49 - loss: 0.488 - ETA: 2:48 - loss: 0.485 - ETA: 2:46 - loss: 0.482 - ETA: 2:46 - loss: 0.479 - ETA: 2:45 - loss: 0.478 - ETA: 2:43 - loss: 0.476 - ETA: 2:41 - loss: 0.475 - ETA: 2:39 - loss: 0.473 - ETA: 2:37 - loss: 0.472 - ETA: 2:35 - loss: 0.471 - ETA: 2:33 - loss: 0.469 - ETA: 2:31 - loss: 0.468 - ETA: 2:30 - loss: 0.466 - ETA: 2:28 - loss: 0.465 - ETA: 2:26 - loss: 0.463 - ETA: 2:24 - loss: 0.462 - ETA: 2:22 - loss: 0.461 - ETA: 2:20 - loss: 0.460 - ETA: 2:18 - loss: 0.459 - ETA: 2:16 - loss: 0.459 - ETA: 2:14 - loss: 0.458 - ETA: 2:12 - loss: 0.458 - ETA: 2:11 - loss: 0.458 - ETA: 2:09 - loss: 0.457 - ETA: 2:07 - loss: 0.457 - ETA: 2:05 - loss: 0.457 - ETA: 2:03 - loss: 0.457 - ETA: 2:01 - loss: 0.457 - ETA: 1:59 - loss: 0.457 - ETA: 1:57 - loss: 0.456 - ETA: 1:55 - loss: 0.456 - ETA: 1:54 - loss: 0.456 - ETA: 1:52 - loss: 0.457 - ETA: 1:50 - loss: 0.457 - ETA: 1:48 - loss: 0.457 - ETA: 1:46 - loss: 0.457 - ETA: 1:44 - loss: 0.457 - ETA: 1:42 - loss: 0.457 - ETA: 1:41 - loss: 0.457 - ETA: 1:39 - loss: 0.457 - ETA: 1:37 - loss: 0.457 - ETA: 1:35 - loss: 0.457 - ETA: 1:33 - loss: 0.457 - ETA: 1:31 - loss: 0.456 - ETA: 1:30 - loss: 0.456 - ETA: 1:28 - loss: 0.456 - ETA: 1:26 - loss: 0.456 - ETA: 1:24 - loss: 0.456 - ETA: 1:22 - loss: 0.455 - ETA: 1:20 - loss: 0.455 - ETA: 1:19 - loss: 0.455 - ETA: 1:17 - loss: 0.455 - ETA: 1:15 - loss: 0.455 - ETA: 1:13 - loss: 0.455 - ETA: 1:11 - loss: 0.455 - ETA: 1:09 - loss: 0.455 - ETA: 1:07 - loss: 0.455 - ETA: 1:06 - loss: 0.455 - ETA: 1:04 - loss: 0.455 - ETA: 1:02 - loss: 0.455 - ETA: 1:00 - loss: 0.455 - ETA: 58s - loss: 0.455 - ETA: 56s - loss: 0.45 - ETA: 55s - loss: 0.45 - ETA: 53s - loss: 0.45 - ETA: 51s - loss: 0.45 - ETA: 49s - loss: 0.45 - ETA: 47s - loss: 0.45 - ETA: 45s - loss: 0.45 - ETA: 44s - loss: 0.45 - ETA: 42s - loss: 0.45 - ETA: 40s - loss: 0.45 - ETA: 38s - loss: 0.45 - ETA: 36s - loss: 0.45 - ETA: 34s - loss: 0.45 - ETA: 33s - loss: 0.45 - ETA: 31s - loss: 0.45 - ETA: 29s - loss: 0.45 - ETA: 27s - loss: 0.45 - ETA: 25s - loss: 0.45 - ETA: 23s - loss: 0.45 - ETA: 22s - loss: 0.45 - ETA: 20s - loss: 0.45 - ETA: 18s - loss: 0.45 - ETA: 16s - loss: 0.45 - ETA: 14s - loss: 0.45 - ETA: 12s - loss: 0.45 - ETA: 11s - loss: 0.44 - ETA: 9s - loss: 0.4497 - ETA: 7s - loss: 0.449 - ETA: 5s - loss: 0.449 - ETA: 3s - loss: 0.449 - ETA: 1s - loss: 0.448 - ETA: 0s - loss: 0.448 - 209s 2s/step - loss: 0.4484 - val_loss: 0.2437\n",
      "\n",
      "Epoch 00003: val_loss did not improve from 0.08220\n",
      "Epoch 4/30\n",
      "111/111 [==============================] - ETA: 3:21 - loss: 0.604 - ETA: 3:15 - loss: 0.485 - ETA: 3:15 - loss: 0.459 - ETA: 3:13 - loss: 0.437 - ETA: 3:12 - loss: 0.419 - ETA: 3:10 - loss: 0.408 - ETA: 3:08 - loss: 0.399 - ETA: 3:07 - loss: 0.393 - ETA: 3:05 - loss: 0.393 - ETA: 3:03 - loss: 0.396 - ETA: 3:01 - loss: 0.399 - ETA: 2:59 - loss: 0.400 - ETA: 2:57 - loss: 0.402 - ETA: 2:56 - loss: 0.403 - ETA: 2:54 - loss: 0.403 - ETA: 2:52 - loss: 0.403 - ETA: 2:50 - loss: 0.402 - ETA: 2:48 - loss: 0.402 - ETA: 2:47 - loss: 0.401 - ETA: 2:45 - loss: 0.400 - ETA: 2:43 - loss: 0.399 - ETA: 2:42 - loss: 0.397 - ETA: 2:40 - loss: 0.396 - ETA: 2:38 - loss: 0.394 - ETA: 2:36 - loss: 0.392 - ETA: 2:35 - loss: 0.391 - ETA: 2:33 - loss: 0.389 - ETA: 2:31 - loss: 0.388 - ETA: 2:29 - loss: 0.387 - ETA: 2:27 - loss: 0.386 - ETA: 2:25 - loss: 0.385 - ETA: 2:24 - loss: 0.384 - ETA: 2:22 - loss: 0.383 - ETA: 2:20 - loss: 0.382 - ETA: 2:18 - loss: 0.382 - ETA: 2:17 - loss: 0.382 - ETA: 2:15 - loss: 0.382 - ETA: 2:13 - loss: 0.383 - ETA: 2:12 - loss: 0.383 - ETA: 2:10 - loss: 0.384 - ETA: 2:08 - loss: 0.384 - ETA: 2:06 - loss: 0.384 - ETA: 2:04 - loss: 0.385 - ETA: 2:03 - loss: 0.385 - ETA: 2:01 - loss: 0.385 - ETA: 1:59 - loss: 0.385 - ETA: 1:57 - loss: 0.384 - ETA: 1:55 - loss: 0.384 - ETA: 1:53 - loss: 0.384 - ETA: 1:51 - loss: 0.384 - ETA: 1:50 - loss: 0.384 - ETA: 1:48 - loss: 0.385 - ETA: 1:46 - loss: 0.385 - ETA: 1:44 - loss: 0.386 - ETA: 1:42 - loss: 0.386 - ETA: 1:40 - loss: 0.386 - ETA: 1:39 - loss: 0.387 - ETA: 1:37 - loss: 0.387 - ETA: 1:35 - loss: 0.387 - ETA: 1:33 - loss: 0.388 - ETA: 1:31 - loss: 0.388 - ETA: 1:30 - loss: 0.388 - ETA: 1:28 - loss: 0.388 - ETA: 1:26 - loss: 0.388 - ETA: 1:24 - loss: 0.388 - ETA: 1:23 - loss: 0.389 - ETA: 1:21 - loss: 0.389 - ETA: 1:19 - loss: 0.389 - ETA: 1:17 - loss: 0.389 - ETA: 1:15 - loss: 0.389 - ETA: 1:13 - loss: 0.389 - ETA: 1:11 - loss: 0.389 - ETA: 1:10 - loss: 0.389 - ETA: 1:08 - loss: 0.389 - ETA: 1:06 - loss: 0.389 - ETA: 1:04 - loss: 0.389 - ETA: 1:02 - loss: 0.389 - ETA: 1:00 - loss: 0.389 - ETA: 58s - loss: 0.389 - ETA: 57s - loss: 0.38 - ETA: 55s - loss: 0.38 - ETA: 53s - loss: 0.38 - ETA: 51s - loss: 0.38 - ETA: 49s - loss: 0.38 - ETA: 47s - loss: 0.38 - ETA: 46s - loss: 0.38 - ETA: 44s - loss: 0.38 - ETA: 42s - loss: 0.38 - ETA: 40s - loss: 0.38 - ETA: 38s - loss: 0.38 - ETA: 36s - loss: 0.38 - ETA: 34s - loss: 0.38 - ETA: 33s - loss: 0.38 - ETA: 31s - loss: 0.38 - ETA: 29s - loss: 0.38 - ETA: 27s - loss: 0.38 - ETA: 25s - loss: 0.38 - ETA: 23s - loss: 0.38 - ETA: 22s - loss: 0.38 - ETA: 20s - loss: 0.38 - ETA: 18s - loss: 0.38 - ETA: 16s - loss: 0.38 - ETA: 14s - loss: 0.38 - ETA: 12s - loss: 0.38 - ETA: 11s - loss: 0.38 - ETA: 9s - loss: 0.3861 - ETA: 7s - loss: 0.386 - ETA: 5s - loss: 0.386 - ETA: 3s - loss: 0.385 - ETA: 1s - loss: 0.385 - ETA: 0s - loss: 0.385 - 210s 2s/step - loss: 0.3857 - val_loss: 0.2049\n",
      "\n",
      "Epoch 00004: val_loss did not improve from 0.08220\n",
      "Epoch 5/30\n",
      "111/111 [==============================] - ETA: 3:44 - loss: 0.297 - ETA: 4:02 - loss: 0.295 - ETA: 3:57 - loss: 0.306 - ETA: 3:50 - loss: 0.305 - ETA: 3:42 - loss: 0.313 - ETA: 3:35 - loss: 0.314 - ETA: 3:28 - loss: 0.336 - ETA: 3:23 - loss: 0.347 - ETA: 3:20 - loss: 0.356 - ETA: 3:17 - loss: 0.362 - ETA: 3:13 - loss: 0.367 - ETA: 3:10 - loss: 0.372 - ETA: 3:07 - loss: 0.378 - ETA: 3:05 - loss: 0.382 - ETA: 3:02 - loss: 0.385 - ETA: 3:00 - loss: 0.388 - ETA: 2:57 - loss: 0.391 - ETA: 2:55 - loss: 0.394 - ETA: 2:53 - loss: 0.396 - ETA: 2:51 - loss: 0.398 - ETA: 2:48 - loss: 0.399 - ETA: 2:46 - loss: 0.401 - ETA: 2:44 - loss: 0.402 - ETA: 2:42 - loss: 0.402 - ETA: 2:40 - loss: 0.404 - ETA: 2:38 - loss: 0.404 - ETA: 2:36 - loss: 0.406 - ETA: 2:34 - loss: 0.407 - ETA: 2:32 - loss: 0.409 - ETA: 2:30 - loss: 0.410 - ETA: 2:28 - loss: 0.411 - ETA: 2:26 - loss: 0.412 - ETA: 2:24 - loss: 0.413 - ETA: 2:22 - loss: 0.414 - ETA: 2:20 - loss: 0.416 - ETA: 2:18 - loss: 0.418 - ETA: 2:16 - loss: 0.420 - ETA: 2:14 - loss: 0.422 - ETA: 2:13 - loss: 0.423 - ETA: 2:11 - loss: 0.425 - ETA: 2:09 - loss: 0.426 - ETA: 2:07 - loss: 0.427 - ETA: 2:05 - loss: 0.429 - ETA: 2:03 - loss: 0.430 - ETA: 2:01 - loss: 0.430 - ETA: 1:59 - loss: 0.431 - ETA: 1:57 - loss: 0.432 - ETA: 1:56 - loss: 0.433 - ETA: 1:54 - loss: 0.433 - ETA: 1:52 - loss: 0.434 - ETA: 1:50 - loss: 0.434 - ETA: 1:48 - loss: 0.435 - ETA: 1:47 - loss: 0.435 - ETA: 1:45 - loss: 0.435 - ETA: 1:43 - loss: 0.435 - ETA: 1:41 - loss: 0.435 - ETA: 1:39 - loss: 0.435 - ETA: 1:37 - loss: 0.435 - ETA: 1:36 - loss: 0.436 - ETA: 1:34 - loss: 0.436 - ETA: 1:32 - loss: 0.436 - ETA: 1:30 - loss: 0.436 - ETA: 1:28 - loss: 0.436 - ETA: 1:26 - loss: 0.436 - ETA: 1:24 - loss: 0.436 - ETA: 1:23 - loss: 0.436 - ETA: 1:21 - loss: 0.436 - ETA: 1:19 - loss: 0.436 - ETA: 1:17 - loss: 0.436 - ETA: 1:15 - loss: 0.436 - ETA: 1:13 - loss: 0.436 - ETA: 1:11 - loss: 0.436 - ETA: 1:10 - loss: 0.436 - ETA: 1:08 - loss: 0.436 - ETA: 1:06 - loss: 0.436 - ETA: 1:04 - loss: 0.436 - ETA: 1:02 - loss: 0.436 - ETA: 1:00 - loss: 0.436 - ETA: 58s - loss: 0.436 - ETA: 57s - loss: 0.43 - ETA: 55s - loss: 0.43 - ETA: 53s - loss: 0.43 - ETA: 51s - loss: 0.43 - ETA: 49s - loss: 0.43 - ETA: 47s - loss: 0.43 - ETA: 46s - loss: 0.43 - ETA: 44s - loss: 0.43 - ETA: 42s - loss: 0.43 - ETA: 40s - loss: 0.43 - ETA: 38s - loss: 0.43 - ETA: 36s - loss: 0.43 - ETA: 34s - loss: 0.43 - ETA: 33s - loss: 0.43 - ETA: 31s - loss: 0.43 - ETA: 29s - loss: 0.43 - ETA: 27s - loss: 0.43 - ETA: 25s - loss: 0.43 - ETA: 23s - loss: 0.43 - ETA: 22s - loss: 0.43 - ETA: 20s - loss: 0.43 - ETA: 18s - loss: 0.43 - ETA: 16s - loss: 0.43 - ETA: 14s - loss: 0.43 - ETA: 12s - loss: 0.43 - ETA: 11s - loss: 0.43 - ETA: 9s - loss: 0.4308 - ETA: 7s - loss: 0.430 - ETA: 5s - loss: 0.430 - ETA: 3s - loss: 0.430 - ETA: 1s - loss: 0.430 - ETA: 0s - loss: 0.429 - 210s 2s/step - loss: 0.4296 - val_loss: 0.2165\n",
      "\n",
      "Epoch 00005: val_loss did not improve from 0.08220\n",
      "Epoch 6/30\n",
      "111/111 [==============================] - ETA: 3:17 - loss: 0.261 - ETA: 3:23 - loss: 0.258 - ETA: 3:18 - loss: 0.267 - ETA: 3:21 - loss: 0.267 - ETA: 3:19 - loss: 0.281 - ETA: 3:17 - loss: 0.293 - ETA: 3:15 - loss: 0.301 - ETA: 3:13 - loss: 0.306 - ETA: 3:11 - loss: 0.308 - ETA: 3:08 - loss: 0.307 - ETA: 3:05 - loss: 0.305 - ETA: 3:03 - loss: 0.306 - ETA: 3:01 - loss: 0.309 - ETA: 2:59 - loss: 0.313 - ETA: 2:56 - loss: 0.316 - ETA: 2:54 - loss: 0.318 - ETA: 2:52 - loss: 0.319 - ETA: 2:50 - loss: 0.320 - ETA: 2:48 - loss: 0.322 - ETA: 2:46 - loss: 0.323 - ETA: 2:44 - loss: 0.324 - ETA: 2:42 - loss: 0.326 - ETA: 2:41 - loss: 0.327 - ETA: 2:39 - loss: 0.327 - ETA: 2:37 - loss: 0.328 - ETA: 2:35 - loss: 0.329 - ETA: 2:33 - loss: 0.330 - ETA: 2:31 - loss: 0.331 - ETA: 2:29 - loss: 0.332 - ETA: 2:27 - loss: 0.333 - ETA: 2:26 - loss: 0.333 - ETA: 2:24 - loss: 0.334 - ETA: 2:22 - loss: 0.335 - ETA: 2:20 - loss: 0.335 - ETA: 2:18 - loss: 0.335 - ETA: 2:16 - loss: 0.336 - ETA: 2:14 - loss: 0.337 - ETA: 2:13 - loss: 0.337 - ETA: 2:11 - loss: 0.337 - ETA: 2:09 - loss: 0.338 - ETA: 2:07 - loss: 0.338 - ETA: 2:05 - loss: 0.338 - ETA: 2:04 - loss: 0.338 - ETA: 2:02 - loss: 0.339 - ETA: 2:00 - loss: 0.339 - ETA: 1:58 - loss: 0.339 - ETA: 1:56 - loss: 0.339 - ETA: 1:54 - loss: 0.339 - ETA: 1:52 - loss: 0.339 - ETA: 1:51 - loss: 0.339 - ETA: 1:49 - loss: 0.339 - ETA: 1:47 - loss: 0.339 - ETA: 1:45 - loss: 0.339 - ETA: 1:43 - loss: 0.339 - ETA: 1:42 - loss: 0.339 - ETA: 1:40 - loss: 0.339 - ETA: 1:38 - loss: 0.339 - ETA: 1:36 - loss: 0.339 - ETA: 1:34 - loss: 0.339 - ETA: 1:32 - loss: 0.339 - ETA: 1:31 - loss: 0.339 - ETA: 1:29 - loss: 0.339 - ETA: 1:27 - loss: 0.339 - ETA: 1:25 - loss: 0.339 - ETA: 1:23 - loss: 0.339 - ETA: 1:21 - loss: 0.340 - ETA: 1:20 - loss: 0.340 - ETA: 1:18 - loss: 0.340 - ETA: 1:16 - loss: 0.341 - ETA: 1:14 - loss: 0.341 - ETA: 1:12 - loss: 0.341 - ETA: 1:11 - loss: 0.342 - ETA: 1:09 - loss: 0.342 - ETA: 1:07 - loss: 0.343 - ETA: 1:05 - loss: 0.343 - ETA: 1:03 - loss: 0.343 - ETA: 1:02 - loss: 0.344 - ETA: 1:00 - loss: 0.344 - ETA: 58s - loss: 0.344 - ETA: 56s - loss: 0.34 - ETA: 54s - loss: 0.34 - ETA: 52s - loss: 0.34 - ETA: 51s - loss: 0.34 - ETA: 49s - loss: 0.34 - ETA: 47s - loss: 0.34 - ETA: 45s - loss: 0.34 - ETA: 43s - loss: 0.34 - ETA: 41s - loss: 0.34 - ETA: 40s - loss: 0.34 - ETA: 38s - loss: 0.34 - ETA: 36s - loss: 0.34 - ETA: 34s - loss: 0.34 - ETA: 32s - loss: 0.34 - ETA: 30s - loss: 0.34 - ETA: 29s - loss: 0.35 - ETA: 27s - loss: 0.35 - ETA: 25s - loss: 0.35 - ETA: 23s - loss: 0.35 - ETA: 21s - loss: 0.35 - ETA: 20s - loss: 0.35 - ETA: 18s - loss: 0.35 - ETA: 16s - loss: 0.35 - ETA: 14s - loss: 0.35 - ETA: 12s - loss: 0.35 - ETA: 10s - loss: 0.35 - ETA: 9s - loss: 0.3523 - ETA: 7s - loss: 0.352 - ETA: 5s - loss: 0.352 - ETA: 3s - loss: 0.352 - ETA: 1s - loss: 0.353 - ETA: 0s - loss: 0.353 - 208s 2s/step - loss: 0.3534 - val_loss: 0.1880\n",
      "\n",
      "Epoch 00006: val_loss did not improve from 0.08220\n",
      "Epoch 7/30\n",
      "111/111 [==============================] - ETA: 3:24 - loss: 0.367 - ETA: 3:09 - loss: 0.299 - ETA: 3:14 - loss: 0.260 - ETA: 3:12 - loss: 0.246 - ETA: 3:10 - loss: 0.249 - ETA: 3:09 - loss: 0.271 - ETA: 3:06 - loss: 0.291 - ETA: 3:05 - loss: 0.307 - ETA: 3:03 - loss: 0.323 - ETA: 3:01 - loss: 0.332 - ETA: 3:00 - loss: 0.342 - ETA: 2:58 - loss: 0.347 - ETA: 2:56 - loss: 0.350 - ETA: 2:54 - loss: 0.353 - ETA: 2:53 - loss: 0.354 - ETA: 2:51 - loss: 0.355 - ETA: 2:49 - loss: 0.355 - ETA: 2:47 - loss: 0.356 - ETA: 2:45 - loss: 0.355 - ETA: 2:44 - loss: 0.354 - ETA: 2:42 - loss: 0.353 - ETA: 2:40 - loss: 0.352 - ETA: 2:39 - loss: 0.351 - ETA: 2:37 - loss: 0.352 - ETA: 2:36 - loss: 0.352 - ETA: 2:34 - loss: 0.353 - ETA: 2:32 - loss: 0.353 - ETA: 2:31 - loss: 0.354 - ETA: 2:29 - loss: 0.354 - ETA: 2:27 - loss: 0.355 - ETA: 2:25 - loss: 0.356 - ETA: 2:23 - loss: 0.358 - ETA: 2:21 - loss: 0.359 - ETA: 2:19 - loss: 0.360 - ETA: 2:17 - loss: 0.361 - ETA: 2:16 - loss: 0.362 - ETA: 2:14 - loss: 0.363 - ETA: 2:12 - loss: 0.363 - ETA: 2:10 - loss: 0.364 - ETA: 2:08 - loss: 0.365 - ETA: 2:07 - loss: 0.366 - ETA: 2:05 - loss: 0.367 - ETA: 2:03 - loss: 0.369 - ETA: 2:01 - loss: 0.370 - ETA: 1:59 - loss: 0.371 - ETA: 1:57 - loss: 0.372 - ETA: 1:56 - loss: 0.372 - ETA: 1:54 - loss: 0.373 - ETA: 1:52 - loss: 0.374 - ETA: 1:50 - loss: 0.374 - ETA: 1:48 - loss: 0.375 - ETA: 1:46 - loss: 0.375 - ETA: 1:45 - loss: 0.376 - ETA: 1:43 - loss: 0.376 - ETA: 1:41 - loss: 0.377 - ETA: 1:39 - loss: 0.378 - ETA: 1:37 - loss: 0.378 - ETA: 1:36 - loss: 0.379 - ETA: 1:34 - loss: 0.379 - ETA: 1:32 - loss: 0.380 - ETA: 1:30 - loss: 0.381 - ETA: 1:28 - loss: 0.381 - ETA: 1:26 - loss: 0.381 - ETA: 1:25 - loss: 0.382 - ETA: 1:23 - loss: 0.382 - ETA: 1:21 - loss: 0.382 - ETA: 1:19 - loss: 0.383 - ETA: 1:17 - loss: 0.383 - ETA: 1:16 - loss: 0.383 - ETA: 1:14 - loss: 0.383 - ETA: 1:12 - loss: 0.384 - ETA: 1:10 - loss: 0.384 - ETA: 1:08 - loss: 0.384 - ETA: 1:06 - loss: 0.385 - ETA: 1:05 - loss: 0.385 - ETA: 1:03 - loss: 0.385 - ETA: 1:01 - loss: 0.385 - ETA: 59s - loss: 0.386 - ETA: 57s - loss: 0.38 - ETA: 56s - loss: 0.38 - ETA: 54s - loss: 0.38 - ETA: 52s - loss: 0.38 - ETA: 50s - loss: 0.38 - ETA: 48s - loss: 0.38 - ETA: 47s - loss: 0.38 - ETA: 45s - loss: 0.38 - ETA: 43s - loss: 0.38 - ETA: 41s - loss: 0.38 - ETA: 39s - loss: 0.38 - ETA: 38s - loss: 0.38 - ETA: 36s - loss: 0.38 - ETA: 34s - loss: 0.38 - ETA: 32s - loss: 0.38 - ETA: 30s - loss: 0.38 - ETA: 29s - loss: 0.38 - ETA: 27s - loss: 0.38 - ETA: 25s - loss: 0.38 - ETA: 23s - loss: 0.38 - ETA: 21s - loss: 0.38 - ETA: 19s - loss: 0.38 - ETA: 18s - loss: 0.38 - ETA: 16s - loss: 0.38 - ETA: 14s - loss: 0.38 - ETA: 12s - loss: 0.38 - ETA: 10s - loss: 0.38 - ETA: 9s - loss: 0.3896 - ETA: 7s - loss: 0.389 - ETA: 5s - loss: 0.389 - ETA: 3s - loss: 0.389 - ETA: 1s - loss: 0.389 - ETA: 0s - loss: 0.389 - 207s 2s/step - loss: 0.3894 - val_loss: 0.1781\n",
      "\n",
      "Epoch 00007: val_loss did not improve from 0.08220\n",
      "Epoch 8/30\n",
      "111/111 [==============================] - ETA: 3:13 - loss: 1.249 - ETA: 3:26 - loss: 1.016 - ETA: 3:19 - loss: 0.911 - ETA: 3:15 - loss: 0.829 - ETA: 3:11 - loss: 0.761 - ETA: 3:10 - loss: 0.719 - ETA: 3:08 - loss: 0.696 - ETA: 3:06 - loss: 0.675 - ETA: 3:04 - loss: 0.654 - ETA: 3:02 - loss: 0.636 - ETA: 3:00 - loss: 0.618 - ETA: 2:58 - loss: 0.607 - ETA: 2:56 - loss: 0.596 - ETA: 2:55 - loss: 0.584 - ETA: 2:52 - loss: 0.573 - ETA: 2:51 - loss: 0.563 - ETA: 2:49 - loss: 0.554 - ETA: 2:47 - loss: 0.548 - ETA: 2:45 - loss: 0.541 - ETA: 2:44 - loss: 0.535 - ETA: 2:42 - loss: 0.529 - ETA: 2:40 - loss: 0.524 - ETA: 2:38 - loss: 0.519 - ETA: 2:36 - loss: 0.514 - ETA: 2:34 - loss: 0.510 - ETA: 2:33 - loss: 0.506 - ETA: 2:32 - loss: 0.502 - ETA: 2:30 - loss: 0.499 - ETA: 2:28 - loss: 0.497 - ETA: 2:27 - loss: 0.494 - ETA: 2:25 - loss: 0.491 - ETA: 2:23 - loss: 0.489 - ETA: 2:21 - loss: 0.487 - ETA: 2:19 - loss: 0.484 - ETA: 2:18 - loss: 0.481 - ETA: 2:16 - loss: 0.479 - ETA: 2:14 - loss: 0.477 - ETA: 2:12 - loss: 0.475 - ETA: 2:10 - loss: 0.473 - ETA: 2:08 - loss: 0.471 - ETA: 2:07 - loss: 0.470 - ETA: 2:05 - loss: 0.469 - ETA: 2:03 - loss: 0.467 - ETA: 2:01 - loss: 0.466 - ETA: 2:00 - loss: 0.465 - ETA: 1:58 - loss: 0.463 - ETA: 1:56 - loss: 0.462 - ETA: 1:54 - loss: 0.461 - ETA: 1:52 - loss: 0.459 - ETA: 1:50 - loss: 0.458 - ETA: 1:49 - loss: 0.457 - ETA: 1:47 - loss: 0.456 - ETA: 1:45 - loss: 0.454 - ETA: 1:43 - loss: 0.453 - ETA: 1:41 - loss: 0.452 - ETA: 1:39 - loss: 0.451 - ETA: 1:38 - loss: 0.450 - ETA: 1:36 - loss: 0.449 - ETA: 1:34 - loss: 0.448 - ETA: 1:32 - loss: 0.447 - ETA: 1:30 - loss: 0.446 - ETA: 1:29 - loss: 0.444 - ETA: 1:27 - loss: 0.443 - ETA: 1:25 - loss: 0.442 - ETA: 1:23 - loss: 0.441 - ETA: 1:21 - loss: 0.440 - ETA: 1:19 - loss: 0.439 - ETA: 1:18 - loss: 0.438 - ETA: 1:16 - loss: 0.437 - ETA: 1:14 - loss: 0.436 - ETA: 1:12 - loss: 0.436 - ETA: 1:10 - loss: 0.435 - ETA: 1:08 - loss: 0.434 - ETA: 1:07 - loss: 0.433 - ETA: 1:05 - loss: 0.432 - ETA: 1:03 - loss: 0.431 - ETA: 1:01 - loss: 0.430 - ETA: 59s - loss: 0.430 - ETA: 58s - loss: 0.42 - ETA: 56s - loss: 0.42 - ETA: 54s - loss: 0.42 - ETA: 52s - loss: 0.42 - ETA: 50s - loss: 0.42 - ETA: 48s - loss: 0.42 - ETA: 47s - loss: 0.42 - ETA: 45s - loss: 0.42 - ETA: 43s - loss: 0.42 - ETA: 41s - loss: 0.42 - ETA: 39s - loss: 0.42 - ETA: 38s - loss: 0.42 - ETA: 36s - loss: 0.42 - ETA: 34s - loss: 0.42 - ETA: 32s - loss: 0.42 - ETA: 30s - loss: 0.41 - ETA: 29s - loss: 0.41 - ETA: 27s - loss: 0.41 - ETA: 25s - loss: 0.41 - ETA: 23s - loss: 0.41 - ETA: 21s - loss: 0.41 - ETA: 19s - loss: 0.41 - ETA: 18s - loss: 0.41 - ETA: 16s - loss: 0.41 - ETA: 14s - loss: 0.41 - ETA: 12s - loss: 0.41 - ETA: 10s - loss: 0.41 - ETA: 9s - loss: 0.4152 - ETA: 7s - loss: 0.414 - ETA: 5s - loss: 0.414 - ETA: 3s - loss: 0.414 - ETA: 1s - loss: 0.413 - ETA: 0s - loss: 0.413 - 207s 2s/step - loss: 0.4128 - val_loss: 0.1730\n",
      "\n",
      "Epoch 00008: val_loss did not improve from 0.08220\n",
      "Epoch 9/30\n",
      "111/111 [==============================] - ETA: 3:16 - loss: 0.343 - ETA: 3:22 - loss: 0.315 - ETA: 3:17 - loss: 0.297 - ETA: 3:13 - loss: 0.315 - ETA: 3:11 - loss: 0.319 - ETA: 3:10 - loss: 0.327 - ETA: 3:07 - loss: 0.335 - ETA: 3:06 - loss: 0.343 - ETA: 3:04 - loss: 0.347 - ETA: 3:02 - loss: 0.348 - ETA: 3:00 - loss: 0.350 - ETA: 2:58 - loss: 0.356 - ETA: 2:57 - loss: 0.361 - ETA: 2:55 - loss: 0.364 - ETA: 2:53 - loss: 0.365 - ETA: 2:51 - loss: 0.365 - ETA: 2:49 - loss: 0.364 - ETA: 2:47 - loss: 0.363 - ETA: 2:46 - loss: 0.362 - ETA: 2:44 - loss: 0.362 - ETA: 2:42 - loss: 0.363 - ETA: 2:40 - loss: 0.364 - ETA: 2:38 - loss: 0.364 - ETA: 2:36 - loss: 0.364 - ETA: 2:35 - loss: 0.365 - ETA: 2:33 - loss: 0.365 - ETA: 2:31 - loss: 0.364 - ETA: 2:29 - loss: 0.364 - ETA: 2:27 - loss: 0.363 - ETA: 2:26 - loss: 0.364 - ETA: 2:24 - loss: 0.364 - ETA: 2:22 - loss: 0.363 - ETA: 2:20 - loss: 0.363 - ETA: 2:18 - loss: 0.363 - ETA: 2:17 - loss: 0.363 - ETA: 2:15 - loss: 0.363 - ETA: 2:13 - loss: 0.363 - ETA: 2:11 - loss: 0.363 - ETA: 2:09 - loss: 0.363 - ETA: 2:08 - loss: 0.363 - ETA: 2:06 - loss: 0.363 - ETA: 2:04 - loss: 0.363 - ETA: 2:02 - loss: 0.363 - ETA: 2:01 - loss: 0.363 - ETA: 1:59 - loss: 0.362 - ETA: 1:57 - loss: 0.362 - ETA: 1:55 - loss: 0.362 - ETA: 1:53 - loss: 0.362 - ETA: 1:51 - loss: 0.362 - ETA: 1:50 - loss: 0.361 - ETA: 1:48 - loss: 0.361 - ETA: 1:46 - loss: 0.361 - ETA: 1:44 - loss: 0.360 - ETA: 1:42 - loss: 0.360 - ETA: 1:41 - loss: 0.360 - ETA: 1:39 - loss: 0.359 - ETA: 1:37 - loss: 0.359 - ETA: 1:35 - loss: 0.359 - ETA: 1:33 - loss: 0.358 - ETA: 1:32 - loss: 0.358 - ETA: 1:30 - loss: 0.358 - ETA: 1:28 - loss: 0.357 - ETA: 1:26 - loss: 0.357 - ETA: 1:25 - loss: 0.357 - ETA: 1:23 - loss: 0.356 - ETA: 1:21 - loss: 0.356 - ETA: 1:19 - loss: 0.356 - ETA: 1:17 - loss: 0.356 - ETA: 1:16 - loss: 0.356 - ETA: 1:14 - loss: 0.356 - ETA: 1:12 - loss: 0.356 - ETA: 1:10 - loss: 0.356 - ETA: 1:08 - loss: 0.355 - ETA: 1:06 - loss: 0.355 - ETA: 1:05 - loss: 0.355 - ETA: 1:03 - loss: 0.355 - ETA: 1:01 - loss: 0.355 - ETA: 59s - loss: 0.355 - ETA: 57s - loss: 0.35 - ETA: 56s - loss: 0.35 - ETA: 54s - loss: 0.35 - ETA: 52s - loss: 0.35 - ETA: 50s - loss: 0.35 - ETA: 48s - loss: 0.35 - ETA: 47s - loss: 0.35 - ETA: 45s - loss: 0.35 - ETA: 43s - loss: 0.35 - ETA: 41s - loss: 0.35 - ETA: 39s - loss: 0.35 - ETA: 37s - loss: 0.35 - ETA: 36s - loss: 0.35 - ETA: 34s - loss: 0.35 - ETA: 32s - loss: 0.35 - ETA: 30s - loss: 0.35 - ETA: 28s - loss: 0.35 - ETA: 27s - loss: 0.35 - ETA: 25s - loss: 0.35 - ETA: 23s - loss: 0.35 - ETA: 21s - loss: 0.35 - ETA: 19s - loss: 0.35 - ETA: 18s - loss: 0.35 - ETA: 16s - loss: 0.35 - ETA: 14s - loss: 0.35 - ETA: 12s - loss: 0.35 - ETA: 10s - loss: 0.35 - ETA: 9s - loss: 0.3554 - ETA: 7s - loss: 0.355 - ETA: 5s - loss: 0.355 - ETA: 3s - loss: 0.355 - ETA: 1s - loss: 0.355 - ETA: 0s - loss: 0.355 - 206s 2s/step - loss: 0.3552 - val_loss: 0.1769\n",
      "\n",
      "Epoch 00009: val_loss did not improve from 0.08220\n",
      "Epoch 10/30\n",
      "111/111 [==============================] - ETA: 3:20 - loss: 0.123 - ETA: 3:14 - loss: 0.114 - ETA: 3:13 - loss: 0.177 - ETA: 3:11 - loss: 0.206 - ETA: 3:09 - loss: 0.231 - ETA: 3:07 - loss: 0.248 - ETA: 3:06 - loss: 0.255 - ETA: 3:04 - loss: 0.261 - ETA: 3:02 - loss: 0.273 - ETA: 3:00 - loss: 0.286 - ETA: 2:59 - loss: 0.295 - ETA: 2:57 - loss: 0.302 - ETA: 2:56 - loss: 0.306 - ETA: 2:54 - loss: 0.309 - ETA: 2:53 - loss: 0.315 - ETA: 2:52 - loss: 0.320 - ETA: 2:50 - loss: 0.324 - ETA: 2:48 - loss: 0.328 - ETA: 2:47 - loss: 0.331 - ETA: 2:45 - loss: 0.334 - ETA: 2:43 - loss: 0.337 - ETA: 2:41 - loss: 0.340 - ETA: 2:39 - loss: 0.342 - ETA: 2:37 - loss: 0.344 - ETA: 2:36 - loss: 0.346 - ETA: 2:34 - loss: 0.347 - ETA: 2:32 - loss: 0.349 - ETA: 2:30 - loss: 0.350 - ETA: 2:28 - loss: 0.350 - ETA: 2:26 - loss: 0.351 - ETA: 2:25 - loss: 0.351 - ETA: 2:23 - loss: 0.351 - ETA: 2:21 - loss: 0.351 - ETA: 2:19 - loss: 0.351 - ETA: 2:17 - loss: 0.351 - ETA: 2:15 - loss: 0.351 - ETA: 2:13 - loss: 0.351 - ETA: 2:12 - loss: 0.352 - ETA: 2:10 - loss: 0.353 - ETA: 2:08 - loss: 0.354 - ETA: 2:06 - loss: 0.354 - ETA: 2:04 - loss: 0.355 - ETA: 2:02 - loss: 0.355 - ETA: 2:01 - loss: 0.355 - ETA: 1:59 - loss: 0.355 - ETA: 1:57 - loss: 0.356 - ETA: 1:55 - loss: 0.356 - ETA: 1:53 - loss: 0.356 - ETA: 1:52 - loss: 0.356 - ETA: 1:50 - loss: 0.356 - ETA: 1:48 - loss: 0.356 - ETA: 1:46 - loss: 0.356 - ETA: 1:44 - loss: 0.356 - ETA: 1:42 - loss: 0.355 - ETA: 1:41 - loss: 0.355 - ETA: 1:39 - loss: 0.355 - ETA: 1:37 - loss: 0.355 - ETA: 1:35 - loss: 0.354 - ETA: 1:33 - loss: 0.354 - ETA: 1:32 - loss: 0.354 - ETA: 1:30 - loss: 0.354 - ETA: 1:28 - loss: 0.354 - ETA: 1:26 - loss: 0.353 - ETA: 1:24 - loss: 0.353 - ETA: 1:23 - loss: 0.353 - ETA: 1:21 - loss: 0.353 - ETA: 1:19 - loss: 0.353 - ETA: 1:17 - loss: 0.352 - ETA: 1:15 - loss: 0.352 - ETA: 1:14 - loss: 0.352 - ETA: 1:12 - loss: 0.351 - ETA: 1:10 - loss: 0.351 - ETA: 1:08 - loss: 0.351 - ETA: 1:06 - loss: 0.350 - ETA: 1:04 - loss: 0.350 - ETA: 1:03 - loss: 0.350 - ETA: 1:01 - loss: 0.350 - ETA: 59s - loss: 0.349 - ETA: 57s - loss: 0.34 - ETA: 55s - loss: 0.34 - ETA: 54s - loss: 0.34 - ETA: 52s - loss: 0.34 - ETA: 50s - loss: 0.34 - ETA: 48s - loss: 0.34 - ETA: 47s - loss: 0.34 - ETA: 45s - loss: 0.34 - ETA: 43s - loss: 0.34 - ETA: 41s - loss: 0.34 - ETA: 39s - loss: 0.34 - ETA: 37s - loss: 0.34 - ETA: 36s - loss: 0.34 - ETA: 34s - loss: 0.34 - ETA: 32s - loss: 0.34 - ETA: 30s - loss: 0.34 - ETA: 28s - loss: 0.34 - ETA: 27s - loss: 0.34 - ETA: 25s - loss: 0.34 - ETA: 23s - loss: 0.34 - ETA: 21s - loss: 0.34 - ETA: 19s - loss: 0.34 - ETA: 18s - loss: 0.34 - ETA: 16s - loss: 0.34 - ETA: 14s - loss: 0.34 - ETA: 12s - loss: 0.34 - ETA: 10s - loss: 0.34 - ETA: 9s - loss: 0.3447 - ETA: 7s - loss: 0.344 - ETA: 5s - loss: 0.344 - ETA: 3s - loss: 0.344 - ETA: 1s - loss: 0.344 - ETA: 0s - loss: 0.343 - 206s 2s/step - loss: 0.3437 - val_loss: 0.1700\n",
      "\n",
      "Epoch 00010: val_loss did not improve from 0.08220\n",
      "Epoch 11/30\n",
      "111/111 [==============================] - ETA: 3:20 - loss: 0.174 - ETA: 3:16 - loss: 0.195 - ETA: 3:15 - loss: 0.204 - ETA: 3:12 - loss: 0.212 - ETA: 3:12 - loss: 0.216 - ETA: 3:10 - loss: 0.216 - ETA: 3:08 - loss: 0.226 - ETA: 3:05 - loss: 0.232 - ETA: 3:04 - loss: 0.238 - ETA: 3:02 - loss: 0.243 - ETA: 3:01 - loss: 0.247 - ETA: 2:59 - loss: 0.249 - ETA: 2:57 - loss: 0.251 - ETA: 2:55 - loss: 0.253 - ETA: 2:53 - loss: 0.253 - ETA: 2:52 - loss: 0.254 - ETA: 2:50 - loss: 0.255 - ETA: 2:48 - loss: 0.258 - ETA: 2:46 - loss: 0.260 - ETA: 2:44 - loss: 0.263 - ETA: 2:42 - loss: 0.265 - ETA: 2:41 - loss: 0.270 - ETA: 2:39 - loss: 0.275 - ETA: 2:37 - loss: 0.279 - ETA: 2:35 - loss: 0.284 - ETA: 2:33 - loss: 0.287 - ETA: 2:31 - loss: 0.291 - ETA: 2:30 - loss: 0.294 - ETA: 2:28 - loss: 0.297 - ETA: 2:26 - loss: 0.300 - ETA: 2:24 - loss: 0.303 - ETA: 2:22 - loss: 0.305 - ETA: 2:21 - loss: 0.308 - ETA: 2:19 - loss: 0.311 - ETA: 2:18 - loss: 0.313 - ETA: 2:16 - loss: 0.315 - ETA: 2:14 - loss: 0.317 - ETA: 2:12 - loss: 0.318 - ETA: 2:11 - loss: 0.320 - ETA: 2:09 - loss: 0.321 - ETA: 2:07 - loss: 0.322 - ETA: 2:05 - loss: 0.324 - ETA: 2:03 - loss: 0.325 - ETA: 2:01 - loss: 0.326 - ETA: 1:59 - loss: 0.327 - ETA: 1:58 - loss: 0.328 - ETA: 1:56 - loss: 0.328 - ETA: 1:54 - loss: 0.329 - ETA: 1:52 - loss: 0.330 - ETA: 1:50 - loss: 0.331 - ETA: 1:49 - loss: 0.331 - ETA: 1:47 - loss: 0.332 - ETA: 1:45 - loss: 0.332 - ETA: 1:43 - loss: 0.333 - ETA: 1:41 - loss: 0.333 - ETA: 1:39 - loss: 0.334 - ETA: 1:38 - loss: 0.334 - ETA: 1:36 - loss: 0.334 - ETA: 1:34 - loss: 0.334 - ETA: 1:32 - loss: 0.335 - ETA: 1:30 - loss: 0.335 - ETA: 1:28 - loss: 0.335 - ETA: 1:27 - loss: 0.336 - ETA: 1:25 - loss: 0.336 - ETA: 1:23 - loss: 0.336 - ETA: 1:21 - loss: 0.336 - ETA: 1:19 - loss: 0.337 - ETA: 1:18 - loss: 0.337 - ETA: 1:16 - loss: 0.337 - ETA: 1:14 - loss: 0.337 - ETA: 1:12 - loss: 0.338 - ETA: 1:10 - loss: 0.338 - ETA: 1:08 - loss: 0.338 - ETA: 1:07 - loss: 0.338 - ETA: 1:05 - loss: 0.338 - ETA: 1:03 - loss: 0.338 - ETA: 1:01 - loss: 0.338 - ETA: 59s - loss: 0.339 - ETA: 58s - loss: 0.33 - ETA: 56s - loss: 0.33 - ETA: 54s - loss: 0.33 - ETA: 52s - loss: 0.33 - ETA: 50s - loss: 0.33 - ETA: 48s - loss: 0.33 - ETA: 47s - loss: 0.33 - ETA: 45s - loss: 0.33 - ETA: 43s - loss: 0.34 - ETA: 41s - loss: 0.34 - ETA: 39s - loss: 0.34 - ETA: 38s - loss: 0.34 - ETA: 36s - loss: 0.34 - ETA: 34s - loss: 0.33 - ETA: 32s - loss: 0.33 - ETA: 30s - loss: 0.33 - ETA: 29s - loss: 0.33 - ETA: 27s - loss: 0.33 - ETA: 25s - loss: 0.33 - ETA: 23s - loss: 0.33 - ETA: 21s - loss: 0.33 - ETA: 19s - loss: 0.33 - ETA: 18s - loss: 0.33 - ETA: 16s - loss: 0.33 - ETA: 14s - loss: 0.33 - ETA: 12s - loss: 0.33 - ETA: 10s - loss: 0.33 - ETA: 9s - loss: 0.3395 - ETA: 7s - loss: 0.339 - ETA: 5s - loss: 0.339 - ETA: 3s - loss: 0.339 - ETA: 1s - loss: 0.339 - ETA: 0s - loss: 0.339 - 207s 2s/step - loss: 0.3396 - val_loss: 0.1827\n",
      "\n",
      "Epoch 00011: val_loss did not improve from 0.08220\n",
      "Epoch 12/30\n",
      "111/111 [==============================] - ETA: 3:27 - loss: 0.112 - ETA: 3:14 - loss: 0.216 - ETA: 3:15 - loss: 0.239 - ETA: 3:11 - loss: 0.266 - ETA: 3:10 - loss: 0.279 - ETA: 3:09 - loss: 0.286 - ETA: 3:07 - loss: 0.294 - ETA: 3:05 - loss: 0.300 - ETA: 3:03 - loss: 0.303 - ETA: 3:02 - loss: 0.303 - ETA: 3:00 - loss: 0.305 - ETA: 2:58 - loss: 0.308 - ETA: 2:56 - loss: 0.316 - ETA: 2:54 - loss: 0.321 - ETA: 2:53 - loss: 0.329 - ETA: 2:50 - loss: 0.335 - ETA: 2:49 - loss: 0.339 - ETA: 2:47 - loss: 0.343 - ETA: 2:46 - loss: 0.346 - ETA: 2:44 - loss: 0.349 - ETA: 2:42 - loss: 0.350 - ETA: 2:40 - loss: 0.351 - ETA: 2:39 - loss: 0.352 - ETA: 2:37 - loss: 0.353 - ETA: 2:35 - loss: 0.353 - ETA: 2:33 - loss: 0.353 - ETA: 2:31 - loss: 0.353 - ETA: 2:30 - loss: 0.353 - ETA: 2:28 - loss: 0.352 - ETA: 2:26 - loss: 0.352 - ETA: 2:24 - loss: 0.351 - ETA: 2:22 - loss: 0.351 - ETA: 2:21 - loss: 0.350 - ETA: 2:19 - loss: 0.350 - ETA: 2:17 - loss: 0.349 - ETA: 2:15 - loss: 0.349 - ETA: 2:13 - loss: 0.348 - ETA: 2:12 - loss: 0.348 - ETA: 2:10 - loss: 0.348 - ETA: 2:08 - loss: 0.347 - ETA: 2:06 - loss: 0.347 - ETA: 2:04 - loss: 0.346 - ETA: 2:03 - loss: 0.346 - ETA: 2:01 - loss: 0.345 - ETA: 1:59 - loss: 0.344 - ETA: 1:57 - loss: 0.344 - ETA: 1:55 - loss: 0.343 - ETA: 1:53 - loss: 0.343 - ETA: 1:52 - loss: 0.342 - ETA: 1:50 - loss: 0.342 - ETA: 1:48 - loss: 0.341 - ETA: 1:46 - loss: 0.341 - ETA: 1:45 - loss: 0.340 - ETA: 1:43 - loss: 0.340 - ETA: 1:41 - loss: 0.339 - ETA: 1:39 - loss: 0.339 - ETA: 1:37 - loss: 0.338 - ETA: 1:36 - loss: 0.338 - ETA: 1:34 - loss: 0.338 - ETA: 1:32 - loss: 0.337 - ETA: 1:30 - loss: 0.337 - ETA: 1:28 - loss: 0.337 - ETA: 1:27 - loss: 0.336 - ETA: 1:25 - loss: 0.336 - ETA: 1:23 - loss: 0.336 - ETA: 1:21 - loss: 0.335 - ETA: 1:19 - loss: 0.335 - ETA: 1:17 - loss: 0.335 - ETA: 1:16 - loss: 0.335 - ETA: 1:14 - loss: 0.335 - ETA: 1:12 - loss: 0.335 - ETA: 1:10 - loss: 0.334 - ETA: 1:08 - loss: 0.334 - ETA: 1:07 - loss: 0.334 - ETA: 1:05 - loss: 0.334 - ETA: 1:03 - loss: 0.334 - ETA: 1:01 - loss: 0.333 - ETA: 59s - loss: 0.333 - ETA: 58s - loss: 0.33 - ETA: 56s - loss: 0.33 - ETA: 54s - loss: 0.33 - ETA: 52s - loss: 0.33 - ETA: 50s - loss: 0.33 - ETA: 48s - loss: 0.33 - ETA: 47s - loss: 0.33 - ETA: 45s - loss: 0.33 - ETA: 43s - loss: 0.33 - ETA: 41s - loss: 0.33 - ETA: 39s - loss: 0.33 - ETA: 38s - loss: 0.33 - ETA: 36s - loss: 0.33 - ETA: 34s - loss: 0.33 - ETA: 32s - loss: 0.33 - ETA: 30s - loss: 0.33 - ETA: 29s - loss: 0.33 - ETA: 27s - loss: 0.33 - ETA: 25s - loss: 0.33 - ETA: 23s - loss: 0.33 - ETA: 21s - loss: 0.33 - ETA: 19s - loss: 0.33 - ETA: 18s - loss: 0.33 - ETA: 16s - loss: 0.32 - ETA: 14s - loss: 0.33 - ETA: 12s - loss: 0.32 - ETA: 10s - loss: 0.32 - ETA: 9s - loss: 0.3299 - ETA: 7s - loss: 0.329 - ETA: 5s - loss: 0.329 - ETA: 3s - loss: 0.329 - ETA: 1s - loss: 0.329 - ETA: 0s - loss: 0.329 - 207s 2s/step - loss: 0.3297 - val_loss: 0.1678\n",
      "\n",
      "Epoch 00012: val_loss did not improve from 0.08220\n",
      "Epoch 13/30\n",
      "111/111 [==============================] - ETA: 3:19 - loss: 0.237 - ETA: 3:18 - loss: 0.256 - ETA: 3:17 - loss: 0.290 - ETA: 3:18 - loss: 0.311 - ETA: 3:16 - loss: 0.314 - ETA: 3:16 - loss: 0.316 - ETA: 3:14 - loss: 0.318 - ETA: 3:12 - loss: 0.316 - ETA: 3:10 - loss: 0.314 - ETA: 3:08 - loss: 0.312 - ETA: 3:04 - loss: 0.317 - ETA: 3:03 - loss: 0.324 - ETA: 3:01 - loss: 0.329 - ETA: 2:58 - loss: 0.336 - ETA: 2:56 - loss: 0.340 - ETA: 2:54 - loss: 0.343 - ETA: 2:52 - loss: 0.345 - ETA: 2:50 - loss: 0.347 - ETA: 2:48 - loss: 0.348 - ETA: 2:46 - loss: 0.349 - ETA: 2:44 - loss: 0.350 - ETA: 2:43 - loss: 0.350 - ETA: 2:41 - loss: 0.350 - ETA: 2:39 - loss: 0.350 - ETA: 2:37 - loss: 0.349 - ETA: 2:35 - loss: 0.349 - ETA: 2:33 - loss: 0.349 - ETA: 2:31 - loss: 0.349 - ETA: 2:29 - loss: 0.348 - ETA: 2:28 - loss: 0.348 - ETA: 2:26 - loss: 0.348 - ETA: 2:24 - loss: 0.348 - ETA: 2:22 - loss: 0.348 - ETA: 2:20 - loss: 0.348 - ETA: 2:18 - loss: 0.348 - ETA: 2:16 - loss: 0.349 - ETA: 2:15 - loss: 0.349 - ETA: 2:13 - loss: 0.350 - ETA: 2:11 - loss: 0.350 - ETA: 2:09 - loss: 0.351 - ETA: 2:07 - loss: 0.352 - ETA: 2:05 - loss: 0.352 - ETA: 2:04 - loss: 0.353 - ETA: 2:02 - loss: 0.353 - ETA: 2:00 - loss: 0.354 - ETA: 1:58 - loss: 0.354 - ETA: 1:56 - loss: 0.355 - ETA: 1:54 - loss: 0.355 - ETA: 1:53 - loss: 0.355 - ETA: 1:51 - loss: 0.356 - ETA: 1:49 - loss: 0.356 - ETA: 1:47 - loss: 0.356 - ETA: 1:45 - loss: 0.356 - ETA: 1:43 - loss: 0.356 - ETA: 1:41 - loss: 0.356 - ETA: 1:40 - loss: 0.356 - ETA: 1:38 - loss: 0.357 - ETA: 1:36 - loss: 0.357 - ETA: 1:34 - loss: 0.358 - ETA: 1:32 - loss: 0.358 - ETA: 1:30 - loss: 0.358 - ETA: 1:29 - loss: 0.358 - ETA: 1:27 - loss: 0.359 - ETA: 1:25 - loss: 0.359 - ETA: 1:23 - loss: 0.359 - ETA: 1:21 - loss: 0.359 - ETA: 1:20 - loss: 0.359 - ETA: 1:18 - loss: 0.359 - ETA: 1:16 - loss: 0.359 - ETA: 1:14 - loss: 0.360 - ETA: 1:12 - loss: 0.360 - ETA: 1:11 - loss: 0.360 - ETA: 1:09 - loss: 0.360 - ETA: 1:07 - loss: 0.360 - ETA: 1:05 - loss: 0.360 - ETA: 1:03 - loss: 0.360 - ETA: 1:01 - loss: 0.361 - ETA: 1:00 - loss: 0.361 - ETA: 58s - loss: 0.361 - ETA: 56s - loss: 0.36 - ETA: 54s - loss: 0.36 - ETA: 52s - loss: 0.36 - ETA: 51s - loss: 0.36 - ETA: 49s - loss: 0.36 - ETA: 47s - loss: 0.36 - ETA: 45s - loss: 0.36 - ETA: 43s - loss: 0.36 - ETA: 41s - loss: 0.36 - ETA: 40s - loss: 0.36 - ETA: 38s - loss: 0.36 - ETA: 36s - loss: 0.36 - ETA: 34s - loss: 0.36 - ETA: 32s - loss: 0.36 - ETA: 31s - loss: 0.36 - ETA: 29s - loss: 0.36 - ETA: 27s - loss: 0.36 - ETA: 25s - loss: 0.36 - ETA: 23s - loss: 0.36 - ETA: 21s - loss: 0.36 - ETA: 20s - loss: 0.36 - ETA: 18s - loss: 0.36 - ETA: 16s - loss: 0.36 - ETA: 14s - loss: 0.36 - ETA: 12s - loss: 0.36 - ETA: 10s - loss: 0.36 - ETA: 9s - loss: 0.3606 - ETA: 7s - loss: 0.360 - ETA: 5s - loss: 0.360 - ETA: 3s - loss: 0.360 - ETA: 1s - loss: 0.360 - ETA: 0s - loss: 0.360 - 208s 2s/step - loss: 0.3599 - val_loss: 0.1714\n",
      "\n",
      "Epoch 00013: val_loss did not improve from 0.08220\n",
      "Epoch 14/30\n",
      "111/111 [==============================] - ETA: 3:20 - loss: 0.334 - ETA: 3:13 - loss: 0.297 - ETA: 3:16 - loss: 0.271 - ETA: 3:14 - loss: 0.252 - ETA: 3:11 - loss: 0.238 - ETA: 3:10 - loss: 0.243 - ETA: 3:08 - loss: 0.257 - ETA: 3:06 - loss: 0.285 - ETA: 3:04 - loss: 0.303 - ETA: 3:03 - loss: 0.316 - ETA: 3:01 - loss: 0.325 - ETA: 2:59 - loss: 0.330 - ETA: 2:58 - loss: 0.335 - ETA: 2:57 - loss: 0.338 - ETA: 2:54 - loss: 0.343 - ETA: 2:53 - loss: 0.348 - ETA: 2:51 - loss: 0.355 - ETA: 2:49 - loss: 0.360 - ETA: 2:47 - loss: 0.364 - ETA: 2:45 - loss: 0.368 - ETA: 2:44 - loss: 0.370 - ETA: 2:42 - loss: 0.372 - ETA: 2:40 - loss: 0.375 - ETA: 2:39 - loss: 0.378 - ETA: 2:37 - loss: 0.379 - ETA: 2:35 - loss: 0.381 - ETA: 2:34 - loss: 0.382 - ETA: 2:32 - loss: 0.383 - ETA: 2:30 - loss: 0.384 - ETA: 2:28 - loss: 0.384 - ETA: 2:26 - loss: 0.384 - ETA: 2:25 - loss: 0.385 - ETA: 2:23 - loss: 0.384 - ETA: 2:21 - loss: 0.384 - ETA: 2:19 - loss: 0.383 - ETA: 2:17 - loss: 0.383 - ETA: 2:15 - loss: 0.382 - ETA: 2:13 - loss: 0.381 - ETA: 2:11 - loss: 0.381 - ETA: 2:10 - loss: 0.380 - ETA: 2:08 - loss: 0.380 - ETA: 2:06 - loss: 0.379 - ETA: 2:04 - loss: 0.379 - ETA: 2:02 - loss: 0.378 - ETA: 2:00 - loss: 0.377 - ETA: 1:58 - loss: 0.377 - ETA: 1:57 - loss: 0.376 - ETA: 1:55 - loss: 0.376 - ETA: 1:53 - loss: 0.375 - ETA: 1:51 - loss: 0.375 - ETA: 1:49 - loss: 0.375 - ETA: 1:47 - loss: 0.374 - ETA: 1:45 - loss: 0.374 - ETA: 1:44 - loss: 0.373 - ETA: 1:42 - loss: 0.373 - ETA: 1:40 - loss: 0.373 - ETA: 1:38 - loss: 0.372 - ETA: 1:36 - loss: 0.372 - ETA: 1:34 - loss: 0.372 - ETA: 1:33 - loss: 0.372 - ETA: 1:31 - loss: 0.372 - ETA: 1:29 - loss: 0.371 - ETA: 1:27 - loss: 0.371 - ETA: 1:25 - loss: 0.371 - ETA: 1:23 - loss: 0.371 - ETA: 1:21 - loss: 0.370 - ETA: 1:20 - loss: 0.370 - ETA: 1:18 - loss: 0.370 - ETA: 1:16 - loss: 0.370 - ETA: 1:14 - loss: 0.370 - ETA: 1:12 - loss: 0.369 - ETA: 1:11 - loss: 0.369 - ETA: 1:09 - loss: 0.369 - ETA: 1:07 - loss: 0.368 - ETA: 1:05 - loss: 0.368 - ETA: 1:03 - loss: 0.368 - ETA: 1:01 - loss: 0.368 - ETA: 1:00 - loss: 0.367 - ETA: 58s - loss: 0.367 - ETA: 56s - loss: 0.36 - ETA: 54s - loss: 0.36 - ETA: 52s - loss: 0.36 - ETA: 51s - loss: 0.36 - ETA: 49s - loss: 0.36 - ETA: 47s - loss: 0.36 - ETA: 45s - loss: 0.36 - ETA: 43s - loss: 0.36 - ETA: 41s - loss: 0.36 - ETA: 40s - loss: 0.36 - ETA: 38s - loss: 0.36 - ETA: 36s - loss: 0.36 - ETA: 34s - loss: 0.36 - ETA: 32s - loss: 0.36 - ETA: 31s - loss: 0.36 - ETA: 29s - loss: 0.36 - ETA: 27s - loss: 0.36 - ETA: 25s - loss: 0.36 - ETA: 23s - loss: 0.36 - ETA: 21s - loss: 0.36 - ETA: 20s - loss: 0.36 - ETA: 18s - loss: 0.35 - ETA: 16s - loss: 0.35 - ETA: 14s - loss: 0.35 - ETA: 12s - loss: 0.35 - ETA: 10s - loss: 0.35 - ETA: 9s - loss: 0.3577 - ETA: 7s - loss: 0.357 - ETA: 5s - loss: 0.357 - ETA: 3s - loss: 0.356 - ETA: 1s - loss: 0.356 - ETA: 0s - loss: 0.356 - 208s 2s/step - loss: 0.3560 - val_loss: 0.1527\n",
      "\n",
      "Epoch 00014: val_loss did not improve from 0.08220\n",
      "Epoch 15/30\n",
      "111/111 [==============================] - ETA: 3:16 - loss: 0.191 - ETA: 3:24 - loss: 0.438 - ETA: 3:17 - loss: 0.464 - ETA: 3:17 - loss: 0.449 - ETA: 3:15 - loss: 0.438 - ETA: 3:11 - loss: 0.449 - ETA: 3:09 - loss: 0.451 - ETA: 3:07 - loss: 0.448 - ETA: 3:05 - loss: 0.443 - ETA: 3:03 - loss: 0.440 - ETA: 3:02 - loss: 0.436 - ETA: 2:59 - loss: 0.430 - ETA: 2:58 - loss: 0.424 - ETA: 2:56 - loss: 0.420 - ETA: 2:54 - loss: 0.416 - ETA: 2:52 - loss: 0.413 - ETA: 2:50 - loss: 0.409 - ETA: 2:48 - loss: 0.408 - ETA: 2:47 - loss: 0.407 - ETA: 2:44 - loss: 0.406 - ETA: 2:43 - loss: 0.404 - ETA: 2:41 - loss: 0.402 - ETA: 2:39 - loss: 0.400 - ETA: 2:38 - loss: 0.398 - ETA: 2:36 - loss: 0.396 - ETA: 2:34 - loss: 0.395 - ETA: 2:32 - loss: 0.392 - ETA: 2:30 - loss: 0.391 - ETA: 2:28 - loss: 0.389 - ETA: 2:26 - loss: 0.387 - ETA: 2:25 - loss: 0.385 - ETA: 2:23 - loss: 0.384 - ETA: 2:21 - loss: 0.382 - ETA: 2:19 - loss: 0.380 - ETA: 2:18 - loss: 0.379 - ETA: 2:16 - loss: 0.377 - ETA: 2:14 - loss: 0.376 - ETA: 2:12 - loss: 0.374 - ETA: 2:10 - loss: 0.373 - ETA: 2:09 - loss: 0.372 - ETA: 2:07 - loss: 0.371 - ETA: 2:05 - loss: 0.370 - ETA: 2:03 - loss: 0.369 - ETA: 2:02 - loss: 0.368 - ETA: 2:00 - loss: 0.367 - ETA: 1:58 - loss: 0.366 - ETA: 1:56 - loss: 0.365 - ETA: 1:54 - loss: 0.364 - ETA: 1:53 - loss: 0.363 - ETA: 1:51 - loss: 0.362 - ETA: 1:49 - loss: 0.362 - ETA: 1:47 - loss: 0.361 - ETA: 1:45 - loss: 0.360 - ETA: 1:43 - loss: 0.359 - ETA: 1:42 - loss: 0.359 - ETA: 1:40 - loss: 0.358 - ETA: 1:38 - loss: 0.358 - ETA: 1:36 - loss: 0.357 - ETA: 1:34 - loss: 0.356 - ETA: 1:33 - loss: 0.356 - ETA: 1:31 - loss: 0.355 - ETA: 1:29 - loss: 0.354 - ETA: 1:27 - loss: 0.354 - ETA: 1:25 - loss: 0.353 - ETA: 1:23 - loss: 0.353 - ETA: 1:22 - loss: 0.352 - ETA: 1:20 - loss: 0.352 - ETA: 1:18 - loss: 0.351 - ETA: 1:16 - loss: 0.351 - ETA: 1:14 - loss: 0.350 - ETA: 1:12 - loss: 0.350 - ETA: 1:11 - loss: 0.349 - ETA: 1:09 - loss: 0.349 - ETA: 1:07 - loss: 0.348 - ETA: 1:05 - loss: 0.348 - ETA: 1:03 - loss: 0.348 - ETA: 1:01 - loss: 0.347 - ETA: 1:00 - loss: 0.347 - ETA: 58s - loss: 0.347 - ETA: 56s - loss: 0.34 - ETA: 54s - loss: 0.34 - ETA: 52s - loss: 0.34 - ETA: 51s - loss: 0.34 - ETA: 49s - loss: 0.34 - ETA: 47s - loss: 0.34 - ETA: 45s - loss: 0.34 - ETA: 43s - loss: 0.34 - ETA: 41s - loss: 0.34 - ETA: 40s - loss: 0.34 - ETA: 38s - loss: 0.34 - ETA: 36s - loss: 0.34 - ETA: 34s - loss: 0.34 - ETA: 32s - loss: 0.34 - ETA: 30s - loss: 0.34 - ETA: 29s - loss: 0.34 - ETA: 27s - loss: 0.34 - ETA: 25s - loss: 0.34 - ETA: 23s - loss: 0.34 - ETA: 21s - loss: 0.34 - ETA: 20s - loss: 0.34 - ETA: 18s - loss: 0.34 - ETA: 16s - loss: 0.34 - ETA: 14s - loss: 0.34 - ETA: 12s - loss: 0.34 - ETA: 10s - loss: 0.34 - ETA: 9s - loss: 0.3423 - ETA: 7s - loss: 0.342 - ETA: 5s - loss: 0.342 - ETA: 3s - loss: 0.342 - ETA: 1s - loss: 0.342 - ETA: 0s - loss: 0.342 - 208s 2s/step - loss: 0.3423 - val_loss: 0.1563\n",
      "\n",
      "Epoch 00015: val_loss did not improve from 0.08220\n",
      "Epoch 16/30\n",
      "111/111 [==============================] - ETA: 3:27 - loss: 0.046 - ETA: 3:16 - loss: 0.063 - ETA: 3:13 - loss: 0.148 - ETA: 3:10 - loss: 0.187 - ETA: 3:11 - loss: 0.213 - ETA: 3:09 - loss: 0.231 - ETA: 3:07 - loss: 0.241 - ETA: 3:06 - loss: 0.262 - ETA: 3:04 - loss: 0.280 - ETA: 3:03 - loss: 0.294 - ETA: 3:01 - loss: 0.309 - ETA: 2:59 - loss: 0.320 - ETA: 2:57 - loss: 0.329 - ETA: 2:55 - loss: 0.335 - ETA: 2:54 - loss: 0.343 - ETA: 2:52 - loss: 0.349 - ETA: 2:50 - loss: 0.354 - ETA: 2:48 - loss: 0.357 - ETA: 2:47 - loss: 0.360 - ETA: 2:45 - loss: 0.362 - ETA: 2:43 - loss: 0.364 - ETA: 2:41 - loss: 0.365 - ETA: 2:39 - loss: 0.366 - ETA: 2:37 - loss: 0.367 - ETA: 2:36 - loss: 0.367 - ETA: 2:34 - loss: 0.367 - ETA: 2:32 - loss: 0.367 - ETA: 2:30 - loss: 0.366 - ETA: 2:28 - loss: 0.366 - ETA: 2:26 - loss: 0.366 - ETA: 2:25 - loss: 0.365 - ETA: 2:23 - loss: 0.364 - ETA: 2:21 - loss: 0.364 - ETA: 2:19 - loss: 0.363 - ETA: 2:17 - loss: 0.363 - ETA: 2:16 - loss: 0.363 - ETA: 2:14 - loss: 0.363 - ETA: 2:12 - loss: 0.363 - ETA: 2:10 - loss: 0.362 - ETA: 2:08 - loss: 0.363 - ETA: 2:06 - loss: 0.363 - ETA: 2:05 - loss: 0.363 - ETA: 2:03 - loss: 0.363 - ETA: 2:01 - loss: 0.363 - ETA: 1:59 - loss: 0.363 - ETA: 1:57 - loss: 0.362 - ETA: 1:56 - loss: 0.362 - ETA: 1:54 - loss: 0.362 - ETA: 1:52 - loss: 0.361 - ETA: 1:50 - loss: 0.361 - ETA: 1:48 - loss: 0.361 - ETA: 1:46 - loss: 0.360 - ETA: 1:45 - loss: 0.360 - ETA: 1:43 - loss: 0.360 - ETA: 1:41 - loss: 0.359 - ETA: 1:39 - loss: 0.359 - ETA: 1:37 - loss: 0.358 - ETA: 1:36 - loss: 0.358 - ETA: 1:34 - loss: 0.357 - ETA: 1:32 - loss: 0.357 - ETA: 1:30 - loss: 0.357 - ETA: 1:29 - loss: 0.356 - ETA: 1:27 - loss: 0.356 - ETA: 1:25 - loss: 0.356 - ETA: 1:23 - loss: 0.356 - ETA: 1:21 - loss: 0.355 - ETA: 1:19 - loss: 0.355 - ETA: 1:18 - loss: 0.355 - ETA: 1:16 - loss: 0.355 - ETA: 1:14 - loss: 0.354 - ETA: 1:12 - loss: 0.354 - ETA: 1:10 - loss: 0.354 - ETA: 1:09 - loss: 0.354 - ETA: 1:07 - loss: 0.353 - ETA: 1:05 - loss: 0.353 - ETA: 1:03 - loss: 0.353 - ETA: 1:01 - loss: 0.352 - ETA: 59s - loss: 0.352 - ETA: 58s - loss: 0.35 - ETA: 56s - loss: 0.35 - ETA: 54s - loss: 0.35 - ETA: 52s - loss: 0.35 - ETA: 50s - loss: 0.35 - ETA: 49s - loss: 0.35 - ETA: 47s - loss: 0.35 - ETA: 45s - loss: 0.35 - ETA: 43s - loss: 0.35 - ETA: 41s - loss: 0.34 - ETA: 39s - loss: 0.34 - ETA: 38s - loss: 0.34 - ETA: 36s - loss: 0.34 - ETA: 34s - loss: 0.34 - ETA: 32s - loss: 0.34 - ETA: 30s - loss: 0.34 - ETA: 29s - loss: 0.34 - ETA: 27s - loss: 0.34 - ETA: 25s - loss: 0.34 - ETA: 23s - loss: 0.34 - ETA: 21s - loss: 0.34 - ETA: 19s - loss: 0.34 - ETA: 18s - loss: 0.34 - ETA: 16s - loss: 0.34 - ETA: 14s - loss: 0.34 - ETA: 12s - loss: 0.34 - ETA: 10s - loss: 0.34 - ETA: 9s - loss: 0.3446 - ETA: 7s - loss: 0.344 - ETA: 5s - loss: 0.344 - ETA: 3s - loss: 0.343 - ETA: 1s - loss: 0.343 - ETA: 0s - loss: 0.343 - 207s 2s/step - loss: 0.3433 - val_loss: 0.1355\n",
      "\n",
      "Epoch 00016: val_loss did not improve from 0.08220\n",
      "Epoch 17/30\n",
      "111/111 [==============================] - ETA: 3:19 - loss: 0.657 - ETA: 3:18 - loss: 0.613 - ETA: 3:15 - loss: 0.545 - ETA: 3:12 - loss: 0.504 - ETA: 3:09 - loss: 0.478 - ETA: 3:09 - loss: 0.454 - ETA: 3:08 - loss: 0.435 - ETA: 3:06 - loss: 0.417 - ETA: 3:04 - loss: 0.402 - ETA: 3:02 - loss: 0.392 - ETA: 3:02 - loss: 0.381 - ETA: 3:00 - loss: 0.371 - ETA: 2:58 - loss: 0.362 - ETA: 2:57 - loss: 0.355 - ETA: 2:55 - loss: 0.347 - ETA: 2:54 - loss: 0.341 - ETA: 2:52 - loss: 0.336 - ETA: 2:50 - loss: 0.332 - ETA: 2:48 - loss: 0.329 - ETA: 2:46 - loss: 0.327 - ETA: 2:44 - loss: 0.325 - ETA: 2:42 - loss: 0.323 - ETA: 2:40 - loss: 0.322 - ETA: 2:38 - loss: 0.320 - ETA: 2:36 - loss: 0.319 - ETA: 2:35 - loss: 0.319 - ETA: 2:33 - loss: 0.318 - ETA: 2:31 - loss: 0.317 - ETA: 2:29 - loss: 0.318 - ETA: 2:27 - loss: 0.318 - ETA: 2:25 - loss: 0.319 - ETA: 2:23 - loss: 0.320 - ETA: 2:21 - loss: 0.320 - ETA: 2:20 - loss: 0.321 - ETA: 2:18 - loss: 0.322 - ETA: 2:16 - loss: 0.322 - ETA: 2:14 - loss: 0.323 - ETA: 2:12 - loss: 0.323 - ETA: 2:10 - loss: 0.323 - ETA: 2:09 - loss: 0.324 - ETA: 2:07 - loss: 0.324 - ETA: 2:05 - loss: 0.324 - ETA: 2:03 - loss: 0.325 - ETA: 2:01 - loss: 0.325 - ETA: 2:00 - loss: 0.325 - ETA: 1:58 - loss: 0.326 - ETA: 1:56 - loss: 0.326 - ETA: 1:54 - loss: 0.326 - ETA: 1:52 - loss: 0.327 - ETA: 1:50 - loss: 0.327 - ETA: 1:49 - loss: 0.328 - ETA: 1:47 - loss: 0.328 - ETA: 1:45 - loss: 0.328 - ETA: 1:43 - loss: 0.328 - ETA: 1:41 - loss: 0.328 - ETA: 1:39 - loss: 0.328 - ETA: 1:38 - loss: 0.328 - ETA: 1:36 - loss: 0.329 - ETA: 1:34 - loss: 0.329 - ETA: 1:32 - loss: 0.329 - ETA: 1:30 - loss: 0.329 - ETA: 1:28 - loss: 0.329 - ETA: 1:27 - loss: 0.329 - ETA: 1:25 - loss: 0.329 - ETA: 1:23 - loss: 0.329 - ETA: 1:21 - loss: 0.329 - ETA: 1:19 - loss: 0.328 - ETA: 1:18 - loss: 0.328 - ETA: 1:16 - loss: 0.328 - ETA: 1:14 - loss: 0.328 - ETA: 1:12 - loss: 0.328 - ETA: 1:10 - loss: 0.328 - ETA: 1:09 - loss: 0.328 - ETA: 1:07 - loss: 0.328 - ETA: 1:05 - loss: 0.328 - ETA: 1:03 - loss: 0.328 - ETA: 1:01 - loss: 0.328 - ETA: 1:00 - loss: 0.328 - ETA: 58s - loss: 0.328 - ETA: 56s - loss: 0.32 - ETA: 54s - loss: 0.32 - ETA: 52s - loss: 0.32 - ETA: 51s - loss: 0.32 - ETA: 49s - loss: 0.32 - ETA: 47s - loss: 0.32 - ETA: 45s - loss: 0.32 - ETA: 43s - loss: 0.32 - ETA: 41s - loss: 0.32 - ETA: 40s - loss: 0.32 - ETA: 38s - loss: 0.32 - ETA: 36s - loss: 0.32 - ETA: 34s - loss: 0.32 - ETA: 32s - loss: 0.32 - ETA: 30s - loss: 0.32 - ETA: 29s - loss: 0.32 - ETA: 27s - loss: 0.32 - ETA: 25s - loss: 0.32 - ETA: 23s - loss: 0.32 - ETA: 21s - loss: 0.32 - ETA: 20s - loss: 0.32 - ETA: 18s - loss: 0.32 - ETA: 16s - loss: 0.32 - ETA: 14s - loss: 0.32 - ETA: 12s - loss: 0.32 - ETA: 10s - loss: 0.32 - ETA: 9s - loss: 0.3287 - ETA: 7s - loss: 0.328 - ETA: 5s - loss: 0.329 - ETA: 3s - loss: 0.329 - ETA: 1s - loss: 0.329 - ETA: 0s - loss: 0.329 - 208s 2s/step - loss: 0.3296 - val_loss: 0.1458\n",
      "\n",
      "Epoch 00017: val_loss did not improve from 0.08220\n",
      "Epoch 18/30\n",
      "111/111 [==============================] - ETA: 3:16 - loss: 0.507 - ETA: 3:26 - loss: 0.404 - ETA: 3:22 - loss: 0.386 - ETA: 3:19 - loss: 0.371 - ETA: 3:15 - loss: 0.355 - ETA: 3:14 - loss: 0.344 - ETA: 3:12 - loss: 0.331 - ETA: 3:09 - loss: 0.322 - ETA: 3:07 - loss: 0.321 - ETA: 3:04 - loss: 0.319 - ETA: 3:03 - loss: 0.316 - ETA: 3:01 - loss: 0.314 - ETA: 2:59 - loss: 0.311 - ETA: 2:57 - loss: 0.310 - ETA: 2:55 - loss: 0.309 - ETA: 2:53 - loss: 0.307 - ETA: 2:50 - loss: 0.304 - ETA: 2:49 - loss: 0.302 - ETA: 2:47 - loss: 0.301 - ETA: 2:45 - loss: 0.300 - ETA: 2:44 - loss: 0.299 - ETA: 2:42 - loss: 0.298 - ETA: 2:40 - loss: 0.298 - ETA: 2:38 - loss: 0.299 - ETA: 2:36 - loss: 0.299 - ETA: 2:34 - loss: 0.299 - ETA: 2:33 - loss: 0.299 - ETA: 2:31 - loss: 0.300 - ETA: 2:29 - loss: 0.300 - ETA: 2:27 - loss: 0.300 - ETA: 2:26 - loss: 0.300 - ETA: 2:24 - loss: 0.300 - ETA: 2:22 - loss: 0.301 - ETA: 2:20 - loss: 0.301 - ETA: 2:19 - loss: 0.301 - ETA: 2:17 - loss: 0.301 - ETA: 2:15 - loss: 0.301 - ETA: 2:13 - loss: 0.301 - ETA: 2:11 - loss: 0.301 - ETA: 2:09 - loss: 0.301 - ETA: 2:07 - loss: 0.301 - ETA: 2:06 - loss: 0.301 - ETA: 2:04 - loss: 0.301 - ETA: 2:02 - loss: 0.301 - ETA: 2:00 - loss: 0.301 - ETA: 1:58 - loss: 0.301 - ETA: 1:56 - loss: 0.301 - ETA: 1:54 - loss: 0.301 - ETA: 1:52 - loss: 0.301 - ETA: 1:51 - loss: 0.301 - ETA: 1:49 - loss: 0.301 - ETA: 1:47 - loss: 0.301 - ETA: 1:45 - loss: 0.301 - ETA: 1:43 - loss: 0.301 - ETA: 1:41 - loss: 0.301 - ETA: 1:40 - loss: 0.301 - ETA: 1:38 - loss: 0.301 - ETA: 1:36 - loss: 0.301 - ETA: 1:34 - loss: 0.302 - ETA: 1:32 - loss: 0.302 - ETA: 1:30 - loss: 0.302 - ETA: 1:29 - loss: 0.302 - ETA: 1:27 - loss: 0.302 - ETA: 1:25 - loss: 0.302 - ETA: 1:23 - loss: 0.302 - ETA: 1:21 - loss: 0.302 - ETA: 1:19 - loss: 0.302 - ETA: 1:18 - loss: 0.302 - ETA: 1:16 - loss: 0.302 - ETA: 1:14 - loss: 0.302 - ETA: 1:12 - loss: 0.302 - ETA: 1:10 - loss: 0.302 - ETA: 1:09 - loss: 0.302 - ETA: 1:07 - loss: 0.302 - ETA: 1:05 - loss: 0.303 - ETA: 1:03 - loss: 0.303 - ETA: 1:01 - loss: 0.303 - ETA: 59s - loss: 0.303 - ETA: 58s - loss: 0.30 - ETA: 56s - loss: 0.30 - ETA: 54s - loss: 0.30 - ETA: 52s - loss: 0.30 - ETA: 50s - loss: 0.30 - ETA: 49s - loss: 0.30 - ETA: 47s - loss: 0.30 - ETA: 45s - loss: 0.30 - ETA: 43s - loss: 0.30 - ETA: 41s - loss: 0.30 - ETA: 39s - loss: 0.30 - ETA: 38s - loss: 0.30 - ETA: 36s - loss: 0.30 - ETA: 34s - loss: 0.30 - ETA: 32s - loss: 0.30 - ETA: 30s - loss: 0.30 - ETA: 29s - loss: 0.30 - ETA: 27s - loss: 0.30 - ETA: 25s - loss: 0.30 - ETA: 23s - loss: 0.30 - ETA: 21s - loss: 0.30 - ETA: 20s - loss: 0.30 - ETA: 18s - loss: 0.30 - ETA: 16s - loss: 0.30 - ETA: 14s - loss: 0.30 - ETA: 12s - loss: 0.30 - ETA: 10s - loss: 0.30 - ETA: 9s - loss: 0.3022 - ETA: 7s - loss: 0.302 - ETA: 5s - loss: 0.302 - ETA: 3s - loss: 0.302 - ETA: 1s - loss: 0.302 - ETA: 0s - loss: 0.302 - 208s 2s/step - loss: 0.3021 - val_loss: 0.1225\n",
      "\n",
      "Epoch 00018: val_loss did not improve from 0.08220\n",
      "Epoch 19/30\n",
      "111/111 [==============================] - ETA: 3:24 - loss: 0.223 - ETA: 3:19 - loss: 0.247 - ETA: 3:16 - loss: 0.276 - ETA: 3:14 - loss: 0.279 - ETA: 3:11 - loss: 0.276 - ETA: 3:10 - loss: 0.307 - ETA: 3:07 - loss: 0.324 - ETA: 3:07 - loss: 0.337 - ETA: 3:05 - loss: 0.344 - ETA: 3:03 - loss: 0.351 - ETA: 3:01 - loss: 0.355 - ETA: 2:59 - loss: 0.356 - ETA: 2:57 - loss: 0.359 - ETA: 2:55 - loss: 0.360 - ETA: 2:53 - loss: 0.361 - ETA: 2:51 - loss: 0.361 - ETA: 2:50 - loss: 0.362 - ETA: 2:48 - loss: 0.363 - ETA: 2:46 - loss: 0.366 - ETA: 2:44 - loss: 0.368 - ETA: 2:43 - loss: 0.369 - ETA: 2:41 - loss: 0.370 - ETA: 2:39 - loss: 0.371 - ETA: 2:37 - loss: 0.371 - ETA: 2:35 - loss: 0.373 - ETA: 2:34 - loss: 0.373 - ETA: 2:32 - loss: 0.374 - ETA: 2:30 - loss: 0.374 - ETA: 2:28 - loss: 0.374 - ETA: 2:26 - loss: 0.374 - ETA: 2:24 - loss: 0.374 - ETA: 2:23 - loss: 0.374 - ETA: 2:21 - loss: 0.374 - ETA: 2:19 - loss: 0.373 - ETA: 2:17 - loss: 0.373 - ETA: 2:15 - loss: 0.373 - ETA: 2:13 - loss: 0.372 - ETA: 2:12 - loss: 0.372 - ETA: 2:10 - loss: 0.371 - ETA: 2:08 - loss: 0.371 - ETA: 2:06 - loss: 0.371 - ETA: 2:04 - loss: 0.370 - ETA: 2:03 - loss: 0.370 - ETA: 2:01 - loss: 0.370 - ETA: 1:59 - loss: 0.370 - ETA: 1:57 - loss: 0.370 - ETA: 1:55 - loss: 0.370 - ETA: 1:54 - loss: 0.370 - ETA: 1:52 - loss: 0.369 - ETA: 1:50 - loss: 0.369 - ETA: 1:48 - loss: 0.369 - ETA: 1:47 - loss: 0.369 - ETA: 1:45 - loss: 0.369 - ETA: 1:43 - loss: 0.368 - ETA: 1:41 - loss: 0.368 - ETA: 1:39 - loss: 0.368 - ETA: 1:38 - loss: 0.367 - ETA: 1:36 - loss: 0.367 - ETA: 1:34 - loss: 0.367 - ETA: 1:32 - loss: 0.366 - ETA: 1:30 - loss: 0.366 - ETA: 1:29 - loss: 0.366 - ETA: 1:27 - loss: 0.365 - ETA: 1:25 - loss: 0.365 - ETA: 1:23 - loss: 0.364 - ETA: 1:21 - loss: 0.364 - ETA: 1:19 - loss: 0.364 - ETA: 1:18 - loss: 0.364 - ETA: 1:16 - loss: 0.363 - ETA: 1:14 - loss: 0.363 - ETA: 1:12 - loss: 0.363 - ETA: 1:10 - loss: 0.363 - ETA: 1:08 - loss: 0.363 - ETA: 1:07 - loss: 0.363 - ETA: 1:05 - loss: 0.363 - ETA: 1:03 - loss: 0.363 - ETA: 1:01 - loss: 0.362 - ETA: 59s - loss: 0.362 - ETA: 58s - loss: 0.36 - ETA: 56s - loss: 0.36 - ETA: 54s - loss: 0.36 - ETA: 52s - loss: 0.36 - ETA: 50s - loss: 0.36 - ETA: 49s - loss: 0.36 - ETA: 47s - loss: 0.36 - ETA: 45s - loss: 0.36 - ETA: 43s - loss: 0.36 - ETA: 41s - loss: 0.36 - ETA: 39s - loss: 0.36 - ETA: 38s - loss: 0.36 - ETA: 36s - loss: 0.36 - ETA: 34s - loss: 0.36 - ETA: 32s - loss: 0.36 - ETA: 30s - loss: 0.36 - ETA: 29s - loss: 0.36 - ETA: 27s - loss: 0.36 - ETA: 25s - loss: 0.36 - ETA: 23s - loss: 0.36 - ETA: 21s - loss: 0.36 - ETA: 19s - loss: 0.36 - ETA: 18s - loss: 0.35 - ETA: 16s - loss: 0.36 - ETA: 14s - loss: 0.36 - ETA: 12s - loss: 0.36 - ETA: 10s - loss: 0.36 - ETA: 9s - loss: 0.3602 - ETA: 7s - loss: 0.360 - ETA: 5s - loss: 0.360 - ETA: 3s - loss: 0.360 - ETA: 1s - loss: 0.360 - ETA: 0s - loss: 0.360 - 207s 2s/step - loss: 0.3602 - val_loss: 0.1532\n",
      "\n",
      "Epoch 00019: val_loss did not improve from 0.08220\n",
      "Epoch 20/30\n",
      "111/111 [==============================] - ETA: 3:38 - loss: 0.540 - ETA: 3:23 - loss: 0.453 - ETA: 3:22 - loss: 0.633 - ETA: 3:20 - loss: 0.673 - ETA: 3:19 - loss: 0.671 - ETA: 3:16 - loss: 0.664 - ETA: 3:13 - loss: 0.651 - ETA: 3:11 - loss: 0.643 - ETA: 3:08 - loss: 0.631 - ETA: 3:06 - loss: 0.618 - ETA: 3:03 - loss: 0.604 - ETA: 3:02 - loss: 0.592 - ETA: 3:00 - loss: 0.580 - ETA: 2:58 - loss: 0.569 - ETA: 2:55 - loss: 0.558 - ETA: 2:54 - loss: 0.549 - ETA: 2:52 - loss: 0.541 - ETA: 2:50 - loss: 0.533 - ETA: 2:48 - loss: 0.526 - ETA: 2:46 - loss: 0.520 - ETA: 2:44 - loss: 0.514 - ETA: 2:42 - loss: 0.508 - ETA: 2:40 - loss: 0.502 - ETA: 2:39 - loss: 0.497 - ETA: 2:37 - loss: 0.492 - ETA: 2:35 - loss: 0.488 - ETA: 2:33 - loss: 0.483 - ETA: 2:31 - loss: 0.479 - ETA: 2:29 - loss: 0.475 - ETA: 2:28 - loss: 0.471 - ETA: 2:26 - loss: 0.468 - ETA: 2:24 - loss: 0.465 - ETA: 2:22 - loss: 0.462 - ETA: 2:20 - loss: 0.460 - ETA: 2:18 - loss: 0.458 - ETA: 2:16 - loss: 0.457 - ETA: 2:15 - loss: 0.455 - ETA: 2:13 - loss: 0.453 - ETA: 2:11 - loss: 0.451 - ETA: 2:09 - loss: 0.449 - ETA: 2:07 - loss: 0.447 - ETA: 2:05 - loss: 0.445 - ETA: 2:04 - loss: 0.443 - ETA: 2:02 - loss: 0.441 - ETA: 2:00 - loss: 0.439 - ETA: 1:58 - loss: 0.438 - ETA: 1:56 - loss: 0.436 - ETA: 1:54 - loss: 0.435 - ETA: 1:52 - loss: 0.434 - ETA: 1:51 - loss: 0.432 - ETA: 1:49 - loss: 0.431 - ETA: 1:47 - loss: 0.430 - ETA: 1:45 - loss: 0.429 - ETA: 1:43 - loss: 0.427 - ETA: 1:41 - loss: 0.426 - ETA: 1:40 - loss: 0.425 - ETA: 1:38 - loss: 0.424 - ETA: 1:36 - loss: 0.423 - ETA: 1:34 - loss: 0.422 - ETA: 1:32 - loss: 0.421 - ETA: 1:31 - loss: 0.420 - ETA: 1:29 - loss: 0.419 - ETA: 1:27 - loss: 0.418 - ETA: 1:25 - loss: 0.418 - ETA: 1:23 - loss: 0.417 - ETA: 1:21 - loss: 0.416 - ETA: 1:20 - loss: 0.415 - ETA: 1:18 - loss: 0.414 - ETA: 1:16 - loss: 0.413 - ETA: 1:14 - loss: 0.412 - ETA: 1:13 - loss: 0.411 - ETA: 1:11 - loss: 0.410 - ETA: 1:09 - loss: 0.409 - ETA: 1:07 - loss: 0.408 - ETA: 1:05 - loss: 0.407 - ETA: 1:03 - loss: 0.406 - ETA: 1:02 - loss: 0.405 - ETA: 1:00 - loss: 0.404 - ETA: 58s - loss: 0.403 - ETA: 56s - loss: 0.40 - ETA: 54s - loss: 0.40 - ETA: 52s - loss: 0.40 - ETA: 51s - loss: 0.40 - ETA: 49s - loss: 0.39 - ETA: 47s - loss: 0.39 - ETA: 45s - loss: 0.39 - ETA: 43s - loss: 0.39 - ETA: 41s - loss: 0.39 - ETA: 40s - loss: 0.39 - ETA: 38s - loss: 0.39 - ETA: 36s - loss: 0.39 - ETA: 34s - loss: 0.39 - ETA: 32s - loss: 0.39 - ETA: 31s - loss: 0.39 - ETA: 29s - loss: 0.39 - ETA: 27s - loss: 0.38 - ETA: 25s - loss: 0.38 - ETA: 23s - loss: 0.38 - ETA: 21s - loss: 0.38 - ETA: 20s - loss: 0.38 - ETA: 18s - loss: 0.38 - ETA: 16s - loss: 0.38 - ETA: 14s - loss: 0.38 - ETA: 12s - loss: 0.38 - ETA: 10s - loss: 0.38 - ETA: 9s - loss: 0.3831 - ETA: 7s - loss: 0.382 - ETA: 5s - loss: 0.382 - ETA: 3s - loss: 0.381 - ETA: 1s - loss: 0.380 - ETA: 0s - loss: 0.380 - 208s 2s/step - loss: 0.3798 - val_loss: 0.1473\n",
      "\n",
      "Epoch 00020: val_loss did not improve from 0.08220\n",
      "Epoch 21/30\n",
      "111/111 [==============================] - ETA: 3:22 - loss: 0.327 - ETA: 3:13 - loss: 0.281 - ETA: 3:24 - loss: 0.249 - ETA: 3:26 - loss: 0.233 - ETA: 3:23 - loss: 0.245 - ETA: 3:19 - loss: 0.279 - ETA: 3:17 - loss: 0.298 - ETA: 3:14 - loss: 0.307 - ETA: 3:11 - loss: 0.311 - ETA: 3:08 - loss: 0.314 - ETA: 3:05 - loss: 0.316 - ETA: 3:04 - loss: 0.318 - ETA: 3:02 - loss: 0.319 - ETA: 3:00 - loss: 0.320 - ETA: 2:58 - loss: 0.323 - ETA: 2:56 - loss: 0.325 - ETA: 2:54 - loss: 0.326 - ETA: 2:52 - loss: 0.329 - ETA: 2:50 - loss: 0.332 - ETA: 2:48 - loss: 0.333 - ETA: 2:47 - loss: 0.335 - ETA: 2:45 - loss: 0.336 - ETA: 2:43 - loss: 0.338 - ETA: 2:42 - loss: 0.339 - ETA: 2:39 - loss: 0.340 - ETA: 2:38 - loss: 0.342 - ETA: 2:36 - loss: 0.343 - ETA: 2:34 - loss: 0.343 - ETA: 2:32 - loss: 0.344 - ETA: 2:30 - loss: 0.345 - ETA: 2:28 - loss: 0.345 - ETA: 2:26 - loss: 0.345 - ETA: 2:24 - loss: 0.346 - ETA: 2:22 - loss: 0.346 - ETA: 2:20 - loss: 0.346 - ETA: 2:18 - loss: 0.346 - ETA: 2:16 - loss: 0.346 - ETA: 2:15 - loss: 0.347 - ETA: 2:13 - loss: 0.347 - ETA: 2:11 - loss: 0.347 - ETA: 2:09 - loss: 0.348 - ETA: 2:07 - loss: 0.348 - ETA: 2:05 - loss: 0.348 - ETA: 2:03 - loss: 0.348 - ETA: 2:01 - loss: 0.348 - ETA: 2:00 - loss: 0.348 - ETA: 1:58 - loss: 0.347 - ETA: 1:56 - loss: 0.347 - ETA: 1:54 - loss: 0.347 - ETA: 1:52 - loss: 0.347 - ETA: 1:50 - loss: 0.347 - ETA: 1:48 - loss: 0.346 - ETA: 1:46 - loss: 0.346 - ETA: 1:45 - loss: 0.346 - ETA: 1:43 - loss: 0.345 - ETA: 1:41 - loss: 0.345 - ETA: 1:39 - loss: 0.345 - ETA: 1:37 - loss: 0.344 - ETA: 1:35 - loss: 0.344 - ETA: 1:33 - loss: 0.344 - ETA: 1:32 - loss: 0.344 - ETA: 1:30 - loss: 0.344 - ETA: 1:28 - loss: 0.344 - ETA: 1:26 - loss: 0.344 - ETA: 1:24 - loss: 0.344 - ETA: 1:22 - loss: 0.344 - ETA: 1:20 - loss: 0.344 - ETA: 1:19 - loss: 0.343 - ETA: 1:17 - loss: 0.343 - ETA: 1:15 - loss: 0.343 - ETA: 1:13 - loss: 0.343 - ETA: 1:11 - loss: 0.343 - ETA: 1:09 - loss: 0.343 - ETA: 1:08 - loss: 0.343 - ETA: 1:06 - loss: 0.343 - ETA: 1:04 - loss: 0.343 - ETA: 1:02 - loss: 0.343 - ETA: 1:00 - loss: 0.342 - ETA: 58s - loss: 0.342 - ETA: 57s - loss: 0.34 - ETA: 55s - loss: 0.34 - ETA: 53s - loss: 0.34 - ETA: 51s - loss: 0.34 - ETA: 49s - loss: 0.34 - ETA: 47s - loss: 0.34 - ETA: 46s - loss: 0.34 - ETA: 44s - loss: 0.34 - ETA: 42s - loss: 0.34 - ETA: 40s - loss: 0.34 - ETA: 38s - loss: 0.34 - ETA: 36s - loss: 0.34 - ETA: 35s - loss: 0.34 - ETA: 33s - loss: 0.34 - ETA: 31s - loss: 0.34 - ETA: 29s - loss: 0.34 - ETA: 27s - loss: 0.34 - ETA: 25s - loss: 0.34 - ETA: 23s - loss: 0.34 - ETA: 22s - loss: 0.34 - ETA: 20s - loss: 0.34 - ETA: 18s - loss: 0.34 - ETA: 16s - loss: 0.34 - ETA: 14s - loss: 0.34 - ETA: 12s - loss: 0.34 - ETA: 11s - loss: 0.34 - ETA: 9s - loss: 0.3402 - ETA: 7s - loss: 0.340 - ETA: 5s - loss: 0.340 - ETA: 3s - loss: 0.339 - ETA: 1s - loss: 0.339 - ETA: 0s - loss: 0.339 - 210s 2s/step - loss: 0.3396 - val_loss: 0.1330\n",
      "\n",
      "Epoch 00021: val_loss did not improve from 0.08220\n",
      "Epoch 22/30\n",
      "111/111 [==============================] - ETA: 3:21 - loss: 0.096 - ETA: 3:19 - loss: 0.173 - ETA: 3:18 - loss: 0.347 - ETA: 3:20 - loss: 0.399 - ETA: 3:16 - loss: 0.425 - ETA: 3:14 - loss: 0.433 - ETA: 3:11 - loss: 0.435 - ETA: 3:09 - loss: 0.430 - ETA: 3:07 - loss: 0.434 - ETA: 3:04 - loss: 0.435 - ETA: 3:03 - loss: 0.434 - ETA: 3:02 - loss: 0.433 - ETA: 2:59 - loss: 0.430 - ETA: 2:57 - loss: 0.427 - ETA: 2:56 - loss: 0.425 - ETA: 2:53 - loss: 0.422 - ETA: 2:52 - loss: 0.419 - ETA: 2:50 - loss: 0.416 - ETA: 2:48 - loss: 0.413 - ETA: 2:46 - loss: 0.409 - ETA: 2:45 - loss: 0.406 - ETA: 2:43 - loss: 0.403 - ETA: 2:41 - loss: 0.400 - ETA: 2:39 - loss: 0.396 - ETA: 2:37 - loss: 0.393 - ETA: 2:35 - loss: 0.389 - ETA: 2:33 - loss: 0.386 - ETA: 2:32 - loss: 0.384 - ETA: 2:30 - loss: 0.382 - ETA: 2:28 - loss: 0.380 - ETA: 2:26 - loss: 0.379 - ETA: 2:24 - loss: 0.378 - ETA: 2:22 - loss: 0.377 - ETA: 2:21 - loss: 0.376 - ETA: 2:19 - loss: 0.375 - ETA: 2:17 - loss: 0.374 - ETA: 2:15 - loss: 0.373 - ETA: 2:13 - loss: 0.372 - ETA: 2:12 - loss: 0.371 - ETA: 2:11 - loss: 0.371 - ETA: 2:10 - loss: 0.371 - ETA: 2:09 - loss: 0.371 - ETA: 2:07 - loss: 0.370 - ETA: 2:05 - loss: 0.370 - ETA: 2:03 - loss: 0.369 - ETA: 2:01 - loss: 0.369 - ETA: 1:59 - loss: 0.368 - ETA: 1:57 - loss: 0.368 - ETA: 1:56 - loss: 0.367 - ETA: 1:54 - loss: 0.367 - ETA: 1:52 - loss: 0.367 - ETA: 1:50 - loss: 0.366 - ETA: 1:48 - loss: 0.366 - ETA: 1:46 - loss: 0.366 - ETA: 1:44 - loss: 0.366 - ETA: 1:42 - loss: 0.365 - ETA: 1:41 - loss: 0.365 - ETA: 1:39 - loss: 0.365 - ETA: 1:37 - loss: 0.364 - ETA: 1:35 - loss: 0.364 - ETA: 1:33 - loss: 0.364 - ETA: 1:31 - loss: 0.363 - ETA: 1:29 - loss: 0.363 - ETA: 1:27 - loss: 0.362 - ETA: 1:25 - loss: 0.362 - ETA: 1:24 - loss: 0.362 - ETA: 1:22 - loss: 0.361 - ETA: 1:20 - loss: 0.361 - ETA: 1:18 - loss: 0.361 - ETA: 1:16 - loss: 0.360 - ETA: 1:14 - loss: 0.360 - ETA: 1:12 - loss: 0.359 - ETA: 1:10 - loss: 0.359 - ETA: 1:08 - loss: 0.359 - ETA: 1:07 - loss: 0.358 - ETA: 1:05 - loss: 0.358 - ETA: 1:03 - loss: 0.357 - ETA: 1:01 - loss: 0.357 - ETA: 59s - loss: 0.357 - ETA: 57s - loss: 0.35 - ETA: 55s - loss: 0.35 - ETA: 53s - loss: 0.35 - ETA: 52s - loss: 0.35 - ETA: 50s - loss: 0.35 - ETA: 48s - loss: 0.35 - ETA: 46s - loss: 0.35 - ETA: 44s - loss: 0.35 - ETA: 42s - loss: 0.35 - ETA: 40s - loss: 0.35 - ETA: 39s - loss: 0.35 - ETA: 37s - loss: 0.35 - ETA: 35s - loss: 0.35 - ETA: 33s - loss: 0.35 - ETA: 31s - loss: 0.35 - ETA: 29s - loss: 0.35 - ETA: 27s - loss: 0.35 - ETA: 25s - loss: 0.35 - ETA: 24s - loss: 0.35 - ETA: 22s - loss: 0.35 - ETA: 20s - loss: 0.35 - ETA: 18s - loss: 0.34 - ETA: 16s - loss: 0.34 - ETA: 14s - loss: 0.34 - ETA: 13s - loss: 0.34 - ETA: 11s - loss: 0.34 - ETA: 9s - loss: 0.3478 - ETA: 7s - loss: 0.347 - ETA: 5s - loss: 0.347 - ETA: 3s - loss: 0.346 - ETA: 1s - loss: 0.346 - ETA: 0s - loss: 0.346 - 212s 2s/step - loss: 0.3457 - val_loss: 0.1432\n",
      "\n",
      "Epoch 00022: val_loss did not improve from 0.08220\n",
      "Epoch 23/30\n",
      "111/111 [==============================] - ETA: 3:23 - loss: 0.126 - ETA: 3:19 - loss: 0.146 - ETA: 3:16 - loss: 0.179 - ETA: 3:16 - loss: 0.189 - ETA: 3:14 - loss: 0.194 - ETA: 3:13 - loss: 0.200 - ETA: 3:11 - loss: 0.212 - ETA: 3:08 - loss: 0.220 - ETA: 3:06 - loss: 0.224 - ETA: 3:04 - loss: 0.228 - ETA: 3:03 - loss: 0.231 - ETA: 3:01 - loss: 0.233 - ETA: 2:59 - loss: 0.235 - ETA: 2:57 - loss: 0.237 - ETA: 2:55 - loss: 0.239 - ETA: 2:53 - loss: 0.242 - ETA: 2:52 - loss: 0.243 - ETA: 2:50 - loss: 0.245 - ETA: 2:48 - loss: 0.248 - ETA: 2:46 - loss: 0.250 - ETA: 2:44 - loss: 0.252 - ETA: 2:43 - loss: 0.253 - ETA: 2:41 - loss: 0.254 - ETA: 2:39 - loss: 0.254 - ETA: 2:37 - loss: 0.255 - ETA: 2:35 - loss: 0.255 - ETA: 2:34 - loss: 0.255 - ETA: 2:32 - loss: 0.256 - ETA: 2:30 - loss: 0.257 - ETA: 2:28 - loss: 0.258 - ETA: 2:26 - loss: 0.258 - ETA: 2:25 - loss: 0.259 - ETA: 2:23 - loss: 0.259 - ETA: 2:21 - loss: 0.260 - ETA: 2:19 - loss: 0.260 - ETA: 2:17 - loss: 0.260 - ETA: 2:15 - loss: 0.260 - ETA: 2:13 - loss: 0.260 - ETA: 2:12 - loss: 0.261 - ETA: 2:10 - loss: 0.261 - ETA: 2:08 - loss: 0.262 - ETA: 2:06 - loss: 0.262 - ETA: 2:04 - loss: 0.263 - ETA: 2:02 - loss: 0.264 - ETA: 2:01 - loss: 0.265 - ETA: 1:59 - loss: 0.265 - ETA: 1:57 - loss: 0.266 - ETA: 1:55 - loss: 0.266 - ETA: 1:53 - loss: 0.267 - ETA: 1:51 - loss: 0.267 - ETA: 1:49 - loss: 0.267 - ETA: 1:48 - loss: 0.268 - ETA: 1:46 - loss: 0.268 - ETA: 1:44 - loss: 0.268 - ETA: 1:42 - loss: 0.269 - ETA: 1:41 - loss: 0.269 - ETA: 1:39 - loss: 0.269 - ETA: 1:37 - loss: 0.270 - ETA: 1:35 - loss: 0.270 - ETA: 1:33 - loss: 0.270 - ETA: 1:31 - loss: 0.271 - ETA: 1:30 - loss: 0.271 - ETA: 1:28 - loss: 0.271 - ETA: 1:26 - loss: 0.271 - ETA: 1:24 - loss: 0.271 - ETA: 1:22 - loss: 0.271 - ETA: 1:20 - loss: 0.271 - ETA: 1:19 - loss: 0.272 - ETA: 1:17 - loss: 0.272 - ETA: 1:15 - loss: 0.272 - ETA: 1:13 - loss: 0.272 - ETA: 1:11 - loss: 0.272 - ETA: 1:09 - loss: 0.272 - ETA: 1:07 - loss: 0.272 - ETA: 1:06 - loss: 0.272 - ETA: 1:04 - loss: 0.272 - ETA: 1:02 - loss: 0.272 - ETA: 1:00 - loss: 0.272 - ETA: 58s - loss: 0.272 - ETA: 56s - loss: 0.27 - ETA: 55s - loss: 0.27 - ETA: 53s - loss: 0.27 - ETA: 51s - loss: 0.27 - ETA: 49s - loss: 0.27 - ETA: 47s - loss: 0.27 - ETA: 45s - loss: 0.27 - ETA: 44s - loss: 0.27 - ETA: 42s - loss: 0.27 - ETA: 40s - loss: 0.27 - ETA: 38s - loss: 0.27 - ETA: 36s - loss: 0.27 - ETA: 34s - loss: 0.27 - ETA: 33s - loss: 0.27 - ETA: 31s - loss: 0.27 - ETA: 29s - loss: 0.27 - ETA: 27s - loss: 0.27 - ETA: 25s - loss: 0.27 - ETA: 23s - loss: 0.27 - ETA: 22s - loss: 0.27 - ETA: 20s - loss: 0.27 - ETA: 18s - loss: 0.27 - ETA: 16s - loss: 0.27 - ETA: 14s - loss: 0.27 - ETA: 12s - loss: 0.27 - ETA: 11s - loss: 0.27 - ETA: 9s - loss: 0.2775 - ETA: 7s - loss: 0.277 - ETA: 5s - loss: 0.277 - ETA: 3s - loss: 0.278 - ETA: 1s - loss: 0.278 - ETA: 0s - loss: 0.278 - 209s 2s/step - loss: 0.2785 - val_loss: 0.1213\n",
      "\n",
      "Epoch 00023: val_loss did not improve from 0.08220\n",
      "Epoch 24/30\n",
      "111/111 [==============================] - ETA: 3:26 - loss: 0.128 - ETA: 3:14 - loss: 0.200 - ETA: 3:13 - loss: 0.226 - ETA: 3:13 - loss: 0.232 - ETA: 3:17 - loss: 0.232 - ETA: 3:21 - loss: 0.228 - ETA: 3:19 - loss: 0.231 - ETA: 3:17 - loss: 0.231 - ETA: 3:16 - loss: 0.237 - ETA: 3:13 - loss: 0.243 - ETA: 3:10 - loss: 0.247 - ETA: 3:08 - loss: 0.251 - ETA: 3:05 - loss: 0.254 - ETA: 3:03 - loss: 0.255 - ETA: 3:01 - loss: 0.256 - ETA: 2:59 - loss: 0.257 - ETA: 2:57 - loss: 0.258 - ETA: 2:54 - loss: 0.261 - ETA: 2:52 - loss: 0.263 - ETA: 2:50 - loss: 0.266 - ETA: 2:48 - loss: 0.268 - ETA: 2:46 - loss: 0.270 - ETA: 2:44 - loss: 0.271 - ETA: 2:42 - loss: 0.272 - ETA: 2:40 - loss: 0.273 - ETA: 2:38 - loss: 0.274 - ETA: 2:36 - loss: 0.275 - ETA: 2:35 - loss: 0.275 - ETA: 2:33 - loss: 0.276 - ETA: 2:31 - loss: 0.276 - ETA: 2:29 - loss: 0.276 - ETA: 2:27 - loss: 0.276 - ETA: 2:25 - loss: 0.276 - ETA: 2:23 - loss: 0.276 - ETA: 2:21 - loss: 0.276 - ETA: 2:19 - loss: 0.277 - ETA: 2:17 - loss: 0.276 - ETA: 2:15 - loss: 0.277 - ETA: 2:14 - loss: 0.277 - ETA: 2:12 - loss: 0.277 - ETA: 2:10 - loss: 0.278 - ETA: 2:08 - loss: 0.278 - ETA: 2:06 - loss: 0.279 - ETA: 2:04 - loss: 0.279 - ETA: 2:02 - loss: 0.280 - ETA: 2:00 - loss: 0.280 - ETA: 1:58 - loss: 0.281 - ETA: 1:57 - loss: 0.281 - ETA: 1:55 - loss: 0.281 - ETA: 1:53 - loss: 0.282 - ETA: 1:51 - loss: 0.282 - ETA: 1:49 - loss: 0.282 - ETA: 1:47 - loss: 0.283 - ETA: 1:45 - loss: 0.283 - ETA: 1:44 - loss: 0.283 - ETA: 1:42 - loss: 0.283 - ETA: 1:40 - loss: 0.283 - ETA: 1:38 - loss: 0.283 - ETA: 1:36 - loss: 0.284 - ETA: 1:34 - loss: 0.284 - ETA: 1:32 - loss: 0.284 - ETA: 1:30 - loss: 0.284 - ETA: 1:29 - loss: 0.284 - ETA: 1:27 - loss: 0.284 - ETA: 1:25 - loss: 0.285 - ETA: 1:23 - loss: 0.285 - ETA: 1:21 - loss: 0.285 - ETA: 1:19 - loss: 0.286 - ETA: 1:17 - loss: 0.286 - ETA: 1:16 - loss: 0.286 - ETA: 1:14 - loss: 0.287 - ETA: 1:12 - loss: 0.287 - ETA: 1:10 - loss: 0.287 - ETA: 1:08 - loss: 0.288 - ETA: 1:06 - loss: 0.288 - ETA: 1:05 - loss: 0.288 - ETA: 1:03 - loss: 0.288 - ETA: 1:01 - loss: 0.288 - ETA: 59s - loss: 0.289 - ETA: 57s - loss: 0.28 - ETA: 55s - loss: 0.28 - ETA: 53s - loss: 0.28 - ETA: 51s - loss: 0.28 - ETA: 50s - loss: 0.28 - ETA: 48s - loss: 0.29 - ETA: 46s - loss: 0.29 - ETA: 44s - loss: 0.29 - ETA: 42s - loss: 0.29 - ETA: 40s - loss: 0.29 - ETA: 38s - loss: 0.29 - ETA: 37s - loss: 0.29 - ETA: 35s - loss: 0.29 - ETA: 33s - loss: 0.29 - ETA: 31s - loss: 0.29 - ETA: 29s - loss: 0.29 - ETA: 27s - loss: 0.29 - ETA: 25s - loss: 0.29 - ETA: 24s - loss: 0.29 - ETA: 22s - loss: 0.29 - ETA: 20s - loss: 0.29 - ETA: 18s - loss: 0.29 - ETA: 16s - loss: 0.29 - ETA: 14s - loss: 0.29 - ETA: 12s - loss: 0.29 - ETA: 11s - loss: 0.29 - ETA: 9s - loss: 0.2931 - ETA: 7s - loss: 0.293 - ETA: 5s - loss: 0.293 - ETA: 3s - loss: 0.293 - ETA: 1s - loss: 0.293 - ETA: 0s - loss: 0.293 - 211s 2s/step - loss: 0.2934 - val_loss: 0.1261\n",
      "\n",
      "Epoch 00024: val_loss did not improve from 0.08220\n",
      "Epoch 25/30\n",
      "111/111 [==============================] - ETA: 3:19 - loss: 0.132 - ETA: 3:27 - loss: 0.184 - ETA: 3:18 - loss: 0.227 - ETA: 3:18 - loss: 0.242 - ETA: 3:15 - loss: 0.247 - ETA: 3:13 - loss: 0.250 - ETA: 3:11 - loss: 0.248 - ETA: 3:09 - loss: 0.254 - ETA: 3:08 - loss: 0.256 - ETA: 3:05 - loss: 0.268 - ETA: 3:05 - loss: 0.278 - ETA: 3:02 - loss: 0.287 - ETA: 3:00 - loss: 0.294 - ETA: 2:58 - loss: 0.299 - ETA: 2:57 - loss: 0.302 - ETA: 2:55 - loss: 0.305 - ETA: 2:53 - loss: 0.308 - ETA: 2:51 - loss: 0.311 - ETA: 2:49 - loss: 0.312 - ETA: 2:47 - loss: 0.314 - ETA: 2:46 - loss: 0.315 - ETA: 2:44 - loss: 0.315 - ETA: 2:43 - loss: 0.315 - ETA: 2:41 - loss: 0.316 - ETA: 2:39 - loss: 0.316 - ETA: 2:38 - loss: 0.316 - ETA: 2:36 - loss: 0.316 - ETA: 2:34 - loss: 0.315 - ETA: 2:32 - loss: 0.315 - ETA: 2:30 - loss: 0.315 - ETA: 2:28 - loss: 0.314 - ETA: 2:26 - loss: 0.313 - ETA: 2:24 - loss: 0.313 - ETA: 2:22 - loss: 0.312 - ETA: 2:20 - loss: 0.311 - ETA: 2:19 - loss: 0.311 - ETA: 2:17 - loss: 0.310 - ETA: 2:15 - loss: 0.310 - ETA: 2:13 - loss: 0.309 - ETA: 2:11 - loss: 0.308 - ETA: 2:09 - loss: 0.308 - ETA: 2:07 - loss: 0.307 - ETA: 2:05 - loss: 0.306 - ETA: 2:03 - loss: 0.305 - ETA: 2:02 - loss: 0.304 - ETA: 2:00 - loss: 0.304 - ETA: 1:58 - loss: 0.304 - ETA: 1:56 - loss: 0.304 - ETA: 1:54 - loss: 0.304 - ETA: 1:52 - loss: 0.304 - ETA: 1:50 - loss: 0.304 - ETA: 1:49 - loss: 0.304 - ETA: 1:47 - loss: 0.305 - ETA: 1:45 - loss: 0.305 - ETA: 1:43 - loss: 0.305 - ETA: 1:41 - loss: 0.306 - ETA: 1:39 - loss: 0.306 - ETA: 1:37 - loss: 0.307 - ETA: 1:35 - loss: 0.307 - ETA: 1:34 - loss: 0.308 - ETA: 1:32 - loss: 0.308 - ETA: 1:30 - loss: 0.309 - ETA: 1:28 - loss: 0.309 - ETA: 1:26 - loss: 0.310 - ETA: 1:24 - loss: 0.311 - ETA: 1:22 - loss: 0.312 - ETA: 1:20 - loss: 0.313 - ETA: 1:19 - loss: 0.313 - ETA: 1:17 - loss: 0.314 - ETA: 1:15 - loss: 0.315 - ETA: 1:13 - loss: 0.316 - ETA: 1:11 - loss: 0.316 - ETA: 1:09 - loss: 0.317 - ETA: 1:08 - loss: 0.318 - ETA: 1:06 - loss: 0.318 - ETA: 1:04 - loss: 0.319 - ETA: 1:02 - loss: 0.319 - ETA: 1:00 - loss: 0.320 - ETA: 58s - loss: 0.320 - ETA: 56s - loss: 0.32 - ETA: 55s - loss: 0.32 - ETA: 53s - loss: 0.32 - ETA: 51s - loss: 0.32 - ETA: 49s - loss: 0.32 - ETA: 47s - loss: 0.32 - ETA: 45s - loss: 0.32 - ETA: 44s - loss: 0.32 - ETA: 42s - loss: 0.32 - ETA: 40s - loss: 0.32 - ETA: 38s - loss: 0.32 - ETA: 36s - loss: 0.32 - ETA: 34s - loss: 0.32 - ETA: 33s - loss: 0.32 - ETA: 31s - loss: 0.32 - ETA: 29s - loss: 0.32 - ETA: 27s - loss: 0.32 - ETA: 25s - loss: 0.32 - ETA: 23s - loss: 0.32 - ETA: 22s - loss: 0.32 - ETA: 20s - loss: 0.32 - ETA: 18s - loss: 0.32 - ETA: 16s - loss: 0.32 - ETA: 14s - loss: 0.32 - ETA: 12s - loss: 0.32 - ETA: 11s - loss: 0.32 - ETA: 9s - loss: 0.3259 - ETA: 7s - loss: 0.326 - ETA: 5s - loss: 0.326 - ETA: 3s - loss: 0.326 - ETA: 1s - loss: 0.326 - ETA: 0s - loss: 0.326 - 209s 2s/step - loss: 0.3266 - val_loss: 0.1262\n",
      "\n",
      "Epoch 00025: val_loss did not improve from 0.08220\n",
      "Epoch 26/30\n",
      "111/111 [==============================] - ETA: 3:19 - loss: 0.443 - ETA: 3:19 - loss: 0.361 - ETA: 3:15 - loss: 0.371 - ETA: 3:15 - loss: 0.386 - ETA: 3:14 - loss: 0.387 - ETA: 3:13 - loss: 0.381 - ETA: 3:11 - loss: 0.373 - ETA: 3:09 - loss: 0.364 - ETA: 3:07 - loss: 0.356 - ETA: 3:04 - loss: 0.347 - ETA: 3:03 - loss: 0.340 - ETA: 3:01 - loss: 0.333 - ETA: 2:58 - loss: 0.328 - ETA: 2:57 - loss: 0.323 - ETA: 2:55 - loss: 0.318 - ETA: 2:53 - loss: 0.313 - ETA: 2:51 - loss: 0.309 - ETA: 2:49 - loss: 0.306 - ETA: 2:47 - loss: 0.302 - ETA: 2:45 - loss: 0.299 - ETA: 2:43 - loss: 0.297 - ETA: 2:42 - loss: 0.295 - ETA: 2:40 - loss: 0.293 - ETA: 2:38 - loss: 0.292 - ETA: 2:36 - loss: 0.292 - ETA: 2:35 - loss: 0.291 - ETA: 2:33 - loss: 0.291 - ETA: 2:31 - loss: 0.290 - ETA: 2:29 - loss: 0.290 - ETA: 2:27 - loss: 0.289 - ETA: 2:26 - loss: 0.289 - ETA: 2:24 - loss: 0.289 - ETA: 2:22 - loss: 0.289 - ETA: 2:20 - loss: 0.289 - ETA: 2:18 - loss: 0.288 - ETA: 2:16 - loss: 0.288 - ETA: 2:15 - loss: 0.288 - ETA: 2:13 - loss: 0.287 - ETA: 2:11 - loss: 0.287 - ETA: 2:09 - loss: 0.287 - ETA: 2:08 - loss: 0.287 - ETA: 2:06 - loss: 0.287 - ETA: 2:04 - loss: 0.287 - ETA: 2:02 - loss: 0.287 - ETA: 2:01 - loss: 0.287 - ETA: 1:59 - loss: 0.287 - ETA: 1:57 - loss: 0.286 - ETA: 1:55 - loss: 0.286 - ETA: 1:53 - loss: 0.286 - ETA: 1:51 - loss: 0.286 - ETA: 1:49 - loss: 0.285 - ETA: 1:48 - loss: 0.285 - ETA: 1:46 - loss: 0.285 - ETA: 1:44 - loss: 0.285 - ETA: 1:42 - loss: 0.284 - ETA: 1:40 - loss: 0.284 - ETA: 1:38 - loss: 0.284 - ETA: 1:36 - loss: 0.284 - ETA: 1:35 - loss: 0.283 - ETA: 1:33 - loss: 0.283 - ETA: 1:31 - loss: 0.283 - ETA: 1:29 - loss: 0.283 - ETA: 1:27 - loss: 0.283 - ETA: 1:25 - loss: 0.283 - ETA: 1:24 - loss: 0.282 - ETA: 1:22 - loss: 0.282 - ETA: 1:20 - loss: 0.282 - ETA: 1:18 - loss: 0.282 - ETA: 1:16 - loss: 0.282 - ETA: 1:14 - loss: 0.282 - ETA: 1:13 - loss: 0.282 - ETA: 1:11 - loss: 0.282 - ETA: 1:09 - loss: 0.282 - ETA: 1:07 - loss: 0.282 - ETA: 1:05 - loss: 0.282 - ETA: 1:04 - loss: 0.282 - ETA: 1:02 - loss: 0.282 - ETA: 1:00 - loss: 0.282 - ETA: 58s - loss: 0.282 - ETA: 56s - loss: 0.28 - ETA: 54s - loss: 0.28 - ETA: 52s - loss: 0.28 - ETA: 51s - loss: 0.28 - ETA: 49s - loss: 0.28 - ETA: 47s - loss: 0.28 - ETA: 45s - loss: 0.28 - ETA: 43s - loss: 0.28 - ETA: 42s - loss: 0.28 - ETA: 40s - loss: 0.28 - ETA: 38s - loss: 0.28 - ETA: 36s - loss: 0.28 - ETA: 34s - loss: 0.28 - ETA: 32s - loss: 0.28 - ETA: 31s - loss: 0.28 - ETA: 29s - loss: 0.28 - ETA: 27s - loss: 0.28 - ETA: 25s - loss: 0.28 - ETA: 23s - loss: 0.28 - ETA: 21s - loss: 0.28 - ETA: 20s - loss: 0.28 - ETA: 18s - loss: 0.28 - ETA: 16s - loss: 0.28 - ETA: 14s - loss: 0.28 - ETA: 12s - loss: 0.28 - ETA: 10s - loss: 0.28 - ETA: 9s - loss: 0.2867 - ETA: 7s - loss: 0.286 - ETA: 5s - loss: 0.287 - ETA: 3s - loss: 0.287 - ETA: 1s - loss: 0.287 - ETA: 0s - loss: 0.287 - 209s 2s/step - loss: 0.2878 - val_loss: 0.1201\n",
      "\n",
      "Epoch 00026: val_loss did not improve from 0.08220\n",
      "Epoch 27/30\n",
      "111/111 [==============================] - ETA: 3:23 - loss: 0.271 - ETA: 3:16 - loss: 0.258 - ETA: 3:17 - loss: 0.260 - ETA: 3:14 - loss: 0.263 - ETA: 3:12 - loss: 0.278 - ETA: 3:10 - loss: 0.283 - ETA: 3:07 - loss: 0.286 - ETA: 3:06 - loss: 0.311 - ETA: 3:04 - loss: 0.325 - ETA: 3:03 - loss: 0.335 - ETA: 3:01 - loss: 0.344 - ETA: 2:59 - loss: 0.349 - ETA: 2:57 - loss: 0.354 - ETA: 2:56 - loss: 0.357 - ETA: 2:54 - loss: 0.358 - ETA: 2:52 - loss: 0.361 - ETA: 2:50 - loss: 0.364 - ETA: 2:48 - loss: 0.366 - ETA: 2:47 - loss: 0.367 - ETA: 2:45 - loss: 0.368 - ETA: 2:43 - loss: 0.368 - ETA: 2:41 - loss: 0.368 - ETA: 2:39 - loss: 0.368 - ETA: 2:38 - loss: 0.367 - ETA: 2:36 - loss: 0.366 - ETA: 2:34 - loss: 0.365 - ETA: 2:32 - loss: 0.364 - ETA: 2:30 - loss: 0.363 - ETA: 2:28 - loss: 0.362 - ETA: 2:27 - loss: 0.361 - ETA: 2:25 - loss: 0.359 - ETA: 2:23 - loss: 0.359 - ETA: 2:21 - loss: 0.358 - ETA: 2:19 - loss: 0.357 - ETA: 2:17 - loss: 0.356 - ETA: 2:16 - loss: 0.355 - ETA: 2:14 - loss: 0.355 - ETA: 2:12 - loss: 0.354 - ETA: 2:10 - loss: 0.353 - ETA: 2:08 - loss: 0.352 - ETA: 2:07 - loss: 0.351 - ETA: 2:05 - loss: 0.350 - ETA: 2:03 - loss: 0.349 - ETA: 2:01 - loss: 0.349 - ETA: 1:59 - loss: 0.348 - ETA: 1:57 - loss: 0.347 - ETA: 1:56 - loss: 0.346 - ETA: 1:54 - loss: 0.345 - ETA: 1:52 - loss: 0.345 - ETA: 1:50 - loss: 0.344 - ETA: 1:48 - loss: 0.343 - ETA: 1:47 - loss: 0.342 - ETA: 1:45 - loss: 0.341 - ETA: 1:43 - loss: 0.341 - ETA: 1:41 - loss: 0.340 - ETA: 1:39 - loss: 0.339 - ETA: 1:38 - loss: 0.339 - ETA: 1:36 - loss: 0.338 - ETA: 1:34 - loss: 0.338 - ETA: 1:32 - loss: 0.337 - ETA: 1:31 - loss: 0.337 - ETA: 1:29 - loss: 0.337 - ETA: 1:27 - loss: 0.337 - ETA: 1:25 - loss: 0.336 - ETA: 1:23 - loss: 0.336 - ETA: 1:21 - loss: 0.336 - ETA: 1:20 - loss: 0.335 - ETA: 1:18 - loss: 0.335 - ETA: 1:16 - loss: 0.335 - ETA: 1:14 - loss: 0.335 - ETA: 1:12 - loss: 0.334 - ETA: 1:11 - loss: 0.334 - ETA: 1:09 - loss: 0.334 - ETA: 1:07 - loss: 0.334 - ETA: 1:05 - loss: 0.333 - ETA: 1:03 - loss: 0.333 - ETA: 1:01 - loss: 0.333 - ETA: 1:00 - loss: 0.333 - ETA: 58s - loss: 0.333 - ETA: 56s - loss: 0.33 - ETA: 54s - loss: 0.33 - ETA: 52s - loss: 0.33 - ETA: 50s - loss: 0.33 - ETA: 49s - loss: 0.33 - ETA: 47s - loss: 0.33 - ETA: 45s - loss: 0.33 - ETA: 43s - loss: 0.33 - ETA: 41s - loss: 0.33 - ETA: 40s - loss: 0.33 - ETA: 38s - loss: 0.33 - ETA: 36s - loss: 0.33 - ETA: 34s - loss: 0.33 - ETA: 32s - loss: 0.33 - ETA: 30s - loss: 0.33 - ETA: 29s - loss: 0.33 - ETA: 27s - loss: 0.33 - ETA: 25s - loss: 0.33 - ETA: 23s - loss: 0.32 - ETA: 21s - loss: 0.32 - ETA: 20s - loss: 0.32 - ETA: 18s - loss: 0.32 - ETA: 16s - loss: 0.32 - ETA: 14s - loss: 0.32 - ETA: 12s - loss: 0.32 - ETA: 10s - loss: 0.32 - ETA: 9s - loss: 0.3282 - ETA: 7s - loss: 0.328 - ETA: 5s - loss: 0.327 - ETA: 3s - loss: 0.327 - ETA: 1s - loss: 0.327 - ETA: 0s - loss: 0.327 - 208s 2s/step - loss: 0.3270 - val_loss: 0.1484\n",
      "\n",
      "Epoch 00027: val_loss did not improve from 0.08220\n",
      "Epoch 28/30\n",
      "111/111 [==============================] - ETA: 3:21 - loss: 0.331 - ETA: 3:20 - loss: 0.377 - ETA: 3:16 - loss: 0.358 - ETA: 3:15 - loss: 0.350 - ETA: 3:11 - loss: 0.335 - ETA: 3:09 - loss: 0.327 - ETA: 3:08 - loss: 0.325 - ETA: 3:06 - loss: 0.320 - ETA: 3:05 - loss: 0.317 - ETA: 3:04 - loss: 0.314 - ETA: 3:03 - loss: 0.310 - ETA: 3:01 - loss: 0.306 - ETA: 3:00 - loss: 0.305 - ETA: 2:58 - loss: 0.304 - ETA: 2:56 - loss: 0.303 - ETA: 2:54 - loss: 0.302 - ETA: 2:52 - loss: 0.301 - ETA: 2:51 - loss: 0.300 - ETA: 2:48 - loss: 0.299 - ETA: 2:47 - loss: 0.299 - ETA: 2:45 - loss: 0.298 - ETA: 2:43 - loss: 0.298 - ETA: 2:41 - loss: 0.298 - ETA: 2:39 - loss: 0.297 - ETA: 2:37 - loss: 0.296 - ETA: 2:35 - loss: 0.296 - ETA: 2:33 - loss: 0.296 - ETA: 2:32 - loss: 0.296 - ETA: 2:30 - loss: 0.296 - ETA: 2:28 - loss: 0.295 - ETA: 2:26 - loss: 0.295 - ETA: 2:24 - loss: 0.296 - ETA: 2:22 - loss: 0.296 - ETA: 2:20 - loss: 0.296 - ETA: 2:19 - loss: 0.296 - ETA: 2:17 - loss: 0.296 - ETA: 2:15 - loss: 0.297 - ETA: 2:13 - loss: 0.297 - ETA: 2:11 - loss: 0.298 - ETA: 2:09 - loss: 0.298 - ETA: 2:07 - loss: 0.299 - ETA: 2:06 - loss: 0.299 - ETA: 2:04 - loss: 0.299 - ETA: 2:02 - loss: 0.300 - ETA: 2:00 - loss: 0.300 - ETA: 1:58 - loss: 0.301 - ETA: 1:56 - loss: 0.301 - ETA: 1:54 - loss: 0.302 - ETA: 1:53 - loss: 0.302 - ETA: 1:51 - loss: 0.303 - ETA: 1:49 - loss: 0.303 - ETA: 1:47 - loss: 0.304 - ETA: 1:45 - loss: 0.304 - ETA: 1:44 - loss: 0.304 - ETA: 1:42 - loss: 0.305 - ETA: 1:40 - loss: 0.305 - ETA: 1:38 - loss: 0.305 - ETA: 1:36 - loss: 0.306 - ETA: 1:34 - loss: 0.306 - ETA: 1:33 - loss: 0.306 - ETA: 1:31 - loss: 0.306 - ETA: 1:29 - loss: 0.307 - ETA: 1:27 - loss: 0.307 - ETA: 1:25 - loss: 0.307 - ETA: 1:23 - loss: 0.307 - ETA: 1:22 - loss: 0.308 - ETA: 1:20 - loss: 0.308 - ETA: 1:18 - loss: 0.308 - ETA: 1:16 - loss: 0.308 - ETA: 1:14 - loss: 0.308 - ETA: 1:12 - loss: 0.308 - ETA: 1:11 - loss: 0.308 - ETA: 1:09 - loss: 0.308 - ETA: 1:07 - loss: 0.308 - ETA: 1:05 - loss: 0.308 - ETA: 1:03 - loss: 0.308 - ETA: 1:02 - loss: 0.308 - ETA: 1:00 - loss: 0.308 - ETA: 58s - loss: 0.308 - ETA: 56s - loss: 0.30 - ETA: 54s - loss: 0.30 - ETA: 53s - loss: 0.30 - ETA: 51s - loss: 0.30 - ETA: 49s - loss: 0.30 - ETA: 47s - loss: 0.30 - ETA: 45s - loss: 0.30 - ETA: 43s - loss: 0.30 - ETA: 42s - loss: 0.30 - ETA: 40s - loss: 0.30 - ETA: 38s - loss: 0.30 - ETA: 36s - loss: 0.30 - ETA: 34s - loss: 0.30 - ETA: 32s - loss: 0.30 - ETA: 31s - loss: 0.30 - ETA: 29s - loss: 0.30 - ETA: 27s - loss: 0.30 - ETA: 25s - loss: 0.30 - ETA: 23s - loss: 0.30 - ETA: 21s - loss: 0.30 - ETA: 20s - loss: 0.30 - ETA: 18s - loss: 0.30 - ETA: 16s - loss: 0.30 - ETA: 14s - loss: 0.30 - ETA: 12s - loss: 0.30 - ETA: 10s - loss: 0.30 - ETA: 9s - loss: 0.3068 - ETA: 7s - loss: 0.306 - ETA: 5s - loss: 0.306 - ETA: 3s - loss: 0.306 - ETA: 1s - loss: 0.306 - ETA: 0s - loss: 0.306 - 208s 2s/step - loss: 0.3069 - val_loss: 0.1306\n",
      "\n",
      "Epoch 00028: val_loss did not improve from 0.08220\n",
      "Epoch 29/30\n",
      "111/111 [==============================] - ETA: 3:29 - loss: 0.055 - ETA: 3:20 - loss: 0.103 - ETA: 3:13 - loss: 0.167 - ETA: 3:15 - loss: 0.197 - ETA: 3:16 - loss: 0.209 - ETA: 3:14 - loss: 0.211 - ETA: 3:12 - loss: 0.213 - ETA: 3:10 - loss: 0.213 - ETA: 3:07 - loss: 0.212 - ETA: 3:06 - loss: 0.214 - ETA: 3:04 - loss: 0.216 - ETA: 3:02 - loss: 0.216 - ETA: 2:59 - loss: 0.220 - ETA: 2:57 - loss: 0.223 - ETA: 2:55 - loss: 0.225 - ETA: 2:53 - loss: 0.227 - ETA: 2:51 - loss: 0.229 - ETA: 2:49 - loss: 0.231 - ETA: 2:47 - loss: 0.233 - ETA: 2:46 - loss: 0.235 - ETA: 2:44 - loss: 0.237 - ETA: 2:42 - loss: 0.238 - ETA: 2:40 - loss: 0.239 - ETA: 2:38 - loss: 0.240 - ETA: 2:36 - loss: 0.242 - ETA: 2:34 - loss: 0.243 - ETA: 2:33 - loss: 0.244 - ETA: 2:31 - loss: 0.245 - ETA: 2:29 - loss: 0.246 - ETA: 2:28 - loss: 0.248 - ETA: 2:26 - loss: 0.249 - ETA: 2:25 - loss: 0.250 - ETA: 2:23 - loss: 0.252 - ETA: 2:21 - loss: 0.254 - ETA: 2:19 - loss: 0.255 - ETA: 2:17 - loss: 0.256 - ETA: 2:15 - loss: 0.258 - ETA: 2:13 - loss: 0.260 - ETA: 2:11 - loss: 0.261 - ETA: 2:10 - loss: 0.262 - ETA: 2:08 - loss: 0.263 - ETA: 2:06 - loss: 0.265 - ETA: 2:04 - loss: 0.266 - ETA: 2:02 - loss: 0.267 - ETA: 2:00 - loss: 0.268 - ETA: 1:59 - loss: 0.269 - ETA: 1:57 - loss: 0.270 - ETA: 1:55 - loss: 0.271 - ETA: 1:53 - loss: 0.271 - ETA: 1:51 - loss: 0.272 - ETA: 1:49 - loss: 0.273 - ETA: 1:47 - loss: 0.274 - ETA: 1:46 - loss: 0.275 - ETA: 1:44 - loss: 0.276 - ETA: 1:42 - loss: 0.277 - ETA: 1:40 - loss: 0.278 - ETA: 1:38 - loss: 0.279 - ETA: 1:36 - loss: 0.280 - ETA: 1:35 - loss: 0.281 - ETA: 1:33 - loss: 0.282 - ETA: 1:31 - loss: 0.283 - ETA: 1:29 - loss: 0.284 - ETA: 1:27 - loss: 0.285 - ETA: 1:25 - loss: 0.286 - ETA: 1:24 - loss: 0.287 - ETA: 1:22 - loss: 0.287 - ETA: 1:20 - loss: 0.288 - ETA: 1:18 - loss: 0.289 - ETA: 1:16 - loss: 0.290 - ETA: 1:14 - loss: 0.290 - ETA: 1:13 - loss: 0.291 - ETA: 1:11 - loss: 0.291 - ETA: 1:09 - loss: 0.292 - ETA: 1:07 - loss: 0.293 - ETA: 1:05 - loss: 0.293 - ETA: 1:03 - loss: 0.294 - ETA: 1:02 - loss: 0.295 - ETA: 1:00 - loss: 0.296 - ETA: 58s - loss: 0.296 - ETA: 56s - loss: 0.29 - ETA: 54s - loss: 0.29 - ETA: 52s - loss: 0.29 - ETA: 51s - loss: 0.29 - ETA: 49s - loss: 0.30 - ETA: 47s - loss: 0.30 - ETA: 45s - loss: 0.30 - ETA: 43s - loss: 0.30 - ETA: 41s - loss: 0.30 - ETA: 40s - loss: 0.30 - ETA: 38s - loss: 0.30 - ETA: 36s - loss: 0.30 - ETA: 34s - loss: 0.30 - ETA: 32s - loss: 0.30 - ETA: 31s - loss: 0.30 - ETA: 29s - loss: 0.30 - ETA: 27s - loss: 0.30 - ETA: 25s - loss: 0.30 - ETA: 23s - loss: 0.30 - ETA: 21s - loss: 0.30 - ETA: 20s - loss: 0.30 - ETA: 18s - loss: 0.30 - ETA: 16s - loss: 0.30 - ETA: 14s - loss: 0.30 - ETA: 12s - loss: 0.30 - ETA: 10s - loss: 0.31 - ETA: 9s - loss: 0.3104 - ETA: 7s - loss: 0.310 - ETA: 5s - loss: 0.311 - ETA: 3s - loss: 0.311 - ETA: 1s - loss: 0.311 - ETA: 0s - loss: 0.311 - 208s 2s/step - loss: 0.3120 - val_loss: 0.1234\n",
      "\n",
      "Epoch 00029: val_loss did not improve from 0.08220\n",
      "Epoch 30/30\n",
      "111/111 [==============================] - ETA: 3:32 - loss: 0.424 - ETA: 3:17 - loss: 0.567 - ETA: 3:16 - loss: 0.593 - ETA: 3:11 - loss: 0.584 - ETA: 3:11 - loss: 0.561 - ETA: 3:09 - loss: 0.536 - ETA: 3:08 - loss: 0.511 - ETA: 3:06 - loss: 0.493 - ETA: 3:04 - loss: 0.480 - ETA: 3:03 - loss: 0.468 - ETA: 3:01 - loss: 0.458 - ETA: 3:00 - loss: 0.447 - ETA: 2:58 - loss: 0.438 - ETA: 2:56 - loss: 0.432 - ETA: 2:54 - loss: 0.427 - ETA: 2:52 - loss: 0.422 - ETA: 2:50 - loss: 0.418 - ETA: 2:49 - loss: 0.414 - ETA: 2:47 - loss: 0.410 - ETA: 2:45 - loss: 0.407 - ETA: 2:43 - loss: 0.404 - ETA: 2:41 - loss: 0.400 - ETA: 2:40 - loss: 0.397 - ETA: 2:38 - loss: 0.393 - ETA: 2:36 - loss: 0.390 - ETA: 2:34 - loss: 0.387 - ETA: 2:32 - loss: 0.383 - ETA: 2:31 - loss: 0.381 - ETA: 2:29 - loss: 0.379 - ETA: 2:27 - loss: 0.376 - ETA: 2:25 - loss: 0.374 - ETA: 2:23 - loss: 0.372 - ETA: 2:21 - loss: 0.370 - ETA: 2:19 - loss: 0.369 - ETA: 2:18 - loss: 0.368 - ETA: 2:16 - loss: 0.367 - ETA: 2:14 - loss: 0.365 - ETA: 2:12 - loss: 0.364 - ETA: 2:11 - loss: 0.363 - ETA: 2:09 - loss: 0.363 - ETA: 2:07 - loss: 0.362 - ETA: 2:05 - loss: 0.361 - ETA: 2:03 - loss: 0.360 - ETA: 2:01 - loss: 0.359 - ETA: 2:00 - loss: 0.359 - ETA: 1:58 - loss: 0.358 - ETA: 1:56 - loss: 0.357 - ETA: 1:55 - loss: 0.357 - ETA: 1:53 - loss: 0.356 - ETA: 1:51 - loss: 0.355 - ETA: 1:49 - loss: 0.355 - ETA: 1:47 - loss: 0.354 - ETA: 1:46 - loss: 0.354 - ETA: 1:44 - loss: 0.353 - ETA: 1:42 - loss: 0.352 - ETA: 1:40 - loss: 0.352 - ETA: 1:38 - loss: 0.351 - ETA: 1:36 - loss: 0.350 - ETA: 1:35 - loss: 0.350 - ETA: 1:33 - loss: 0.349 - ETA: 1:31 - loss: 0.348 - ETA: 1:29 - loss: 0.347 - ETA: 1:27 - loss: 0.347 - ETA: 1:25 - loss: 0.346 - ETA: 1:24 - loss: 0.346 - ETA: 1:22 - loss: 0.345 - ETA: 1:20 - loss: 0.344 - ETA: 1:18 - loss: 0.344 - ETA: 1:16 - loss: 0.343 - ETA: 1:14 - loss: 0.343 - ETA: 1:13 - loss: 0.342 - ETA: 1:11 - loss: 0.342 - ETA: 1:09 - loss: 0.342 - ETA: 1:07 - loss: 0.342 - ETA: 1:05 - loss: 0.341 - ETA: 1:03 - loss: 0.341 - ETA: 1:02 - loss: 0.341 - ETA: 1:00 - loss: 0.341 - ETA: 58s - loss: 0.340 - ETA: 56s - loss: 0.34 - ETA: 54s - loss: 0.34 - ETA: 52s - loss: 0.34 - ETA: 51s - loss: 0.33 - ETA: 49s - loss: 0.33 - ETA: 47s - loss: 0.33 - ETA: 45s - loss: 0.33 - ETA: 43s - loss: 0.33 - ETA: 42s - loss: 0.33 - ETA: 40s - loss: 0.33 - ETA: 38s - loss: 0.33 - ETA: 36s - loss: 0.33 - ETA: 34s - loss: 0.33 - ETA: 32s - loss: 0.33 - ETA: 31s - loss: 0.33 - ETA: 29s - loss: 0.33 - ETA: 27s - loss: 0.33 - ETA: 25s - loss: 0.33 - ETA: 23s - loss: 0.33 - ETA: 21s - loss: 0.33 - ETA: 20s - loss: 0.33 - ETA: 18s - loss: 0.33 - ETA: 16s - loss: 0.33 - ETA: 14s - loss: 0.33 - ETA: 12s - loss: 0.33 - ETA: 10s - loss: 0.33 - ETA: 9s - loss: 0.3335 - ETA: 7s - loss: 0.333 - ETA: 5s - loss: 0.333 - ETA: 3s - loss: 0.332 - ETA: 1s - loss: 0.332 - ETA: 0s - loss: 0.332 - 209s 2s/step - loss: 0.3323 - val_loss: 0.1227\n",
      "\n",
      "Epoch 00030: val_loss did not improve from 0.08220\n",
      "4/4 [==============================] - ETA: 6s - loss: 0.065 - ETA: 4s - loss: 0.101 - ETA: 2s - loss: 0.117 - ETA: 0s - loss: 0.109 - 7s 2s/step - loss: 0.1097\n",
      "Val Score:  0.1097303181886673\n",
      "====================================================================================\n",
      "\n",
      "\n",
      "Computation time :  193.273 min\n"
     ]
    }
   ],
   "source": [
    "t0 = time()\n",
    "n_folds = 2\n",
    "epochs = 30\n",
    "batch_size = 8\n",
    "\n",
Lafnoune Imane's avatar
Lafnoune Imane committed
955
    "\n",
956
957
    "#save the model history in a list after fitting so that we can plot later\n",
    "model_history = [] \n",
Lafnoune Imane's avatar
Lafnoune Imane committed
958
    "\n",
959
960
    "for i in range(n_folds):\n",
    "    new_model = efn.EfficientNetB1(weights=None,include_top=False)\n",
Lafnoune Imane's avatar
Lafnoune Imane committed
961
    "\n",
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
    "    input_channel = 4\n",
    "    config = new_model.get_config()\n",
    "    config[\"layers\"][0][\"config\"][\"batch_input_shape\"] = (None, 240, 240, input_channel)\n",
    "    modify_name = config[\"layers\"][1][\"config\"][\"name\"]\n",
    "    custom_model = Model.from_config(config)\n",
    "    \n",
    "    model = create_hybrid_transfer(trainAttrX.shape[1],new_model,custom_model,modify_name, input_channel, weight = False)\n",
    "    opt = Adam(lr=1e-3, decay=1e-3 / 200)\n",
    "    model.compile(loss=\"mean_squared_error\", optimizer=opt)\n",
    "\n",
    "    t_x, val_x, t_y, val_y = custom_shuffle_split(trainAttrX,train_dataset,trainY,test_size = 0.1)    \n",
    "    model_history.append(fit_and_evaluate(t_x, val_x, t_y, val_y, epochs, batch_size,model,es,cp))\n",
    "    print(\"=======\"*12, end=\"\\n\\n\\n\")\n",
    "\n",
    "print(\"Computation time : \", round((time() - t0)/60,3), \"min\")"
Lafnoune Imane's avatar
Lafnoune Imane committed
977
978
979
980
   ]
  },
  {
   "cell_type": "code",
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
   "execution_count": 19,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "TensorShape([None, 240, 240, 4])"
      ]
     },
     "execution_count": 19,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "import tensorflow as tf\n",
    "tf.TensorShape([None, 240, 240, input_channel])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(1093, 6) (140, 240, 240, 4)\n"
     ]
    }
   ],
   "source": [
    "print(trainAttrX.shape,trainImagesX.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(1093, 6) (1093, 240, 240, 4)\n"
     ]
    }
   ],
   "source": [
    "print(trainAttrX.shape,train_dataset.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>First_Percent</th>\n",
       "      <th>Delta_week</th>\n",
       "      <th>First_FVC_scaled</th>\n",
       "      <th>Age_scaled</th>\n",
       "      <th>Sex_le</th>\n",
       "      <th>SmokingStatus_le</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>0.582536</td>\n",
       "      <td>0.067669</td>\n",
       "      <td>-0.631784</td>\n",
       "      <td>1.684379</td>\n",
       "      <td>1</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>0.582536</td>\n",
       "      <td>0.082707</td>\n",
       "      <td>-0.631784</td>\n",
       "      <td>1.684379</td>\n",
       "      <td>1</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>0.582536</td>\n",
       "      <td>0.097744</td>\n",
       "      <td>-0.631784</td>\n",
       "      <td>1.684379</td>\n",
       "      <td>1</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>0.582536</td>\n",
       "      <td>0.112782</td>\n",
       "      <td>-0.631784</td>\n",
       "      <td>1.684379</td>\n",
       "      <td>1</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>0.582536</td>\n",
       "      <td>0.157895</td>\n",
       "      <td>-0.631784</td>\n",
       "      <td>1.684379</td>\n",
       "      <td>1</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "   First_Percent  Delta_week  First_FVC_scaled  Age_scaled  Sex_le  \\\n",
       "0       0.582536    0.067669         -0.631784    1.684379       1   \n",
       "1       0.582536    0.082707         -0.631784    1.684379       1   \n",
       "2       0.582536    0.097744         -0.631784    1.684379       1   \n",
       "3       0.582536    0.112782         -0.631784    1.684379       1   \n",
       "4       0.582536    0.157895         -0.631784    1.684379       1   \n",
       "\n",
       "   SmokingStatus_le  \n",
       "0                 1  \n",
       "1                 1  \n",
       "2                 1  \n",
       "3                 1  \n",
       "4                 1  "
      ]
     },
     "execution_count": 22,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "trainAttrX.head(5)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXUAAAEICAYAAACgQWTXAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAA5bUlEQVR4nO3dd3xUVfrH8c+TSYM0WkISAiQiiPQSQBAVO6iAXRFFsICrrKuu/ecq6669u6vu2hY7dkVFsaFYEAi9CVICJCQhvZI65/fHGSBASCZhwjCT5/165ZWZO3fuPDcD37lz7jnnijEGpZRS/iHA2wUopZTyHA11pZTyIxrqSinlRzTUlVLKj2ioK6WUH9FQV0opP6KhrtQRRkQmi8jP3q5D+SYNddXsRCRVRE7zdh1NISKjRMQpIiX7/Qz3dm1K1SXQ2wUo5QN2GGMSvF2EUu7QI3XlNSISIiJPi8gO18/TIhLieqyDiHwuIgUikiciP4lIgOuxO0QkXUSKRWS9iJxax7aHiUimiDhqLTtPRFa6bg8VkRQRKRKRLBF5son78IOIPCQii1zb+lRE2tV6fJyIrHHtxw8icmytxzqLyEciki0iuSLy7/22/biI5IvIFhEZU2v5ZBHZ7Nr/LSIysSm1K/+koa686f+A44ABQH9gKHCP67G/AmlANNARuBswInIMMB0YYoyJAM4EUvffsDFmIVAKnFJr8WXA267bzwDPGGMigW7Ae4ewH5OAq4A4oBp4FkBEegDvADe59mMO8JmIBLs+bD4HtgKJQCdgVq1tDgPWAx2AR4FXxApzbX+Ma/9HAMsPoXblZzTUlTdNBO43xuw0xmQDfweucD1WhQ3JrsaYKmPMT8ZOVFQDhAC9RCTIGJNqjNl0kO2/A0wAEJEI4CzXst3bP1pEOhhjSowxv9VTZ7zrSLv2T1itx98wxqw2xpQCfwMudoX2JcAXxphvjDFVwONAK2wQDwXigduMMaXGmHJjTO2To1uNMS8ZY2qA11x/i46ux5xAHxFpZYzJMMasqad21cJoqCtvisceqe621bUM4DFgI/C1q6nhTgBjzEbske8MYKeIzBKReOr2NnC+q0nnfGCpMWb3610N9AB+F5HFInJOPXXuMMa02e+ntNbj2/fbhyDsEfY++2eMcbrW7QR0xgZ39UFeM7PW88pcN8Ndr3sJcB2QISJfiEjPempXLYyGuvKmHUDXWve7uJZhjCk2xvzVGHMUMA64ZXfbuTHmbWPMSNdzDfBIXRs3xqzFhuoY9m16wRjzhzFmAhDjev4H+x19N0bn/fahCsjZf/9ERFzrpmPDvYuINLqzgjFmrjHmdOzR++/AS02sW/khDXV1uASJSGitn0BsU8g9IhItIh2Ae4E3AUTkHBE52hWEhdhmF6eIHCMip7iOvsuBXdjmiIN5G/gLcCLw/u6FInK5iES7jp4LXIvr2059LheRXiLSGrgf+MDVbPIecLaInCoiQdjzBBXAr8AiIAN4WETCXH+T4xt6IRHpKCLjXR9AFUDJIdSt/JCGujpc5mADePfPDOCfQAqwElgFLHUtA+gOfIsNrQXA88aYedj29IexR8KZ2CPtu+p53XeAk4DvjTE5tZaPBtaISAn2pOmlxphdB9lGfB391C+o9fgbwExXPaHAjQDGmPXA5cC/XPWOBcYaYypdoT8WOBrYhj0pfEk9+7FbAHAL9ltAnmvf/uTG81QLIXqRDKWaTkR+AN40xrzs7VqUAj1SV0opv6KhrpRSfkSbX5RSyo/okbpSSvkRr03o1aFDB5OYmOitl1dKKZ+0ZMmSHGNM9MEe91qoJyYmkpKS4q2XV0opnyQiW+t7XJtflFLKj2ioK6WUH3Er1EVktGve6o27J1ba7/GuIvKdiKx0zRmtFxRQSikvaLBN3TWF6HPA6dihzItFZLZrsqTdHgdeN8a8JiKnAA+xdwpVpdQRoKqqirS0NMrLy71dinJDaGgoCQkJBAUFNep57pwoHQpsNMZsBhCRWcB4oHao98LORwEwD/ikUVUopZpdWloaERERJCYmYudJU0cqYwy5ubmkpaWRlJTUqOe60/zSiX3ni05zLattBXa+aoDzgAgRab//hkRkqusSYinZ2dmNKlQpdWjKy8tp3769BroPEBHat2/fpG9VnjpReitwkogsw84al46dKnUfxpgXjTHJxpjk6OiDdrNUSjUTDXTf0dT3yp1QT2ffiwAkuJbtYYzZYYw53xgzEHvdSYwxBU2qqCFbF8C3M0CnN1BKqQO4E+qLge4ikiQiwcClwOzaK4i98vvubd0FvOrZMmvJWA4/PwW78pvtJZRSnpebm8uAAQMYMGAAsbGxdOrUac/9ysrKep+bkpLCjTfe2OBrjBgxwiO1/vDDD0RFRe2p77TTTqt3/cTERHJycg5YPmPGDB5//PEDls+fP59BgwYRGBjIBx984JGad2vwRKkxplpEpgNzAQfwqjFmjYjcD6QYY2YDo4CHRMQA84EbPFplLduqougCmKJ0pHW75noZpZSHtW/fnuXLlwM27MLDw7n11lv3PF5dXU1gYN2RlJycTHJycoOv8euvv3qkVoATTjiBzz//3GPbq61Lly7MnDmzzsA/VG61qRtj5hhjehhjuhljHnAtu9cV6BhjPjDGdHetc40xpsLjlbosL2wFQGnO9gbWVEod6SZPnsx1113HsGHDuP3221m0aBHDhw9n4MCBjBgxgvXr1wP2yPmcc+y1wWfMmMFVV13FqFGjOOqoo3j22Wf3bC88PHzP+qNGjeLCCy+kZ8+eTJw4kd0z0s6ZM4eePXsyePBgbrzxxj3bdcc777xD37596dOnD3fccUed6zzwwAP06NGDkSNH7ql/f4mJifTr14+AAM+P//Ta3C9NFRFtr+NbmLWV8D5eLkYpH/X3z9awdkeRR7fZKz6S+8b2bvTz0tLS+PXXX3E4HBQVFfHTTz8RGBjIt99+y913382HH354wHN+//135s2bR3FxMccccwx/+tOfDujPvWzZMtasWUN8fDzHH388v/zyC8nJyUybNo358+eTlJTEhAkTDlrXTz/9xIABAwC46KKLmDJlCnfccQdLliyhbdu2nHHGGXzyySece+65e56zZMkSZs2axfLly6murmbQoEEMHjy40X+TQ+Fzod4+rjNOI+zKTfN2KUopD7joootwOBwAFBYWcuWVV/LHH38gIlRVVdX5nLPPPpuQkBBCQkKIiYkhKyuLhIR9B7IPHTp0z7IBAwaQmppKeHg4Rx111J6+3xMmTODFF1+s8zX2b3759NNPGTVqFLt77k2cOJH58+fvE+o//fQT5513Hq1btwZg3LhxTfiLHBqfC/X4dpFkE0VNQXrDKyul6tSUI+rmEhYWtuf23/72N04++WQ+/vhjUlNTGTVqVJ3PCQkJ2XPb4XBQXV3dpHX8kc9N6NU+LJidtMNRkuHtUpRSHlZYWEinTnZs48yZMz2+/WOOOYbNmzeTmpoKwLvvvuv2c4cOHcqPP/5ITk4ONTU1vPPOO5x00kn7rHPiiSfyySefsGvXLoqLi/nss888Wb5bfC7URYTCoGhalWd5uxSllIfdfvvt3HXXXQwcOLBZjqxbtWrF888/z+jRoxk8eDARERFERUW59dy4uDgefvhhTj75ZPr378/gwYMZP378PusMGjSISy65hP79+zNmzBiGDBlS57YWL15MQkIC77//PtOmTaN3b899c/LaNUqTk5NNUy+S8c3jV3Bc6fdE3KdNMEq5a926dRx77LHeLsPrSkpKCA8PxxjDDTfcQPfu3bn55pu9XVad6nrPRGSJMeag/Tt97kgdoCY8lghTApVl3i5FKeVjXnrpJQYMGEDv3r0pLCxk2rRp3i7Jo3zuRClAQGQnyITyvDRCY3t4uxyllA+5+eabj9gjc0/wySP1Vu1tN6XcjFTvFqKUUkcYnwz1yI52AFLxznqvv6qUUi2OT4Z6h3g7cKA8TwcgKaVUbT4Z6h3bt6PQhOEs1N4vSilVm0+GeqAjgNyAdgSWZnq7FKWUm3Tq3b2efPJJevXqRb9+/Tj11FPZutVzTck+2fsFoCgohnAdgKSUz9Cpd/caOHAgKSkptG7dmhdeeIHbb7+9UaNb6+OTR+oA5a06ElV14CejUsp3tNSpd08++eQ9k34dd9xxpKV57vygzx6pO8NjaVeQT3VVJYFBwd4uRynf8uWdkLnKs9uM7QtjHm7001r61LuvvPIKY8aMcf8P1gCfDXVHmwQc6YaMzO3Ede7m7XKUUk3UkqfeffPNN0lJSeHHH3+sd73G8NlQb93BXgs7LyNVQ12pxmrCEXVzaalT73777bc88MAD/Pjjj/vUeqh8tk09yjUAqSRbByAp5S9aytS7y5YtY9q0acyePZuYmJgm709dfDbUO8QnAlCRp33VlfIXLWXq3dtuu42SkhIuuugiBgwY4NErJPnk1LsAGEPF36P5LfoiTrrhBc8VppSf0ql3LZ1690glQl5Ae4J0AJJSqhF06t0jWElwNGEVOgBJKeU+nXr3CFbROpY21Tl4qwlJKV+j/1d8R1PfK7dCXURGi8h6EdkoInfW8XgXEZknIstEZKWInNWkahrJGRFHR/LIK6k4HC+nlE8LDQ0lNzdXg90HGGPIzc0lNDS00c9tsPlFRBzAc8DpQBqwWERmG2PW1lrtHuA9Y8wLItILmAMkNrqaRgpqk0CoVLE5K5P2Ec3+ckr5tISEBNLS0sjOzvZ2KcoNoaGhBwyococ7bepDgY3GmM0AIjILGA/UDnUDRLpuRwE7Gl1JE4TtHoCUmQpHJx6Ol1TKZwUFBe0ZSan8lzvNL52A7bXup7mW1TYDuFxE0rBH6X+ua0MiMlVEUkQkxRNHC21iEwEdgKSUUrt56kTpBGCmMSYBOAt4Q0QO2LYx5kVjTLIxJnn3/AmHIiLaHqlX5esAJKWUAvdCPR3oXOt+gmtZbVcD7wEYYxYAoUAHTxRYH4mIxYlA0WFp7VFKqSOeO6G+GOguIkkiEgxcCszeb51twKkAInIsNtSb/2yMI4hCR1uCd2lfdaWUAjdC3RhTDUwH5gLrsL1c1ojI/SKye8KCvwLXisgK4B1gsjlM/aZKg2MI1wFISikFuDmi1BgzB3sCtPaye2vdXgsc79nS3FPZOpb2pZsoq6ymdbBPD5BVSqlD5tMjSgGIjCdW8kjP3+XtSpRSyut8PtSD2ybQRkrZkZPn7VKUUsrrfD7Uw13dGgsyta+6Ukr5fKhHxNgrIO3K2eblSpRSyvt8PtQdUXZwa1WBDkBSSimfD3Ui4wCQ4gwvF6KUUt7n+6EeHEZZQDghZXoFJKWU8v1QB8pCYoisyqaqxuntUpRSyqv8ItSrwuPoKHlkFpZ7uxSllPIqvwh1iYwjVvJJL9ABSEqpls0vQj2kbWeiKWBHbrG3S1FKKa/yi1APj+6MQwwF2WneLkUppbzKL0I9qK29jt+unO0NrKmUUv7NL0KdyHgAqnUAklKqhfOPUI+woe4o0QFISqmWzT9CvXU7qiWY0PIsDtO1OZRS6ojkH6EuQlloDNEml5ySSm9Xo5RSXuMfoQ7UhMfZi2VoX3WlVAvmN6EeEBlPR/L1CkhKqRbNb0I9tH1n4iSP9PxSb5eilFJe4zehHty2EyFSRX7uTm+XopRSXuM3oS6uvuo6AEkp1ZL5TagTaa+A5CzSAUhKqZbLj0LdXgFJByAppVoyt0JdREaLyHoR2Sgid9bx+FMistz1s0FECjxeaUPCO+IkgDbVORSXVx32l1dKqSNBYEMriIgDeA44HUgDFovIbGPM2t3rGGNurrX+n4GBzVBr/RxBVIa2J7Ykjx0F5RwTG3TYS1BKKW9z50h9KLDRGLPZGFMJzALG17P+BOAdTxTXWDXh8a4BSGXeeHmllPI6d0K9E1C7S0maa9kBRKQrkAR8f5DHp4pIioikZGdnN7bWBjmi4ukoOgBJKdVyefpE6aXAB8aYmroeNMa8aIxJNsYkR0dHe/ilIaRdAnGSR5pOFaCUaqHcCfV0oHOt+wmuZXW5FC81vYDtqx4lpWTn5nmrBKWU8ip3Qn0x0F1EkkQkGBvcs/dfSUR6Am2BBZ4tsRFcA5Aq8vSydkqplqnBUDfGVAPTgbnAOuA9Y8waEblfRMbVWvVSYJbx5oTmrlA3RTu8VoJSSnlTg10aAYwxc4A5+y27d7/7MzxXVhO5roAUuiuLymonwYH+M7ZKKaXc4V+p5xpV2pF8Mgr1ZKlSquXxr1APDqM6OJJYydVujUqpFsm/Qh1wRsQTK/narVEp1SL5XagHRtlRpTs01JVSLZDfhXpAVDzxATqqVCnVMvldqBPZifYUkJlf7O1KlFLqsPO/UI+IIwDDrnztq66Uann8L9RdV0CS4kycTu+Ng1JKKW/ww1C3fdU7OHPILqnwcjFKKXV4+WGo2yN1O6+6nixVSrUs/hfqrdridITYUNceMEqpFsb/Ql0EIuKIlXw9UldKtTj+F+pAQFQnOjm0r7pSquXxy1AnIo5OAXqkrpRqefwz1CPj6WBy2ZGvF6BWSrUsfhvqQaaK0oKd3q5EKaUOK78NdYDwymwKd1V5uRillDp8/DPUXVdA0m6NSqmWxj9DPbJWqOvJUqVUC+KfoR7eESMBriN1PVmqlGo5/DPUHYEQFkNCQAGpuRrqSqmWwz9DHZDIeLq1KiJla563S1FKqcPGb0OdyHgSAvJZu6OI4nLtAaOUahn8OtSjanJwGli6rcDb1Sil1GHhVqiLyGgRWS8iG0XkzoOsc7GIrBWRNSLytmfLbIKIOIIqiwgPqGDxFm2CUUq1DIENrSAiDuA54HQgDVgsIrONMWtrrdMduAs43hiTLyIxzVWw21zzqo/sWMWiVA11pVTL4M6R+lBgozFmszGmEpgFjN9vnWuB54wx+QDGGO+Pz3ddAen46AqWby+gorrGywUppVTzcyfUOwHba91Pcy2rrQfQQ0R+EZHfRGR0XRsSkakikiIiKdnZ2U2r2F2uUaX9o8qorHayKq2weV9PKaWOAJ46URoIdAdGAROAl0Skzf4rGWNeNMYkG2OSo6OjPfTSB+E6Uu8WWgSgTTBKqRbBnVBPBzrXup/gWlZbGjDbGFNljNkCbMCGvPcEh0FYNGHFWzk6JlxPliqlWgR3Qn0x0F1EkkQkGLgUmL3fOp9gj9IRkQ7Y5pjNniuziWL7QuYKhiS2I2VrPjVO4+2KlFKqWTUY6saYamA6MBdYB7xnjFkjIveLyDjXanOBXBFZC8wDbjPG5DZX0W6L7Qc7f2dY1zCKy6tZn1ns7YqUUqpZNdilEcAYMweYs9+ye2vdNsAtrp8jR1w/cFYxPCIHgMWpefSKj/RyUUop1Xz8d0QpQGx/ADqWbiA+KlRPliql/J5/h3q7oyAoDDJWMiSpHYu35GG/VCillH/y71APCIDYPpC5kiGJ7dhZXMG2PJ2KVynlv/w71MGeLM1cxdDENgAs1K6NSik/5v+hHtcPKks42pFNm9ZB2l9dKeXX/D/UY/sBEJC1kuSu7VisJ0uVUn7M/0M95lgICITMlQxNaktqbhk7i8u9XZVSSjUL/w/1wBCIPtb2gElsB8DiLfleLkoppZqH/4c62Hb1zJX0iY+kVZBDm2CUUn6rZYR6bD8ozSZoVzYDu7RhkZ4sVUr5qZYR6nH2ZOnuJph1mUUU6cWolVJ+qGWEesc+9nfmCoYltcMYWLJV29WVUv6nZYR6aCS0TYKMlQzs0pbAANH+6kopv9QyQh32nCxtFeygT6coPVmqlPJLLSfUY/tBfiqUFzI0qR0rthdSXqUXo1ZK+ZeWE+pxdhpeMlcxJLEdlTVOVurFqJVSfqblhHrs3h4wyV3bAmgTjFLK77ScUI/oCOEdIXMlbcOC6dExXPurK6X8TssJdbBH6xkrARiS2I6lejFqpZSfaVmhHtcPctZDVTlDk9pRXFHNuowib1ellFIe07JCPbYfOKshe93eyb20XV0p5UdaVqjXmi4gvk0rOrVppaGulPIrLSvU2yRCcARk2nb1oUntWLQlXy9GrZTyGy0r1AMCILbvPidLc0oqSM3Vi1ErpfyDW6EuIqNFZL2IbBSRO+t4fLKIZIvIctfPNZ4v1UPi+kHWanDWMDTJ9ldftCXXy0UppZRnNBjqIuIAngPGAL2ACSLSq45V3zXGDHD9vOzhOj0nth9UlUHuJrpFh9MuLJhFeiUkpZSfcOdIfSiw0Riz2RhTCcwCxjdvWc1o98nSzJWICMld2+rJUqWU33An1DsB22vdT3Mt298FIrJSRD4Qkc51bUhEpopIioikZGdnN6FcD4juCY5gyFgB2JOl2/LKyCrSi1ErpXyfp06UfgYkGmP6Ad8Ar9W1kjHmRWNMsjEmOTo62kMv3UiOIIg5dp8eMIBOGaCU8gvuhHo6UPvIO8G1bA9jTK4xpsJ192VgsGfKaya7pwswhl5xkYQF68WolVL+wZ1QXwx0F5EkEQkGLgVm115BROJq3R0HrPNcic0grj/syoOiHQQ6AhjUta0eqSul/EKDoW6MqQamA3OxYf2eMWaNiNwvIuNcq90oImtEZAVwIzC5uQr2iNi9J0sBhndrz++ZxbyXsr2eJyml1JEv0J2VjDFzgDn7Lbu31u27gLs8W1oz6tgbENsEc8wYpoxIYsGmXG7/YCWV1U4uP66rtytUSqkmaVkjSncLCYf23fYcqbcKdvDSpGROOzaGez5ZzSs/b/FygUop1TQtM9Rhn7nVAUKDHDw/cTBj+sTyj8/X8ty8jV4sTimlmqblhnpcPyjcBmV7T5AGBwbwrwkDGT8gnsfmrufJbzboZF9KKZ/iVpu6X9pzsnQVHHXSnsWBjgCevHgAIYEBPPvdH1RU13Dn6J6IiJcKVUop97XcUI/rb39nrtwn1AEcAcLD5/cjODCA//64mYoqJ/eN7aXBrpQ64rXcUA/rABHx+7Sr1xYQIPxjfB9CAh288vMWKqqdPHBuHwICNNiVUkeulhvqYNvVM+sOdQAR4Z6zjyU0KIDn5m2iorqGRy/oR6Cj5Z6KUEod2Vp2qMf2gz++hqpdENSqzlVEhNvO7ElooIMnvtlARbWTpy8ZQJAGu1LqCNSyQz2uHxgnZK2FhPqnq/nzqd0JDgzgoS9/Jzo8hBnjeh+mIpVSyn0t+3BzTw+YFW6tPu2kbkwa3pXXFqSyYntB89WllFJN1LJDvU0XCI066MnSutx65jHERIRw10erqK5xNmNxSinVeC071EXs0Xo9J0v3FxkaxIyxvVmbUcTMX1ObrzallGqClh3qYEM9aw3UVLv9lNF9YjmlZwxPfrOB9IJdzVicUko1joZ6XD+oLofcP9x+iojw93G9MQbu+3S1TiWglDpiaKjvPlnaiHZ1gM7tWnPz6d35dt1O5q7JaobClFKq8TTUO/SAwNBGtavvNuX4JI6Ni2TG7DUUl1c1Q3FKKdU4GuqOQIjpBRnudWusLcgRwIPn9SGruJwnvt7QDMUppVTjaKjD3ukCnDWNfurALm254jjbd31lWoHna1NKqUbQUAc4+jQoL4RFLzbp6beeeQzR4SHc/bH2XVdKeZeGOkDPc6D7GfDd/ZCf2uinR4YGcd/Y3qxOL+K1BVs9X59SSrlJQx3sIKSznwQJgM9ugiZ0UTyrbywnHxPNE1+vZ4f2XVdKeYmG+m5tOsNpM2DzPFj+dqOfLiLcP74PTmO4b/Yaz9enlFJu0FCvLflq6DIc5t4NxY3ve965XWtuPq0H36zNYu6azGYoUCml6qehXltAAIz7l51f/cvbmrSJq0Ym0TM2gvs+XUNJhftTDyillCe4FeoiMlpE1ovIRhG5s571LhARIyLJnivxMOvQHUbdAWs/hXWfNfrpQY4AHjy/L91LFrPq5etwlpc0Q5FKKVW3BkNdRBzAc8AYoBcwQUR61bFeBPAXYKGnizzsRtwIsX3hi7/CrvxGP31QzufMDH6U4dnvs/bRU3n9+xUUlFU2Q6FKKbUvd47UhwIbjTGbjTGVwCxgfB3r/QN4BCj3YH3e4QiCcf+G0hz4+m/uP88Y+OFhmD0dOepElgx8gGOcGxn0wyRGP/gxt76/guXbC3QCMKVUs3En1DsB22vdT3Mt20NEBgGdjTFf1LchEZkqIikikpKdnd3oYg+r+AEwYjosewM2/9Dw+jVVMHs6/PAQ9L+MgInvM3j8dIImzqJXUAafhj3I4lVrOPe5Xxj77595d/E2dlU2fgSrUkrV55BPlIpIAPAk8NeG1jXGvGiMSTbGJEdHRx/qSze/UXdBu6Pgs79AZenB16sogXcuhWVvwom3w7nP26N9gO6nE3DFR3Q0Ocxr9whPntGGymond3y4iqEPfsuM2WvYuFPb3ZVSniENNQWIyHBghjHmTNf9uwCMMQ+57kcBm4DdyRQL5AHjjDEpB9tucnKySUk56MNHjtSfYebZMHw6nPnAgY8XZ8HbF0HmajjnSRg8ue7tpC2BN8+HoNaYSZ+yuKQDb/62lS9XZ1BVY2gfFkx0RAgxkaHERITs/dlzP5SYyBBCgxzNurtKqSObiCwxxhy0M4o7oR4IbABOBdKBxcBlxpg6R9iIyA/ArfUFOvhQqIMdZbr0Nbj6W0gYvHd59gZ46wLb9n7RTOhxZv3byVwNb5xrb1/xMcT2Jbu4gk+Xp7M5p5SdRRVkF5eTVVRBTkkF1c4D35u2rYPo3jGCnrER9OgYwTGu31Gtgjy1t0qpI9ghh7prI2cBTwMO4FVjzAMicj+QYoyZvd+6P+BvoV5eCM8dB63awNQfITAYtv1mm1zEARPfg06DG9wMADkb4fXxUFkMEz+EzkPqXM3pNOSVVbKzqIKdRWVUpq8iIu0Haoqz+NA5im9yo/fpBx8XFbpPyPeMjaBXXCQBAeKBP4BS6kjhkVBvDj4V6gDrv7QhPupuiOkJH14LUQlw+Qe23b0xCrbBa+OgZCdc9i4knXDgOqU5sOl72Pid/V260y53BENNJSbpRPL6XsPKVsNYv7OUDZnF/J5ZzMbsEiqr7UyRp/fqyHOXDSI4UMeYKeUvNNQ96YOr7KAkZw0kJMOEdyGsfdO2VZwJr58L+Vvg4jeg28mwfRFs+s4GecZyu16rdtDtFDj6VPvbEQxLX7fTBBel2w+UYX+CAZdBSDjVNU625pXx5aoMHv96A2f06shzEwcR5NBgV8ofaKh7Ukk2/PdEG+jn/ReCWx/a9kpz7cnTrNUQ2Mo2yYgDOg+FbqfaII8bYKcv2F9NFaybDQueh/QUCImCwZNg6FRo0wWAmb9sYcZnaxndO5Z/XTZQg10pP6Ch7mk11fYSeJ5SXghf32On/e12Khx1EoRGNW4b2xfDb8/bbxEAx46F466HzkN59ZdU7v98LWP6xPLsBA12pXydhnpLUrAdFr8ES2baD4vuZ8I5T/Lyykr++cU6zuobyzOX+k6wO52GP3aWsCg1j0Vb8hDg/vG9adM62NulKeU1GuotUWUpLH7Fjm4VB5xxPy+Xncg/56zn7H5xPHPJAAKPwGCvrnGyZkcRi1PzWLglj8WpeRSUVQEQGxlKXlklR3UI442rhxEdEeLlapXyDg31lixvix0Nu+VHSDyBWbF/5c4fyhjbP56nLu7vdrBnFpbz9qJtfJCyndLKGloHO1w/gXtvhwTSOshBWEggrYIdtApyEOgQgh0BBAYIgY4AghxCYEAAQYEBBLmWBQisyyhi4ZY8lm7Np9Q1dUJShzCGJLZlaFJ7hiW1I6FtK37dlMs1r6UQFxXKm9cMI75Nq+b86yl1RNJQb+mMsfPXzL0HaipY0PU6Jq5J5pz+CTxZT7AbY/htcx5v/JbK3DVZOI3hpB7RdG3XmrLKGtdPNaWVNeyqrKG0str+rqimrLKmzoFT9ekZG8HQpHb2J7EdMZGhda6XkprHlP8tJrJVEG9fO4yu7cMa/SfZLTWnlLT8XYzs3qHJ21DqcNNQV1ZRBnxxC6yfQ1ZEb67ImcSx/Yfx5MUDcNQaoFRSUc3HS9N447etbMgqIapVEJcM6czEYV0aFaDVNU6qnYbKGifVNYbqGidVTtfvGkO100lVtf2d1CGsUe3kq9IKmfTqQoIDA3jrmmEcHRPRqD9FZbWTF+dv4tnvN1JZ7eTj60cwsEvbRm3jsMpaA6m/wNBr7fV0VYumoa72MgbWfARzbqdmVwH/qhpHWp/reeTiZLbklPDGgq18uDSdkopq+nSKZNJxiYztH0+r4CNvvpn1mcVc/spCapyGN64eSu9493oMLdmax10frWJDVgln941jcWoecVGhfHz98Ufm6NuaanhhBOSstxPMjTroNWpUC6Ghrg5Umgtf3Qmr3uN3Z2eeav1n5hYkEOwI4Ox+cVwxvCsDO7dBjvCjwi05pUx86TdKKqqZedVQBtVztF1UXsWjX/3OWwu3ERcZyj/O7cOpx3bk42Vp3PzuCh69oB8XD+l8yDUt2ZrPp8vT6d7RTtPQMzaCsJDGdYE1xrA9bxfL0woIXPIqZ217jI1BPTi6aoMdH9H/0kOuU/kuDXV1cBvmUvLBdFpV5vBrt5s59tzb6RBRd1u2V9VUwY+P2CaIM/65z6RqafllXP7yQnYWV/DKlUMY3m3fEb7GGL5ancl9s9eQU1LBlOOTuOX0HnuC1hjDhf9ZQGpOKd/fOuqQJkbLL63kzKfnk11Swe7/ViKQ2D6MXnGR9IqPpFdcJMfGRdIxMmTPh2ZeaSUr0gpYsb2A5dvt7/yyKiIo48eQm0kPTuT84tv4JuZZEktWwKRPIHFkk+tUvk1DXdWvvBA+/hOs/wL6Xgxjnzn0kbKelLcFPrzGjpoNjYKKYjuw6uT/21PnzqJyJr68kG15ZfznisGcfEwMADsKdnHvp2v4dl0WveIiefiCvvRLaGO3a4ydjiFnPasTLmHs8wuZPCKR+8b2blKZxhhueHsp36zN4uPrj6dtWDBrdxSxLqOItTuKWJtRxLa8sj3rtwsLpntMOBmF5XuWi0CPmAj6d46if+c2nJH+PB1W/heZ+gN3/BrAV0t+Z2HHRwgtz4FrvrXX01Utjoa6apjTCT89AfMegNg+cMmb0DbR21XByvfh85vtaNtxz9i5b765D5b8D9omwbh/7ZkMLa+0kkmvLmR9ZjFPXzKQ7OJyHpu7nhpjuOX0Hlx1fNLenj5bF8B398O2X+39zsfxUMSdvLx8F3NuPIFjYht34hXgwyVp3Pr+Mt7svZTjI3bCOU/Z2TxrKS6v4vfM4j1hvz6rmNjIUPp3bkP/hDb0TYgifHdTTX4q/HsI9LkQznuBovIqznhyPj1CcnjN+X9IcBhc8x2Eac+dlkZDXbnvj2/gw6sBgQtftXPPNFZaCsx/HLJ/h4ETIflqaN2ucduoKIY5t8GKd6DzcXDBS3vmswFgy08w+892MrTBk+H0+yE0iqLyKqb8bzFLttqLhZ/YI5oHzu1D53aubx4ZK+H7f8AfX0N4RzjxNgiJhM9vwhkUxrVlN1AWP5y3rx3WqPMJ2/PKmPLMxzwd8h/6VK60C4+7HkY/1Lj9ru39ybD+K7hxKUTGA/D971lcNTOFB4eUc9m66yG2H1w5G4L8uL9+WZ6d1TR+gLcrqV9JNhRnQFy/Zn8pDXXVOHmbYdblsHMtnPo3GHmLe93oUn+B+Y/B5nnQqi3E9IatP9uJygZMsCHnTnNB+hLb3JKfai8NeOJtdc+1U1kGPzwIC56D8Fh7ZHzMaEorqnn0q98Z1LUt4/rH23DO3QTf/9P2/AltAyNvshOfBbu6aO5cB+9egTN3E49WXUzfi+/j7P7xbv25amqcPPfsQ0wu+DfhwULAmEcgcxUs+q+dfbPXOLe2s49tC+HVM+CkO+Hku/Z56JZ3lzN7xQ6+P6uQLt/+CXqNhwv/V/ekbx5QWe3k1V+2cErPGHp0bPw3mEN78TJ45XTbpXPcv2DQFYf39d2VtwVeG2tnTT3rMRhyTbO+nIa6arzKUnskvPpDOznYuS9ASB3/oY2xUwXPfxy2LYCwaBjxZ0i+yq6/c50N3ZXvQU0F9BhtLwuYOPLADwqnE3591h5Jh8fao/OuIxquNW2JveD3zrXQ9yIY/fDeJonCdHuCddmbEBhiP1hG/Nle7GR/FcU4P51OwNpPmB8wlOSbZtE6soFplcvy+ON/0+ie/TU5bQfQ4YqZ0C4Jqivhf6Mh5w+Y+gO079bwftT+O7xymq39xqV7P3hcCsoqOe3J+cREhPDZ4KU4vr0XRt4Mp81w/zXcVFFdww1vLeXbdTuJCAnkv5MGM6LbYWruMQY+ud5+W4vrBxkr4KzHbV/9I0nuJph5DlTvgrj+9iL1I2+GU+9rtjEFGuqqaYyxMz9+/TdofzRc+tbeI22nEzZ8aY/MdyyDyE5w/E32SKqupoCSnXYumsUvQVmubTYYPh16n2fbnYsz4eNp9j9Er/H2ZG2rRgwGqq6En5+0Hy6hkXD6P2zIL3oJMPZD5oS/QnhMg/u8dc4TxC96kNJWcbSZPAti+9a97qZ5VH10HZRkM6fDZMZd/yhS+xtF/lY7TXObzvYyiEFu9ipa9YFtAhv/vG2+qsNXqzO57s0l3Hp6d6aXPW/PMYx95uDXx22C8qoa/vTmEuatz+aW03vw+codbMkp5YmLBzDOzW8xh2TJTDvFxUl32G+L70+2/+bOeABGTG/+13dH9np7sRtnFUyaDdE9Yc6t9v3odwmM+/cB51U8QUNdHZot8+H9KVBTCec+b3/PfwJ2rrEnU0feAv0nuPePt2qXPWpf8JwdTBMRZ4+ul79lv2qPeQQGTWr6EU7WWnvUnr7EnlztP8EO1qndHu+Gp199nQlb7yU6cBcBY5+yFyCpvQ/f/h0WvsC2gATuCbiRZ26eQtuwOvZ//VfwziUweAqMfbrhF67aZU+OtmprL5tYT5PKDW8v5Zs1WXw+/Th6fHcNbJoHE9+v/zxIeZG9EMu2BfbHWWOb2PbrHlleVcO0N5bw44ZsHjivDxOHdaVwVxVTX09h4ZY8/u+sY7nmhKTmG8ewYzm8cgYkHg8TP4AAh/3g/uhaWPsJnHwPnHRb87y2u7LWwuvjALHnNWKOtcuNsZ0Ovv8HJJ0El7zR+Km0G6Chrg5dYRq8e7k9KgfocIw98u1zQdPmlnc6bbPNgn/bo/PYvnDBqxDd49BrddbAhq/st4voY5q0iczCci564hNeav0CPcuX2yPg0Y9AzgYbLNm/syj6QiZtP5sXrzqBE3tEH3xj39wLvzwD578E/S6u/4V/esL2yrnyM0g6sd5Vc0oqOOOp+XRu15oPp/Qm8LWz7beDq7+Gjr3sSkUZrgD/zfb0yVoDxmln7ozrD6XZULjd9rA54x8QGc+uyhqmvpHCzxtzePj8vlwyZO8HYkV1Dbe8t4IvVmYw5fhE/nZ2L8+Pwi3LgxdPsu/jtJ/2vbJYTTV8egOsnGX//Z3yN+9Mm5Cx0l5nODDEvld1nSta/o49wIjuaT9sIz337UZDXXlGVTn8+i/7D/jYcZ47MVecZXvHOJo+6Kc5vPDDJh7/ag3zBv1Ml7X/hfbd7cnb1u1ZNeRBxn4ZyuQRicwY10C/9poqexItYyVMnXfwD5qSnfDsQHt0N+Ftt2qcvWIHN76zjLvP6snU/iHw8mk2sJNOtCGen2pXDGoNCUPsOYoux0GnZAgJt9+Ofnkafn4aHEFUjryVq39P5uctRTx6QT8uSj5whK3TaXhgzjpe+XkLZ/WN5cmLBxAa5KFpJJxOex3gTd/DlC/rvii70wlf3GybZ467Ac584PAGe/pSeOM8CA63R+j1nS/Z9D28O8k2CV7+4d6j+UOkoa5UE1RU1zD66Z8QYO5ZJQR9Nh2STqLw1Ec447+riQgN4vM/j3Qv0Ip2wH9GQlgMXPvdASc/Adt+vOxNuH4hdDjarRqNMUx9YwnzN2Tz5V9O4KjqTfC/s+0RZNfh0GW4DfHYfvV/aOZtpnrOXQRu/IpNzjgyjr+fkWfW/63i5Z82888v1jE0sR0vTUomqrUHPpTnP26bLcY8BsOmHnw9Y+w0Fwv/Y8+XnPVEs/X+2cf2xfbyk63a2CN0d8ZyZKyEty6yTWuXvlX3ReYbSUNdqSaa9/tOpsxczF1jejLthCSMCNPfWcbc1Zl8csPx9OnUiLbSjd/BmxfYeVvOfWHfo8ustfCf42HoNBjzcKNq3FlUzmlP/kiPjhG8N204ATUVNtQbcfRaUlHNlP8tInL79zzTZhbhpdvtt7EzH7Qneg/isxU7+Ot7K+javjUzrxpKp3rmt989n81vm3NZsDmX1emFnDuwE386qZttwtn8gz0C7n0+XPByw/UbA9/9HX5+CgZMtF0eA5px4rmtC2w4h3WwgV7P3+UABdvte5+/xb73fS88pFIaCvUj7/I3Sh0hTu4Zw6k9Y3j2uz/IKqnkk+XpfLEyg5tP79G4QAd7AvOk220XvWVv7vvY1/fYQVAn3d7oGmMiQ7l3bG9Stubz+oJU28umEYFeXF7FpFcWsnRbAedefBXhN6XAKffYgWj/HmJ7OFWV1/ncsf3jee2qoWQWlXP+87+wLqNon8fTC3bxwZI0/vreCkY+Mo8TH5vH7R+u5Kc/sgkLCeSxueu55vUUCrO2wgdX2yausc+4V7+I7TY46m57ov3Da2xTV3PY8pMN5YiOMGVO4wIdXD2g5tomsA+vts2YzXgwrUfqStUjNaeUM56az4ij27MkNZ+ecRHMmjp8nzno3easgTfOtT1QrvnOTsnwx7fw1gVw5kMw/Pom1WiMYcrMxSzcnMfcm06kS3v35u4pKq9i0iuLWJ1eyL8mDGRM37i9DxZsh7l3w7rZdkqGU+6BXufWeWJ8fWYxV766iNKKav5yWnc2ZBWzYHMu2/N2AdC2dRDHHdWe4d3aM/yo9hwdEw7Am79t5aHPV/JuyD/pFbAdx7QfmnZy+5dn7AnpnufY0cVRCfbbyqHaPQ5j1uXQtqvtthjRsenbqyqHT66DNR/b8RTH/alJm/FI84uIjAaeARzAy8aYh/d7/DrgBqAGKAGmGmPW1rdNDXXlKx6b+zvPzdtEeEggX/7lhL3TDjRFyU7bvh4cbtvXXx1tu4lev/CQ+jTvKNjFGU/Np2+nKN6+dhgAlTVOKqqdVFbv/W1v11Be5eSBL9ayNqOIf182iDN7x9a94U3fw1d32Wkf2nS1g7cGXn7AeISMwl1MfnUx67OKiWoVxLCkdjbEu7WnR0zEQXvJ7HzvJmLW/o+bqm9kyNhruGxol6Z1lVz4InxZq5tjRJztynrAT1cb+o5gO2aiMM2e8yhKd/3ssAO/dt+uqbCjoyd9CuH19HJyl9MJvzwFAyc1eXuHHOoi4gA2AKcDacBiYELt0BaRSGNMkev2OOB6Y8zo+raroa58RVllNTe8tZSLkzvvezTbVKk/2x4xUZ2hYCtc8hYce84hb/bthdu4++NVBDmEqpqGD9aCHQE8P3EQp/Vq4OjT6YT1c2z7dXoKtO5gjzKHXLPP6NzyqhrS8neR1CHMvW8yqz+CD6ZQPuhapuVczI8bsjl/YCceOK9v0y7MkrkKMlfbuWIKttm/bcE2G9DO6n3XdQTbD9PaAoIgMs4OpouMt7/bdLFt4I0ZDNfMPBHqw4EZxpgzXffvAjDG1DlbkYhMACYZY8bUt10NddWi7e6T3nUkTP7cI93yjDG8+dtW0gvKCQ4MIMT1E1zrd7DDsed2Yvswt5tqXC8AW3+xXSA3fgPBEZA82XYtjGzkh132BnjpZIjpBZO/wBkQxL++38jT322gR0wEL1w+iKOiwxu3zYNx1tjJtvaE/TaoLKkV3vEQmWCnuTgcvWgOkSdC/UJgtDHmGtf9K4Bhxpjp+613A3ALEAycYoz5o45tTQWmAnTp0mXw1q1bG7k7SvkJpxNSXoEeZzZ6xOsRIXOVDfc1H0FAoO3VM+Ive7tjGmPn6i/ZCSWZrt9ZdkqIkp32w6GqzA4wiuq0Z7M/bsjmplnLqKoxPHZhP898M/Izhy3Ua61/GXCmMebK+rarR+pK+YG8LXZk8LI3obrCjg4uL7DBXV1HrxlHiJ32ODIeTruvzknbdhTs4vq3lrJ8ewFXj0zizjE9CXIc+UfQh4s3ml8CgHxjTL19vjTUlfIjJdl2MFB6ih1kFR4DEbE2wMM7um7H2KmP3Whqqqx28uCcdcz8NZXkrm3535QhRIQeWaOOvaWhUHdn4o7FQHcRSQLSgUuBy2qvICLdazW3nA0c0PSilPJj4dF2cjAPCQ4MYMa43gzq2pYf12fvvSKUalCDfyljTLWITAfmYrs0vmqMWSMi9wMpxpjZwHQROQ2oAvKBeptelFLKHeP6xx+eqX79iFsff8aYOcCc/ZbdW+v2Xzxcl1JKqSbQsw9KKeVHNNSVUsqPaKgrpZQf0VBXSik/oqGulFJ+RENdKaX8iIa6Ukr5Ea9dJENEsoGmzujVAcjxYDlHAn/bJ3/bH/C/ffK3/QH/26e69qerMeagk7F7LdQPhYik1Df3gS/yt33yt/0B/9snf9sf8L99asr+aPOLUkr5EQ11pZTyI74a6i96u4Bm4G/75G/7A/63T/62P+B/+9To/fHJNnWllFJ189UjdaWUUnXQUFdKKT/ic6EuIqNFZL2IbBSRO71dz6ESkVQRWSUiy0XEJ6/vJyKvishOEVlda1k7EflGRP5w/W7rzRob4yD7M0NE0l3v03IROcubNTaWiHQWkXkislZE1ojIX1zLffJ9qmd/fPZ9EpFQEVkkIitc+/R31/IkEVnoyrx3RSS43u34Upu6iDiADcDpQBr2UnsTjDFrvVrYIRCRVCDZGOOzAyZE5ESgBHjdGNPHtexRIM8Y87Drw7etMeYOb9bproPszwygxBjzuDdrayoRiQPijDFLRSQCWAKcC0zGB9+nevbnYnz0fRIRAcKMMSUiEgT8DPwFuAX4yBgzS0T+A6wwxrxwsO342pH6UGCjMWazMaYSmAWM93JNLZ4xZj6Qt9/i8cBrrtuvYf/D+YSD7I9PM8ZkGGOWum4XA+uATvjo+1TP/vgsY5W47ga5fgxwCvCBa3mD75GvhXonYHut+2n4+BuJfdO+FpElIjLV28V4UEdjTIbrdibQ0ZvFeMh0EVnpap7xiWaKuohIIjAQWIgfvE/77Q/48PskIg4RWQ7sBL4BNgEFxphq1yoNZp6vhbo/GmmMGQSMAW5wffX3K8a28flOO1/dXgC6AQOADOAJr1bTRCISDnwI3GSMKar9mC++T3Xsj0+/T8aYGmPMACAB2zLRs7Hb8LVQTwc617qf4Frms4wx6a7fO4GPsW+kP8hytXvubv/c6eV6DokxJsv1H84JvIQPvk+udtoPgbeMMR+5Fvvs+1TX/vjD+wRgjCkA5gHDgTYiEuh6qMHM87VQXwx0d50NDgYuBWZ7uaYmE5Ew10keRCQMOANYXf+zfMZs4ErX7SuBT71YyyHbHXwu5+Fj75PrJNwrwDpjzJO1HvLJ9+lg++PL75OIRItIG9ftVtgOIeuw4X6ha7UG3yOf6v0C4Oqi9DTgAF41xjzg3YqaTkSOwh6dAwQCb/vi/ojIO8Ao7DShWcB9wCfAe0AX7BTLFxtjfOLk40H2ZxT2K70BUoFptdqij3giMhL4CVgFOF2L78a2Q/vc+1TP/kzAR98nEemHPRHqwB5wv2eMud+VE7OAdsAy4HJjTMVBt+Nroa6UUurgfK35RSmlVD001JVSyo9oqCullB/RUFdKKT+ioa6UUn5EQ10ppfyIhrpSSvmR/wffN0PjxrmkoAAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "import matplotlib.pyplot as plt \n",
    "\n",
    "plt.title('Loss vs Epochs')\n",
    "plt.plot(model_history[0].history['loss'], label='Training Fold 1')\n",
    "plt.plot(model_history[1].history['loss'], label='Training Fold 2')\n",
    "plt.legend()\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 36,
   "metadata": {},
   "outputs": [],
   "source": [
    "#Load model\n",
    "from keras.models import load_model\n",
    "from efficientnet.tfkeras import EfficientNetB1\n",
    "model = None\n",
    "model = load_model('clean_notebooks/cnn_injection_transfer.h5')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 37,
   "metadata": {},
Lafnoune Imane's avatar
Lafnoune Imane committed
1188
1189
   "outputs": [
    {
Lafnoune Imane's avatar
Lafnoune Imane committed
1190
1191
1192
     "name": "stdout",
     "output_type": "stream",
     "text": [
1193
      "Model: \"model_1\"\n",
Lafnoune Imane's avatar
Lafnoune Imane committed
1194
1195
1196
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
1197
      "input_1 (InputLayer)            [(None, 240, 240, 4) 0                                            \n",
Lafnoune Imane's avatar
Lafnoune Imane committed
1198
      "__________________________________________________________________________________________________\n",
1199
      "stem_conv (Conv2D)              (None, 120, 120, 32) 1152        input_1[0][0]                    \n",
Lafnoune Imane's avatar
Lafnoune Imane committed
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556
1557
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571
1572
1573
1574
1575
1576
1577
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587
1588
1589
1590
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600
1601
1602
1603
1604
1605
1606
1607
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632
1633
1634
1635
1636
1637
1638
1639
1640
1641
1642
1643
1644
1645
1646
1647
1648
1649
1650
1651
1652
1653
1654
1655
1656
1657
1658
1659
1660
1661
1662
1663
1664
1665
1666
1667
1668
1669
1670
1671
1672
1673
1674
1675
1676
1677
1678
1679
1680
1681
1682
1683
1684
1685
1686
1687
1688
1689
1690
1691
1692
1693
1694
1695
1696
1697
1698
1699
1700
1701
1702
1703
1704
1705
1706
1707
1708
1709
1710
1711
1712
1713
1714
1715
1716
1717
1718
1719
1720
1721
1722
1723
1724
1725
1726
1727
1728
1729
1730
1731
1732
1733
1734
1735
1736
1737
1738
1739
1740
1741
1742
1743
1744
1745
1746
1747
1748
1749
1750
1751
1752
1753
1754
1755
1756
1757
1758
1759
1760
1761
1762
1763
1764
1765
1766
1767
1768
1769
1770
1771
1772
1773
1774
1775
1776
1777
1778
1779
1780
1781
1782
1783
1784
1785
1786
1787
1788
1789
1790
1791
1792
1793
1794
1795
1796
1797
1798
1799
1800
1801
1802
1803
1804
1805
1806
1807
1808
1809
1810
1811
1812
1813
1814
1815
1816
1817
1818
1819
1820
1821
1822
1823
1824
1825
1826
1827
1828
1829
1830
1831
1832
1833
1834
1835
1836
1837
1838
1839
1840
1841
1842
1843
1844
1845
1846
1847
1848
1849
1850
1851
1852
1853
1854
1855
1856
1857
1858
1859
1860
1861
1862
1863
1864
1865
1866
1867
1868
1869
1870
1871
1872
1873
1874
1875
1876
1877
1878
1879
1880
1881
1882
1883
1884
1885
1886
1887
1888
1889
1890
1891
1892
1893
1894
1895
      "__________________________________________________________________________________________________\n",
      "stem_bn (BatchNormalization)    (None, 120, 120, 32) 128         stem_conv[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "stem_activation (Activation)    (None, 120, 120, 32) 0           stem_bn[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "block1a_dwconv (DepthwiseConv2D (None, 120, 120, 32) 288         stem_activation[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "block1a_bn (BatchNormalization) (None, 120, 120, 32) 128         block1a_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block1a_activation (Activation) (None, 120, 120, 32) 0           block1a_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block1a_se_squeeze (GlobalAvera (None, 32)           0           block1a_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block1a_se_reshape (Reshape)    (None, 1, 1, 32)     0           block1a_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block1a_se_reduce (Conv2D)      (None, 1, 1, 8)      264         block1a_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block1a_se_expand (Conv2D)      (None, 1, 1, 32)     288         block1a_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block1a_se_excite (Multiply)    (None, 120, 120, 32) 0           block1a_activation[0][0]         \n",
      "                                                                 block1a_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block1a_project_conv (Conv2D)   (None, 120, 120, 16) 512         block1a_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block1a_project_bn (BatchNormal (None, 120, 120, 16) 64          block1a_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block1b_dwconv (DepthwiseConv2D (None, 120, 120, 16) 144         block1a_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block1b_bn (BatchNormalization) (None, 120, 120, 16) 64          block1b_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block1b_activation (Activation) (None, 120, 120, 16) 0           block1b_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block1b_se_squeeze (GlobalAvera (None, 16)           0           block1b_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block1b_se_reshape (Reshape)    (None, 1, 1, 16)     0           block1b_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block1b_se_reduce (Conv2D)      (None, 1, 1, 4)      68          block1b_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block1b_se_expand (Conv2D)      (None, 1, 1, 16)     80          block1b_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block1b_se_excite (Multiply)    (None, 120, 120, 16) 0           block1b_activation[0][0]         \n",
      "                                                                 block1b_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block1b_project_conv (Conv2D)   (None, 120, 120, 16) 256         block1b_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block1b_project_bn (BatchNormal (None, 120, 120, 16) 64          block1b_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block1b_drop (FixedDropout)     (None, 120, 120, 16) 0           block1b_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block1b_add (Add)               (None, 120, 120, 16) 0           block1b_drop[0][0]               \n",
      "                                                                 block1a_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block2a_expand_conv (Conv2D)    (None, 120, 120, 96) 1536        block1b_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block2a_expand_bn (BatchNormali (None, 120, 120, 96) 384         block2a_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block2a_expand_activation (Acti (None, 120, 120, 96) 0           block2a_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block2a_dwconv (DepthwiseConv2D (None, 60, 60, 96)   864         block2a_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block2a_bn (BatchNormalization) (None, 60, 60, 96)   384         block2a_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block2a_activation (Activation) (None, 60, 60, 96)   0           block2a_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block2a_se_squeeze (GlobalAvera (None, 96)           0           block2a_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block2a_se_reshape (Reshape)    (None, 1, 1, 96)     0           block2a_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block2a_se_reduce (Conv2D)      (None, 1, 1, 4)      388         block2a_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block2a_se_expand (Conv2D)      (None, 1, 1, 96)     480         block2a_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block2a_se_excite (Multiply)    (None, 60, 60, 96)   0           block2a_activation[0][0]         \n",
      "                                                                 block2a_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block2a_project_conv (Conv2D)   (None, 60, 60, 24)   2304        block2a_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block2a_project_bn (BatchNormal (None, 60, 60, 24)   96          block2a_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block2b_expand_conv (Conv2D)    (None, 60, 60, 144)  3456        block2a_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block2b_expand_bn (BatchNormali (None, 60, 60, 144)  576         block2b_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block2b_expand_activation (Acti (None, 60, 60, 144)  0           block2b_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block2b_dwconv (DepthwiseConv2D (None, 60, 60, 144)  1296        block2b_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block2b_bn (BatchNormalization) (None, 60, 60, 144)  576         block2b_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block2b_activation (Activation) (None, 60, 60, 144)  0           block2b_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block2b_se_squeeze (GlobalAvera (None, 144)          0           block2b_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block2b_se_reshape (Reshape)    (None, 1, 1, 144)    0           block2b_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block2b_se_reduce (Conv2D)      (None, 1, 1, 6)      870         block2b_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block2b_se_expand (Conv2D)      (None, 1, 1, 144)    1008        block2b_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block2b_se_excite (Multiply)    (None, 60, 60, 144)  0           block2b_activation[0][0]         \n",
      "                                                                 block2b_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block2b_project_conv (Conv2D)   (None, 60, 60, 24)   3456        block2b_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block2b_project_bn (BatchNormal (None, 60, 60, 24)   96          block2b_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block2b_drop (FixedDropout)     (None, 60, 60, 24)   0           block2b_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block2b_add (Add)               (None, 60, 60, 24)   0           block2b_drop[0][0]               \n",
      "                                                                 block2a_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block2c_expand_conv (Conv2D)    (None, 60, 60, 144)  3456        block2b_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block2c_expand_bn (BatchNormali (None, 60, 60, 144)  576         block2c_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block2c_expand_activation (Acti (None, 60, 60, 144)  0           block2c_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block2c_dwconv (DepthwiseConv2D (None, 60, 60, 144)  1296        block2c_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block2c_bn (BatchNormalization) (None, 60, 60, 144)  576         block2c_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block2c_activation (Activation) (None, 60, 60, 144)  0           block2c_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block2c_se_squeeze (GlobalAvera (None, 144)          0           block2c_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block2c_se_reshape (Reshape)    (None, 1, 1, 144)    0           block2c_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block2c_se_reduce (Conv2D)      (None, 1, 1, 6)      870         block2c_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block2c_se_expand (Conv2D)      (None, 1, 1, 144)    1008        block2c_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block2c_se_excite (Multiply)    (None, 60, 60, 144)  0           block2c_activation[0][0]         \n",
      "                                                                 block2c_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block2c_project_conv (Conv2D)   (None, 60, 60, 24)   3456        block2c_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block2c_project_bn (BatchNormal (None, 60, 60, 24)   96          block2c_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block2c_drop (FixedDropout)     (None, 60, 60, 24)   0           block2c_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block2c_add (Add)               (None, 60, 60, 24)   0           block2c_drop[0][0]               \n",
      "                                                                 block2b_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block3a_expand_conv (Conv2D)    (None, 60, 60, 144)  3456        block2c_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block3a_expand_bn (BatchNormali (None, 60, 60, 144)  576         block3a_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block3a_expand_activation (Acti (None, 60, 60, 144)  0           block3a_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block3a_dwconv (DepthwiseConv2D (None, 30, 30, 144)  3600        block3a_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block3a_bn (BatchNormalization) (None, 30, 30, 144)  576         block3a_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block3a_activation (Activation) (None, 30, 30, 144)  0           block3a_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block3a_se_squeeze (GlobalAvera (None, 144)          0           block3a_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block3a_se_reshape (Reshape)    (None, 1, 1, 144)    0           block3a_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block3a_se_reduce (Conv2D)      (None, 1, 1, 6)      870         block3a_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block3a_se_expand (Conv2D)      (None, 1, 1, 144)    1008        block3a_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block3a_se_excite (Multiply)    (None, 30, 30, 144)  0           block3a_activation[0][0]         \n",
      "                                                                 block3a_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block3a_project_conv (Conv2D)   (None, 30, 30, 40)   5760        block3a_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block3a_project_bn (BatchNormal (None, 30, 30, 40)   160         block3a_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block3b_expand_conv (Conv2D)    (None, 30, 30, 240)  9600        block3a_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block3b_expand_bn (BatchNormali (None, 30, 30, 240)  960         block3b_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block3b_expand_activation (Acti (None, 30, 30, 240)  0           block3b_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block3b_dwconv (DepthwiseConv2D (None, 30, 30, 240)  6000        block3b_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block3b_bn (BatchNormalization) (None, 30, 30, 240)  960         block3b_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block3b_activation (Activation) (None, 30, 30, 240)  0           block3b_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block3b_se_squeeze (GlobalAvera (None, 240)          0           block3b_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block3b_se_reshape (Reshape)    (None, 1, 1, 240)    0           block3b_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block3b_se_reduce (Conv2D)      (None, 1, 1, 10)     2410        block3b_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block3b_se_expand (Conv2D)      (None, 1, 1, 240)    2640        block3b_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block3b_se_excite (Multiply)    (None, 30, 30, 240)  0           block3b_activation[0][0]         \n",
      "                                                                 block3b_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block3b_project_conv (Conv2D)   (None, 30, 30, 40)   9600        block3b_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block3b_project_bn (BatchNormal (None, 30, 30, 40)   160         block3b_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block3b_drop (FixedDropout)     (None, 30, 30, 40)   0           block3b_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block3b_add (Add)               (None, 30, 30, 40)   0           block3b_drop[0][0]               \n",
      "                                                                 block3a_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block3c_expand_conv (Conv2D)    (None, 30, 30, 240)  9600        block3b_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block3c_expand_bn (BatchNormali (None, 30, 30, 240)  960         block3c_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block3c_expand_activation (Acti (None, 30, 30, 240)  0           block3c_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block3c_dwconv (DepthwiseConv2D (None, 30, 30, 240)  6000        block3c_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block3c_bn (BatchNormalization) (None, 30, 30, 240)  960         block3c_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block3c_activation (Activation) (None, 30, 30, 240)  0           block3c_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block3c_se_squeeze (GlobalAvera (None, 240)          0           block3c_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block3c_se_reshape (Reshape)    (None, 1, 1, 240)    0           block3c_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block3c_se_reduce (Conv2D)      (None, 1, 1, 10)     2410        block3c_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block3c_se_expand (Conv2D)      (None, 1, 1, 240)    2640        block3c_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block3c_se_excite (Multiply)    (None, 30, 30, 240)  0           block3c_activation[0][0]         \n",
      "                                                                 block3c_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block3c_project_conv (Conv2D)   (None, 30, 30, 40)   9600        block3c_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block3c_project_bn (BatchNormal (None, 30, 30, 40)   160         block3c_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block3c_drop (FixedDropout)     (None, 30, 30, 40)   0           block3c_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block3c_add (Add)               (None, 30, 30, 40)   0           block3c_drop[0][0]               \n",
      "                                                                 block3b_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block4a_expand_conv (Conv2D)    (None, 30, 30, 240)  9600        block3c_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block4a_expand_bn (BatchNormali (None, 30, 30, 240)  960         block4a_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block4a_expand_activation (Acti (None, 30, 30, 240)  0           block4a_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4a_dwconv (DepthwiseConv2D (None, 15, 15, 240)  2160        block4a_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block4a_bn (BatchNormalization) (None, 15, 15, 240)  960         block4a_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block4a_activation (Activation) (None, 15, 15, 240)  0           block4a_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block4a_se_squeeze (GlobalAvera (None, 240)          0           block4a_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4a_se_reshape (Reshape)    (None, 1, 1, 240)    0           block4a_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4a_se_reduce (Conv2D)      (None, 1, 1, 10)     2410        block4a_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4a_se_expand (Conv2D)      (None, 1, 1, 240)    2640        block4a_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4a_se_excite (Multiply)    (None, 15, 15, 240)  0           block4a_activation[0][0]         \n",
      "                                                                 block4a_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4a_project_conv (Conv2D)   (None, 15, 15, 80)   19200       block4a_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4a_project_bn (BatchNormal (None, 15, 15, 80)   320         block4a_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block4b_expand_conv (Conv2D)    (None, 15, 15, 480)  38400       block4a_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4b_expand_bn (BatchNormali (None, 15, 15, 480)  1920        block4b_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block4b_expand_activation (Acti (None, 15, 15, 480)  0           block4b_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4b_dwconv (DepthwiseConv2D (None, 15, 15, 480)  4320        block4b_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block4b_bn (BatchNormalization) (None, 15, 15, 480)  1920        block4b_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block4b_activation (Activation) (None, 15, 15, 480)  0           block4b_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block4b_se_squeeze (GlobalAvera (None, 480)          0           block4b_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4b_se_reshape (Reshape)    (None, 1, 1, 480)    0           block4b_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4b_se_reduce (Conv2D)      (None, 1, 1, 20)     9620        block4b_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4b_se_expand (Conv2D)      (None, 1, 1, 480)    10080       block4b_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4b_se_excite (Multiply)    (None, 15, 15, 480)  0           block4b_activation[0][0]         \n",
      "                                                                 block4b_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4b_project_conv (Conv2D)   (None, 15, 15, 80)   38400       block4b_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4b_project_bn (BatchNormal (None, 15, 15, 80)   320         block4b_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block4b_drop (FixedDropout)     (None, 15, 15, 80)   0           block4b_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4b_add (Add)               (None, 15, 15, 80)   0           block4b_drop[0][0]               \n",
      "                                                                 block4a_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4c_expand_conv (Conv2D)    (None, 15, 15, 480)  38400       block4b_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block4c_expand_bn (BatchNormali (None, 15, 15, 480)  1920        block4c_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block4c_expand_activation (Acti (None, 15, 15, 480)  0           block4c_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4c_dwconv (DepthwiseConv2D (None, 15, 15, 480)  4320        block4c_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block4c_bn (BatchNormalization) (None, 15, 15, 480)  1920        block4c_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block4c_activation (Activation) (None, 15, 15, 480)  0           block4c_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block4c_se_squeeze (GlobalAvera (None, 480)          0           block4c_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4c_se_reshape (Reshape)    (None, 1, 1, 480)    0           block4c_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4c_se_reduce (Conv2D)      (None, 1, 1, 20)     9620        block4c_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4c_se_expand (Conv2D)      (None, 1, 1, 480)    10080       block4c_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4c_se_excite (Multiply)    (None, 15, 15, 480)  0           block4c_activation[0][0]         \n",
      "                                                                 block4c_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4c_project_conv (Conv2D)   (None, 15, 15, 80)   38400       block4c_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4c_project_bn (BatchNormal (None, 15, 15, 80)   320         block4c_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block4c_drop (FixedDropout)     (None, 15, 15, 80)   0           block4c_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4c_add (Add)               (None, 15, 15, 80)   0           block4c_drop[0][0]               \n",
      "                                                                 block4b_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block4d_expand_conv (Conv2D)    (None, 15, 15, 480)  38400       block4c_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block4d_expand_bn (BatchNormali (None, 15, 15, 480)  1920        block4d_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block4d_expand_activation (Acti (None, 15, 15, 480)  0           block4d_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4d_dwconv (DepthwiseConv2D (None, 15, 15, 480)  4320        block4d_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block4d_bn (BatchNormalization) (None, 15, 15, 480)  1920        block4d_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block4d_activation (Activation) (None, 15, 15, 480)  0           block4d_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block4d_se_squeeze (GlobalAvera (None, 480)          0           block4d_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4d_se_reshape (Reshape)    (None, 1, 1, 480)    0           block4d_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4d_se_reduce (Conv2D)      (None, 1, 1, 20)     9620        block4d_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4d_se_expand (Conv2D)      (None, 1, 1, 480)    10080       block4d_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4d_se_excite (Multiply)    (None, 15, 15, 480)  0           block4d_activation[0][0]         \n",
      "                                                                 block4d_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4d_project_conv (Conv2D)   (None, 15, 15, 80)   38400       block4d_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block4d_project_bn (BatchNormal (None, 15, 15, 80)   320         block4d_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block4d_drop (FixedDropout)     (None, 15, 15, 80)   0           block4d_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block4d_add (Add)               (None, 15, 15, 80)   0           block4d_drop[0][0]               \n",
      "                                                                 block4c_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block5a_expand_conv (Conv2D)    (None, 15, 15, 480)  38400       block4d_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block5a_expand_bn (BatchNormali (None, 15, 15, 480)  1920        block5a_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block5a_expand_activation (Acti (None, 15, 15, 480)  0           block5a_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5a_dwconv (DepthwiseConv2D (None, 15, 15, 480)  12000       block5a_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block5a_bn (BatchNormalization) (None, 15, 15, 480)  1920        block5a_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block5a_activation (Activation) (None, 15, 15, 480)  0           block5a_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block5a_se_squeeze (GlobalAvera (None, 480)          0           block5a_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5a_se_reshape (Reshape)    (None, 1, 1, 480)    0           block5a_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5a_se_reduce (Conv2D)      (None, 1, 1, 20)     9620        block5a_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5a_se_expand (Conv2D)      (None, 1, 1, 480)    10080       block5a_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5a_se_excite (Multiply)    (None, 15, 15, 480)  0           block5a_activation[0][0]         \n",
      "                                                                 block5a_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5a_project_conv (Conv2D)   (None, 15, 15, 112)  53760       block5a_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5a_project_bn (BatchNormal (None, 15, 15, 112)  448         block5a_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block5b_expand_conv (Conv2D)    (None, 15, 15, 672)  75264       block5a_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5b_expand_bn (BatchNormali (None, 15, 15, 672)  2688        block5b_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block5b_expand_activation (Acti (None, 15, 15, 672)  0           block5b_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5b_dwconv (DepthwiseConv2D (None, 15, 15, 672)  16800       block5b_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block5b_bn (BatchNormalization) (None, 15, 15, 672)  2688        block5b_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block5b_activation (Activation) (None, 15, 15, 672)  0           block5b_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block5b_se_squeeze (GlobalAvera (None, 672)          0           block5b_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5b_se_reshape (Reshape)    (None, 1, 1, 672)    0           block5b_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5b_se_reduce (Conv2D)      (None, 1, 1, 28)     18844       block5b_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5b_se_expand (Conv2D)      (None, 1, 1, 672)    19488       block5b_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5b_se_excite (Multiply)    (None, 15, 15, 672)  0           block5b_activation[0][0]         \n",
      "                                                                 block5b_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5b_project_conv (Conv2D)   (None, 15, 15, 112)  75264       block5b_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5b_project_bn (BatchNormal (None, 15, 15, 112)  448         block5b_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block5b_drop (FixedDropout)     (None, 15, 15, 112)  0           block5b_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5b_add (Add)               (None, 15, 15, 112)  0           block5b_drop[0][0]               \n",
      "                                                                 block5a_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5c_expand_conv (Conv2D)    (None, 15, 15, 672)  75264       block5b_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block5c_expand_bn (BatchNormali (None, 15, 15, 672)  2688        block5c_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block5c_expand_activation (Acti (None, 15, 15, 672)  0           block5c_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5c_dwconv (DepthwiseConv2D (None, 15, 15, 672)  16800       block5c_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block5c_bn (BatchNormalization) (None, 15, 15, 672)  2688        block5c_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block5c_activation (Activation) (None, 15, 15, 672)  0           block5c_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block5c_se_squeeze (GlobalAvera (None, 672)          0           block5c_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5c_se_reshape (Reshape)    (None, 1, 1, 672)    0           block5c_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5c_se_reduce (Conv2D)      (None, 1, 1, 28)     18844       block5c_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5c_se_expand (Conv2D)      (None, 1, 1, 672)    19488       block5c_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5c_se_excite (Multiply)    (None, 15, 15, 672)  0           block5c_activation[0][0]         \n",
      "                                                                 block5c_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5c_project_conv (Conv2D)   (None, 15, 15, 112)  75264       block5c_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5c_project_bn (BatchNormal (None, 15, 15, 112)  448         block5c_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block5c_drop (FixedDropout)     (None, 15, 15, 112)  0           block5c_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5c_add (Add)               (None, 15, 15, 112)  0           block5c_drop[0][0]               \n",
      "                                                                 block5b_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block5d_expand_conv (Conv2D)    (None, 15, 15, 672)  75264       block5c_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block5d_expand_bn (BatchNormali (None, 15, 15, 672)  2688        block5d_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block5d_expand_activation (Acti (None, 15, 15, 672)  0           block5d_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5d_dwconv (DepthwiseConv2D (None, 15, 15, 672)  16800       block5d_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block5d_bn (BatchNormalization) (None, 15, 15, 672)  2688        block5d_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block5d_activation (Activation) (None, 15, 15, 672)  0           block5d_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block5d_se_squeeze (GlobalAvera (None, 672)          0           block5d_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5d_se_reshape (Reshape)    (None, 1, 1, 672)    0           block5d_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5d_se_reduce (Conv2D)      (None, 1, 1, 28)     18844       block5d_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5d_se_expand (Conv2D)      (None, 1, 1, 672)    19488       block5d_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5d_se_excite (Multiply)    (None, 15, 15, 672)  0           block5d_activation[0][0]         \n",
      "                                                                 block5d_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5d_project_conv (Conv2D)   (None, 15, 15, 112)  75264       block5d_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block5d_project_bn (BatchNormal (None, 15, 15, 112)  448         block5d_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block5d_drop (FixedDropout)     (None, 15, 15, 112)  0           block5d_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block5d_add (Add)               (None, 15, 15, 112)  0           block5d_drop[0][0]               \n",
      "                                                                 block5c_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block6a_expand_conv (Conv2D)    (None, 15, 15, 672)  75264       block5d_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block6a_expand_bn (BatchNormali (None, 15, 15, 672)  2688        block6a_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block6a_expand_activation (Acti (None, 15, 15, 672)  0           block6a_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6a_dwconv (DepthwiseConv2D (None, 8, 8, 672)    16800       block6a_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block6a_bn (BatchNormalization) (None, 8, 8, 672)    2688        block6a_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block6a_activation (Activation) (None, 8, 8, 672)    0           block6a_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block6a_se_squeeze (GlobalAvera (None, 672)          0           block6a_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6a_se_reshape (Reshape)    (None, 1, 1, 672)    0           block6a_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6a_se_reduce (Conv2D)      (None, 1, 1, 28)     18844       block6a_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6a_se_expand (Conv2D)      (None, 1, 1, 672)    19488       block6a_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6a_se_excite (Multiply)    (None, 8, 8, 672)    0           block6a_activation[0][0]         \n",
      "                                                                 block6a_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6a_project_conv (Conv2D)   (None, 8, 8, 192)    129024      block6a_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6a_project_bn (BatchNormal (None, 8, 8, 192)    768         block6a_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block6b_expand_conv (Conv2D)    (None, 8, 8, 1152)   221184      block6a_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6b_expand_bn (BatchNormali (None, 8, 8, 1152)   4608        block6b_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block6b_expand_activation (Acti (None, 8, 8, 1152)   0           block6b_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6b_dwconv (DepthwiseConv2D (None, 8, 8, 1152)   28800       block6b_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block6b_bn (BatchNormalization) (None, 8, 8, 1152)   4608        block6b_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block6b_activation (Activation) (None, 8, 8, 1152)   0           block6b_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block6b_se_squeeze (GlobalAvera (None, 1152)         0           block6b_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6b_se_reshape (Reshape)    (None, 1, 1, 1152)   0           block6b_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6b_se_reduce (Conv2D)      (None, 1, 1, 48)     55344       block6b_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6b_se_expand (Conv2D)      (None, 1, 1, 1152)   56448       block6b_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6b_se_excite (Multiply)    (None, 8, 8, 1152)   0           block6b_activation[0][0]         \n",
      "                                                                 block6b_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6b_project_conv (Conv2D)   (None, 8, 8, 192)    221184      block6b_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6b_project_bn (BatchNormal (None, 8, 8, 192)    768         block6b_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block6b_drop (FixedDropout)     (None, 8, 8, 192)    0           block6b_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6b_add (Add)               (None, 8, 8, 192)    0           block6b_drop[0][0]               \n",
      "                                                                 block6a_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6c_expand_conv (Conv2D)    (None, 8, 8, 1152)   221184      block6b_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block6c_expand_bn (BatchNormali (None, 8, 8, 1152)   4608        block6c_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block6c_expand_activation (Acti (None, 8, 8, 1152)   0           block6c_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6c_dwconv (DepthwiseConv2D (None, 8, 8, 1152)   28800       block6c_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block6c_bn (BatchNormalization) (None, 8, 8, 1152)   4608        block6c_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block6c_activation (Activation) (None, 8, 8, 1152)   0           block6c_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block6c_se_squeeze (GlobalAvera (None, 1152)         0           block6c_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6c_se_reshape (Reshape)    (None, 1, 1, 1152)   0           block6c_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6c_se_reduce (Conv2D)      (None, 1, 1, 48)     55344       block6c_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6c_se_expand (Conv2D)      (None, 1, 1, 1152)   56448       block6c_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6c_se_excite (Multiply)    (None, 8, 8, 1152)   0           block6c_activation[0][0]         \n",
      "                                                                 block6c_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6c_project_conv (Conv2D)   (None, 8, 8, 192)    221184      block6c_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6c_project_bn (BatchNormal (None, 8, 8, 192)    768         block6c_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block6c_drop (FixedDropout)     (None, 8, 8, 192)    0           block6c_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6c_add (Add)               (None, 8, 8, 192)    0           block6c_drop[0][0]               \n",
      "                                                                 block6b_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block6d_expand_conv (Conv2D)    (None, 8, 8, 1152)   221184      block6c_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block6d_expand_bn (BatchNormali (None, 8, 8, 1152)   4608        block6d_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block6d_expand_activation (Acti (None, 8, 8, 1152)   0           block6d_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6d_dwconv (DepthwiseConv2D (None, 8, 8, 1152)   28800       block6d_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block6d_bn (BatchNormalization) (None, 8, 8, 1152)   4608        block6d_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block6d_activation (Activation) (None, 8, 8, 1152)   0           block6d_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block6d_se_squeeze (GlobalAvera (None, 1152)         0           block6d_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6d_se_reshape (Reshape)    (None, 1, 1, 1152)   0           block6d_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6d_se_reduce (Conv2D)      (None, 1, 1, 48)     55344       block6d_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6d_se_expand (Conv2D)      (None, 1, 1, 1152)   56448       block6d_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6d_se_excite (Multiply)    (None, 8, 8, 1152)   0           block6d_activation[0][0]         \n",
      "                                                                 block6d_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6d_project_conv (Conv2D)   (None, 8, 8, 192)    221184      block6d_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6d_project_bn (BatchNormal (None, 8, 8, 192)    768         block6d_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block6d_drop (FixedDropout)     (None, 8, 8, 192)    0           block6d_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6d_add (Add)               (None, 8, 8, 192)    0           block6d_drop[0][0]               \n",
      "                                                                 block6c_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block6e_expand_conv (Conv2D)    (None, 8, 8, 1152)   221184      block6d_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block6e_expand_bn (BatchNormali (None, 8, 8, 1152)   4608        block6e_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block6e_expand_activation (Acti (None, 8, 8, 1152)   0           block6e_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6e_dwconv (DepthwiseConv2D (None, 8, 8, 1152)   28800       block6e_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block6e_bn (BatchNormalization) (None, 8, 8, 1152)   4608        block6e_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block6e_activation (Activation) (None, 8, 8, 1152)   0           block6e_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block6e_se_squeeze (GlobalAvera (None, 1152)         0           block6e_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6e_se_reshape (Reshape)    (None, 1, 1, 1152)   0           block6e_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6e_se_reduce (Conv2D)      (None, 1, 1, 48)     55344       block6e_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6e_se_expand (Conv2D)      (None, 1, 1, 1152)   56448       block6e_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6e_se_excite (Multiply)    (None, 8, 8, 1152)   0           block6e_activation[0][0]         \n",
      "                                                                 block6e_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6e_project_conv (Conv2D)   (None, 8, 8, 192)    221184      block6e_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block6e_project_bn (BatchNormal (None, 8, 8, 192)    768         block6e_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block6e_drop (FixedDropout)     (None, 8, 8, 192)    0           block6e_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block6e_add (Add)               (None, 8, 8, 192)    0           block6e_drop[0][0]               \n",
      "                                                                 block6d_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block7a_expand_conv (Conv2D)    (None, 8, 8, 1152)   221184      block6e_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
      "block7a_expand_bn (BatchNormali (None, 8, 8, 1152)   4608        block7a_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block7a_expand_activation (Acti (None, 8, 8, 1152)   0           block7a_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block7a_dwconv (DepthwiseConv2D (None, 8, 8, 1152)   10368       block7a_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block7a_bn (BatchNormalization) (None, 8, 8, 1152)   4608        block7a_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block7a_activation (Activation) (None, 8, 8, 1152)   0           block7a_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block7a_se_squeeze (GlobalAvera (None, 1152)         0           block7a_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block7a_se_reshape (Reshape)    (None, 1, 1, 1152)   0           block7a_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block7a_se_reduce (Conv2D)      (None, 1, 1, 48)     55344       block7a_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block7a_se_expand (Conv2D)      (None, 1, 1, 1152)   56448       block7a_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block7a_se_excite (Multiply)    (None, 8, 8, 1152)   0           block7a_activation[0][0]         \n",
      "                                                                 block7a_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block7a_project_conv (Conv2D)   (None, 8, 8, 320)    368640      block7a_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block7a_project_bn (BatchNormal (None, 8, 8, 320)    1280        block7a_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block7b_expand_conv (Conv2D)    (None, 8, 8, 1920)   614400      block7a_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block7b_expand_bn (BatchNormali (None, 8, 8, 1920)   7680        block7b_expand_conv[0][0]        \n",
      "__________________________________________________________________________________________________\n",
      "block7b_expand_activation (Acti (None, 8, 8, 1920)   0           block7b_expand_bn[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block7b_dwconv (DepthwiseConv2D (None, 8, 8, 1920)   17280       block7b_expand_activation[0][0]  \n",
      "__________________________________________________________________________________________________\n",
      "block7b_bn (BatchNormalization) (None, 8, 8, 1920)   7680        block7b_dwconv[0][0]             \n",
      "__________________________________________________________________________________________________\n",
      "block7b_activation (Activation) (None, 8, 8, 1920)   0           block7b_bn[0][0]                 \n",
      "__________________________________________________________________________________________________\n",
      "block7b_se_squeeze (GlobalAvera (None, 1920)         0           block7b_activation[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block7b_se_reshape (Reshape)    (None, 1, 1, 1920)   0           block7b_se_squeeze[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block7b_se_reduce (Conv2D)      (None, 1, 1, 80)     153680      block7b_se_reshape[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block7b_se_expand (Conv2D)      (None, 1, 1, 1920)   155520      block7b_se_reduce[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block7b_se_excite (Multiply)    (None, 8, 8, 1920)   0           block7b_activation[0][0]         \n",
      "                                                                 block7b_se_expand[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block7b_project_conv (Conv2D)   (None, 8, 8, 320)    614400      block7b_se_excite[0][0]          \n",
      "__________________________________________________________________________________________________\n",
      "block7b_project_bn (BatchNormal (None, 8, 8, 320)    1280        block7b_project_conv[0][0]       \n",
      "__________________________________________________________________________________________________\n",
      "block7b_drop (FixedDropout)     (None, 8, 8, 320)    0           block7b_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "block7b_add (Add)               (None, 8, 8, 320)    0           block7b_drop[0][0]               \n",
      "                                                                 block7a_project_bn[0][0]         \n",
      "__________________________________________________________________________________________________\n",
      "top_conv (Conv2D)               (None, 8, 8, 1280)   409600      block7b_add[0][0]                \n",
      "__________________________________________________________________________________________________\n",
1896
1897
      "dense_input (InputLayer)        [(None, 6)]          0                                            \n",
      "__________________________________________________________________________________________________\n",
Lafnoune Imane's avatar
Lafnoune Imane committed
1898
1899
      "top_bn (BatchNormalization)     (None, 8, 8, 1280)   5120        top_conv[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
1900
      "dense (Dense)                   (None, 8)            56          dense_input[0][0]                \n",
Lafnoune Imane's avatar
Lafnoune Imane committed
1901
1902
1903
      "__________________________________________________________________________________________________\n",
      "top_activation (Activation)     (None, 8, 8, 1280)   0           top_bn[0][0]                     \n",
      "__________________________________________________________________________________________________\n",
1904
      "dropout (Dropout)               (None, 8)            0           dense[0][0]                      \n",
Lafnoune Imane's avatar
Lafnoune Imane committed
1905
      "__________________________________________________________________________________________________\n",
1906
      "global_average_pooling2d (Globa (None, 1280)         0           top_activation[0][0]             \n",
Lafnoune Imane's avatar
Lafnoune Imane committed
1907
      "__________________________________________________________________________________________________\n",
1908
      "dense_1 (Dense)                 (None, 4)            36          dropout[0][0]                    \n",
Lafnoune Imane's avatar
Lafnoune Imane committed
1909
      "__________________________________________________________________________________________________\n",
1910
      "dropout_1 (Dropout)             (None, 1280)         0           global_average_pooling2d[0][0]   \n",
Lafnoune Imane's avatar
Lafnoune Imane committed
1911
      "__________________________________________________________________________________________________\n",
1912
1913
      "concatenate (Concatenate)       (None, 1284)         0           dense_1[0][0]                    \n",
      "                                                                 dropout_1[0][0]                  \n",
Lafnoune Imane's avatar
Lafnoune Imane committed
1914
      "__________________________________________________________________________________________________\n",
1915
      "dense_2 (Dense)                 (None, 4)            5140        concatenate[0][0]                \n",
Lafnoune Imane's avatar
Lafnoune Imane committed
1916
      "__________________________________________________________________________________________________\n",
1917
      "dropout_2 (Dropout)             (None, 4)            0           dense_2[0][0]                    \n",
Lafnoune Imane's avatar
Lafnoune Imane committed
1918
      "__________________________________________________________________________________________________\n",
1919
      "dense_3 (Dense)                 (None, 1)            5           dropout_2[0][0]                  \n",
Lafnoune Imane's avatar
Lafnoune Imane committed
1920
      "==================================================================================================\n",
1921
1922
1923
      "Total params: 6,580,757\n",
      "Trainable params: 6,389\n",
      "Non-trainable params: 6,574,368\n",
Lafnoune Imane's avatar
Lafnoune Imane committed
1924
1925
1926
1927
1928
1929
1930
1931
1932
      "__________________________________________________________________________________________________\n"
     ]
    }
   ],
   "source": [
    "model.summary()"
   ]
  },
  {
1933
   "cell_type": "markdown",
Lafnoune Imane's avatar
Lafnoune Imane committed
1934
   "metadata": {},
1935
1936
1937
1938
1939
1940
1941
1942
1943
1944
1945
1946
1947
1948
1949
1950
1951
1952
1953
1954
1955
1956
1957
1958
1959
1960
1961
1962
1963
1964
1965
1966
   "source": [
    "# F - Postprocessing : Evaluation"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {},
   "outputs": [],
   "source": [
    "from postprocessing.evaluate import evaluate_hybrid"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO:predicting ...\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "avg. FVC: 2690.479018721756, std FVC 832.7709592986739\n",
      "mean difference : 158.92%, std: 1050.05%\n"
     ]
Lafnoune Imane's avatar
Lafnoune Imane committed
1967
1968
1969
    }
   ],
   "source": [
1970
    "preds = evaluate_hybrid(model, df, trainAttrX, train_dataset, trainY, sc)"
Lafnoune Imane's avatar
Lafnoune Imane committed
1971
1972
1973
1974
   ]
  },
  {
   "cell_type": "code",
1975
   "execution_count": 29,
Lafnoune Imane's avatar
Lafnoune Imane committed
1976
1977
1978
1979
1980
1981
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
1982
      "35/35 [==============================] - ETA: 8:06 - loss: 0.146 - ETA: 1:28 - loss: 0.107 - ETA: 1:28 - loss: 0.109 - ETA: 1:24 - loss: 0.111 - ETA: 1:21 - loss: 0.111 - ETA: 1:17 - loss: 0.103 - ETA: 1:14 - loss: 0.124 - ETA: 1:11 - loss: 0.118 - ETA: 1:08 - loss: 0.110 - ETA: 1:06 - loss: 0.105 - ETA: 1:03 - loss: 0.101 - ETA: 1:00 - loss: 0.099 - ETA: 57s - loss: 0.099 - ETA: 55s - loss: 0.10 - ETA: 52s - loss: 0.09 - ETA: 49s - loss: 0.10 - ETA: 47s - loss: 0.10 - ETA: 44s - loss: 0.13 - ETA: 41s - loss: 0.15 - ETA: 39s - loss: 0.15 - ETA: 36s - loss: 0.15 - ETA: 34s - loss: 0.15 - ETA: 31s - loss: 0.15 - ETA: 29s - loss: 0.15 - ETA: 26s - loss: 0.14 - ETA: 23s - loss: 0.14 - ETA: 21s - loss: 0.14 - ETA: 18s - loss: 0.14 - ETA: 15s - loss: 0.14 - ETA: 13s - loss: 0.14 - ETA: 10s - loss: 0.14 - ETA: 7s - loss: 0.1402 - ETA: 5s - loss: 0.137 - ETA: 2s - loss: 0.137 - ETA: 0s - loss: 0.136 - 103s 3s/step - loss: 0.1366\n"
Lafnoune Imane's avatar
Lafnoune Imane committed
1983
     ]
1984
1985
1986
1987
1988
1989
1990
1991
1992
1993
    },
    {
     "data": {
      "text/plain": [
       "0.13659389317035675"
      ]
     },
     "execution_count": 29,
     "metadata": {},
     "output_type": "execute_result"
Lafnoune Imane's avatar
Lafnoune Imane committed
1994
1995
1996
    }
   ],
   "source": [
1997
1998
1999
2000
2001
2002
2003
2004
    "model.evaluate([trainAttrX, train_dataset], trainY)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Test set"
Lafnoune Imane's avatar
Lafnoune Imane committed
2005
2006
2007
2008
   ]
  },
  {
   "cell_type": "code",
2009
   "execution_count": 30,
Lafnoune Imane's avatar
Lafnoune Imane committed
2010
2011
   "metadata": {},
   "outputs": [
2012
2013
2014
2015
2016
2017
2018
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO:predicting ...\n"
     ]
    },
Lafnoune Imane's avatar
Lafnoune Imane committed
2019
2020
2021
2022
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
2023
2024
      "avg. FVC: 2690.479018721756, std FVC 832.7709592986739\n",
      "mean difference : 83.21%, std: 223.93%\n"
Lafnoune Imane's avatar
Lafnoune Imane committed
2025
2026
2027
2028
     ]
    }
   ],
   "source": [
2029
    "preds = evaluate_hybrid(model, df, testAttrX, test_dataset, testY, sc)"
Lafnoune Imane's avatar
Lafnoune Imane committed
2030
2031
2032
2033
   ]
  },
  {
   "cell_type": "code",
2034
   "execution_count": 31,
Lafnoune Imane's avatar
Lafnoune Imane committed
2035
2036
   "metadata": {},
   "outputs": [
2037
2038
2039
2040
2041
2042
2043
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "9/9 [==============================] - ETA: 22s - loss: 0.08 - ETA: 18s - loss: 0.09 - ETA: 15s - loss: 0.11 - ETA: 13s - loss: 0.15 - ETA: 10s - loss: 0.13 - ETA: 7s - loss: 0.1212 - ETA: 5s - loss: 0.116 - ETA: 2s - loss: 0.107 - ETA: 0s - loss: 0.107 - 23s 3s/step - loss: 0.1076\n"
     ]
    },
Lafnoune Imane's avatar
Lafnoune Imane committed
2044
2045
2046
    {
     "data": {
      "text/plain": [
2047
       "0.10756301134824753"
Lafnoune Imane's avatar
Lafnoune Imane committed
2048
2049
      ]
     },
2050
     "execution_count": 31,
Lafnoune Imane's avatar
Lafnoune Imane committed
2051
2052
2053
2054
2055
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
2056
2057
2058
2059
2060
2061
2062
2063
2064
2065
2066
2067
2068
2069
2070
2071
2072
2073
2074
2075
    "model.evaluate([testAttrX, test_dataset], testY)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# G - Postprocessing : Competition score"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 38,
   "metadata": {},
   "outputs": [],
   "source": [
    "from postprocessing.dropout_predictions import create_dropout_predict_function\n",
    "import tensorflow as tf\n",
    "import matplotlib.pyplot as plt\n",
    "from postprocessing.evaluate import compute_score"
Lafnoune Imane's avatar
Lafnoune Imane committed
2076
2077
2078
2079
   ]
  },
  {
   "cell_type": "code",
2080
2081
2082
2083
2084
2085
2086
2087
2088
2089
2090
2091
2092
2093
2094
2095
2096
2097
2098
2099
2100
   "execution_count": 34,
   "metadata": {},
   "outputs": [],
   "source": [
    "#Load model\n",
    "from keras.models import load_model\n",
    "from efficientnet.tfkeras import EfficientNetB1\n",
    "model = None\n",
    "model = load_model('clean_notebooks/cnn_injection_transfer.h5')"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Train score"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 39,
Lafnoune Imane's avatar
Lafnoune Imane committed
2101
2102
2103
2104
2105
2106
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
2107
2108
2109
2110
2111
2112
2113
2114
2115
2116
2117
2118
2119
2120
      "[<tf.Tensor 'dense_input_2:0' shape=(None, 6) dtype=float32>, <tf.Tensor 'input_1_2:0' shape=(None, 240, 240, 4) dtype=float32>]\n"
     ]
    },
    {
     "ename": "ResourceExhaustedError",
     "evalue": "OOM when allocating tensor with shape[1093,120,120,96] and type float on /job:localhost/replica:0/task:0/device:CPU:0 by allocator cpu\n\t [[{{node block2a_expand_bn_2/FusedBatchNormV3}}]]\nHint: If you want to see a list of allocated tensors when OOM happens, add report_tensor_allocations_upon_oom to RunOptions for current allocation info.\n",
     "output_type": "error",
     "traceback": [
      "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[1;31mResourceExhaustedError\u001b[0m                    Traceback (most recent call last)",
      "\u001b[1;32m<ipython-input-39-991bbdb759f6>\u001b[0m in \u001b[0;36m<module>\u001b[1;34m\u001b[0m\n\u001b[0;32m     12\u001b[0m \u001b[0mpredictions\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mnp\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mzeros\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mnum_samples\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mnum_iter\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m     13\u001b[0m \u001b[1;32mfor\u001b[0m \u001b[0mi\u001b[0m \u001b[1;32min\u001b[0m \u001b[0mrange\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mnum_iter\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 14\u001b[1;33m     \u001b[0mpredictions\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m,\u001b[0m\u001b[0mi\u001b[0m\u001b[1;33m]\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mpredict_with_dropout\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0minput_data\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;36m1\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;36m0\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mreshape\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m-\u001b[0m\u001b[1;36m1\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m",
      "\u001b[1;32m~\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\keras\\backend.py\u001b[0m in \u001b[0;36m__call__\u001b[1;34m(self, inputs)\u001b[0m\n\u001b[0;32m   3955\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   3956\u001b[0m     fetched = self._callable_fn(*array_vals,\n\u001b[1;32m-> 3957\u001b[1;33m                                 run_metadata=self.run_metadata)\n\u001b[0m\u001b[0;32m   3958\u001b[0m     \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_call_fetch_callbacks\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mfetched\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;33m-\u001b[0m\u001b[0mlen\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_fetches\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   3959\u001b[0m     output_structure = nest.pack_sequence_as(\n",
      "\u001b[1;32m~\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36m__call__\u001b[1;34m(self, *args, **kwargs)\u001b[0m\n\u001b[0;32m   1480\u001b[0m         ret = tf_session.TF_SessionRunCallable(self._session._session,\n\u001b[0;32m   1481\u001b[0m                                                \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_handle\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0margs\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1482\u001b[1;33m                                                run_metadata_ptr)\n\u001b[0m\u001b[0;32m   1483\u001b[0m         \u001b[1;32mif\u001b[0m \u001b[0mrun_metadata\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m   1484\u001b[0m           \u001b[0mproto_data\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mtf_session\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mTF_GetBuffer\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mrun_metadata_ptr\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;31mResourceExhaustedError\u001b[0m: OOM when allocating tensor with shape[1093,120,120,96] and type float on /job:localhost/replica:0/task:0/device:CPU:0 by allocator cpu\n\t [[{{node block2a_expand_bn_2/FusedBatchNormV3}}]]\nHint: If you want to see a list of allocated tensors when OOM happens, add report_tensor_allocations_upon_oom to RunOptions for current allocation info.\n"
Lafnoune Imane's avatar
Lafnoune Imane committed
2121
2122
2123
2124
     ]
    }
   ],
   "source": [
2125
2126
2127
2128
2129
2130
2131
2132
2133
2134
2135
2136
2137
2138
2139
2140
2141
2142
2143
2144
2145
2146
2147
2148
2149
2150
2151
2152
2153
2154
2155
2156
2157
2158
2159
    "tf.compat.v1.disable_eager_execution()\n",
    "\n",
    "dropout = 0.3\n",
    "num_iter = 20\n",
    " \n",
    "input_data=[trainAttrX, train_dataset]\n",
    "input_data[1] = np.asarray(input_data[1]).reshape(train_dataset.shape[0],240,240,4)\n",
    "num_samples = input_data[0].shape[0]\n",
    "\n",
    "predict_with_dropout = create_dropout_predict_function(model, dropout)\n",
    "\n",
    "predictions = np.zeros((num_samples, num_iter))\n",
    "for i in range(num_iter):\n",
    "    predictions[:,i] = predict_with_dropout([input_data,1])[0].reshape(-1)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Mémoire insuffisante"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Test score"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Dropout 0.4"
Lafnoune Imane's avatar
Lafnoune Imane committed
2160
2161
2162
2163
   ]
  },
  {
   "cell_type": "code",
2164
   "execution_count": 40,
Billy Amélie's avatar
Billy Amélie committed
2165
   "metadata": {},
2166
2167
2168
2169
2170
2171
2172
2173
2174
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[<tf.Tensor 'dense_input_3:0' shape=(None, 6) dtype=float32>, <tf.Tensor 'input_1_3:0' shape=(None, 240, 240, 4) dtype=float32>]\n"
     ]
    }
   ],
Billy Amélie's avatar
Billy Amélie committed
2175
   "source": [
2176
2177
2178
2179
2180
2181
2182
2183
2184
2185
2186
2187
2188
2189
    "tf.compat.v1.disable_eager_execution()\n",
    "\n",
    "dropout = 0.4\n",
    "num_iter = 20\n",
    " \n",
    "input_data=[testAttrX, test_dataset]\n",
    "input_data[1] = np.asarray(input_data[1]).reshape(test_dataset.shape[0],240,240,4)\n",
    "num_samples = input_data[0].shape[0]\n",
    "\n",
    "predict_with_dropout = create_dropout_predict_function(model, dropout)\n",
    "\n",
    "predictions = np.zeros((num_samples, num_iter))\n",
    "for i in range(num_iter):\n",
    "    predictions[:,i] = predict_with_dropout([input_data,1])[0].reshape(-1)"
Billy Amélie's avatar
Billy Amélie committed
2190
2191
2192
2193
   ]
  },
  {
   "cell_type": "code",
2194
   "execution_count": 41,
Lafnoune Imane's avatar
Lafnoune Imane committed
2195
2196
2197
   "metadata": {},
   "outputs": [
    {
Billy Amélie's avatar
Billy Amélie committed
2198
     "data": {
2199
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXAAAAD4CAYAAAD1jb0+AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAP4UlEQVR4nO3df6xfdX3H8edrBcWpsyA3pKFkF5VIyDIKuesgGKN1GASjmJBFYlz/aFKXYIKZmVaXbJpsCSTT6pKFrArSP5w/hj8g6NQOaozJUnYrpRQqoWKNbQq9TFDZH2yF9/74nuLN5d5+v73f7/fefm6fj+Tke87nnO/3vD/wzeuefs4535OqQpLUnt9b7gIkSYtjgEtSowxwSWqUAS5JjTLAJalRZyzlzs4999yanJxcyl1KUvN27979dFVNzG1f0gCfnJxkenp6KXcpSc1L8ov52gceQkmyKsmDSe7tli9MsivJgSRfS/KKURUrServZMbAbwb2z1q+FdhaVW8CngE2jbIwSdKJDRTgSdYC1wFf7JYDbADu6jbZDlw/hvokSQsY9Aj8c8DHgBe75dcDz1bVsW75EHD+fG9MsjnJdJLpmZmZYWqVJM3SN8CTvBs4WlW7F7ODqtpWVVNVNTUx8bKTqJKkRRrkKpSrgPckuRY4C/gD4PPA6iRndEfha4HD4ytTkjRX3yPwqvpEVa2tqkng/cD9VfUBYCdwQ7fZRuDusVUpSXqZYe7E/DjwV0kO0BsTv300JUmSBnFSN/JU1Q+BH3bzTwDrR1+SJGkQS3onpk7O5JbvjPTzDt5y3Ug/T9Ly8sesJKlRBrgkNcoAl6RGGeCS1ChPYmoonmiVlo9H4JLUKANckhplgEtSowxwSWqUAS5JjTLAJalRBrgkNcoAl6RGGeCS1CgDXJIaZYBLUqMMcElqVN8AT3JWkgeSPJTkkSSf7trvTPLzJHu6ad3Yq5UkvWSQXyN8HthQVc8lORP4cZJ/79b9dVXdNb7yJEkL6RvgVVXAc93imd1U4yxKktTfQGPgSVYl2QMcBXZU1a5u1T8k2Ztka5JXLvDezUmmk0zPzMyMpmpJ0mABXlUvVNU6YC2wPskfAZ8ALgb+BDgH+PgC791WVVNVNTUxMTGaqiVJJ3cVSlU9C+wErqmqI9XzPPAlYP0Y6pMkLWCQq1Amkqzu5l8FXA38NMmari3A9cC+8ZUpSZprkKtQ1gDbk6yiF/hfr6p7k9yfZAIIsAf4y/GVKUmaa5CrUPYCl83TvmEsFTVs1A/4laQT8U5MSWqUAS5JjTLAJalRBrgkNcoAl6RGGeCS1CgDXJIaZYBLUqMMcElqlAEuSY0ywCWpUQa4JDXKAJekRhngktQoA1ySGmWAS1KjDHBJatQgz8Q8K8kDSR5K8kiST3ftFybZleRAkq8lecX4y5UkHTfIEfjzwIaquhRYB1yT5ArgVmBrVb0JeAbYNLYqJUkv0zfAq+e5bvHMbipgA3BX176d3pPpJUlLZKAx8CSrkuwBjgI7gJ8Bz1bVsW6TQ8D5Y6lQkjSvgQK8ql6oqnXAWmA9cPGgO0iyOcl0kumZmZnFVSlJepmTugqlqp4FdgJXAquTnNGtWgscXuA926pqqqqmJiYmhqlVkjTLIFehTCRZ3c2/Crga2E8vyG/oNtsI3D2mGiVJ8zij/yasAbYnWUUv8L9eVfcmeRT4apK/Bx4Ebh9jnZKkOfoGeFXtBS6bp/0JeuPhkqRl4J2YktQoA1ySGmWAS1KjDHBJapQBLkmNGuQywhVpcst3lrsESRqKR+CS1CgDXJIaZYBLUqMMcElq1Gl7ElOnh1GfrD54y3Uj/TxpGB6BS1KjDHBJapQBLkmNMsAlqVEGuCQ1ygCXpEYZ4JLUqEEeanxBkp1JHk3ySJKbu/ZPJTmcZE83XTv+ciVJxw1yI88x4KNV9ZMkrwV2J9nRrdtaVf84vvIkSQsZ5KHGR4Aj3fxvk+wHzh93YZKkEzupMfAkk/SeUL+ra/pwkr1J7khy9gLv2ZxkOsn0zMzMcNVKkl4ycIAneQ3wDeAjVfUb4DbgjcA6ekfon5nvfVW1raqmqmpqYmJi+IolScCAAZ7kTHrh/eWq+iZAVT1VVS9U1YvAF4D14ytTkjTXIFehBLgd2F9Vn53VvmbWZu8D9o2+PEnSQga5CuUq4IPAw0n2dG2fBG5Msg4o4CDwoTHUJ0lawCBXofwYyDyrvjv6ciRJg/JOTElqlAEuSY0ywCWpUQa4JDXKAJekRvlUeukkjPop9+CT7rV4HoFLUqMMcElqlAEuSY0ywCWpUQa4JDXKAJekRhngktQoA1ySGmWAS1KjvBPzNDKOuwglLR+PwCWpUYM8E/OCJDuTPJrkkSQ3d+3nJNmR5PHu9ezxlytJOm6QI/BjwEer6hLgCuCmJJcAW4D7quoi4L5uWZK0RPoGeFUdqaqfdPO/BfYD5wPvBbZ3m20Hrh9TjZKkeZzUGHiSSeAyYBdwXlUd6VY9CZw32tIkSScycIAneQ3wDeAjVfWb2euqqoBa4H2bk0wnmZ6ZmRmqWEnS7wwU4EnOpBfeX66qb3bNTyVZ061fAxyd771Vta2qpqpqamJiYhQ1S5IY7CqUALcD+6vqs7NW3QNs7OY3AnePvjxJ0kIGuZHnKuCDwMNJ9nRtnwRuAb6eZBPwC+DPx1KhJGlefQO8qn4MZIHV7xhtOZKkQXknpiQ1ygCXpEYZ4JLUKANckhplgEtSo/w9cJ1STsffLB91nw/ect1IP0+nLo/AJalRBrgkNcoAl6RGGeCS1CgDXJIaZYBLUqMMcElqlAEuSY0ywCWpUQa4JDXKAJekRhngktSoQR5qfEeSo0n2zWr7VJLDSfZ007XjLVOSNNcgR+B3AtfM0761qtZ103dHW5YkqZ++AV5VPwJ+tQS1SJJOwjBj4B9OsrcbYjl7oY2SbE4ynWR6ZmZmiN1JkmZbbIDfBrwRWAccAT6z0IZVta2qpqpqamJiYpG7kyTNtagAr6qnquqFqnoR+AKwfrRlSZL6WVSAJ1kza/F9wL6FtpUkjUffZ2Im+QrwNuDcJIeAvwPelmQdUMBB4EPjK1GSNJ++AV5VN87TfPsYapEknQTvxJSkRhngktQoA1ySGmWAS1KjDHBJapQBLkmNMsAlqVEGuCQ1ygCXpEYZ4JLUqL630ktqy+SW74z08w7ect1IP0+j4xG4JDXKAJekRhngktQoA1ySGmWAS1KjDHBJapQBLkmN6hvgSe5IcjTJvllt5yTZkeTx7vXs8ZYpSZprkCPwO4Fr5rRtAe6rqouA+7plSdIS6hvgVfUj4Fdzmt8LbO/mtwPXj7YsSVI/ix0DP6+qjnTzTwLnLbRhks1JppNMz8zMLHJ3kqS5hj6JWVUF1AnWb6uqqaqampiYGHZ3kqTOYgP8qSRrALrXo6MrSZI0iMUG+D3Axm5+I3D3aMqRJA1qkMsIvwL8J/DmJIeSbAJuAa5O8jjwZ92yJGkJ9f098Kq6cYFV7xhxLZJOE/5m+Wh4J6YkNcoAl6RGGeCS1CgDXJIaZYBLUqMMcElqlAEuSY0ywCWpUQa4JDXKAJekRvW9lf5UMepbbyWpdR6BS1KjDHBJapQBLkmNMsAlqVEGuCQ1ygCXpEYNdRlhkoPAb4EXgGNVNTWKoiRJ/Y3iOvC3V9XTI/gcSdJJcAhFkho1bIAX8IMku5Nsnm+DJJuTTCeZnpmZGXJ3kqTjhg3wt1TV5cC7gJuSvHXuBlW1raqmqmpqYmJiyN1Jko4bKsCr6nD3ehT4FrB+FEVJkvpbdIAneXWS1x6fB94J7BtVYZKkExvmKpTzgG8lOf45/1pV3xtJVZKkvhYd4FX1BHDpCGuRJJ0ELyOUpEYZ4JLUKANckhplgEtSowxwSWpUMw81lrQ8fKD4qcsjcElqlAEuSY0ywCWpUQa4JDXKk5iSNMc4TtwevOW6kX+mR+CS1CgDXJIaZYBLUqMMcElqlCcxJTXvdL1b1CNwSWqUAS5JjRoqwJNck+SxJAeSbBlVUZKk/oZ5Kv0q4J+BdwGXADcmuWRUhUmSTmyYI/D1wIGqeqKq/hf4KvDe0ZQlSepnmKtQzgd+OWv5EPCnczdKshnY3C0+l+SxIfY5qHOBp5dgP0tppfVppfUHVl6fVlp/YBn7lFuHevsfztc49ssIq2obsG3c+5ktyXRVTS3lPsdtpfVppfUHVl6fVlp/YOX1aZghlMPABbOW13ZtkqQlMEyA/xdwUZILk7wCeD9wz2jKkiT1s+ghlKo6luTDwPeBVcAdVfXIyCobzpIO2SyRldanldYfWHl9Wmn9gRXWp1TVctcgSVoE78SUpEYZ4JLUqCYDPMkdSY4m2Ter7ZwkO5I83r2e3bUnyT91t/vvTXL58lU+vyQXJNmZ5NEkjyS5uWtvsk9JzkryQJKHuv58umu/MMmuru6vdSe/SfLKbvlAt35yWTtwAklWJXkwyb3dctN9SnIwycNJ9iSZ7tqa/N4BJFmd5K4kP02yP8mVLfennyYDHLgTuGZO2xbgvqq6CLivW4berf4XddNm4LYlqvFkHAM+WlWXAFcAN3U/S9Bqn54HNlTVpcA64JokVwC3Alur6k3AM8CmbvtNwDNd+9Zuu1PVzcD+WcsroU9vr6p1s66PbvV7B/B54HtVdTFwKb3/Vy3358SqqskJmAT2zVp+DFjTza8BHuvm/wW4cb7tTtUJuBu4eiX0Cfh94Cf07tJ9Gjija78S+H43/33gym7+jG67LHft8/RlLb0A2ADcC2QF9OkgcO6ctia/d8DrgJ/P/e/can8GmVo9Ap/PeVV1pJt/Ejivm5/vlv/zl7Kwk9H9U/syYBcN96kbatgDHAV2AD8Dnq2qY90ms2t+qT/d+l8Dr1/SggfzOeBjwIvd8utpv08F/CDJ7u5nL6Dd792FwAzwpW6Y64tJXk27/elrJQX4S6r357S56yOTvAb4BvCRqvrN7HWt9amqXqiqdfSOWtcDFy9vRcNJ8m7gaFXtXu5aRuwtVXU5veGEm5K8dfbKxr53ZwCXA7dV1WXA//C74RKguf70tZIC/KkkawC616NdexO3/Cc5k154f7mqvtk1N90ngKp6FthJb3hhdZLjN4/Nrvml/nTrXwf899JW2tdVwHuSHKT3y5sb6I23ttwnqupw93oU+Ba9P7atfu8OAYeqale3fBe9QG+1P32tpAC/B9jYzW+kN458vP0vujPOVwC/nvXPqVNCkgC3A/ur6rOzVjXZpyQTSVZ386+iN56/n16Q39BtNrc/x/t5A3B/d6R0yqiqT1TV2qqapPezEfdX1QdouE9JXp3ktcfngXcC+2j0e1dVTwK/TPLmrukdwKM02p+BLPcg/GIm4CvAEeD/6P3V3URvfPE+4HHgP4Bzum1D78ETPwMeBqaWu/55+vMWev+s2wvs6aZrW+0T8MfAg11/9gF/27W/AXgAOAD8G/DKrv2sbvlAt/4Ny92HPv17G3Bv633qan+omx4B/qZrb/J719W4DpjuvnvfBs5uuT/9Jm+ll6RGraQhFEk6rRjgktQoA1ySGmWAS1KjDHBJapQBLkmNMsAlqVH/D5s3USq2I8LxAAAAAElFTkSuQmCC\n",
Billy Amélie's avatar
Billy Amélie committed
2200
2201
2202
2203
2204
2205
2206
2207
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
Lafnoune Imane's avatar
Lafnoune Imane committed
2208
2209
2210
    }
   ],
   "source": [