CNN_injection_collage_1Chann.ipynb 292 KB
Newer Older
1
2
3
4
5
6
{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
7
    "# CNN with 4 images producing 1 image - 1 depth + MLP"
8
9
10
11
12
13
14
15
16
17
18
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "https://www.pyimagesearch.com/2019/02/04/keras-multiple-inputs-and-mixed-data/"
   ]
  },
  {
   "cell_type": "code",
19
   "execution_count": 1,
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import pandas as pd\n",
    "import os\n",
    "import logging\n",
    "logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## A - Preprocessing : Reading Data"
   ]
  },
  {
   "cell_type": "code",
39
   "execution_count": 2,
40
41
42
43
44
45
46
47
   "metadata": {},
   "outputs": [],
   "source": [
    "os.chdir('../')"
   ]
  },
  {
   "cell_type": "code",
48
   "execution_count": 3,
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>Patient</th>\n",
       "      <th>Weeks</th>\n",
       "      <th>FVC</th>\n",
       "      <th>Percent</th>\n",
       "      <th>Age</th>\n",
       "      <th>Sex</th>\n",
       "      <th>SmokingStatus</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>-4</td>\n",
       "      <td>2315</td>\n",
       "      <td>58.253649</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>5</td>\n",
       "      <td>2214</td>\n",
       "      <td>55.712129</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>7</td>\n",
       "      <td>2061</td>\n",
       "      <td>51.862104</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>9</td>\n",
       "      <td>2144</td>\n",
       "      <td>53.950679</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>11</td>\n",
       "      <td>2069</td>\n",
       "      <td>52.063412</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                     Patient  Weeks   FVC    Percent  Age   Sex SmokingStatus\n",
       "0  ID00007637202177411956430     -4  2315  58.253649   79  Male     Ex-smoker\n",
       "1  ID00007637202177411956430      5  2214  55.712129   79  Male     Ex-smoker\n",
       "2  ID00007637202177411956430      7  2061  51.862104   79  Male     Ex-smoker\n",
       "3  ID00007637202177411956430      9  2144  53.950679   79  Male     Ex-smoker\n",
       "4  ID00007637202177411956430     11  2069  52.063412   79  Male     Ex-smoker"
      ]
     },
145
     "execution_count": 3,
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from preprocessing.read_load_data import read_data\n",
    "\n",
    "input_directory='../osic-pulmonary-fibrosis-progression'\n",
    "train_df, test_df, sample_df = read_data(input_directory)   \n",
    "train_df.head()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## B - Preprocessing : Loading Data"
   ]
  },
  {
   "cell_type": "code",
167
   "execution_count": 4,
168
169
170
171
172
173
174
175
176
177
178
179
180
181
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO:loading  attributes...\n",
      "INFO:loading images...\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
Bannier Delphine's avatar
Bannier Delphine committed
182
183
      "Array shape:  (176, 240, 240)\n",
      "min value:  -0.124956130312047\n",
184
      "max value:  0.13136708968066269\n"
185
186
187
188
189
190
191
192
193
194
195
196
197
198
     ]
    }
   ],
   "source": [
    "from preprocessing.read_load_data import load_images\n",
    "\n",
    "logging.info(\"loading  attributes...\")\n",
    "df = pd.read_csv(f'{input_directory}/train.csv')\n",
    "patients_train_ids= df.Patient.unique().tolist()\n",
    "\n",
    "logging.info(\"loading images...\")\n",
    "images = load_images(input_directory,\n",
    "                    'train',\n",
    "                     patients_train_ids,\n",
Bannier Delphine's avatar
Bannier Delphine committed
199
    "                     option='collage',\n",
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
    "                     outputH = 240,\n",
    "                     outputW = 240)\n",
    "\n",
    "print(\"Array shape: \", images.shape)\n",
    "#check value between -1,1\n",
    "print('min value: ', np.amin(images))\n",
    "print('max value: ', np.amax(images))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## C - Preprocessing : shuffle"
   ]
  },
  {
   "cell_type": "code",
218
   "execution_count": 5,
219
220
221
222
223
   "metadata": {},
   "outputs": [],
   "source": [
    "from sklearn.model_selection import train_test_split\n",
    "\n",
Bannier Delphine's avatar
Bannier Delphine committed
224
225
226
227
228
229
230
231
232
233
    "split = train_test_split(patients_train_ids, images, test_size=0.2, random_state=42)\n",
    "(trainPatient, testPatient, trainImagesX, testImagesX) = split\n",
    "\n",
    "#split the dataframe like the images\n",
    "df_train = df[df.Patient.isin(trainPatient)].copy()\n",
    "df_test = df[df.Patient.isin(testPatient)].copy()"
   ]
  },
  {
   "cell_type": "code",
234
   "execution_count": 6,
Bannier Delphine's avatar
Bannier Delphine committed
235
236
   "metadata": {},
   "outputs": [
237
238
239
240
241
242
243
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO:NumExpr defaulting to 8 threads.\n"
     ]
    },
Bannier Delphine's avatar
Bannier Delphine committed
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(1093, 8) (280, 8)\n"
     ]
    }
   ],
   "source": [
    "from preprocessing.read_load_data import create_dataframe\n",
    "\n",
    "trainAttrX = create_dataframe(df_train)\n",
    "testAttrX = create_dataframe(df_test)\n",
    "print(trainAttrX.shape, testAttrX.shape)"
   ]
  },
  {
   "cell_type": "code",
262
   "execution_count": 7,
Bannier Delphine's avatar
Bannier Delphine committed
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "1093 280\n"
     ]
    }
   ],
   "source": [
    "#set one image per training row\n",
    "\n",
    "indice = 0\n",
    "train_dataset = np.ndarray((len(trainAttrX),240,240))\n",
    "for i,patient in enumerate(trainPatient):\n",
    "    nb_data = len(trainAttrX[trainAttrX.PatientID ==patient])\n",
    "    for ii in range(nb_data):\n",
    "        train_dataset[indice]=(trainImagesX[i])\n",
    "        indice+=1\n",
    "        \n",
    "        \n",
    "indicet = 0        \n",
    "test_dataset = np.ndarray((len(testAttrX),240,240))\n",
    "for i,patient in enumerate(testPatient):\n",
    "    nb_data = len(testAttrX[testAttrX.PatientID ==patient])\n",
    "    for ii in range(nb_data):\n",
    "        test_dataset[indicet] = testImagesX[i]\n",
    "        indicet+=1\n",
    "        \n",
    "        \n",
    "print(len(train_dataset),len(test_dataset))"
295
296
297
298
299
300
301
302
303
304
305
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## D - Preprocessing : Scaling + Encoding"
   ]
  },
  {
   "cell_type": "code",
306
   "execution_count": 8,
307
308
309
310
311
   "metadata": {},
   "outputs": [],
   "source": [
    "from preprocessing.scale_data import scale_variable\n",
    "\n",
Bannier Delphine's avatar
Bannier Delphine committed
312
313
314
315
316
317
    "sc, trainAttrX, testAttrX = scale_variable(trainAttrX, testAttrX,'Target_FVC')\n",
    "sc1, trainAttrX, testAttrX = scale_variable(trainAttrX, testAttrX,'First_FVC')\n",
    "sc2, trainAttrX, testAttrX = scale_variable(trainAttrX, testAttrX,'Age')\n",
    "\n",
    "trainY = trainAttrX.loc[:,'Target_FVC_scaled']\n",
    "testY = testAttrX.loc[:,'Target_FVC_scaled']"
318
319
320
321
   ]
  },
  {
   "cell_type": "code",
322
   "execution_count": 9,
323
324
325
326
327
328
329
330
   "metadata": {},
   "outputs": [],
   "source": [
    "from preprocessing.scale_data import encode_variable\n",
    "\n",
    "trainAttrX, testAttrX = encode_variable(trainAttrX, testAttrX,'Sex')\n",
    "trainAttrX, testAttrX = encode_variable(trainAttrX, testAttrX,'SmokingStatus')\n",
    "\n",
Bannier Delphine's avatar
Bannier Delphine committed
331
332
333
334
335
336
337
338
339
    "for dft in [trainAttrX,testAttrX]:\n",
    "    dft.drop(columns = ['Sex','SmokingStatus','Target_FVC','Target_FVC_scaled',\n",
    "                          'PatientID','First_FVC','Age'], inplace = True)\n",
    "    dft.loc[:,'First_Percent'] = dft.loc[:,'First_Percent']/100\n",
    "    dft.loc[:,'Delta_week'] = dft.loc[:,'Delta_week']/133"
   ]
  },
  {
   "cell_type": "code",
340
   "execution_count": 10,
Bannier Delphine's avatar
Bannier Delphine committed
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>First_Percent</th>\n",
       "      <th>Delta_week</th>\n",
       "      <th>First_FVC_scaled</th>\n",
       "      <th>Age_scaled</th>\n",
       "      <th>Sex_le</th>\n",
       "      <th>SmokingStatus_le</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>0.582536</td>\n",
       "      <td>0.067669</td>\n",
       "      <td>-0.631784</td>\n",
       "      <td>1.684379</td>\n",
       "      <td>1</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>0.582536</td>\n",
       "      <td>0.082707</td>\n",
       "      <td>-0.631784</td>\n",
       "      <td>1.684379</td>\n",
       "      <td>1</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>0.582536</td>\n",
       "      <td>0.097744</td>\n",
       "      <td>-0.631784</td>\n",
       "      <td>1.684379</td>\n",
       "      <td>1</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>0.582536</td>\n",
       "      <td>0.112782</td>\n",
       "      <td>-0.631784</td>\n",
       "      <td>1.684379</td>\n",
       "      <td>1</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>0.582536</td>\n",
       "      <td>0.157895</td>\n",
       "      <td>-0.631784</td>\n",
       "      <td>1.684379</td>\n",
       "      <td>1</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "   First_Percent  Delta_week  First_FVC_scaled  Age_scaled  Sex_le  \\\n",
       "0       0.582536    0.067669         -0.631784    1.684379       1   \n",
       "1       0.582536    0.082707         -0.631784    1.684379       1   \n",
       "2       0.582536    0.097744         -0.631784    1.684379       1   \n",
       "3       0.582536    0.112782         -0.631784    1.684379       1   \n",
       "4       0.582536    0.157895         -0.631784    1.684379       1   \n",
       "\n",
       "   SmokingStatus_le  \n",
       "0                 1  \n",
       "1                 1  \n",
       "2                 1  \n",
       "3                 1  \n",
       "4                 1  "
      ]
     },
438
     "execution_count": 10,
Bannier Delphine's avatar
Bannier Delphine committed
439
440
441
442
443
444
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "trainAttrX.head(5)"
445
446
447
448
449
450
451
452
453
454
455
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## E - Processing : Create models"
   ]
  },
  {
   "cell_type": "code",
456
   "execution_count": 11,
457
458
459
460
461
462
   "metadata": {},
   "outputs": [],
   "source": [
    "from processing.models import create_hybrid\n",
    "from keras.optimizers import Adam\n",
    "\n",
463
    "#model = create_hybrid(trainAttrX.shape[1], shape = (240,240,1))\n",
464
    "opt = Adam(lr=1e-3, decay=1e-3 / 200)\n",
465
    "#model.compile(loss=\"mean_squared_error\", optimizer=opt)"
466
467
468
469
   ]
  },
  {
   "cell_type": "code",
470
   "execution_count": 12,
Bannier Delphine's avatar
Bannier Delphine committed
471
472
473
474
475
476
   "metadata": {},
   "outputs": [],
   "source": [
    "from tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping\n",
    "\n",
    "#set early stopping criteria\n",
477
    "pat = 5 #this is the number of epochs with no improvment after which the training will stop\n",
Bannier Delphine's avatar
Bannier Delphine committed
478
479
480
    "es = EarlyStopping(monitor='val_loss', patience=pat, verbose=1)\n",
    "\n",
    "#define the model checkpoint callback -> this will keep on saving the model as a physical file\n",
481
    "cp = ModelCheckpoint('clean_notebooks/cnn_inj_collage_1C.h5', verbose=1, save_best_only=True)"
Bannier Delphine's avatar
Bannier Delphine committed
482
483
484
485
   ]
  },
  {
   "cell_type": "code",
486
   "execution_count": 13,
Bannier Delphine's avatar
Bannier Delphine committed
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
   "metadata": {},
   "outputs": [],
   "source": [
    "def custom_shuffle_split(trainAttrX,train_dataset,trainY,test_size = 0.1 ):\n",
    "    cut = int(len(trainY)*test_size)\n",
    "    arr = list(np.arange(len(trainY)))\n",
    "    np.random.shuffle(arr)\n",
    "    trainidx = arr[cut:]\n",
    "    testidx = arr[:cut]\n",
    "    train_x, train_y = [trainAttrX.iloc[trainidx], train_dataset[trainidx]] , trainY[trainidx]\n",
    "    val_x, val_y = [trainAttrX.iloc[testidx], train_dataset[testidx]] , trainY[testidx]\n",
    "    return train_x, val_x, train_y, val_y"
   ]
  },
  {
   "cell_type": "code",
503
   "execution_count": 14,
504
505
506
507
508
509
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
Bannier Delphine's avatar
Bannier Delphine committed
510
511
      "Training on Fold:  1\n",
      "Epoch 1/30\n",
512
513
514
      "111/111 [==============================] - ETA: 11:25 - loss: 1.32 - ETA: 1:57 - loss: 1.3336 - ETA: 1:56 - loss: 1.423 - ETA: 1:55 - loss: 1.506 - ETA: 1:54 - loss: 1.563 - ETA: 1:53 - loss: 1.576 - ETA: 1:54 - loss: 1.564 - ETA: 1:54 - loss: 1.549 - ETA: 1:55 - loss: 1.545 - ETA: 1:54 - loss: 1.533 - ETA: 1:53 - loss: 1.522 - ETA: 1:54 - loss: 1.508 - ETA: 1:53 - loss: 1.495 - ETA: 1:53 - loss: 1.481 - ETA: 1:53 - loss: 1.468 - ETA: 1:51 - loss: 1.458 - ETA: 1:50 - loss: 1.447 - ETA: 1:50 - loss: 1.439 - ETA: 1:50 - loss: 1.431 - ETA: 1:49 - loss: 1.427 - ETA: 1:47 - loss: 1.423 - ETA: 1:45 - loss: 1.418 - ETA: 1:44 - loss: 1.414 - ETA: 1:42 - loss: 1.414 - ETA: 1:41 - loss: 1.414 - ETA: 1:39 - loss: 1.414 - ETA: 1:38 - loss: 1.414 - ETA: 1:36 - loss: 1.414 - ETA: 1:35 - loss: 1.414 - ETA: 1:34 - loss: 1.413 - ETA: 1:32 - loss: 1.412 - ETA: 1:31 - loss: 1.409 - ETA: 1:30 - loss: 1.407 - ETA: 1:28 - loss: 1.405 - ETA: 1:27 - loss: 1.402 - ETA: 1:26 - loss: 1.401 - ETA: 1:25 - loss: 1.399 - ETA: 1:23 - loss: 1.399 - ETA: 1:22 - loss: 1.399 - ETA: 1:21 - loss: 1.398 - ETA: 1:19 - loss: 1.398 - ETA: 1:18 - loss: 1.397 - ETA: 1:17 - loss: 1.396 - ETA: 1:15 - loss: 1.394 - ETA: 1:14 - loss: 1.392 - ETA: 1:13 - loss: 1.390 - ETA: 1:11 - loss: 1.388 - ETA: 1:10 - loss: 1.385 - ETA: 1:09 - loss: 1.384 - ETA: 1:08 - loss: 1.382 - ETA: 1:07 - loss: 1.380 - ETA: 1:05 - loss: 1.378 - ETA: 1:04 - loss: 1.376 - ETA: 1:03 - loss: 1.374 - ETA: 1:02 - loss: 1.371 - ETA: 1:01 - loss: 1.369 - ETA: 1:00 - loss: 1.367 - ETA: 58s - loss: 1.365 - ETA: 57s - loss: 1.36 - ETA: 56s - loss: 1.36 - ETA: 55s - loss: 1.35 - ETA: 54s - loss: 1.35 - ETA: 53s - loss: 1.35 - ETA: 52s - loss: 1.35 - ETA: 50s - loss: 1.35 - ETA: 49s - loss: 1.34 - ETA: 48s - loss: 1.34 - ETA: 47s - loss: 1.34 - ETA: 46s - loss: 1.34 - ETA: 45s - loss: 1.34 - ETA: 44s - loss: 1.34 - ETA: 43s - loss: 1.33 - ETA: 42s - loss: 1.33 - ETA: 41s - loss: 1.33 - ETA: 39s - loss: 1.33 - ETA: 38s - loss: 1.33 - ETA: 37s - loss: 1.32 - ETA: 36s - loss: 1.32 - ETA: 35s - loss: 1.32 - ETA: 34s - loss: 1.32 - ETA: 33s - loss: 1.32 - ETA: 32s - loss: 1.32 - ETA: 31s - loss: 1.31 - ETA: 30s - loss: 1.31 - ETA: 29s - loss: 1.31 - ETA: 28s - loss: 1.31 - ETA: 26s - loss: 1.31 - ETA: 25s - loss: 1.31 - ETA: 24s - loss: 1.31 - ETA: 23s - loss: 1.31 - ETA: 22s - loss: 1.30 - ETA: 21s - loss: 1.30 - ETA: 20s - loss: 1.30 - ETA: 19s - loss: 1.30 - ETA: 18s - loss: 1.30 - ETA: 16s - loss: 1.30 - ETA: 15s - loss: 1.30 - ETA: 14s - loss: 1.30 - ETA: 13s - loss: 1.30 - ETA: 12s - loss: 1.30 - ETA: 11s - loss: 1.29 - ETA: 10s - loss: 1.29 - ETA: 8s - loss: 1.2975 - ETA: 7s - loss: 1.296 - ETA: 6s - loss: 1.295 - ETA: 5s - loss: 1.294 - ETA: 4s - loss: 1.293 - ETA: 3s - loss: 1.292 - ETA: 2s - loss: 1.291 - ETA: 1s - loss: 1.290 - ETA: 0s - loss: 1.289 - 132s 1s/step - loss: 1.2888 - val_loss: 0.9790\n",
      "\n",
      "Epoch 00001: val_loss improved from inf to 0.97899, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
Bannier Delphine's avatar
Bannier Delphine committed
515
      "Epoch 2/30\n",
516
517
518
      "111/111 [==============================] - ETA: 2:02 - loss: 1.857 - ETA: 1:53 - loss: 1.498 - ETA: 1:54 - loss: 1.411 - ETA: 1:53 - loss: 1.383 - ETA: 1:51 - loss: 1.382 - ETA: 1:50 - loss: 1.368 - ETA: 1:49 - loss: 1.345 - ETA: 1:48 - loss: 1.332 - ETA: 1:47 - loss: 1.314 - ETA: 1:46 - loss: 1.297 - ETA: 1:46 - loss: 1.279 - ETA: 1:45 - loss: 1.262 - ETA: 1:43 - loss: 1.245 - ETA: 1:42 - loss: 1.229 - ETA: 1:41 - loss: 1.213 - ETA: 1:40 - loss: 1.199 - ETA: 1:39 - loss: 1.184 - ETA: 1:38 - loss: 1.170 - ETA: 1:37 - loss: 1.157 - ETA: 1:36 - loss: 1.145 - ETA: 1:34 - loss: 1.135 - ETA: 1:33 - loss: 1.125 - ETA: 1:32 - loss: 1.115 - ETA: 1:31 - loss: 1.106 - ETA: 1:31 - loss: 1.097 - ETA: 1:30 - loss: 1.088 - ETA: 1:30 - loss: 1.080 - ETA: 1:30 - loss: 1.071 - ETA: 1:30 - loss: 1.064 - ETA: 1:29 - loss: 1.057 - ETA: 1:29 - loss: 1.051 - ETA: 1:28 - loss: 1.045 - ETA: 1:27 - loss: 1.039 - ETA: 1:27 - loss: 1.034 - ETA: 1:26 - loss: 1.030 - ETA: 1:25 - loss: 1.027 - ETA: 1:24 - loss: 1.023 - ETA: 1:24 - loss: 1.020 - ETA: 1:23 - loss: 1.017 - ETA: 1:22 - loss: 1.015 - ETA: 1:21 - loss: 1.012 - ETA: 1:20 - loss: 1.010 - ETA: 1:18 - loss: 1.007 - ETA: 1:17 - loss: 1.004 - ETA: 1:16 - loss: 1.002 - ETA: 1:15 - loss: 0.999 - ETA: 1:14 - loss: 0.996 - ETA: 1:13 - loss: 0.993 - ETA: 1:12 - loss: 0.991 - ETA: 1:11 - loss: 0.988 - ETA: 1:10 - loss: 0.986 - ETA: 1:09 - loss: 0.983 - ETA: 1:08 - loss: 0.981 - ETA: 1:07 - loss: 0.979 - ETA: 1:06 - loss: 0.976 - ETA: 1:05 - loss: 0.974 - ETA: 1:03 - loss: 0.972 - ETA: 1:02 - loss: 0.969 - ETA: 1:01 - loss: 0.967 - ETA: 1:00 - loss: 0.965 - ETA: 59s - loss: 0.962 - ETA: 57s - loss: 0.96 - ETA: 56s - loss: 0.95 - ETA: 55s - loss: 0.95 - ETA: 54s - loss: 0.95 - ETA: 52s - loss: 0.95 - ETA: 51s - loss: 0.95 - ETA: 50s - loss: 0.94 - ETA: 49s - loss: 0.94 - ETA: 48s - loss: 0.94 - ETA: 46s - loss: 0.94 - ETA: 45s - loss: 0.94 - ETA: 44s - loss: 0.94 - ETA: 43s - loss: 0.93 - ETA: 41s - loss: 0.93 - ETA: 40s - loss: 0.93 - ETA: 39s - loss: 0.93 - ETA: 38s - loss: 0.93 - ETA: 37s - loss: 0.93 - ETA: 36s - loss: 0.93 - ETA: 34s - loss: 0.92 - ETA: 33s - loss: 0.92 - ETA: 32s - loss: 0.92 - ETA: 31s - loss: 0.92 - ETA: 30s - loss: 0.92 - ETA: 28s - loss: 0.92 - ETA: 27s - loss: 0.92 - ETA: 26s - loss: 0.92 - ETA: 25s - loss: 0.92 - ETA: 24s - loss: 0.91 - ETA: 22s - loss: 0.91 - ETA: 21s - loss: 0.91 - ETA: 20s - loss: 0.91 - ETA: 19s - loss: 0.91 - ETA: 18s - loss: 0.91 - ETA: 17s - loss: 0.91 - ETA: 15s - loss: 0.91 - ETA: 14s - loss: 0.91 - ETA: 13s - loss: 0.91 - ETA: 12s - loss: 0.91 - ETA: 11s - loss: 0.90 - ETA: 10s - loss: 0.90 - ETA: 9s - loss: 0.9074 - ETA: 7s - loss: 0.906 - ETA: 6s - loss: 0.905 - ETA: 5s - loss: 0.904 - ETA: 4s - loss: 0.903 - ETA: 3s - loss: 0.902 - ETA: 2s - loss: 0.901 - ETA: 1s - loss: 0.900 - ETA: 0s - loss: 0.899 - 127s 1s/step - loss: 0.8985 - val_loss: 0.8492\n",
      "\n",
      "Epoch 00002: val_loss improved from 0.97899 to 0.84924, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
Bannier Delphine's avatar
Bannier Delphine committed
519
      "Epoch 3/30\n",
520
521
522
      "111/111 [==============================] - ETA: 1:50 - loss: 1.219 - ETA: 1:58 - loss: 1.114 - ETA: 1:53 - loss: 1.037 - ETA: 1:52 - loss: 0.991 - ETA: 1:51 - loss: 0.944 - ETA: 1:51 - loss: 0.904 - ETA: 1:49 - loss: 0.892 - ETA: 1:47 - loss: 0.889 - ETA: 1:46 - loss: 0.892 - ETA: 1:44 - loss: 0.887 - ETA: 1:42 - loss: 0.881 - ETA: 1:42 - loss: 0.875 - ETA: 1:41 - loss: 0.868 - ETA: 1:40 - loss: 0.864 - ETA: 1:39 - loss: 0.858 - ETA: 1:38 - loss: 0.859 - ETA: 1:37 - loss: 0.860 - ETA: 1:37 - loss: 0.861 - ETA: 1:37 - loss: 0.860 - ETA: 1:37 - loss: 0.861 - ETA: 1:37 - loss: 0.860 - ETA: 1:38 - loss: 0.860 - ETA: 1:38 - loss: 0.862 - ETA: 1:38 - loss: 0.863 - ETA: 1:38 - loss: 0.864 - ETA: 1:38 - loss: 0.865 - ETA: 1:37 - loss: 0.864 - ETA: 1:37 - loss: 0.864 - ETA: 1:38 - loss: 0.864 - ETA: 1:37 - loss: 0.863 - ETA: 1:37 - loss: 0.863 - ETA: 1:36 - loss: 0.861 - ETA: 1:36 - loss: 0.861 - ETA: 1:35 - loss: 0.860 - ETA: 1:35 - loss: 0.860 - ETA: 1:33 - loss: 0.860 - ETA: 1:32 - loss: 0.860 - ETA: 1:31 - loss: 0.861 - ETA: 1:30 - loss: 0.861 - ETA: 1:28 - loss: 0.860 - ETA: 1:27 - loss: 0.860 - ETA: 1:25 - loss: 0.859 - ETA: 1:24 - loss: 0.858 - ETA: 1:23 - loss: 0.857 - ETA: 1:22 - loss: 0.856 - ETA: 1:20 - loss: 0.855 - ETA: 1:19 - loss: 0.854 - ETA: 1:17 - loss: 0.853 - ETA: 1:16 - loss: 0.851 - ETA: 1:15 - loss: 0.850 - ETA: 1:13 - loss: 0.849 - ETA: 1:12 - loss: 0.848 - ETA: 1:10 - loss: 0.847 - ETA: 1:09 - loss: 0.846 - ETA: 1:08 - loss: 0.845 - ETA: 1:06 - loss: 0.844 - ETA: 1:05 - loss: 0.843 - ETA: 1:04 - loss: 0.841 - ETA: 1:03 - loss: 0.840 - ETA: 1:01 - loss: 0.839 - ETA: 1:00 - loss: 0.838 - ETA: 59s - loss: 0.836 - ETA: 58s - loss: 0.83 - ETA: 56s - loss: 0.83 - ETA: 55s - loss: 0.83 - ETA: 54s - loss: 0.83 - ETA: 53s - loss: 0.82 - ETA: 52s - loss: 0.82 - ETA: 50s - loss: 0.82 - ETA: 49s - loss: 0.82 - ETA: 48s - loss: 0.82 - ETA: 47s - loss: 0.82 - ETA: 45s - loss: 0.82 - ETA: 44s - loss: 0.82 - ETA: 43s - loss: 0.81 - ETA: 42s - loss: 0.81 - ETA: 40s - loss: 0.81 - ETA: 39s - loss: 0.81 - ETA: 38s - loss: 0.81 - ETA: 36s - loss: 0.81 - ETA: 35s - loss: 0.81 - ETA: 34s - loss: 0.81 - ETA: 33s - loss: 0.81 - ETA: 32s - loss: 0.81 - ETA: 30s - loss: 0.80 - ETA: 29s - loss: 0.80 - ETA: 28s - loss: 0.80 - ETA: 27s - loss: 0.80 - ETA: 26s - loss: 0.80 - ETA: 24s - loss: 0.80 - ETA: 23s - loss: 0.80 - ETA: 22s - loss: 0.80 - ETA: 21s - loss: 0.80 - ETA: 20s - loss: 0.79 - ETA: 18s - loss: 0.79 - ETA: 17s - loss: 0.79 - ETA: 16s - loss: 0.79 - ETA: 15s - loss: 0.79 - ETA: 14s - loss: 0.79 - ETA: 12s - loss: 0.79 - ETA: 11s - loss: 0.79 - ETA: 10s - loss: 0.79 - ETA: 9s - loss: 0.7915 - ETA: 8s - loss: 0.790 - ETA: 7s - loss: 0.789 - ETA: 5s - loss: 0.789 - ETA: 4s - loss: 0.788 - ETA: 3s - loss: 0.787 - ETA: 2s - loss: 0.787 - ETA: 1s - loss: 0.786 - ETA: 0s - loss: 0.785 - 133s 1s/step - loss: 0.7850 - val_loss: 0.6483\n",
      "\n",
      "Epoch 00003: val_loss improved from 0.84924 to 0.64826, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
Bannier Delphine's avatar
Bannier Delphine committed
523
      "Epoch 4/30\n",
524
525
526
      "111/111 [==============================] - ETA: 1:42 - loss: 1.010 - ETA: 1:48 - loss: 0.983 - ETA: 1:47 - loss: 0.988 - ETA: 1:46 - loss: 1.007 - ETA: 1:46 - loss: 1.009 - ETA: 1:45 - loss: 1.004 - ETA: 1:45 - loss: 0.986 - ETA: 1:46 - loss: 0.964 - ETA: 1:46 - loss: 0.946 - ETA: 1:46 - loss: 0.928 - ETA: 1:45 - loss: 0.909 - ETA: 1:44 - loss: 0.895 - ETA: 1:43 - loss: 0.880 - ETA: 1:44 - loss: 0.868 - ETA: 1:45 - loss: 0.855 - ETA: 1:44 - loss: 0.846 - ETA: 1:44 - loss: 0.836 - ETA: 1:43 - loss: 0.829 - ETA: 1:42 - loss: 0.821 - ETA: 1:41 - loss: 0.814 - ETA: 1:40 - loss: 0.807 - ETA: 1:38 - loss: 0.801 - ETA: 1:36 - loss: 0.795 - ETA: 1:35 - loss: 0.791 - ETA: 1:33 - loss: 0.786 - ETA: 1:32 - loss: 0.780 - ETA: 1:30 - loss: 0.774 - ETA: 1:29 - loss: 0.769 - ETA: 1:28 - loss: 0.765 - ETA: 1:26 - loss: 0.761 - ETA: 1:25 - loss: 0.758 - ETA: 1:24 - loss: 0.755 - ETA: 1:22 - loss: 0.752 - ETA: 1:21 - loss: 0.748 - ETA: 1:20 - loss: 0.744 - ETA: 1:19 - loss: 0.741 - ETA: 1:18 - loss: 0.737 - ETA: 1:16 - loss: 0.734 - ETA: 1:15 - loss: 0.730 - ETA: 1:14 - loss: 0.726 - ETA: 1:13 - loss: 0.723 - ETA: 1:12 - loss: 0.720 - ETA: 1:11 - loss: 0.717 - ETA: 1:09 - loss: 0.714 - ETA: 1:08 - loss: 0.711 - ETA: 1:07 - loss: 0.709 - ETA: 1:06 - loss: 0.707 - ETA: 1:05 - loss: 0.705 - ETA: 1:04 - loss: 0.703 - ETA: 1:03 - loss: 0.701 - ETA: 1:02 - loss: 0.700 - ETA: 1:00 - loss: 0.698 - ETA: 59s - loss: 0.696 - ETA: 58s - loss: 0.69 - ETA: 57s - loss: 0.69 - ETA: 56s - loss: 0.69 - ETA: 55s - loss: 0.68 - ETA: 54s - loss: 0.68 - ETA: 53s - loss: 0.68 - ETA: 52s - loss: 0.68 - ETA: 51s - loss: 0.68 - ETA: 50s - loss: 0.68 - ETA: 49s - loss: 0.68 - ETA: 48s - loss: 0.67 - ETA: 47s - loss: 0.67 - ETA: 46s - loss: 0.67 - ETA: 45s - loss: 0.67 - ETA: 44s - loss: 0.67 - ETA: 43s - loss: 0.67 - ETA: 42s - loss: 0.67 - ETA: 41s - loss: 0.67 - ETA: 40s - loss: 0.67 - ETA: 39s - loss: 0.67 - ETA: 38s - loss: 0.66 - ETA: 37s - loss: 0.66 - ETA: 36s - loss: 0.66 - ETA: 35s - loss: 0.66 - ETA: 34s - loss: 0.66 - ETA: 33s - loss: 0.66 - ETA: 32s - loss: 0.66 - ETA: 31s - loss: 0.66 - ETA: 30s - loss: 0.66 - ETA: 29s - loss: 0.66 - ETA: 28s - loss: 0.66 - ETA: 27s - loss: 0.66 - ETA: 26s - loss: 0.65 - ETA: 25s - loss: 0.65 - ETA: 24s - loss: 0.65 - ETA: 23s - loss: 0.65 - ETA: 22s - loss: 0.65 - ETA: 21s - loss: 0.65 - ETA: 20s - loss: 0.65 - ETA: 19s - loss: 0.65 - ETA: 17s - loss: 0.65 - ETA: 16s - loss: 0.65 - ETA: 15s - loss: 0.65 - ETA: 14s - loss: 0.65 - ETA: 13s - loss: 0.65 - ETA: 12s - loss: 0.65 - ETA: 11s - loss: 0.65 - ETA: 10s - loss: 0.64 - ETA: 9s - loss: 0.6488 - ETA: 8s - loss: 0.648 - ETA: 7s - loss: 0.647 - ETA: 6s - loss: 0.647 - ETA: 5s - loss: 0.646 - ETA: 4s - loss: 0.646 - ETA: 3s - loss: 0.645 - ETA: 2s - loss: 0.645 - ETA: 1s - loss: 0.644 - ETA: 0s - loss: 0.644 - 118s 1s/step - loss: 0.6441 - val_loss: 0.6014\n",
      "\n",
      "Epoch 00004: val_loss improved from 0.64826 to 0.60136, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
Bannier Delphine's avatar
Bannier Delphine committed
527
      "Epoch 5/30\n",
528
529
530
      "111/111 [==============================] - ETA: 1:42 - loss: 0.529 - ETA: 1:45 - loss: 0.543 - ETA: 1:44 - loss: 0.566 - ETA: 1:42 - loss: 0.563 - ETA: 1:40 - loss: 0.631 - ETA: 1:40 - loss: 0.660 - ETA: 1:39 - loss: 0.685 - ETA: 1:38 - loss: 0.695 - ETA: 1:37 - loss: 0.695 - ETA: 1:36 - loss: 0.697 - ETA: 1:35 - loss: 0.699 - ETA: 1:34 - loss: 0.702 - ETA: 1:34 - loss: 0.704 - ETA: 1:33 - loss: 0.705 - ETA: 1:32 - loss: 0.706 - ETA: 1:31 - loss: 0.707 - ETA: 1:30 - loss: 0.707 - ETA: 1:29 - loss: 0.705 - ETA: 1:28 - loss: 0.703 - ETA: 1:27 - loss: 0.703 - ETA: 1:26 - loss: 0.702 - ETA: 1:25 - loss: 0.700 - ETA: 1:24 - loss: 0.698 - ETA: 1:23 - loss: 0.696 - ETA: 1:22 - loss: 0.694 - ETA: 1:21 - loss: 0.691 - ETA: 1:20 - loss: 0.688 - ETA: 1:19 - loss: 0.685 - ETA: 1:18 - loss: 0.682 - ETA: 1:17 - loss: 0.679 - ETA: 1:17 - loss: 0.676 - ETA: 1:16 - loss: 0.674 - ETA: 1:15 - loss: 0.672 - ETA: 1:14 - loss: 0.670 - ETA: 1:13 - loss: 0.668 - ETA: 1:12 - loss: 0.666 - ETA: 1:11 - loss: 0.664 - ETA: 1:10 - loss: 0.662 - ETA: 1:09 - loss: 0.660 - ETA: 1:08 - loss: 0.658 - ETA: 1:07 - loss: 0.656 - ETA: 1:07 - loss: 0.654 - ETA: 1:06 - loss: 0.652 - ETA: 1:05 - loss: 0.650 - ETA: 1:04 - loss: 0.648 - ETA: 1:03 - loss: 0.647 - ETA: 1:03 - loss: 0.645 - ETA: 1:02 - loss: 0.643 - ETA: 1:01 - loss: 0.641 - ETA: 1:01 - loss: 0.640 - ETA: 1:00 - loss: 0.638 - ETA: 59s - loss: 0.636 - ETA: 58s - loss: 0.63 - ETA: 58s - loss: 0.63 - ETA: 57s - loss: 0.63 - ETA: 56s - loss: 0.62 - ETA: 55s - loss: 0.62 - ETA: 54s - loss: 0.62 - ETA: 54s - loss: 0.62 - ETA: 53s - loss: 0.62 - ETA: 52s - loss: 0.62 - ETA: 51s - loss: 0.62 - ETA: 50s - loss: 0.62 - ETA: 49s - loss: 0.61 - ETA: 48s - loss: 0.61 - ETA: 47s - loss: 0.61 - ETA: 46s - loss: 0.61 - ETA: 44s - loss: 0.61 - ETA: 43s - loss: 0.61 - ETA: 42s - loss: 0.61 - ETA: 41s - loss: 0.61 - ETA: 40s - loss: 0.61 - ETA: 39s - loss: 0.61 - ETA: 38s - loss: 0.60 - ETA: 37s - loss: 0.60 - ETA: 36s - loss: 0.60 - ETA: 35s - loss: 0.60 - ETA: 34s - loss: 0.60 - ETA: 33s - loss: 0.60 - ETA: 32s - loss: 0.60 - ETA: 30s - loss: 0.60 - ETA: 29s - loss: 0.60 - ETA: 28s - loss: 0.60 - ETA: 27s - loss: 0.60 - ETA: 26s - loss: 0.60 - ETA: 25s - loss: 0.60 - ETA: 24s - loss: 0.59 - ETA: 23s - loss: 0.59 - ETA: 22s - loss: 0.59 - ETA: 21s - loss: 0.59 - ETA: 20s - loss: 0.59 - ETA: 19s - loss: 0.59 - ETA: 18s - loss: 0.59 - ETA: 17s - loss: 0.59 - ETA: 16s - loss: 0.59 - ETA: 15s - loss: 0.59 - ETA: 14s - loss: 0.59 - ETA: 13s - loss: 0.59 - ETA: 12s - loss: 0.59 - ETA: 11s - loss: 0.59 - ETA: 10s - loss: 0.59 - ETA: 9s - loss: 0.5916 - ETA: 8s - loss: 0.591 - ETA: 7s - loss: 0.590 - ETA: 6s - loss: 0.590 - ETA: 5s - loss: 0.590 - ETA: 4s - loss: 0.589 - ETA: 3s - loss: 0.589 - ETA: 2s - loss: 0.589 - ETA: 1s - loss: 0.588 - ETA: 0s - loss: 0.588 - 115s 1s/step - loss: 0.5882 - val_loss: 0.4093\n",
      "\n",
      "Epoch 00005: val_loss improved from 0.60136 to 0.40928, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
Bannier Delphine's avatar
Bannier Delphine committed
531
      "Epoch 6/30\n",
532
533
534
      "111/111 [==============================] - ETA: 1:41 - loss: 0.363 - ETA: 1:49 - loss: 0.334 - ETA: 1:48 - loss: 0.301 - ETA: 1:46 - loss: 0.306 - ETA: 1:44 - loss: 0.314 - ETA: 1:43 - loss: 0.320 - ETA: 1:42 - loss: 0.327 - ETA: 1:41 - loss: 0.337 - ETA: 1:41 - loss: 0.348 - ETA: 1:42 - loss: 0.357 - ETA: 1:41 - loss: 0.363 - ETA: 1:41 - loss: 0.372 - ETA: 1:39 - loss: 0.381 - ETA: 1:39 - loss: 0.387 - ETA: 1:38 - loss: 0.391 - ETA: 1:37 - loss: 0.393 - ETA: 1:36 - loss: 0.395 - ETA: 1:35 - loss: 0.397 - ETA: 1:35 - loss: 0.399 - ETA: 1:35 - loss: 0.400 - ETA: 1:34 - loss: 0.403 - ETA: 1:34 - loss: 0.405 - ETA: 1:33 - loss: 0.406 - ETA: 1:33 - loss: 0.408 - ETA: 1:32 - loss: 0.411 - ETA: 1:32 - loss: 0.412 - ETA: 1:31 - loss: 0.413 - ETA: 1:31 - loss: 0.414 - ETA: 1:30 - loss: 0.415 - ETA: 1:29 - loss: 0.416 - ETA: 1:29 - loss: 0.417 - ETA: 1:28 - loss: 0.417 - ETA: 1:27 - loss: 0.418 - ETA: 1:25 - loss: 0.419 - ETA: 1:24 - loss: 0.419 - ETA: 1:23 - loss: 0.420 - ETA: 1:21 - loss: 0.420 - ETA: 1:20 - loss: 0.420 - ETA: 1:19 - loss: 0.421 - ETA: 1:17 - loss: 0.421 - ETA: 1:16 - loss: 0.421 - ETA: 1:15 - loss: 0.421 - ETA: 1:14 - loss: 0.421 - ETA: 1:12 - loss: 0.421 - ETA: 1:11 - loss: 0.421 - ETA: 1:10 - loss: 0.421 - ETA: 1:09 - loss: 0.420 - ETA: 1:07 - loss: 0.420 - ETA: 1:06 - loss: 0.420 - ETA: 1:05 - loss: 0.420 - ETA: 1:04 - loss: 0.421 - ETA: 1:03 - loss: 0.421 - ETA: 1:01 - loss: 0.421 - ETA: 1:00 - loss: 0.421 - ETA: 59s - loss: 0.421 - ETA: 58s - loss: 0.42 - ETA: 57s - loss: 0.42 - ETA: 56s - loss: 0.42 - ETA: 55s - loss: 0.42 - ETA: 53s - loss: 0.42 - ETA: 52s - loss: 0.42 - ETA: 51s - loss: 0.42 - ETA: 50s - loss: 0.42 - ETA: 49s - loss: 0.42 - ETA: 48s - loss: 0.42 - ETA: 47s - loss: 0.42 - ETA: 46s - loss: 0.42 - ETA: 45s - loss: 0.42 - ETA: 44s - loss: 0.42 - ETA: 42s - loss: 0.42 - ETA: 41s - loss: 0.42 - ETA: 40s - loss: 0.42 - ETA: 39s - loss: 0.42 - ETA: 38s - loss: 0.42 - ETA: 37s - loss: 0.43 - ETA: 36s - loss: 0.43 - ETA: 35s - loss: 0.43 - ETA: 34s - loss: 0.43 - ETA: 33s - loss: 0.43 - ETA: 32s - loss: 0.43 - ETA: 31s - loss: 0.43 - ETA: 30s - loss: 0.43 - ETA: 29s - loss: 0.43 - ETA: 28s - loss: 0.43 - ETA: 27s - loss: 0.43 - ETA: 26s - loss: 0.43 - ETA: 25s - loss: 0.43 - ETA: 24s - loss: 0.43 - ETA: 23s - loss: 0.44 - ETA: 22s - loss: 0.44 - ETA: 21s - loss: 0.44 - ETA: 20s - loss: 0.44 - ETA: 19s - loss: 0.44 - ETA: 17s - loss: 0.44 - ETA: 16s - loss: 0.44 - ETA: 15s - loss: 0.44 - ETA: 14s - loss: 0.44 - ETA: 13s - loss: 0.44 - ETA: 12s - loss: 0.44 - ETA: 11s - loss: 0.44 - ETA: 10s - loss: 0.44 - ETA: 9s - loss: 0.4463 - ETA: 8s - loss: 0.446 - ETA: 7s - loss: 0.447 - ETA: 6s - loss: 0.447 - ETA: 5s - loss: 0.447 - ETA: 4s - loss: 0.448 - ETA: 3s - loss: 0.448 - ETA: 2s - loss: 0.449 - ETA: 1s - loss: 0.449 - ETA: 0s - loss: 0.449 - 118s 1s/step - loss: 0.4502 - val_loss: 0.3642\n",
      "\n",
      "Epoch 00006: val_loss improved from 0.40928 to 0.36421, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
Bannier Delphine's avatar
Bannier Delphine committed
535
      "Epoch 7/30\n",
536
537
538
      "111/111 [==============================] - ETA: 1:53 - loss: 0.598 - ETA: 2:04 - loss: 0.585 - ETA: 2:11 - loss: 0.553 - ETA: 2:12 - loss: 0.513 - ETA: 2:13 - loss: 0.499 - ETA: 2:19 - loss: 0.485 - ETA: 2:21 - loss: 0.497 - ETA: 2:21 - loss: 0.510 - ETA: 2:21 - loss: 0.518 - ETA: 2:22 - loss: 0.534 - ETA: 2:24 - loss: 0.548 - ETA: 2:23 - loss: 0.556 - ETA: 2:24 - loss: 0.560 - ETA: 2:24 - loss: 0.562 - ETA: 2:25 - loss: 0.563 - ETA: 2:23 - loss: 0.563 - ETA: 2:20 - loss: 0.563 - ETA: 2:16 - loss: 0.561 - ETA: 2:12 - loss: 0.559 - ETA: 2:10 - loss: 0.557 - ETA: 2:06 - loss: 0.556 - ETA: 2:03 - loss: 0.554 - ETA: 2:00 - loss: 0.551 - ETA: 1:58 - loss: 0.548 - ETA: 1:57 - loss: 0.545 - ETA: 1:55 - loss: 0.542 - ETA: 1:53 - loss: 0.540 - ETA: 1:50 - loss: 0.537 - ETA: 1:49 - loss: 0.534 - ETA: 1:47 - loss: 0.532 - ETA: 1:45 - loss: 0.530 - ETA: 1:43 - loss: 0.527 - ETA: 1:42 - loss: 0.525 - ETA: 1:40 - loss: 0.523 - ETA: 1:39 - loss: 0.522 - ETA: 1:37 - loss: 0.520 - ETA: 1:35 - loss: 0.518 - ETA: 1:34 - loss: 0.516 - ETA: 1:32 - loss: 0.514 - ETA: 1:30 - loss: 0.513 - ETA: 1:28 - loss: 0.511 - ETA: 1:27 - loss: 0.510 - ETA: 1:25 - loss: 0.509 - ETA: 1:24 - loss: 0.508 - ETA: 1:22 - loss: 0.507 - ETA: 1:20 - loss: 0.506 - ETA: 1:19 - loss: 0.505 - ETA: 1:17 - loss: 0.504 - ETA: 1:16 - loss: 0.504 - ETA: 1:14 - loss: 0.503 - ETA: 1:13 - loss: 0.502 - ETA: 1:11 - loss: 0.501 - ETA: 1:10 - loss: 0.501 - ETA: 1:09 - loss: 0.501 - ETA: 1:07 - loss: 0.500 - ETA: 1:06 - loss: 0.499 - ETA: 1:05 - loss: 0.499 - ETA: 1:03 - loss: 0.498 - ETA: 1:02 - loss: 0.498 - ETA: 1:01 - loss: 0.497 - ETA: 59s - loss: 0.496 - ETA: 58s - loss: 0.49 - ETA: 57s - loss: 0.49 - ETA: 56s - loss: 0.49 - ETA: 54s - loss: 0.49 - ETA: 53s - loss: 0.49 - ETA: 52s - loss: 0.49 - ETA: 50s - loss: 0.49 - ETA: 49s - loss: 0.49 - ETA: 48s - loss: 0.49 - ETA: 47s - loss: 0.49 - ETA: 45s - loss: 0.49 - ETA: 44s - loss: 0.49 - ETA: 43s - loss: 0.48 - ETA: 42s - loss: 0.48 - ETA: 40s - loss: 0.48 - ETA: 39s - loss: 0.48 - ETA: 38s - loss: 0.48 - ETA: 37s - loss: 0.48 - ETA: 35s - loss: 0.48 - ETA: 34s - loss: 0.48 - ETA: 33s - loss: 0.48 - ETA: 32s - loss: 0.48 - ETA: 31s - loss: 0.48 - ETA: 29s - loss: 0.48 - ETA: 28s - loss: 0.48 - ETA: 27s - loss: 0.48 - ETA: 26s - loss: 0.48 - ETA: 25s - loss: 0.48 - ETA: 24s - loss: 0.48 - ETA: 22s - loss: 0.48 - ETA: 21s - loss: 0.48 - ETA: 20s - loss: 0.48 - ETA: 19s - loss: 0.48 - ETA: 18s - loss: 0.48 - ETA: 17s - loss: 0.48 - ETA: 15s - loss: 0.48 - ETA: 14s - loss: 0.48 - ETA: 13s - loss: 0.48 - ETA: 12s - loss: 0.48 - ETA: 11s - loss: 0.48 - ETA: 10s - loss: 0.48 - ETA: 9s - loss: 0.4851 - ETA: 8s - loss: 0.485 - ETA: 6s - loss: 0.484 - ETA: 5s - loss: 0.484 - ETA: 4s - loss: 0.484 - ETA: 3s - loss: 0.484 - ETA: 2s - loss: 0.484 - ETA: 1s - loss: 0.484 - ETA: 0s - loss: 0.484 - 131s 1s/step - loss: 0.4844 - val_loss: 0.3340\n",
      "\n",
      "Epoch 00007: val_loss improved from 0.36421 to 0.33401, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
Bannier Delphine's avatar
Bannier Delphine committed
539
      "Epoch 8/30\n",
540
541
542
      "111/111 [==============================] - ETA: 2:01 - loss: 0.346 - ETA: 1:56 - loss: 0.401 - ETA: 1:55 - loss: 0.415 - ETA: 1:53 - loss: 0.410 - ETA: 1:50 - loss: 0.409 - ETA: 1:48 - loss: 0.417 - ETA: 1:46 - loss: 0.421 - ETA: 1:45 - loss: 0.423 - ETA: 1:43 - loss: 0.423 - ETA: 1:42 - loss: 0.422 - ETA: 1:41 - loss: 0.425 - ETA: 1:40 - loss: 0.430 - ETA: 1:38 - loss: 0.434 - ETA: 1:37 - loss: 0.437 - ETA: 1:36 - loss: 0.438 - ETA: 1:35 - loss: 0.440 - ETA: 1:34 - loss: 0.442 - ETA: 1:33 - loss: 0.443 - ETA: 1:32 - loss: 0.444 - ETA: 1:31 - loss: 0.444 - ETA: 1:30 - loss: 0.444 - ETA: 1:29 - loss: 0.444 - ETA: 1:27 - loss: 0.444 - ETA: 1:26 - loss: 0.444 - ETA: 1:25 - loss: 0.445 - ETA: 1:24 - loss: 0.445 - ETA: 1:23 - loss: 0.446 - ETA: 1:22 - loss: 0.446 - ETA: 1:21 - loss: 0.446 - ETA: 1:21 - loss: 0.447 - ETA: 1:20 - loss: 0.447 - ETA: 1:19 - loss: 0.447 - ETA: 1:18 - loss: 0.447 - ETA: 1:17 - loss: 0.446 - ETA: 1:16 - loss: 0.446 - ETA: 1:15 - loss: 0.446 - ETA: 1:14 - loss: 0.447 - ETA: 1:13 - loss: 0.448 - ETA: 1:12 - loss: 0.448 - ETA: 1:11 - loss: 0.449 - ETA: 1:10 - loss: 0.449 - ETA: 1:09 - loss: 0.450 - ETA: 1:08 - loss: 0.450 - ETA: 1:07 - loss: 0.451 - ETA: 1:06 - loss: 0.451 - ETA: 1:05 - loss: 0.451 - ETA: 1:04 - loss: 0.451 - ETA: 1:03 - loss: 0.451 - ETA: 1:02 - loss: 0.451 - ETA: 1:01 - loss: 0.451 - ETA: 1:00 - loss: 0.451 - ETA: 59s - loss: 0.451 - ETA: 58s - loss: 0.45 - ETA: 57s - loss: 0.45 - ETA: 56s - loss: 0.45 - ETA: 55s - loss: 0.45 - ETA: 54s - loss: 0.44 - ETA: 53s - loss: 0.44 - ETA: 52s - loss: 0.44 - ETA: 51s - loss: 0.44 - ETA: 50s - loss: 0.44 - ETA: 49s - loss: 0.44 - ETA: 49s - loss: 0.44 - ETA: 48s - loss: 0.44 - ETA: 47s - loss: 0.44 - ETA: 46s - loss: 0.44 - ETA: 45s - loss: 0.44 - ETA: 44s - loss: 0.44 - ETA: 43s - loss: 0.44 - ETA: 42s - loss: 0.44 - ETA: 41s - loss: 0.44 - ETA: 40s - loss: 0.44 - ETA: 39s - loss: 0.44 - ETA: 38s - loss: 0.44 - ETA: 37s - loss: 0.44 - ETA: 36s - loss: 0.44 - ETA: 35s - loss: 0.44 - ETA: 34s - loss: 0.44 - ETA: 33s - loss: 0.44 - ETA: 32s - loss: 0.44 - ETA: 31s - loss: 0.44 - ETA: 30s - loss: 0.44 - ETA: 29s - loss: 0.44 - ETA: 28s - loss: 0.44 - ETA: 27s - loss: 0.44 - ETA: 26s - loss: 0.44 - ETA: 25s - loss: 0.44 - ETA: 24s - loss: 0.44 - ETA: 23s - loss: 0.44 - ETA: 22s - loss: 0.44 - ETA: 21s - loss: 0.44 - ETA: 20s - loss: 0.44 - ETA: 19s - loss: 0.44 - ETA: 18s - loss: 0.44 - ETA: 17s - loss: 0.44 - ETA: 15s - loss: 0.44 - ETA: 14s - loss: 0.44 - ETA: 13s - loss: 0.44 - ETA: 12s - loss: 0.44 - ETA: 11s - loss: 0.44 - ETA: 10s - loss: 0.44 - ETA: 9s - loss: 0.4481 - ETA: 8s - loss: 0.448 - ETA: 7s - loss: 0.448 - ETA: 6s - loss: 0.448 - ETA: 5s - loss: 0.448 - ETA: 4s - loss: 0.448 - ETA: 3s - loss: 0.448 - ETA: 2s - loss: 0.448 - ETA: 1s - loss: 0.448 - ETA: 0s - loss: 0.449 - 119s 1s/step - loss: 0.4491 - val_loss: 0.3405\n",
      "\n",
      "Epoch 00008: val_loss did not improve from 0.33401\n",
Bannier Delphine's avatar
Bannier Delphine committed
543
      "Epoch 9/30\n",
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
      "111/111 [==============================] - ETA: 1:52 - loss: 0.338 - ETA: 1:49 - loss: 0.373 - ETA: 1:47 - loss: 0.403 - ETA: 1:45 - loss: 0.400 - ETA: 1:43 - loss: 0.388 - ETA: 1:42 - loss: 0.375 - ETA: 1:41 - loss: 0.363 - ETA: 1:41 - loss: 0.353 - ETA: 1:39 - loss: 0.347 - ETA: 1:39 - loss: 0.340 - ETA: 1:39 - loss: 0.334 - ETA: 1:37 - loss: 0.331 - ETA: 1:36 - loss: 0.329 - ETA: 1:36 - loss: 0.326 - ETA: 1:34 - loss: 0.323 - ETA: 1:34 - loss: 0.321 - ETA: 1:33 - loss: 0.319 - ETA: 1:32 - loss: 0.317 - ETA: 1:31 - loss: 0.316 - ETA: 1:30 - loss: 0.316 - ETA: 1:29 - loss: 0.317 - ETA: 1:28 - loss: 0.317 - ETA: 1:27 - loss: 0.317 - ETA: 1:26 - loss: 0.317 - ETA: 1:25 - loss: 0.317 - ETA: 1:24 - loss: 0.317 - ETA: 1:23 - loss: 0.318 - ETA: 1:22 - loss: 0.318 - ETA: 1:21 - loss: 0.319 - ETA: 1:20 - loss: 0.320 - ETA: 1:19 - loss: 0.320 - ETA: 1:18 - loss: 0.321 - ETA: 1:17 - loss: 0.321 - ETA: 1:17 - loss: 0.321 - ETA: 1:16 - loss: 0.321 - ETA: 1:15 - loss: 0.321 - ETA: 1:14 - loss: 0.321 - ETA: 1:13 - loss: 0.321 - ETA: 1:12 - loss: 0.321 - ETA: 1:11 - loss: 0.321 - ETA: 1:11 - loss: 0.321 - ETA: 1:10 - loss: 0.322 - ETA: 1:09 - loss: 0.322 - ETA: 1:08 - loss: 0.322 - ETA: 1:08 - loss: 0.323 - ETA: 1:07 - loss: 0.324 - ETA: 1:06 - loss: 0.324 - ETA: 1:06 - loss: 0.325 - ETA: 1:05 - loss: 0.325 - ETA: 1:04 - loss: 0.326 - ETA: 1:04 - loss: 0.327 - ETA: 1:03 - loss: 0.328 - ETA: 1:02 - loss: 0.329 - ETA: 1:01 - loss: 0.330 - ETA: 1:00 - loss: 0.331 - ETA: 59s - loss: 0.332 - ETA: 58s - loss: 0.33 - ETA: 57s - loss: 0.33 - ETA: 56s - loss: 0.33 - ETA: 55s - loss: 0.33 - ETA: 54s - loss: 0.33 - ETA: 53s - loss: 0.33 - ETA: 52s - loss: 0.33 - ETA: 51s - loss: 0.33 - ETA: 50s - loss: 0.33 - ETA: 49s - loss: 0.34 - ETA: 47s - loss: 0.34 - ETA: 46s - loss: 0.34 - ETA: 45s - loss: 0.34 - ETA: 44s - loss: 0.34 - ETA: 43s - loss: 0.34 - ETA: 42s - loss: 0.34 - ETA: 41s - loss: 0.34 - ETA: 39s - loss: 0.34 - ETA: 38s - loss: 0.35 - ETA: 37s - loss: 0.35 - ETA: 36s - loss: 0.35 - ETA: 35s - loss: 0.35 - ETA: 34s - loss: 0.35 - ETA: 33s - loss: 0.35 - ETA: 32s - loss: 0.35 - ETA: 31s - loss: 0.35 - ETA: 29s - loss: 0.35 - ETA: 28s - loss: 0.35 - ETA: 27s - loss: 0.35 - ETA: 26s - loss: 0.36 - ETA: 25s - loss: 0.36 - ETA: 24s - loss: 0.36 - ETA: 23s - loss: 0.36 - ETA: 22s - loss: 0.36 - ETA: 21s - loss: 0.36 - ETA: 20s - loss: 0.36 - ETA: 19s - loss: 0.36 - ETA: 18s - loss: 0.36 - ETA: 16s - loss: 0.36 - ETA: 15s - loss: 0.37 - ETA: 14s - loss: 0.37 - ETA: 13s - loss: 0.37 - ETA: 12s - loss: 0.37 - ETA: 11s - loss: 0.37 - ETA: 10s - loss: 0.37 - ETA: 9s - loss: 0.3756 - ETA: 8s - loss: 0.376 - ETA: 7s - loss: 0.377 - ETA: 6s - loss: 0.377 - ETA: 5s - loss: 0.378 - ETA: 4s - loss: 0.379 - ETA: 3s - loss: 0.380 - ETA: 2s - loss: 0.380 - ETA: 1s - loss: 0.381 - ETA: 0s - loss: 0.382 - 118s 1s/step - loss: 0.3833 - val_loss: 0.3160\n",
      "\n",
      "Epoch 00009: val_loss improved from 0.33401 to 0.31596, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 10/30\n",
      "111/111 [==============================] - ETA: 1:43 - loss: 0.264 - ETA: 1:46 - loss: 0.300 - ETA: 1:47 - loss: 0.325 - ETA: 1:47 - loss: 0.325 - ETA: 1:46 - loss: 0.335 - ETA: 1:45 - loss: 0.345 - ETA: 1:45 - loss: 0.362 - ETA: 1:45 - loss: 0.371 - ETA: 1:45 - loss: 0.376 - ETA: 1:45 - loss: 0.379 - ETA: 1:45 - loss: 0.383 - ETA: 1:45 - loss: 0.384 - ETA: 1:45 - loss: 0.385 - ETA: 1:46 - loss: 0.386 - ETA: 1:46 - loss: 0.385 - ETA: 1:46 - loss: 0.386 - ETA: 1:46 - loss: 0.386 - ETA: 1:46 - loss: 0.386 - ETA: 1:45 - loss: 0.388 - ETA: 1:45 - loss: 0.389 - ETA: 1:45 - loss: 0.391 - ETA: 1:44 - loss: 0.392 - ETA: 1:43 - loss: 0.393 - ETA: 1:42 - loss: 0.393 - ETA: 1:40 - loss: 0.394 - ETA: 1:39 - loss: 0.394 - ETA: 1:38 - loss: 0.393 - ETA: 1:37 - loss: 0.393 - ETA: 1:35 - loss: 0.392 - ETA: 1:34 - loss: 0.392 - ETA: 1:32 - loss: 0.391 - ETA: 1:31 - loss: 0.391 - ETA: 1:29 - loss: 0.390 - ETA: 1:28 - loss: 0.390 - ETA: 1:26 - loss: 0.390 - ETA: 1:25 - loss: 0.391 - ETA: 1:24 - loss: 0.391 - ETA: 1:22 - loss: 0.392 - ETA: 1:21 - loss: 0.392 - ETA: 1:19 - loss: 0.393 - ETA: 1:18 - loss: 0.394 - ETA: 1:17 - loss: 0.395 - ETA: 1:15 - loss: 0.396 - ETA: 1:14 - loss: 0.397 - ETA: 1:13 - loss: 0.398 - ETA: 1:11 - loss: 0.400 - ETA: 1:10 - loss: 0.401 - ETA: 1:09 - loss: 0.401 - ETA: 1:08 - loss: 0.402 - ETA: 1:06 - loss: 0.403 - ETA: 1:05 - loss: 0.403 - ETA: 1:04 - loss: 0.404 - ETA: 1:03 - loss: 0.404 - ETA: 1:01 - loss: 0.405 - ETA: 1:00 - loss: 0.405 - ETA: 59s - loss: 0.405 - ETA: 58s - loss: 0.40 - ETA: 57s - loss: 0.40 - ETA: 56s - loss: 0.40 - ETA: 54s - loss: 0.40 - ETA: 53s - loss: 0.40 - ETA: 52s - loss: 0.40 - ETA: 51s - loss: 0.40 - ETA: 50s - loss: 0.40 - ETA: 49s - loss: 0.40 - ETA: 48s - loss: 0.40 - ETA: 47s - loss: 0.40 - ETA: 46s - loss: 0.40 - ETA: 45s - loss: 0.40 - ETA: 43s - loss: 0.40 - ETA: 42s - loss: 0.40 - ETA: 41s - loss: 0.40 - ETA: 40s - loss: 0.40 - ETA: 39s - loss: 0.41 - ETA: 38s - loss: 0.41 - ETA: 37s - loss: 0.41 - ETA: 36s - loss: 0.41 - ETA: 35s - loss: 0.41 - ETA: 34s - loss: 0.41 - ETA: 33s - loss: 0.41 - ETA: 32s - loss: 0.41 - ETA: 31s - loss: 0.41 - ETA: 29s - loss: 0.41 - ETA: 28s - loss: 0.41 - ETA: 27s - loss: 0.41 - ETA: 26s - loss: 0.41 - ETA: 25s - loss: 0.41 - ETA: 24s - loss: 0.41 - ETA: 23s - loss: 0.41 - ETA: 22s - loss: 0.41 - ETA: 21s - loss: 0.41 - ETA: 20s - loss: 0.41 - ETA: 19s - loss: 0.41 - ETA: 18s - loss: 0.41 - ETA: 17s - loss: 0.41 - ETA: 16s - loss: 0.41 - ETA: 15s - loss: 0.41 - ETA: 14s - loss: 0.41 - ETA: 13s - loss: 0.41 - ETA: 12s - loss: 0.41 - ETA: 10s - loss: 0.41 - ETA: 9s - loss: 0.4141 - ETA: 8s - loss: 0.414 - ETA: 7s - loss: 0.414 - ETA: 6s - loss: 0.414 - ETA: 5s - loss: 0.414 - ETA: 4s - loss: 0.414 - ETA: 3s - loss: 0.414 - ETA: 2s - loss: 0.414 - ETA: 1s - loss: 0.415 - ETA: 0s - loss: 0.415 - 124s 1s/step - loss: 0.4154 - val_loss: 0.3062\n",
      "\n",
      "Epoch 00010: val_loss improved from 0.31596 to 0.30621, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 11/30\n",
      "111/111 [==============================] - ETA: 1:44 - loss: 0.133 - ETA: 1:50 - loss: 0.139 - ETA: 1:46 - loss: 0.180 - ETA: 1:45 - loss: 0.200 - ETA: 1:43 - loss: 0.233 - ETA: 1:41 - loss: 0.251 - ETA: 1:40 - loss: 0.264 - ETA: 1:39 - loss: 0.273 - ETA: 1:38 - loss: 0.293 - ETA: 1:37 - loss: 0.305 - ETA: 1:36 - loss: 0.313 - ETA: 1:35 - loss: 0.322 - ETA: 1:34 - loss: 0.329 - ETA: 1:33 - loss: 0.335 - ETA: 1:32 - loss: 0.342 - ETA: 1:31 - loss: 0.346 - ETA: 1:30 - loss: 0.350 - ETA: 1:29 - loss: 0.353 - ETA: 1:28 - loss: 0.355 - ETA: 1:27 - loss: 0.356 - ETA: 1:26 - loss: 0.357 - ETA: 1:25 - loss: 0.360 - ETA: 1:24 - loss: 0.362 - ETA: 1:23 - loss: 0.363 - ETA: 1:22 - loss: 0.364 - ETA: 1:21 - loss: 0.365 - ETA: 1:20 - loss: 0.365 - ETA: 1:19 - loss: 0.367 - ETA: 1:18 - loss: 0.368 - ETA: 1:17 - loss: 0.369 - ETA: 1:16 - loss: 0.370 - ETA: 1:15 - loss: 0.371 - ETA: 1:14 - loss: 0.372 - ETA: 1:13 - loss: 0.372 - ETA: 1:12 - loss: 0.373 - ETA: 1:11 - loss: 0.373 - ETA: 1:10 - loss: 0.374 - ETA: 1:10 - loss: 0.376 - ETA: 1:09 - loss: 0.377 - ETA: 1:08 - loss: 0.378 - ETA: 1:07 - loss: 0.379 - ETA: 1:06 - loss: 0.380 - ETA: 1:05 - loss: 0.381 - ETA: 1:04 - loss: 0.382 - ETA: 1:03 - loss: 0.383 - ETA: 1:02 - loss: 0.385 - ETA: 1:01 - loss: 0.386 - ETA: 1:00 - loss: 0.386 - ETA: 59s - loss: 0.387 - ETA: 58s - loss: 0.38 - ETA: 57s - loss: 0.38 - ETA: 56s - loss: 0.39 - ETA: 55s - loss: 0.39 - ETA: 54s - loss: 0.39 - ETA: 53s - loss: 0.39 - ETA: 52s - loss: 0.39 - ETA: 51s - loss: 0.39 - ETA: 51s - loss: 0.39 - ETA: 50s - loss: 0.39 - ETA: 49s - loss: 0.39 - ETA: 48s - loss: 0.39 - ETA: 47s - loss: 0.39 - ETA: 46s - loss: 0.39 - ETA: 45s - loss: 0.40 - ETA: 44s - loss: 0.40 - ETA: 44s - loss: 0.40 - ETA: 43s - loss: 0.40 - ETA: 42s - loss: 0.40 - ETA: 41s - loss: 0.40 - ETA: 40s - loss: 0.40 - ETA: 39s - loss: 0.40 - ETA: 38s - loss: 0.40 - ETA: 38s - loss: 0.40 - ETA: 37s - loss: 0.40 - ETA: 36s - loss: 0.40 - ETA: 35s - loss: 0.40 - ETA: 34s - loss: 0.40 - ETA: 33s - loss: 0.40 - ETA: 32s - loss: 0.40 - ETA: 31s - loss: 0.41 - ETA: 30s - loss: 0.41 - ETA: 29s - loss: 0.41 - ETA: 28s - loss: 0.41 - ETA: 27s - loss: 0.41 - ETA: 26s - loss: 0.41 - ETA: 25s - loss: 0.41 - ETA: 24s - loss: 0.41 - ETA: 23s - loss: 0.41 - ETA: 22s - loss: 0.41 - ETA: 21s - loss: 0.41 - ETA: 20s - loss: 0.41 - ETA: 19s - loss: 0.41 - ETA: 18s - loss: 0.41 - ETA: 17s - loss: 0.41 - ETA: 16s - loss: 0.41 - ETA: 15s - loss: 0.41 - ETA: 14s - loss: 0.41 - ETA: 13s - loss: 0.41 - ETA: 12s - loss: 0.41 - ETA: 11s - loss: 0.41 - ETA: 10s - loss: 0.41 - ETA: 8s - loss: 0.4191 - ETA: 7s - loss: 0.419 - ETA: 6s - loss: 0.419 - ETA: 5s - loss: 0.419 - ETA: 4s - loss: 0.419 - ETA: 3s - loss: 0.419 - ETA: 2s - loss: 0.420 - ETA: 1s - loss: 0.420 - ETA: 0s - loss: 0.420 - ETA: 0s - loss: 0.420 - 112s 1s/step - loss: 0.4204 - val_loss: 0.2890\n",
      "\n",
      "Epoch 00011: val_loss improved from 0.30621 to 0.28899, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 12/30\n",
      "111/111 [==============================] - ETA: 1:38 - loss: 0.230 - ETA: 1:47 - loss: 0.285 - ETA: 1:44 - loss: 0.281 - ETA: 1:44 - loss: 0.276 - ETA: 1:43 - loss: 0.269 - ETA: 1:43 - loss: 0.272 - ETA: 1:41 - loss: 0.277 - ETA: 1:40 - loss: 0.283 - ETA: 1:38 - loss: 0.287 - ETA: 1:37 - loss: 0.290 - ETA: 1:36 - loss: 0.291 - ETA: 1:36 - loss: 0.296 - ETA: 1:34 - loss: 0.300 - ETA: 1:33 - loss: 0.303 - ETA: 1:32 - loss: 0.306 - ETA: 1:31 - loss: 0.310 - ETA: 1:30 - loss: 0.313 - ETA: 1:29 - loss: 0.316 - ETA: 1:28 - loss: 0.318 - ETA: 1:27 - loss: 0.321 - ETA: 1:26 - loss: 0.322 - ETA: 1:25 - loss: 0.324 - ETA: 1:24 - loss: 0.326 - ETA: 1:23 - loss: 0.328 - ETA: 1:23 - loss: 0.329 - ETA: 1:21 - loss: 0.331 - ETA: 1:21 - loss: 0.332 - ETA: 1:20 - loss: 0.333 - ETA: 1:19 - loss: 0.334 - ETA: 1:18 - loss: 0.334 - ETA: 1:17 - loss: 0.334 - ETA: 1:16 - loss: 0.336 - ETA: 1:15 - loss: 0.337 - ETA: 1:15 - loss: 0.338 - ETA: 1:14 - loss: 0.340 - ETA: 1:13 - loss: 0.342 - ETA: 1:13 - loss: 0.343 - ETA: 1:12 - loss: 0.345 - ETA: 1:11 - loss: 0.347 - ETA: 1:10 - loss: 0.349 - ETA: 1:09 - loss: 0.351 - ETA: 1:08 - loss: 0.354 - ETA: 1:07 - loss: 0.356 - ETA: 1:06 - loss: 0.358 - ETA: 1:05 - loss: 0.360 - ETA: 1:05 - loss: 0.362 - ETA: 1:04 - loss: 0.363 - ETA: 1:03 - loss: 0.365 - ETA: 1:02 - loss: 0.367 - ETA: 1:01 - loss: 0.368 - ETA: 1:00 - loss: 0.370 - ETA: 59s - loss: 0.371 - ETA: 58s - loss: 0.37 - ETA: 57s - loss: 0.37 - ETA: 56s - loss: 0.37 - ETA: 55s - loss: 0.37 - ETA: 54s - loss: 0.37 - ETA: 53s - loss: 0.37 - ETA: 52s - loss: 0.38 - ETA: 51s - loss: 0.38 - ETA: 50s - loss: 0.38 - ETA: 49s - loss: 0.38 - ETA: 48s - loss: 0.38 - ETA: 47s - loss: 0.38 - ETA: 46s - loss: 0.38 - ETA: 45s - loss: 0.38 - ETA: 44s - loss: 0.38 - ETA: 43s - loss: 0.38 - ETA: 42s - loss: 0.38 - ETA: 41s - loss: 0.39 - ETA: 40s - loss: 0.39 - ETA: 39s - loss: 0.39 - ETA: 38s - loss: 0.39 - ETA: 37s - loss: 0.39 - ETA: 36s - loss: 0.39 - ETA: 35s - loss: 0.39 - ETA: 34s - loss: 0.39 - ETA: 33s - loss: 0.39 - ETA: 32s - loss: 0.39 - ETA: 31s - loss: 0.39 - ETA: 30s - loss: 0.39 - ETA: 29s - loss: 0.39 - ETA: 28s - loss: 0.39 - ETA: 27s - loss: 0.39 - ETA: 26s - loss: 0.39 - ETA: 25s - loss: 0.39 - ETA: 24s - loss: 0.39 - ETA: 23s - loss: 0.39 - ETA: 22s - loss: 0.39 - ETA: 21s - loss: 0.39 - ETA: 20s - loss: 0.39 - ETA: 19s - loss: 0.39 - ETA: 18s - loss: 0.39 - ETA: 17s - loss: 0.39 - ETA: 15s - loss: 0.39 - ETA: 14s - loss: 0.39 - ETA: 13s - loss: 0.40 - ETA: 12s - loss: 0.40 - ETA: 11s - loss: 0.40 - ETA: 10s - loss: 0.40 - ETA: 9s - loss: 0.4006 - ETA: 8s - loss: 0.400 - ETA: 7s - loss: 0.400 - ETA: 6s - loss: 0.400 - ETA: 5s - loss: 0.400 - ETA: 4s - loss: 0.400 - ETA: 3s - loss: 0.400 - ETA: 2s - loss: 0.400 - ETA: 1s - loss: 0.400 - ETA: 0s - loss: 0.400 - ETA: 0s - loss: 0.400 - 112s 1s/step - loss: 0.4008 - val_loss: 0.2743\n",
      "\n",
      "Epoch 00012: val_loss improved from 0.28899 to 0.27426, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 13/30\n",
      "111/111 [==============================] - ETA: 1:44 - loss: 0.606 - ETA: 1:45 - loss: 0.501 - ETA: 1:45 - loss: 0.442 - ETA: 1:43 - loss: 0.407 - ETA: 1:42 - loss: 0.381 - ETA: 1:41 - loss: 0.361 - ETA: 1:40 - loss: 0.350 - ETA: 1:40 - loss: 0.343 - ETA: 1:39 - loss: 0.342 - ETA: 1:39 - loss: 0.339 - ETA: 1:38 - loss: 0.338 - ETA: 1:37 - loss: 0.337 - ETA: 1:36 - loss: 0.337 - ETA: 1:36 - loss: 0.337 - ETA: 1:35 - loss: 0.335 - ETA: 1:35 - loss: 0.335 - ETA: 1:34 - loss: 0.335 - ETA: 1:34 - loss: 0.335 - ETA: 1:34 - loss: 0.336 - ETA: 1:34 - loss: 0.337 - ETA: 1:33 - loss: 0.338 - ETA: 1:33 - loss: 0.339 - ETA: 1:33 - loss: 0.340 - ETA: 1:33 - loss: 0.341 - ETA: 1:32 - loss: 0.341 - ETA: 1:32 - loss: 0.342 - ETA: 1:31 - loss: 0.342 - ETA: 1:31 - loss: 0.342 - ETA: 1:30 - loss: 0.342 - ETA: 1:29 - loss: 0.342 - ETA: 1:28 - loss: 0.342 - ETA: 1:27 - loss: 0.342 - ETA: 1:26 - loss: 0.343 - ETA: 1:25 - loss: 0.344 - ETA: 1:24 - loss: 0.345 - ETA: 1:22 - loss: 0.347 - ETA: 1:21 - loss: 0.349 - ETA: 1:20 - loss: 0.350 - ETA: 1:18 - loss: 0.351 - ETA: 1:17 - loss: 0.352 - ETA: 1:16 - loss: 0.352 - ETA: 1:14 - loss: 0.353 - ETA: 1:13 - loss: 0.354 - ETA: 1:12 - loss: 0.355 - ETA: 1:10 - loss: 0.356 - ETA: 1:09 - loss: 0.357 - ETA: 1:08 - loss: 0.357 - ETA: 1:07 - loss: 0.358 - ETA: 1:05 - loss: 0.358 - ETA: 1:04 - loss: 0.359 - ETA: 1:03 - loss: 0.360 - ETA: 1:02 - loss: 0.360 - ETA: 1:01 - loss: 0.361 - ETA: 1:00 - loss: 0.362 - ETA: 58s - loss: 0.362 - ETA: 57s - loss: 0.36 - ETA: 56s - loss: 0.36 - ETA: 55s - loss: 0.36 - ETA: 54s - loss: 0.36 - ETA: 53s - loss: 0.36 - ETA: 51s - loss: 0.36 - ETA: 50s - loss: 0.36 - ETA: 49s - loss: 0.36 - ETA: 48s - loss: 0.36 - ETA: 47s - loss: 0.36 - ETA: 46s - loss: 0.36 - ETA: 45s - loss: 0.36 - ETA: 44s - loss: 0.36 - ETA: 43s - loss: 0.36 - ETA: 42s - loss: 0.37 - ETA: 41s - loss: 0.37 - ETA: 40s - loss: 0.37 - ETA: 38s - loss: 0.37 - ETA: 37s - loss: 0.37 - ETA: 36s - loss: 0.37 - ETA: 35s - loss: 0.37 - ETA: 34s - loss: 0.37 - ETA: 33s - loss: 0.37 - ETA: 32s - loss: 0.37 - ETA: 31s - loss: 0.37 - ETA: 30s - loss: 0.37 - ETA: 29s - loss: 0.37 - ETA: 28s - loss: 0.37 - ETA: 27s - loss: 0.37 - ETA: 26s - loss: 0.37 - ETA: 25s - loss: 0.37 - ETA: 24s - loss: 0.37 - ETA: 23s - loss: 0.37 - ETA: 22s - loss: 0.37 - ETA: 21s - loss: 0.37 - ETA: 20s - loss: 0.38 - ETA: 19s - loss: 0.38 - ETA: 18s - loss: 0.38 - ETA: 17s - loss: 0.38 - ETA: 16s - loss: 0.38 - ETA: 15s - loss: 0.38 - ETA: 14s - loss: 0.38 - ETA: 13s - loss: 0.38 - ETA: 12s - loss: 0.38 - ETA: 11s - loss: 0.38 - ETA: 10s - loss: 0.38 - ETA: 9s - loss: 0.3834 - ETA: 8s - loss: 0.383 - ETA: 7s - loss: 0.383 - ETA: 6s - loss: 0.384 - ETA: 5s - loss: 0.384 - ETA: 4s - loss: 0.384 - ETA: 3s - loss: 0.385 - ETA: 2s - loss: 0.385 - ETA: 1s - loss: 0.385 - ETA: 0s - loss: 0.385 - 115s 1s/step - loss: 0.3861 - val_loss: 0.2721\n",
      "\n",
      "Epoch 00013: val_loss improved from 0.27426 to 0.27212, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 14/30\n",
      "111/111 [==============================] - ETA: 2:05 - loss: 0.416 - ETA: 2:00 - loss: 0.383 - ETA: 2:05 - loss: 0.369 - ETA: 2:07 - loss: 0.367 - ETA: 2:05 - loss: 0.356 - ETA: 2:05 - loss: 0.349 - ETA: 2:03 - loss: 0.343 - ETA: 2:01 - loss: 0.343 - ETA: 1:58 - loss: 0.341 - ETA: 1:57 - loss: 0.339 - ETA: 1:55 - loss: 0.337 - ETA: 1:54 - loss: 0.337 - ETA: 1:52 - loss: 0.337 - ETA: 1:50 - loss: 0.337 - ETA: 1:48 - loss: 0.336 - ETA: 1:45 - loss: 0.336 - ETA: 1:44 - loss: 0.336 - ETA: 1:43 - loss: 0.335 - ETA: 1:42 - loss: 0.335 - ETA: 1:41 - loss: 0.334 - ETA: 1:39 - loss: 0.333 - ETA: 1:38 - loss: 0.333 - ETA: 1:37 - loss: 0.333 - ETA: 1:37 - loss: 0.333 - ETA: 1:36 - loss: 0.333 - ETA: 1:34 - loss: 0.334 - ETA: 1:33 - loss: 0.334 - ETA: 1:31 - loss: 0.334 - ETA: 1:30 - loss: 0.334 - ETA: 1:29 - loss: 0.335 - ETA: 1:28 - loss: 0.335 - ETA: 1:27 - loss: 0.335 - ETA: 1:26 - loss: 0.336 - ETA: 1:24 - loss: 0.336 - ETA: 1:23 - loss: 0.337 - ETA: 1:22 - loss: 0.337 - ETA: 1:21 - loss: 0.338 - ETA: 1:19 - loss: 0.338 - ETA: 1:18 - loss: 0.338 - ETA: 1:17 - loss: 0.338 - ETA: 1:16 - loss: 0.339 - ETA: 1:15 - loss: 0.339 - ETA: 1:14 - loss: 0.339 - ETA: 1:13 - loss: 0.340 - ETA: 1:12 - loss: 0.340 - ETA: 1:11 - loss: 0.341 - ETA: 1:10 - loss: 0.341 - ETA: 1:09 - loss: 0.342 - ETA: 1:08 - loss: 0.342 - ETA: 1:06 - loss: 0.343 - ETA: 1:06 - loss: 0.344 - ETA: 1:05 - loss: 0.344 - ETA: 1:03 - loss: 0.345 - ETA: 1:02 - loss: 0.345 - ETA: 1:01 - loss: 0.345 - ETA: 1:00 - loss: 0.346 - ETA: 59s - loss: 0.346 - ETA: 58s - loss: 0.34 - ETA: 57s - loss: 0.34 - ETA: 56s - loss: 0.34 - ETA: 55s - loss: 0.34 - ETA: 54s - loss: 0.34 - ETA: 53s - loss: 0.34 - ETA: 52s - loss: 0.34 - ETA: 51s - loss: 0.34 - ETA: 50s - loss: 0.34 - ETA: 49s - loss: 0.34 - ETA: 48s - loss: 0.34 - ETA: 47s - loss: 0.34 - ETA: 45s - loss: 0.34 - ETA: 44s - loss: 0.34 - ETA: 43s - loss: 0.34 - ETA: 42s - loss: 0.34 - ETA: 41s - loss: 0.34 - ETA: 40s - loss: 0.35 - ETA: 39s - loss: 0.35 - ETA: 38s - loss: 0.35 - ETA: 37s - loss: 0.35 - ETA: 36s - loss: 0.35 - ETA: 35s - loss: 0.35 - ETA: 34s - loss: 0.35 - ETA: 33s - loss: 0.35 - ETA: 32s - loss: 0.35 - ETA: 31s - loss: 0.35 - ETA: 30s - loss: 0.35 - ETA: 29s - loss: 0.35 - ETA: 28s - loss: 0.35 - ETA: 27s - loss: 0.35 - ETA: 26s - loss: 0.35 - ETA: 24s - loss: 0.35 - ETA: 23s - loss: 0.35 - ETA: 22s - loss: 0.35 - ETA: 21s - loss: 0.35 - ETA: 20s - loss: 0.35 - ETA: 18s - loss: 0.35 - ETA: 17s - loss: 0.35 - ETA: 16s - loss: 0.35 - ETA: 15s - loss: 0.35 - ETA: 14s - loss: 0.35 - ETA: 12s - loss: 0.35 - ETA: 11s - loss: 0.35 - ETA: 10s - loss: 0.35 - ETA: 9s - loss: 0.3567 - ETA: 8s - loss: 0.356 - ETA: 6s - loss: 0.357 - ETA: 5s - loss: 0.357 - ETA: 4s - loss: 0.357 - ETA: 3s - loss: 0.357 - ETA: 2s - loss: 0.357 - ETA: 1s - loss: 0.357 - ETA: 0s - loss: 0.358 - 128s 1s/step - loss: 0.3581 - val_loss: 0.2541\n",
      "\n",
      "Epoch 00014: val_loss improved from 0.27212 to 0.25407, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 15/30\n",
      "111/111 [==============================] - ETA: 1:40 - loss: 0.350 - ETA: 1:42 - loss: 0.539 - ETA: 1:41 - loss: 0.541 - ETA: 1:41 - loss: 0.572 - ETA: 1:40 - loss: 0.573 - ETA: 1:39 - loss: 0.585 - ETA: 1:38 - loss: 0.589 - ETA: 1:38 - loss: 0.587 - ETA: 1:37 - loss: 0.581 - ETA: 1:36 - loss: 0.571 - ETA: 1:35 - loss: 0.562 - ETA: 1:34 - loss: 0.552 - ETA: 1:34 - loss: 0.548 - ETA: 1:33 - loss: 0.543 - ETA: 1:32 - loss: 0.539 - ETA: 1:31 - loss: 0.534 - ETA: 1:30 - loss: 0.530 - ETA: 1:29 - loss: 0.525 - ETA: 1:28 - loss: 0.521 - ETA: 1:27 - loss: 0.517 - ETA: 1:26 - loss: 0.512 - ETA: 1:25 - loss: 0.507 - ETA: 1:24 - loss: 0.503 - ETA: 1:23 - loss: 0.499 - ETA: 1:22 - loss: 0.496 - ETA: 1:22 - loss: 0.493 - ETA: 1:21 - loss: 0.490 - ETA: 1:20 - loss: 0.487 - ETA: 1:19 - loss: 0.484 - ETA: 1:18 - loss: 0.482 - ETA: 1:17 - loss: 0.480 - ETA: 1:16 - loss: 0.478 - ETA: 1:15 - loss: 0.476 - ETA: 1:14 - loss: 0.475 - ETA: 1:13 - loss: 0.474 - ETA: 1:12 - loss: 0.473 - ETA: 1:11 - loss: 0.472 - ETA: 1:10 - loss: 0.471 - ETA: 1:09 - loss: 0.470 - ETA: 1:08 - loss: 0.468 - ETA: 1:07 - loss: 0.467 - ETA: 1:06 - loss: 0.466 - ETA: 1:05 - loss: 0.465 - ETA: 1:04 - loss: 0.464 - ETA: 1:03 - loss: 0.462 - ETA: 1:02 - loss: 0.461 - ETA: 1:02 - loss: 0.460 - ETA: 1:01 - loss: 0.459 - ETA: 1:00 - loss: 0.458 - ETA: 59s - loss: 0.457 - ETA: 58s - loss: 0.45 - ETA: 57s - loss: 0.45 - ETA: 56s - loss: 0.45 - ETA: 56s - loss: 0.45 - ETA: 55s - loss: 0.45 - ETA: 54s - loss: 0.45 - ETA: 53s - loss: 0.45 - ETA: 52s - loss: 0.45 - ETA: 51s - loss: 0.45 - ETA: 50s - loss: 0.45 - ETA: 50s - loss: 0.45 - ETA: 49s - loss: 0.45 - ETA: 48s - loss: 0.45 - ETA: 47s - loss: 0.45 - ETA: 46s - loss: 0.45 - ETA: 45s - loss: 0.45 - ETA: 44s - loss: 0.45 - ETA: 44s - loss: 0.45 - ETA: 43s - loss: 0.45 - ETA: 42s - loss: 0.45 - ETA: 41s - loss: 0.45 - ETA: 40s - loss: 0.45 - ETA: 39s - loss: 0.44 - ETA: 38s - loss: 0.44 - ETA: 37s - loss: 0.44 - ETA: 36s - loss: 0.44 - ETA: 35s - loss: 0.44 - ETA: 34s - loss: 0.44 - ETA: 33s - loss: 0.44 - ETA: 32s - loss: 0.44 - ETA: 31s - loss: 0.44 - ETA: 30s - loss: 0.44 - ETA: 29s - loss: 0.44 - ETA: 28s - loss: 0.44 - ETA: 27s - loss: 0.44 - ETA: 25s - loss: 0.44 - ETA: 24s - loss: 0.44 - ETA: 23s - loss: 0.44 - ETA: 22s - loss: 0.44 - ETA: 21s - loss: 0.44 - ETA: 20s - loss: 0.44 - ETA: 19s - loss: 0.44 - ETA: 18s - loss: 0.44 - ETA: 17s - loss: 0.44 - ETA: 16s - loss: 0.44 - ETA: 15s - loss: 0.44 - ETA: 14s - loss: 0.44 - ETA: 13s - loss: 0.44 - ETA: 12s - loss: 0.44 - ETA: 11s - loss: 0.44 - ETA: 10s - loss: 0.44 - ETA: 9s - loss: 0.4448 - ETA: 8s - loss: 0.444 - ETA: 7s - loss: 0.444 - ETA: 6s - loss: 0.444 - ETA: 5s - loss: 0.443 - ETA: 4s - loss: 0.443 - ETA: 3s - loss: 0.443 - ETA: 2s - loss: 0.443 - ETA: 1s - loss: 0.442 - ETA: 0s - loss: 0.442 - 114s 1s/step - loss: 0.4425 - val_loss: 0.2570\n",
      "\n",
      "Epoch 00015: val_loss did not improve from 0.25407\n",
      "Epoch 16/30\n",
      "111/111 [==============================] - ETA: 1:44 - loss: 0.314 - ETA: 1:43 - loss: 0.312 - ETA: 1:41 - loss: 0.341 - ETA: 1:40 - loss: 0.357 - ETA: 1:41 - loss: 0.454 - ETA: 1:40 - loss: 0.512 - ETA: 1:40 - loss: 0.541 - ETA: 1:39 - loss: 0.557 - ETA: 1:38 - loss: 0.566 - ETA: 1:37 - loss: 0.572 - ETA: 1:36 - loss: 0.576 - ETA: 1:35 - loss: 0.576 - ETA: 1:34 - loss: 0.575 - ETA: 1:33 - loss: 0.573 - ETA: 1:32 - loss: 0.571 - ETA: 1:31 - loss: 0.567 - ETA: 1:29 - loss: 0.563 - ETA: 1:28 - loss: 0.560 - ETA: 1:27 - loss: 0.556 - ETA: 1:26 - loss: 0.552 - ETA: 1:25 - loss: 0.549 - ETA: 1:24 - loss: 0.546 - ETA: 1:23 - loss: 0.543 - ETA: 1:22 - loss: 0.540 - ETA: 1:21 - loss: 0.537 - ETA: 1:20 - loss: 0.534 - ETA: 1:19 - loss: 0.531 - ETA: 1:18 - loss: 0.530 - ETA: 1:17 - loss: 0.528 - ETA: 1:17 - loss: 0.527 - ETA: 1:16 - loss: 0.525 - ETA: 1:15 - loss: 0.523 - ETA: 1:14 - loss: 0.521 - ETA: 1:13 - loss: 0.519 - ETA: 1:12 - loss: 0.517 - ETA: 1:11 - loss: 0.516 - ETA: 1:10 - loss: 0.514 - ETA: 1:09 - loss: 0.513 - ETA: 1:08 - loss: 0.511 - ETA: 1:07 - loss: 0.510 - ETA: 1:06 - loss: 0.508 - ETA: 1:05 - loss: 0.506 - ETA: 1:05 - loss: 0.505 - ETA: 1:04 - loss: 0.503 - ETA: 1:03 - loss: 0.501 - ETA: 1:02 - loss: 0.500 - ETA: 1:01 - loss: 0.498 - ETA: 1:01 - loss: 0.497 - ETA: 1:00 - loss: 0.495 - ETA: 59s - loss: 0.494 - ETA: 58s - loss: 0.49 - ETA: 57s - loss: 0.49 - ETA: 57s - loss: 0.48 - ETA: 56s - loss: 0.48 - ETA: 56s - loss: 0.48 - ETA: 55s - loss: 0.48 - ETA: 54s - loss: 0.48 - ETA: 53s - loss: 0.48 - ETA: 53s - loss: 0.48 - ETA: 52s - loss: 0.47 - ETA: 51s - loss: 0.47 - ETA: 50s - loss: 0.47 - ETA: 49s - loss: 0.47 - ETA: 48s - loss: 0.47 - ETA: 47s - loss: 0.47 - ETA: 46s - loss: 0.47 - ETA: 45s - loss: 0.47 - ETA: 44s - loss: 0.46 - ETA: 43s - loss: 0.46 - ETA: 42s - loss: 0.46 - ETA: 41s - loss: 0.46 - ETA: 40s - loss: 0.46 - ETA: 39s - loss: 0.46 - ETA: 38s - loss: 0.46 - ETA: 37s - loss: 0.46 - ETA: 36s - loss: 0.46 - ETA: 34s - loss: 0.46 - ETA: 33s - loss: 0.46 - ETA: 32s - loss: 0.45 - ETA: 31s - loss: 0.45 - ETA: 30s - loss: 0.45 - ETA: 29s - loss: 0.45 - ETA: 28s - loss: 0.45 - ETA: 27s - loss: 0.45 - ETA: 26s - loss: 0.45 - ETA: 25s - loss: 0.45 - ETA: 24s - loss: 0.45 - ETA: 23s - loss: 0.45 - ETA: 22s - loss: 0.45 - ETA: 21s - loss: 0.45 - ETA: 20s - loss: 0.45 - ETA: 19s - loss: 0.45 - ETA: 18s - loss: 0.45 - ETA: 17s - loss: 0.44 - ETA: 16s - loss: 0.44 - ETA: 15s - loss: 0.44 - ETA: 14s - loss: 0.44 - ETA: 13s - loss: 0.44 - ETA: 12s - loss: 0.44 - ETA: 11s - loss: 0.44 - ETA: 10s - loss: 0.44 - ETA: 9s - loss: 0.4464 - ETA: 8s - loss: 0.446 - ETA: 7s - loss: 0.445 - ETA: 6s - loss: 0.445 - ETA: 5s - loss: 0.444 - ETA: 4s - loss: 0.444 - ETA: 3s - loss: 0.444 - ETA: 2s - loss: 0.443 - ETA: 1s - loss: 0.443 - ETA: 0s - loss: 0.443 - 115s 1s/step - loss: 0.4428 - val_loss: 0.2397\n",
      "\n",
      "Epoch 00016: val_loss improved from 0.25407 to 0.23965, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 17/30\n",
      "111/111 [==============================] - ETA: 1:41 - loss: 0.315 - ETA: 1:43 - loss: 0.266 - ETA: 1:43 - loss: 0.269 - ETA: 1:43 - loss: 0.262 - ETA: 1:44 - loss: 0.269 - ETA: 1:43 - loss: 0.292 - ETA: 1:41 - loss: 0.305 - ETA: 1:40 - loss: 0.315 - ETA: 1:38 - loss: 0.323 - ETA: 1:37 - loss: 0.329 - ETA: 1:36 - loss: 0.332 - ETA: 1:35 - loss: 0.335 - ETA: 1:34 - loss: 0.336 - ETA: 1:33 - loss: 0.337 - ETA: 1:33 - loss: 0.338 - ETA: 1:31 - loss: 0.338 - ETA: 1:31 - loss: 0.338 - ETA: 1:30 - loss: 0.338 - ETA: 1:29 - loss: 0.337 - ETA: 1:28 - loss: 0.336 - ETA: 1:27 - loss: 0.336 - ETA: 1:26 - loss: 0.337 - ETA: 1:26 - loss: 0.337 - ETA: 1:25 - loss: 0.338 - ETA: 1:24 - loss: 0.338 - ETA: 1:23 - loss: 0.338 - ETA: 1:22 - loss: 0.338 - ETA: 1:22 - loss: 0.337 - ETA: 1:21 - loss: 0.337 - ETA: 1:20 - loss: 0.336 - ETA: 1:19 - loss: 0.335 - ETA: 1:19 - loss: 0.335 - ETA: 1:18 - loss: 0.334 - ETA: 1:18 - loss: 0.334 - ETA: 1:17 - loss: 0.334 - ETA: 1:16 - loss: 0.333 - ETA: 1:16 - loss: 0.333 - ETA: 1:15 - loss: 0.333 - ETA: 1:14 - loss: 0.333 - ETA: 1:13 - loss: 0.333 - ETA: 1:13 - loss: 0.333 - ETA: 1:12 - loss: 0.332 - ETA: 1:11 - loss: 0.332 - ETA: 1:10 - loss: 0.332 - ETA: 1:09 - loss: 0.333 - ETA: 1:08 - loss: 0.333 - ETA: 1:07 - loss: 0.333 - ETA: 1:06 - loss: 0.334 - ETA: 1:04 - loss: 0.334 - ETA: 1:03 - loss: 0.335 - ETA: 1:02 - loss: 0.336 - ETA: 1:01 - loss: 0.336 - ETA: 1:00 - loss: 0.337 - ETA: 59s - loss: 0.337 - ETA: 58s - loss: 0.33 - ETA: 57s - loss: 0.33 - ETA: 56s - loss: 0.33 - ETA: 54s - loss: 0.33 - ETA: 53s - loss: 0.34 - ETA: 52s - loss: 0.34 - ETA: 51s - loss: 0.34 - ETA: 50s - loss: 0.34 - ETA: 49s - loss: 0.34 - ETA: 48s - loss: 0.34 - ETA: 47s - loss: 0.34 - ETA: 46s - loss: 0.34 - ETA: 45s - loss: 0.34 - ETA: 43s - loss: 0.34 - ETA: 42s - loss: 0.34 - ETA: 41s - loss: 0.34 - ETA: 40s - loss: 0.34 - ETA: 39s - loss: 0.34 - ETA: 38s - loss: 0.34 - ETA: 37s - loss: 0.34 - ETA: 36s - loss: 0.34 - ETA: 35s - loss: 0.35 - ETA: 34s - loss: 0.35 - ETA: 33s - loss: 0.35 - ETA: 32s - loss: 0.35 - ETA: 31s - loss: 0.35 - ETA: 30s - loss: 0.35 - ETA: 29s - loss: 0.35 - ETA: 28s - loss: 0.35 - ETA: 27s - loss: 0.35 - ETA: 26s - loss: 0.35 - ETA: 25s - loss: 0.35 - ETA: 24s - loss: 0.35 - ETA: 23s - loss: 0.35 - ETA: 22s - loss: 0.35 - ETA: 21s - loss: 0.35 - ETA: 20s - loss: 0.35 - ETA: 19s - loss: 0.35 - ETA: 18s - loss: 0.35 - ETA: 17s - loss: 0.35 - ETA: 16s - loss: 0.35 - ETA: 15s - loss: 0.35 - ETA: 14s - loss: 0.35 - ETA: 13s - loss: 0.36 - ETA: 12s - loss: 0.36 - ETA: 11s - loss: 0.36 - ETA: 10s - loss: 0.36 - ETA: 9s - loss: 0.3621 - ETA: 8s - loss: 0.362 - ETA: 7s - loss: 0.363 - ETA: 6s - loss: 0.363 - ETA: 5s - loss: 0.363 - ETA: 4s - loss: 0.364 - ETA: 3s - loss: 0.364 - ETA: 2s - loss: 0.365 - ETA: 1s - loss: 0.365 - ETA: 0s - loss: 0.365 - 114s 1s/step - loss: 0.3661 - val_loss: 0.2333\n",
      "\n",
      "Epoch 00017: val_loss improved from 0.23965 to 0.23325, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 18/30\n",
      "111/111 [==============================] - ETA: 1:49 - loss: 0.287 - ETA: 1:47 - loss: 0.245 - ETA: 1:48 - loss: 0.275 - ETA: 1:48 - loss: 0.277 - ETA: 1:47 - loss: 0.269 - ETA: 1:47 - loss: 0.267 - ETA: 1:47 - loss: 0.265 - ETA: 1:47 - loss: 0.266 - ETA: 1:47 - loss: 0.267 - ETA: 1:46 - loss: 0.268 - ETA: 1:46 - loss: 0.268 - ETA: 1:46 - loss: 0.270 - ETA: 1:46 - loss: 0.272 - ETA: 1:45 - loss: 0.274 - ETA: 1:44 - loss: 0.276 - ETA: 1:43 - loss: 0.278 - ETA: 1:42 - loss: 0.279 - ETA: 1:40 - loss: 0.279 - ETA: 1:39 - loss: 0.280 - ETA: 1:37 - loss: 0.280 - ETA: 1:35 - loss: 0.280 - ETA: 1:34 - loss: 0.279 - ETA: 1:32 - loss: 0.279 - ETA: 1:31 - loss: 0.280 - ETA: 1:30 - loss: 0.281 - ETA: 1:28 - loss: 0.282 - ETA: 1:27 - loss: 0.282 - ETA: 1:26 - loss: 0.283 - ETA: 1:25 - loss: 0.285 - ETA: 1:24 - loss: 0.287 - ETA: 1:23 - loss: 0.288 - ETA: 1:22 - loss: 0.289 - ETA: 1:20 - loss: 0.290 - ETA: 1:19 - loss: 0.291 - ETA: 1:18 - loss: 0.292 - ETA: 1:17 - loss: 0.293 - ETA: 1:16 - loss: 0.294 - ETA: 1:15 - loss: 0.294 - ETA: 1:13 - loss: 0.295 - ETA: 1:12 - loss: 0.296 - ETA: 1:11 - loss: 0.297 - ETA: 1:10 - loss: 0.297 - ETA: 1:09 - loss: 0.298 - ETA: 1:08 - loss: 0.298 - ETA: 1:07 - loss: 0.299 - ETA: 1:05 - loss: 0.300 - ETA: 1:04 - loss: 0.300 - ETA: 1:03 - loss: 0.301 - ETA: 1:02 - loss: 0.302 - ETA: 1:01 - loss: 0.303 - ETA: 1:00 - loss: 0.303 - ETA: 59s - loss: 0.304 - ETA: 58s - loss: 0.30 - ETA: 57s - loss: 0.30 - ETA: 56s - loss: 0.30 - ETA: 55s - loss: 0.30 - ETA: 54s - loss: 0.30 - ETA: 53s - loss: 0.30 - ETA: 52s - loss: 0.30 - ETA: 51s - loss: 0.30 - ETA: 50s - loss: 0.30 - ETA: 49s - loss: 0.30 - ETA: 48s - loss: 0.30 - ETA: 47s - loss: 0.30 - ETA: 46s - loss: 0.30 - ETA: 45s - loss: 0.30 - ETA: 44s - loss: 0.30 - ETA: 43s - loss: 0.30 - ETA: 42s - loss: 0.30 - ETA: 41s - loss: 0.30 - ETA: 40s - loss: 0.31 - ETA: 39s - loss: 0.31 - ETA: 38s - loss: 0.31 - ETA: 37s - loss: 0.31 - ETA: 36s - loss: 0.31 - ETA: 35s - loss: 0.31 - ETA: 34s - loss: 0.31 - ETA: 33s - loss: 0.31 - ETA: 32s - loss: 0.31 - ETA: 31s - loss: 0.31 - ETA: 30s - loss: 0.31 - ETA: 29s - loss: 0.31 - ETA: 28s - loss: 0.31 - ETA: 27s - loss: 0.31 - ETA: 26s - loss: 0.31 - ETA: 25s - loss: 0.31 - ETA: 24s - loss: 0.31 - ETA: 23s - loss: 0.31 - ETA: 22s - loss: 0.31 - ETA: 21s - loss: 0.31 - ETA: 20s - loss: 0.31 - ETA: 19s - loss: 0.31 - ETA: 18s - loss: 0.31 - ETA: 17s - loss: 0.31 - ETA: 16s - loss: 0.31 - ETA: 15s - loss: 0.31 - ETA: 14s - loss: 0.31 - ETA: 13s - loss: 0.31 - ETA: 12s - loss: 0.31 - ETA: 11s - loss: 0.31 - ETA: 10s - loss: 0.31 - ETA: 9s - loss: 0.3201 - ETA: 8s - loss: 0.320 - ETA: 7s - loss: 0.320 - ETA: 6s - loss: 0.320 - ETA: 5s - loss: 0.321 - ETA: 4s - loss: 0.321 - ETA: 3s - loss: 0.321 - ETA: 2s - loss: 0.321 - ETA: 1s - loss: 0.321 - ETA: 0s - loss: 0.322 - 118s 1s/step - loss: 0.3223 - val_loss: 0.2254\n",
      "\n",
      "Epoch 00018: val_loss improved from 0.23325 to 0.22535, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 19/30\n",
      "111/111 [==============================] - ETA: 1:42 - loss: 0.607 - ETA: 1:46 - loss: 0.565 - ETA: 1:45 - loss: 0.525 - ETA: 1:44 - loss: 0.529 - ETA: 1:44 - loss: 0.522 - ETA: 1:43 - loss: 0.509 - ETA: 1:42 - loss: 0.498 - ETA: 1:41 - loss: 0.489 - ETA: 1:40 - loss: 0.479 - ETA: 1:39 - loss: 0.472 - ETA: 1:38 - loss: 0.463 - ETA: 1:37 - loss: 0.455 - ETA: 1:36 - loss: 0.450 - ETA: 1:35 - loss: 0.444 - ETA: 1:34 - loss: 0.440 - ETA: 1:33 - loss: 0.436 - ETA: 1:32 - loss: 0.434 - ETA: 1:31 - loss: 0.433 - ETA: 1:30 - loss: 0.434 - ETA: 1:29 - loss: 0.434 - ETA: 1:28 - loss: 0.435 - ETA: 1:27 - loss: 0.436 - ETA: 1:26 - loss: 0.436 - ETA: 1:26 - loss: 0.436 - ETA: 1:25 - loss: 0.436 - ETA: 1:24 - loss: 0.435 - ETA: 1:23 - loss: 0.434 - ETA: 1:22 - loss: 0.433 - ETA: 1:21 - loss: 0.432 - ETA: 1:20 - loss: 0.431 - ETA: 1:19 - loss: 0.430 - ETA: 1:18 - loss: 0.429 - ETA: 1:17 - loss: 0.428 - ETA: 1:16 - loss: 0.427 - ETA: 1:15 - loss: 0.426 - ETA: 1:14 - loss: 0.426 - ETA: 1:13 - loss: 0.425 - ETA: 1:12 - loss: 0.424 - ETA: 1:11 - loss: 0.424 - ETA: 1:10 - loss: 0.423 - ETA: 1:09 - loss: 0.423 - ETA: 1:08 - loss: 0.422 - ETA: 1:07 - loss: 0.421 - ETA: 1:06 - loss: 0.421 - ETA: 1:05 - loss: 0.420 - ETA: 1:04 - loss: 0.419 - ETA: 1:04 - loss: 0.419 - ETA: 1:03 - loss: 0.419 - ETA: 1:03 - loss: 0.418 - ETA: 1:02 - loss: 0.418 - ETA: 1:02 - loss: 0.418 - ETA: 1:01 - loss: 0.417 - ETA: 1:00 - loss: 0.417 - ETA: 59s - loss: 0.417 - ETA: 59s - loss: 0.41 - ETA: 58s - loss: 0.41 - ETA: 57s - loss: 0.41 - ETA: 56s - loss: 0.41 - ETA: 55s - loss: 0.41 - ETA: 54s - loss: 0.41 - ETA: 53s - loss: 0.41 - ETA: 52s - loss: 0.41 - ETA: 51s - loss: 0.41 - ETA: 50s - loss: 0.41 - ETA: 49s - loss: 0.41 - ETA: 48s - loss: 0.41 - ETA: 47s - loss: 0.41 - ETA: 47s - loss: 0.41 - ETA: 46s - loss: 0.41 - ETA: 45s - loss: 0.41 - ETA: 44s - loss: 0.41 - ETA: 43s - loss: 0.41 - ETA: 42s - loss: 0.41 - ETA: 41s - loss: 0.41 - ETA: 40s - loss: 0.41 - ETA: 39s - loss: 0.41 - ETA: 37s - loss: 0.40 - ETA: 36s - loss: 0.40 - ETA: 35s - loss: 0.40 - ETA: 34s - loss: 0.40 - ETA: 33s - loss: 0.40 - ETA: 32s - loss: 0.40 - ETA: 31s - loss: 0.40 - ETA: 29s - loss: 0.40 - ETA: 28s - loss: 0.40 - ETA: 27s - loss: 0.40 - ETA: 26s - loss: 0.40 - ETA: 25s - loss: 0.40 - ETA: 24s - loss: 0.40 - ETA: 23s - loss: 0.40 - ETA: 21s - loss: 0.40 - ETA: 20s - loss: 0.40 - ETA: 19s - loss: 0.40 - ETA: 18s - loss: 0.40 - ETA: 17s - loss: 0.40 - ETA: 16s - loss: 0.40 - ETA: 15s - loss: 0.40 - ETA: 14s - loss: 0.40 - ETA: 13s - loss: 0.40 - ETA: 12s - loss: 0.40 - ETA: 10s - loss: 0.40 - ETA: 9s - loss: 0.4040 - ETA: 8s - loss: 0.403 - ETA: 7s - loss: 0.403 - ETA: 6s - loss: 0.403 - ETA: 5s - loss: 0.403 - ETA: 4s - loss: 0.402 - ETA: 3s - loss: 0.402 - ETA: 2s - loss: 0.402 - ETA: 1s - loss: 0.402 - ETA: 0s - loss: 0.401 - 122s 1s/step - loss: 0.4016 - val_loss: 0.2238\n",
      "\n",
      "Epoch 00019: val_loss improved from 0.22535 to 0.22376, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 20/30\n",
      "111/111 [==============================] - ETA: 1:43 - loss: 0.376 - ETA: 1:45 - loss: 0.330 - ETA: 1:47 - loss: 0.314 - ETA: 1:46 - loss: 0.300 - ETA: 1:48 - loss: 0.303 - ETA: 1:48 - loss: 0.304 - ETA: 1:47 - loss: 0.300 - ETA: 1:46 - loss: 0.300 - ETA: 1:45 - loss: 0.299 - ETA: 1:43 - loss: 0.298 - ETA: 1:42 - loss: 0.296 - ETA: 1:41 - loss: 0.294 - ETA: 1:39 - loss: 0.292 - ETA: 1:38 - loss: 0.292 - ETA: 1:37 - loss: 0.292 - ETA: 1:36 - loss: 0.291 - ETA: 1:35 - loss: 0.293 - ETA: 1:34 - loss: 0.295 - ETA: 1:33 - loss: 0.295 - ETA: 1:32 - loss: 0.296 - ETA: 1:31 - loss: 0.297 - ETA: 1:30 - loss: 0.297 - ETA: 1:29 - loss: 0.298 - ETA: 1:28 - loss: 0.298 - ETA: 1:27 - loss: 0.299 - ETA: 1:25 - loss: 0.300 - ETA: 1:24 - loss: 0.301 - ETA: 1:23 - loss: 0.303 - ETA: 1:22 - loss: 0.304 - ETA: 1:21 - loss: 0.306 - ETA: 1:20 - loss: 0.307 - ETA: 1:20 - loss: 0.309 - ETA: 1:19 - loss: 0.310 - ETA: 1:18 - loss: 0.311 - ETA: 1:17 - loss: 0.312 - ETA: 1:16 - loss: 0.313 - ETA: 1:15 - loss: 0.314 - ETA: 1:14 - loss: 0.314 - ETA: 1:14 - loss: 0.315 - ETA: 1:13 - loss: 0.315 - ETA: 1:12 - loss: 0.316 - ETA: 1:12 - loss: 0.316 - ETA: 1:11 - loss: 0.317 - ETA: 1:11 - loss: 0.317 - ETA: 1:10 - loss: 0.317 - ETA: 1:09 - loss: 0.317 - ETA: 1:08 - loss: 0.318 - ETA: 1:08 - loss: 0.318 - ETA: 1:07 - loss: 0.318 - ETA: 1:06 - loss: 0.319 - ETA: 1:05 - loss: 0.319 - ETA: 1:04 - loss: 0.319 - ETA: 1:03 - loss: 0.319 - ETA: 1:02 - loss: 0.320 - ETA: 1:01 - loss: 0.320 - ETA: 1:00 - loss: 0.320 - ETA: 59s - loss: 0.320 - ETA: 58s - loss: 0.32 - ETA: 57s - loss: 0.32 - ETA: 56s - loss: 0.32 - ETA: 54s - loss: 0.32 - ETA: 53s - loss: 0.32 - ETA: 52s - loss: 0.32 - ETA: 51s - loss: 0.32 - ETA: 50s - loss: 0.32 - ETA: 49s - loss: 0.32 - ETA: 47s - loss: 0.32 - ETA: 46s - loss: 0.32 - ETA: 45s - loss: 0.32 - ETA: 44s - loss: 0.32 - ETA: 43s - loss: 0.32 - ETA: 42s - loss: 0.32 - ETA: 41s - loss: 0.32 - ETA: 40s - loss: 0.32 - ETA: 38s - loss: 0.32 - ETA: 37s - loss: 0.32 - ETA: 36s - loss: 0.32 - ETA: 35s - loss: 0.32 - ETA: 34s - loss: 0.32 - ETA: 33s - loss: 0.32 - ETA: 32s - loss: 0.32 - ETA: 31s - loss: 0.32 - ETA: 30s - loss: 0.32 - ETA: 28s - loss: 0.32 - ETA: 27s - loss: 0.32 - ETA: 26s - loss: 0.32 - ETA: 25s - loss: 0.32 - ETA: 24s - loss: 0.32 - ETA: 23s - loss: 0.32 - ETA: 22s - loss: 0.32 - ETA: 21s - loss: 0.32 - ETA: 20s - loss: 0.32 - ETA: 19s - loss: 0.32 - ETA: 18s - loss: 0.32 - ETA: 17s - loss: 0.32 - ETA: 15s - loss: 0.32 - ETA: 14s - loss: 0.32 - ETA: 13s - loss: 0.32 - ETA: 12s - loss: 0.32 - ETA: 11s - loss: 0.32 - ETA: 10s - loss: 0.32 - ETA: 9s - loss: 0.3291 - ETA: 8s - loss: 0.329 - ETA: 7s - loss: 0.329 - ETA: 6s - loss: 0.330 - ETA: 5s - loss: 0.330 - ETA: 4s - loss: 0.330 - ETA: 3s - loss: 0.330 - ETA: 2s - loss: 0.331 - ETA: 1s - loss: 0.331 - ETA: 0s - loss: 0.331 - 119s 1s/step - loss: 0.3319 - val_loss: 0.2100\n",
      "\n",
      "Epoch 00020: val_loss improved from 0.22376 to 0.21003, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 21/30\n",
      "111/111 [==============================] - ETA: 2:06 - loss: 0.441 - ETA: 2:10 - loss: 0.426 - ETA: 2:13 - loss: 0.392 - ETA: 2:17 - loss: 0.400 - ETA: 2:20 - loss: 0.398 - ETA: 2:19 - loss: 0.389 - ETA: 2:17 - loss: 0.377 - ETA: 2:15 - loss: 0.365 - ETA: 2:14 - loss: 0.356 - ETA: 2:12 - loss: 0.348 - ETA: 2:09 - loss: 0.341 - ETA: 2:07 - loss: 0.336 - ETA: 2:05 - loss: 0.331 - ETA: 2:04 - loss: 0.328 - ETA: 2:02 - loss: 0.324 - ETA: 2:00 - loss: 0.320 - ETA: 1:58 - loss: 0.316 - ETA: 1:55 - loss: 0.313 - ETA: 1:53 - loss: 0.310 - ETA: 1:51 - loss: 0.307 - ETA: 1:49 - loss: 0.305 - ETA: 1:47 - loss: 0.304 - ETA: 1:45 - loss: 0.302 - ETA: 1:43 - loss: 0.301 - ETA: 1:41 - loss: 0.300 - ETA: 1:39 - loss: 0.299 - ETA: 1:37 - loss: 0.298 - ETA: 1:36 - loss: 0.297 - ETA: 1:34 - loss: 0.297 - ETA: 1:32 - loss: 0.298 - ETA: 1:31 - loss: 0.299 - ETA: 1:29 - loss: 0.300 - ETA: 1:28 - loss: 0.301 - ETA: 1:26 - loss: 0.302 - ETA: 1:25 - loss: 0.303 - ETA: 1:23 - loss: 0.304 - ETA: 1:22 - loss: 0.305 - ETA: 1:21 - loss: 0.306 - ETA: 1:19 - loss: 0.307 - ETA: 1:18 - loss: 0.307 - ETA: 1:17 - loss: 0.308 - ETA: 1:15 - loss: 0.309 - ETA: 1:14 - loss: 0.309 - ETA: 1:13 - loss: 0.310 - ETA: 1:12 - loss: 0.311 - ETA: 1:10 - loss: 0.312 - ETA: 1:09 - loss: 0.312 - ETA: 1:08 - loss: 0.313 - ETA: 1:07 - loss: 0.314 - ETA: 1:06 - loss: 0.314 - ETA: 1:04 - loss: 0.315 - ETA: 1:03 - loss: 0.316 - ETA: 1:02 - loss: 0.316 - ETA: 1:01 - loss: 0.317 - ETA: 1:00 - loss: 0.318 - ETA: 59s - loss: 0.318 - ETA: 58s - loss: 0.31 - ETA: 56s - loss: 0.32 - ETA: 55s - loss: 0.32 - ETA: 54s - loss: 0.32 - ETA: 53s - loss: 0.32 - ETA: 52s - loss: 0.32 - ETA: 51s - loss: 0.32 - ETA: 50s - loss: 0.32 - ETA: 49s - loss: 0.32 - ETA: 47s - loss: 0.32 - ETA: 46s - loss: 0.32 - ETA: 45s - loss: 0.32 - ETA: 44s - loss: 0.32 - ETA: 43s - loss: 0.32 - ETA: 42s - loss: 0.32 - ETA: 41s - loss: 0.32 - ETA: 40s - loss: 0.32 - ETA: 39s - loss: 0.32 - ETA: 38s - loss: 0.32 - ETA: 37s - loss: 0.32 - ETA: 36s - loss: 0.32 - ETA: 35s - loss: 0.32 - ETA: 34s - loss: 0.32 - ETA: 33s - loss: 0.32 - ETA: 32s - loss: 0.32 - ETA: 31s - loss: 0.32 - ETA: 30s - loss: 0.32 - ETA: 29s - loss: 0.32 - ETA: 28s - loss: 0.32 - ETA: 27s - loss: 0.32 - ETA: 26s - loss: 0.32 - ETA: 24s - loss: 0.32 - ETA: 23s - loss: 0.32 - ETA: 22s - loss: 0.32 - ETA: 21s - loss: 0.32 - ETA: 20s - loss: 0.32 - ETA: 19s - loss: 0.32 - ETA: 18s - loss: 0.33 - ETA: 17s - loss: 0.33 - ETA: 16s - loss: 0.33 - ETA: 15s - loss: 0.33 - ETA: 14s - loss: 0.33 - ETA: 13s - loss: 0.33 - ETA: 12s - loss: 0.33 - ETA: 10s - loss: 0.33 - ETA: 9s - loss: 0.3315 - ETA: 8s - loss: 0.331 - ETA: 7s - loss: 0.331 - ETA: 6s - loss: 0.331 - ETA: 5s - loss: 0.332 - ETA: 4s - loss: 0.332 - ETA: 3s - loss: 0.332 - ETA: 2s - loss: 0.332 - ETA: 1s - loss: 0.332 - ETA: 0s - loss: 0.332 - 122s 1s/step - loss: 0.3331 - val_loss: 0.2173\n",
      "\n",
      "Epoch 00021: val_loss did not improve from 0.21003\n",
      "Epoch 22/30\n",
      "111/111 [==============================] - ETA: 1:54 - loss: 0.841 - ETA: 1:52 - loss: 0.676 - ETA: 1:56 - loss: 0.612 - ETA: 1:54 - loss: 0.572 - ETA: 1:50 - loss: 0.543 - ETA: 1:48 - loss: 0.525 - ETA: 1:47 - loss: 0.510 - ETA: 1:45 - loss: 0.497 - ETA: 1:44 - loss: 0.491 - ETA: 1:42 - loss: 0.484 - ETA: 1:42 - loss: 0.479 - ETA: 1:41 - loss: 0.473 - ETA: 1:40 - loss: 0.468 - ETA: 1:39 - loss: 0.462 - ETA: 1:38 - loss: 0.456 - ETA: 1:36 - loss: 0.451 - ETA: 1:35 - loss: 0.447 - ETA: 1:34 - loss: 0.443 - ETA: 1:33 - loss: 0.440 - ETA: 1:32 - loss: 0.436 - ETA: 1:31 - loss: 0.432 - ETA: 1:30 - loss: 0.429 - ETA: 1:29 - loss: 0.427 - ETA: 1:28 - loss: 0.424 - ETA: 1:27 - loss: 0.422 - ETA: 1:26 - loss: 0.421 - ETA: 1:25 - loss: 0.420 - ETA: 1:24 - loss: 0.419 - ETA: 1:22 - loss: 0.417 - ETA: 1:21 - loss: 0.415 - ETA: 1:20 - loss: 0.414 - ETA: 1:19 - loss: 0.413 - ETA: 1:18 - loss: 0.411 - ETA: 1:17 - loss: 0.410 - ETA: 1:16 - loss: 0.409 - ETA: 1:16 - loss: 0.407 - ETA: 1:15 - loss: 0.406 - ETA: 1:14 - loss: 0.405 - ETA: 1:13 - loss: 0.404 - ETA: 1:12 - loss: 0.403 - ETA: 1:11 - loss: 0.402 - ETA: 1:10 - loss: 0.401 - ETA: 1:09 - loss: 0.401 - ETA: 1:08 - loss: 0.400 - ETA: 1:07 - loss: 0.399 - ETA: 1:07 - loss: 0.398 - ETA: 1:06 - loss: 0.397 - ETA: 1:05 - loss: 0.397 - ETA: 1:05 - loss: 0.396 - ETA: 1:04 - loss: 0.395 - ETA: 1:04 - loss: 0.394 - ETA: 1:03 - loss: 0.393 - ETA: 1:02 - loss: 0.392 - ETA: 1:01 - loss: 0.392 - ETA: 1:00 - loss: 0.391 - ETA: 59s - loss: 0.390 - ETA: 58s - loss: 0.38 - ETA: 57s - loss: 0.38 - ETA: 56s - loss: 0.38 - ETA: 55s - loss: 0.38 - ETA: 54s - loss: 0.38 - ETA: 53s - loss: 0.38 - ETA: 52s - loss: 0.38 - ETA: 51s - loss: 0.38 - ETA: 49s - loss: 0.38 - ETA: 48s - loss: 0.38 - ETA: 47s - loss: 0.38 - ETA: 46s - loss: 0.38 - ETA: 45s - loss: 0.38 - ETA: 44s - loss: 0.38 - ETA: 43s - loss: 0.38 - ETA: 41s - loss: 0.38 - ETA: 40s - loss: 0.38 - ETA: 39s - loss: 0.37 - ETA: 38s - loss: 0.37 - ETA: 37s - loss: 0.37 - ETA: 36s - loss: 0.37 - ETA: 35s - loss: 0.37 - ETA: 34s - loss: 0.37 - ETA: 33s - loss: 0.37 - ETA: 31s - loss: 0.37 - ETA: 30s - loss: 0.37 - ETA: 29s - loss: 0.37 - ETA: 28s - loss: 0.37 - ETA: 27s - loss: 0.37 - ETA: 26s - loss: 0.37 - ETA: 25s - loss: 0.37 - ETA: 24s - loss: 0.37 - ETA: 23s - loss: 0.37 - ETA: 22s - loss: 0.37 - ETA: 21s - loss: 0.37 - ETA: 20s - loss: 0.37 - ETA: 19s - loss: 0.37 - ETA: 17s - loss: 0.37 - ETA: 16s - loss: 0.37 - ETA: 15s - loss: 0.37 - ETA: 14s - loss: 0.37 - ETA: 13s - loss: 0.37 - ETA: 12s - loss: 0.37 - ETA: 11s - loss: 0.37 - ETA: 10s - loss: 0.37 - ETA: 9s - loss: 0.3711 - ETA: 8s - loss: 0.370 - ETA: 7s - loss: 0.370 - ETA: 6s - loss: 0.370 - ETA: 5s - loss: 0.370 - ETA: 4s - loss: 0.370 - ETA: 3s - loss: 0.369 - ETA: 2s - loss: 0.369 - ETA: 1s - loss: 0.369 - ETA: 0s - loss: 0.369 - 118s 1s/step - loss: 0.3689 - val_loss: 0.2082\n",
      "\n",
      "Epoch 00022: val_loss improved from 0.21003 to 0.20821, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 23/30\n",
      "111/111 [==============================] - ETA: 1:41 - loss: 0.384 - ETA: 1:49 - loss: 0.357 - ETA: 1:49 - loss: 0.340 - ETA: 1:51 - loss: 0.351 - ETA: 1:51 - loss: 0.354 - ETA: 1:50 - loss: 0.350 - ETA: 1:49 - loss: 0.349 - ETA: 1:48 - loss: 0.345 - ETA: 1:47 - loss: 0.342 - ETA: 1:46 - loss: 0.340 - ETA: 1:45 - loss: 0.337 - ETA: 1:45 - loss: 0.338 - ETA: 1:45 - loss: 0.338 - ETA: 1:45 - loss: 0.337 - ETA: 1:45 - loss: 0.336 - ETA: 1:45 - loss: 0.335 - ETA: 1:45 - loss: 0.338 - ETA: 1:45 - loss: 0.341 - ETA: 1:44 - loss: 0.344 - ETA: 1:44 - loss: 0.346 - ETA: 1:43 - loss: 0.347 - ETA: 1:43 - loss: 0.349 - ETA: 1:41 - loss: 0.351 - ETA: 1:40 - loss: 0.352 - ETA: 1:39 - loss: 0.353 - ETA: 1:37 - loss: 0.354 - ETA: 1:36 - loss: 0.357 - ETA: 1:35 - loss: 0.360 - ETA: 1:33 - loss: 0.362 - ETA: 1:32 - loss: 0.365 - ETA: 1:30 - loss: 0.366 - ETA: 1:29 - loss: 0.368 - ETA: 1:27 - loss: 0.369 - ETA: 1:26 - loss: 0.370 - ETA: 1:24 - loss: 0.371 - ETA: 1:22 - loss: 0.372 - ETA: 1:21 - loss: 0.373 - ETA: 1:19 - loss: 0.374 - ETA: 1:18 - loss: 0.374 - ETA: 1:16 - loss: 0.374 - ETA: 1:15 - loss: 0.375 - ETA: 1:14 - loss: 0.375 - ETA: 1:12 - loss: 0.375 - ETA: 1:11 - loss: 0.375 - ETA: 1:09 - loss: 0.375 - ETA: 1:08 - loss: 0.375 - ETA: 1:07 - loss: 0.375 - ETA: 1:06 - loss: 0.375 - ETA: 1:04 - loss: 0.375 - ETA: 1:03 - loss: 0.375 - ETA: 1:02 - loss: 0.375 - ETA: 1:01 - loss: 0.375 - ETA: 59s - loss: 0.375 - ETA: 58s - loss: 0.37 - ETA: 57s - loss: 0.37 - ETA: 56s - loss: 0.37 - ETA: 55s - loss: 0.37 - ETA: 54s - loss: 0.37 - ETA: 53s - loss: 0.37 - ETA: 51s - loss: 0.37 - ETA: 50s - loss: 0.37 - ETA: 49s - loss: 0.37 - ETA: 48s - loss: 0.37 - ETA: 47s - loss: 0.38 - ETA: 46s - loss: 0.38 - ETA: 45s - loss: 0.38 - ETA: 44s - loss: 0.38 - ETA: 43s - loss: 0.38 - ETA: 42s - loss: 0.38 - ETA: 41s - loss: 0.38 - ETA: 39s - loss: 0.38 - ETA: 38s - loss: 0.38 - ETA: 37s - loss: 0.38 - ETA: 36s - loss: 0.38 - ETA: 35s - loss: 0.38 - ETA: 34s - loss: 0.38 - ETA: 33s - loss: 0.38 - ETA: 32s - loss: 0.38 - ETA: 31s - loss: 0.38 - ETA: 30s - loss: 0.38 - ETA: 29s - loss: 0.38 - ETA: 28s - loss: 0.38 - ETA: 27s - loss: 0.38 - ETA: 26s - loss: 0.38 - ETA: 25s - loss: 0.38 - ETA: 24s - loss: 0.38 - ETA: 23s - loss: 0.38 - ETA: 22s - loss: 0.38 - ETA: 21s - loss: 0.38 - ETA: 20s - loss: 0.38 - ETA: 19s - loss: 0.38 - ETA: 18s - loss: 0.38 - ETA: 17s - loss: 0.38 - ETA: 16s - loss: 0.38 - ETA: 15s - loss: 0.38 - ETA: 14s - loss: 0.38 - ETA: 13s - loss: 0.38 - ETA: 12s - loss: 0.38 - ETA: 11s - loss: 0.38 - ETA: 10s - loss: 0.38 - ETA: 9s - loss: 0.3874 - ETA: 8s - loss: 0.387 - ETA: 7s - loss: 0.387 - ETA: 6s - loss: 0.387 - ETA: 5s - loss: 0.387 - ETA: 4s - loss: 0.387 - ETA: 3s - loss: 0.387 - ETA: 2s - loss: 0.387 - ETA: 1s - loss: 0.387 - ETA: 0s - loss: 0.387 - ETA: 0s - loss: 0.387 - 111s 1s/step - loss: 0.3869 - val_loss: 0.2095\n",
      "\n",
      "Epoch 00023: val_loss did not improve from 0.20821\n",
      "Epoch 24/30\n",
      "111/111 [==============================] - ETA: 2:06 - loss: 0.456 - ETA: 2:03 - loss: 0.366 - ETA: 2:02 - loss: 0.340 - ETA: 2:02 - loss: 0.318 - ETA: 2:01 - loss: 0.306 - ETA: 1:59 - loss: 0.293 - ETA: 1:58 - loss: 0.285 - ETA: 1:57 - loss: 0.281 - ETA: 1:56 - loss: 0.276 - ETA: 1:54 - loss: 0.270 - ETA: 1:53 - loss: 0.265 - ETA: 1:50 - loss: 0.262 - ETA: 1:48 - loss: 0.260 - ETA: 1:46 - loss: 0.259 - ETA: 1:44 - loss: 0.258 - ETA: 1:41 - loss: 0.257 - ETA: 1:39 - loss: 0.255 - ETA: 1:37 - loss: 0.254 - ETA: 1:35 - loss: 0.253 - ETA: 1:34 - loss: 0.251 - ETA: 1:32 - loss: 0.251 - ETA: 1:30 - loss: 0.250 - ETA: 1:29 - loss: 0.250 - ETA: 1:27 - loss: 0.250 - ETA: 1:26 - loss: 0.251 - ETA: 1:24 - loss: 0.252 - ETA: 1:23 - loss: 0.254 - ETA: 1:22 - loss: 0.254 - ETA: 1:21 - loss: 0.255 - ETA: 1:19 - loss: 0.256 - ETA: 1:18 - loss: 0.256 - ETA: 1:17 - loss: 0.257 - ETA: 1:16 - loss: 0.258 - ETA: 1:14 - loss: 0.258 - ETA: 1:13 - loss: 0.258 - ETA: 1:12 - loss: 0.259 - ETA: 1:11 - loss: 0.259 - ETA: 1:10 - loss: 0.260 - ETA: 1:09 - loss: 0.260 - ETA: 1:08 - loss: 0.260 - ETA: 1:07 - loss: 0.260 - ETA: 1:05 - loss: 0.260 - ETA: 1:04 - loss: 0.260 - ETA: 1:03 - loss: 0.261 - ETA: 1:02 - loss: 0.261 - ETA: 1:01 - loss: 0.261 - ETA: 1:00 - loss: 0.262 - ETA: 59s - loss: 0.262 - ETA: 58s - loss: 0.26 - ETA: 57s - loss: 0.26 - ETA: 56s - loss: 0.26 - ETA: 55s - loss: 0.26 - ETA: 54s - loss: 0.26 - ETA: 53s - loss: 0.26 - ETA: 52s - loss: 0.26 - ETA: 51s - loss: 0.26 - ETA: 50s - loss: 0.26 - ETA: 49s - loss: 0.26 - ETA: 48s - loss: 0.26 - ETA: 47s - loss: 0.26 - ETA: 46s - loss: 0.27 - ETA: 45s - loss: 0.27 - ETA: 44s - loss: 0.27 - ETA: 43s - loss: 0.27 - ETA: 42s - loss: 0.27 - ETA: 41s - loss: 0.27 - ETA: 40s - loss: 0.27 - ETA: 39s - loss: 0.27 - ETA: 38s - loss: 0.27 - ETA: 38s - loss: 0.27 - ETA: 37s - loss: 0.27 - ETA: 36s - loss: 0.27 - ETA: 35s - loss: 0.27 - ETA: 34s - loss: 0.28 - ETA: 33s - loss: 0.28 - ETA: 32s - loss: 0.28 - ETA: 31s - loss: 0.28 - ETA: 30s - loss: 0.28 - ETA: 29s - loss: 0.28 - ETA: 28s - loss: 0.28 - ETA: 27s - loss: 0.28 - ETA: 26s - loss: 0.28 - ETA: 25s - loss: 0.28 - ETA: 24s - loss: 0.28 - ETA: 24s - loss: 0.28 - ETA: 23s - loss: 0.28 - ETA: 22s - loss: 0.28 - ETA: 21s - loss: 0.28 - ETA: 20s - loss: 0.29 - ETA: 19s - loss: 0.29 - ETA: 18s - loss: 0.29 - ETA: 17s - loss: 0.29 - ETA: 16s - loss: 0.29 - ETA: 15s - loss: 0.29 - ETA: 14s - loss: 0.29 - ETA: 13s - loss: 0.29 - ETA: 13s - loss: 0.29 - ETA: 12s - loss: 0.29 - ETA: 11s - loss: 0.29 - ETA: 10s - loss: 0.29 - ETA: 9s - loss: 0.2971 - ETA: 8s - loss: 0.297 - ETA: 7s - loss: 0.297 - ETA: 6s - loss: 0.298 - ETA: 5s - loss: 0.298 - ETA: 4s - loss: 0.298 - ETA: 3s - loss: 0.299 - ETA: 2s - loss: 0.299 - ETA: 1s - loss: 0.299 - ETA: 0s - loss: 0.300 - ETA: 0s - loss: 0.300 - 106s 957ms/step - loss: 0.3006 - val_loss: 0.2048\n",
      "\n",
      "Epoch 00024: val_loss improved from 0.20821 to 0.20484, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 25/30\n",
      "111/111 [==============================] - ETA: 1:31 - loss: 1.087 - ETA: 1:45 - loss: 0.855 - ETA: 1:45 - loss: 0.723 - ETA: 1:43 - loss: 0.645 - ETA: 1:43 - loss: 0.591 - ETA: 1:42 - loss: 0.549 - ETA: 1:41 - loss: 0.517 - ETA: 1:40 - loss: 0.492 - ETA: 1:39 - loss: 0.477 - ETA: 1:37 - loss: 0.466 - ETA: 1:35 - loss: 0.457 - ETA: 1:34 - loss: 0.449 - ETA: 1:32 - loss: 0.442 - ETA: 1:31 - loss: 0.440 - ETA: 1:30 - loss: 0.437 - ETA: 1:28 - loss: 0.434 - ETA: 1:27 - loss: 0.431 - ETA: 1:26 - loss: 0.429 - ETA: 1:24 - loss: 0.426 - ETA: 1:23 - loss: 0.423 - ETA: 1:22 - loss: 0.421 - ETA: 1:21 - loss: 0.419 - ETA: 1:20 - loss: 0.416 - ETA: 1:19 - loss: 0.414 - ETA: 1:18 - loss: 0.413 - ETA: 1:17 - loss: 0.411 - ETA: 1:16 - loss: 0.409 - ETA: 1:15 - loss: 0.407 - ETA: 1:14 - loss: 0.404 - ETA: 1:13 - loss: 0.403 - ETA: 1:12 - loss: 0.401 - ETA: 1:11 - loss: 0.399 - ETA: 1:10 - loss: 0.398 - ETA: 1:09 - loss: 0.396 - ETA: 1:08 - loss: 0.395 - ETA: 1:07 - loss: 0.394 - ETA: 1:06 - loss: 0.393 - ETA: 1:06 - loss: 0.391 - ETA: 1:05 - loss: 0.390 - ETA: 1:04 - loss: 0.389 - ETA: 1:03 - loss: 0.388 - ETA: 1:02 - loss: 0.387 - ETA: 1:01 - loss: 0.386 - ETA: 1:00 - loss: 0.385 - ETA: 59s - loss: 0.384 - ETA: 58s - loss: 0.38 - ETA: 57s - loss: 0.38 - ETA: 56s - loss: 0.38 - ETA: 56s - loss: 0.38 - ETA: 55s - loss: 0.38 - ETA: 54s - loss: 0.38 - ETA: 53s - loss: 0.37 - ETA: 52s - loss: 0.37 - ETA: 51s - loss: 0.37 - ETA: 50s - loss: 0.37 - ETA: 49s - loss: 0.37 - ETA: 48s - loss: 0.37 - ETA: 47s - loss: 0.37 - ETA: 47s - loss: 0.37 - ETA: 46s - loss: 0.37 - ETA: 45s - loss: 0.37 - ETA: 44s - loss: 0.37 - ETA: 43s - loss: 0.37 - ETA: 42s - loss: 0.37 - ETA: 42s - loss: 0.37 - ETA: 41s - loss: 0.37 - ETA: 40s - loss: 0.37 - ETA: 39s - loss: 0.37 - ETA: 38s - loss: 0.37 - ETA: 38s - loss: 0.37 - ETA: 37s - loss: 0.37 - ETA: 36s - loss: 0.37 - ETA: 35s - loss: 0.37 - ETA: 34s - loss: 0.37 - ETA: 33s - loss: 0.37 - ETA: 33s - loss: 0.37 - ETA: 32s - loss: 0.37 - ETA: 31s - loss: 0.37 - ETA: 30s - loss: 0.37 - ETA: 29s - loss: 0.37 - ETA: 28s - loss: 0.37 - ETA: 27s - loss: 0.37 - ETA: 26s - loss: 0.37 - ETA: 25s - loss: 0.37 - ETA: 24s - loss: 0.37 - ETA: 23s - loss: 0.37 - ETA: 22s - loss: 0.36 - ETA: 21s - loss: 0.36 - ETA: 20s - loss: 0.36 - ETA: 19s - loss: 0.36 - ETA: 18s - loss: 0.36 - ETA: 17s - loss: 0.36 - ETA: 17s - loss: 0.36 - ETA: 16s - loss: 0.36 - ETA: 15s - loss: 0.36 - ETA: 14s - loss: 0.36 - ETA: 13s - loss: 0.36 - ETA: 12s - loss: 0.36 - ETA: 11s - loss: 0.36 - ETA: 10s - loss: 0.36 - ETA: 9s - loss: 0.3661 - ETA: 8s - loss: 0.365 - ETA: 7s - loss: 0.365 - ETA: 6s - loss: 0.365 - ETA: 5s - loss: 0.365 - ETA: 4s - loss: 0.364 - ETA: 3s - loss: 0.364 - ETA: 2s - loss: 0.364 - ETA: 1s - loss: 0.364 - ETA: 0s - loss: 0.363 - ETA: 0s - loss: 0.363 - 105s 947ms/step - loss: 0.3632 - val_loss: 0.2107\n",
      "\n",
      "Epoch 00025: val_loss did not improve from 0.20484\n",
      "Epoch 26/30\n",
      "111/111 [==============================] - ETA: 1:36 - loss: 0.334 - ETA: 1:39 - loss: 0.314 - ETA: 1:36 - loss: 0.431 - ETA: 1:34 - loss: 0.467 - ETA: 1:35 - loss: 0.482 - ETA: 1:36 - loss: 0.525 - ETA: 1:34 - loss: 0.546 - ETA: 1:33 - loss: 0.554 - ETA: 1:32 - loss: 0.554 - ETA: 1:31 - loss: 0.551 - ETA: 1:30 - loss: 0.547 - ETA: 1:29 - loss: 0.541 - ETA: 1:28 - loss: 0.536 - ETA: 1:27 - loss: 0.534 - ETA: 1:26 - loss: 0.531 - ETA: 1:25 - loss: 0.527 - ETA: 1:24 - loss: 0.523 - ETA: 1:23 - loss: 0.518 - ETA: 1:23 - loss: 0.512 - ETA: 1:22 - loss: 0.509 - ETA: 1:22 - loss: 0.505 - ETA: 1:21 - loss: 0.501 - ETA: 1:20 - loss: 0.499 - ETA: 1:20 - loss: 0.495 - ETA: 1:19 - loss: 0.492 - ETA: 1:18 - loss: 0.489 - ETA: 1:17 - loss: 0.485 - ETA: 1:16 - loss: 0.482 - ETA: 1:15 - loss: 0.478 - ETA: 1:14 - loss: 0.475 - ETA: 1:13 - loss: 0.472 - ETA: 1:12 - loss: 0.469 - ETA: 1:11 - loss: 0.466 - ETA: 1:10 - loss: 0.462 - ETA: 1:09 - loss: 0.459 - ETA: 1:08 - loss: 0.456 - ETA: 1:07 - loss: 0.453 - ETA: 1:06 - loss: 0.450 - ETA: 1:06 - loss: 0.447 - ETA: 1:05 - loss: 0.445 - ETA: 1:04 - loss: 0.442 - ETA: 1:03 - loss: 0.440 - ETA: 1:02 - loss: 0.437 - ETA: 1:01 - loss: 0.435 - ETA: 1:00 - loss: 0.433 - ETA: 59s - loss: 0.431 - ETA: 58s - loss: 0.42 - ETA: 57s - loss: 0.42 - ETA: 56s - loss: 0.42 - ETA: 55s - loss: 0.42 - ETA: 54s - loss: 0.42 - ETA: 53s - loss: 0.41 - ETA: 53s - loss: 0.41 - ETA: 52s - loss: 0.41 - ETA: 51s - loss: 0.41 - ETA: 50s - loss: 0.41 - ETA: 49s - loss: 0.41 - ETA: 48s - loss: 0.41 - ETA: 48s - loss: 0.40 - ETA: 47s - loss: 0.40 - ETA: 46s - loss: 0.40 - ETA: 45s - loss: 0.40 - ETA: 44s - loss: 0.40 - ETA: 44s - loss: 0.40 - ETA: 43s - loss: 0.40 - ETA: 42s - loss: 0.40 - ETA: 41s - loss: 0.40 - ETA: 40s - loss: 0.40 - ETA: 40s - loss: 0.39 - ETA: 39s - loss: 0.39 - ETA: 38s - loss: 0.39 - ETA: 37s - loss: 0.39 - ETA: 36s - loss: 0.39 - ETA: 35s - loss: 0.39 - ETA: 34s - loss: 0.39 - ETA: 33s - loss: 0.39 - ETA: 32s - loss: 0.39 - ETA: 31s - loss: 0.39 - ETA: 30s - loss: 0.39 - ETA: 29s - loss: 0.38 - ETA: 28s - loss: 0.38 - ETA: 27s - loss: 0.38 - ETA: 26s - loss: 0.38 - ETA: 25s - loss: 0.38 - ETA: 24s - loss: 0.38 - ETA: 24s - loss: 0.38 - ETA: 23s - loss: 0.38 - ETA: 22s - loss: 0.38 - ETA: 21s - loss: 0.38 - ETA: 20s - loss: 0.38 - ETA: 19s - loss: 0.38 - ETA: 18s - loss: 0.37 - ETA: 17s - loss: 0.37 - ETA: 16s - loss: 0.37 - ETA: 15s - loss: 0.37 - ETA: 14s - loss: 0.37 - ETA: 13s - loss: 0.37 - ETA: 12s - loss: 0.37 - ETA: 11s - loss: 0.37 - ETA: 10s - loss: 0.37 - ETA: 9s - loss: 0.3739 - ETA: 8s - loss: 0.373 - ETA: 7s - loss: 0.372 - ETA: 6s - loss: 0.372 - ETA: 5s - loss: 0.371 - ETA: 4s - loss: 0.371 - ETA: 3s - loss: 0.370 - ETA: 2s - loss: 0.370 - ETA: 1s - loss: 0.369 - ETA: 0s - loss: 0.369 - ETA: 0s - loss: 0.368 - 106s 957ms/step - loss: 0.3680 - val_loss: 0.2047\n",
      "\n",
      "Epoch 00026: val_loss improved from 0.20484 to 0.20474, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 27/30\n",
      "111/111 [==============================] - ETA: 1:23 - loss: 0.205 - ETA: 1:35 - loss: 0.202 - ETA: 1:34 - loss: 0.200 - ETA: 1:33 - loss: 0.218 - ETA: 1:32 - loss: 0.231 - ETA: 1:31 - loss: 0.247 - ETA: 1:30 - loss: 0.285 - ETA: 1:30 - loss: 0.316 - ETA: 1:29 - loss: 0.336 - ETA: 1:28 - loss: 0.350 - ETA: 1:27 - loss: 0.358 - ETA: 1:27 - loss: 0.363 - ETA: 1:26 - loss: 0.372 - ETA: 1:25 - loss: 0.378 - ETA: 1:24 - loss: 0.381 - ETA: 1:23 - loss: 0.386 - ETA: 1:22 - loss: 0.390 - ETA: 1:22 - loss: 0.392 - ETA: 1:21 - loss: 0.393 - ETA: 1:20 - loss: 0.394 - ETA: 1:19 - loss: 0.394 - ETA: 1:18 - loss: 0.394 - ETA: 1:18 - loss: 0.394 - ETA: 1:17 - loss: 0.393 - ETA: 1:16 - loss: 0.392 - ETA: 1:15 - loss: 0.393 - ETA: 1:14 - loss: 0.393 - ETA: 1:13 - loss: 0.393 - ETA: 1:12 - loss: 0.392 - ETA: 1:11 - loss: 0.392 - ETA: 1:10 - loss: 0.391 - ETA: 1:10 - loss: 0.391 - ETA: 1:09 - loss: 0.390 - ETA: 1:08 - loss: 0.389 - ETA: 1:08 - loss: 0.389 - ETA: 1:07 - loss: 0.388 - ETA: 1:06 - loss: 0.387 - ETA: 1:05 - loss: 0.386 - ETA: 1:04 - loss: 0.386 - ETA: 1:04 - loss: 0.385 - ETA: 1:03 - loss: 0.384 - ETA: 1:02 - loss: 0.383 - ETA: 1:01 - loss: 0.383 - ETA: 1:00 - loss: 0.383 - ETA: 59s - loss: 0.382 - ETA: 58s - loss: 0.38 - ETA: 58s - loss: 0.38 - ETA: 57s - loss: 0.38 - ETA: 56s - loss: 0.38 - ETA: 55s - loss: 0.38 - ETA: 54s - loss: 0.38 - ETA: 53s - loss: 0.38 - ETA: 53s - loss: 0.38 - ETA: 52s - loss: 0.37 - ETA: 51s - loss: 0.37 - ETA: 50s - loss: 0.37 - ETA: 49s - loss: 0.37 - ETA: 49s - loss: 0.37 - ETA: 48s - loss: 0.37 - ETA: 47s - loss: 0.37 - ETA: 46s - loss: 0.37 - ETA: 46s - loss: 0.37 - ETA: 45s - loss: 0.37 - ETA: 44s - loss: 0.37 - ETA: 43s - loss: 0.37 - ETA: 42s - loss: 0.37 - ETA: 42s - loss: 0.37 - ETA: 41s - loss: 0.37 - ETA: 40s - loss: 0.37 - ETA: 39s - loss: 0.37 - ETA: 38s - loss: 0.37 - ETA: 37s - loss: 0.37 - ETA: 36s - loss: 0.36 - ETA: 35s - loss: 0.36 - ETA: 34s - loss: 0.36 - ETA: 34s - loss: 0.36 - ETA: 33s - loss: 0.36 - ETA: 32s - loss: 0.36 - ETA: 31s - loss: 0.36 - ETA: 30s - loss: 0.36 - ETA: 29s - loss: 0.36 - ETA: 28s - loss: 0.36 - ETA: 27s - loss: 0.36 - ETA: 26s - loss: 0.36 - ETA: 25s - loss: 0.36 - ETA: 24s - loss: 0.36 - ETA: 23s - loss: 0.36 - ETA: 22s - loss: 0.36 - ETA: 21s - loss: 0.36 - ETA: 20s - loss: 0.36 - ETA: 19s - loss: 0.36 - ETA: 18s - loss: 0.36 - ETA: 17s - loss: 0.36 - ETA: 16s - loss: 0.36 - ETA: 15s - loss: 0.36 - ETA: 14s - loss: 0.36 - ETA: 13s - loss: 0.36 - ETA: 12s - loss: 0.36 - ETA: 11s - loss: 0.36 - ETA: 10s - loss: 0.36 - ETA: 9s - loss: 0.3601 - ETA: 8s - loss: 0.359 - ETA: 7s - loss: 0.359 - ETA: 6s - loss: 0.359 - ETA: 5s - loss: 0.359 - ETA: 4s - loss: 0.358 - ETA: 3s - loss: 0.358 - ETA: 2s - loss: 0.358 - ETA: 1s - loss: 0.357 - ETA: 0s - loss: 0.357 - ETA: 0s - loss: 0.357 - 107s 962ms/step - loss: 0.3571 - val_loss: 0.2066\n",
      "\n",
      "Epoch 00027: val_loss did not improve from 0.20474\n",
      "Epoch 28/30\n",
      "111/111 [==============================] - ETA: 1:41 - loss: 0.148 - ETA: 1:35 - loss: 0.142 - ETA: 1:34 - loss: 0.165 - ETA: 1:34 - loss: 0.189 - ETA: 1:33 - loss: 0.200 - ETA: 1:33 - loss: 0.210 - ETA: 1:32 - loss: 0.221 - ETA: 1:32 - loss: 0.231 - ETA: 1:31 - loss: 0.236 - ETA: 1:30 - loss: 0.241 - ETA: 1:29 - loss: 0.247 - ETA: 1:28 - loss: 0.252 - ETA: 1:27 - loss: 0.258 - ETA: 1:26 - loss: 0.262 - ETA: 1:25 - loss: 0.266 - ETA: 1:24 - loss: 0.270 - ETA: 1:23 - loss: 0.273 - ETA: 1:22 - loss: 0.275 - ETA: 1:22 - loss: 0.277 - ETA: 1:21 - loss: 0.278 - ETA: 1:20 - loss: 0.279 - ETA: 1:19 - loss: 0.280 - ETA: 1:18 - loss: 0.280 - ETA: 1:17 - loss: 0.280 - ETA: 1:16 - loss: 0.280 - ETA: 1:15 - loss: 0.280 - ETA: 1:14 - loss: 0.279 - ETA: 1:13 - loss: 0.279 - ETA: 1:12 - loss: 0.280 - ETA: 1:12 - loss: 0.280 - ETA: 1:11 - loss: 0.281 - ETA: 1:10 - loss: 0.281 - ETA: 1:09 - loss: 0.282 - ETA: 1:08 - loss: 0.282 - ETA: 1:07 - loss: 0.283 - ETA: 1:06 - loss: 0.283 - ETA: 1:05 - loss: 0.284 - ETA: 1:05 - loss: 0.284 - ETA: 1:04 - loss: 0.285 - ETA: 1:03 - loss: 0.285 - ETA: 1:02 - loss: 0.286 - ETA: 1:01 - loss: 0.287 - ETA: 1:00 - loss: 0.287 - ETA: 59s - loss: 0.288 - ETA: 58s - loss: 0.28 - ETA: 58s - loss: 0.29 - ETA: 57s - loss: 0.29 - ETA: 56s - loss: 0.29 - ETA: 55s - loss: 0.29 - ETA: 54s - loss: 0.29 - ETA: 53s - loss: 0.29 - ETA: 53s - loss: 0.29 - ETA: 52s - loss: 0.29 - ETA: 51s - loss: 0.29 - ETA: 50s - loss: 0.29 - ETA: 50s - loss: 0.29 - ETA: 49s - loss: 0.29 - ETA: 48s - loss: 0.30 - ETA: 47s - loss: 0.30 - ETA: 46s - loss: 0.30 - ETA: 45s - loss: 0.30 - ETA: 45s - loss: 0.30 - ETA: 44s - loss: 0.30 - ETA: 43s - loss: 0.30 - ETA: 42s - loss: 0.30 - ETA: 41s - loss: 0.30 - ETA: 40s - loss: 0.30 - ETA: 40s - loss: 0.30 - ETA: 39s - loss: 0.31 - ETA: 38s - loss: 0.31 - ETA: 37s - loss: 0.31 - ETA: 36s - loss: 0.31 - ETA: 36s - loss: 0.31 - ETA: 35s - loss: 0.31 - ETA: 34s - loss: 0.31 - ETA: 33s - loss: 0.31 - ETA: 32s - loss: 0.31 - ETA: 31s - loss: 0.31 - ETA: 30s - loss: 0.31 - ETA: 29s - loss: 0.31 - ETA: 28s - loss: 0.31 - ETA: 27s - loss: 0.32 - ETA: 26s - loss: 0.32 - ETA: 25s - loss: 0.32 - ETA: 24s - loss: 0.32 - ETA: 23s - loss: 0.32 - ETA: 22s - loss: 0.32 - ETA: 21s - loss: 0.32 - ETA: 20s - loss: 0.32 - ETA: 19s - loss: 0.32 - ETA: 18s - loss: 0.32 - ETA: 17s - loss: 0.33 - ETA: 16s - loss: 0.33 - ETA: 15s - loss: 0.33 - ETA: 15s - loss: 0.33 - ETA: 14s - loss: 0.33 - ETA: 13s - loss: 0.33 - ETA: 12s - loss: 0.33 - ETA: 11s - loss: 0.33 - ETA: 10s - loss: 0.33 - ETA: 9s - loss: 0.3364 - ETA: 8s - loss: 0.337 - ETA: 7s - loss: 0.337 - ETA: 6s - loss: 0.338 - ETA: 5s - loss: 0.338 - ETA: 4s - loss: 0.339 - ETA: 3s - loss: 0.339 - ETA: 2s - loss: 0.340 - ETA: 1s - loss: 0.340 - ETA: 0s - loss: 0.341 - ETA: 0s - loss: 0.341 - 105s 945ms/step - loss: 0.3418 - val_loss: 0.1975\n",
      "\n",
      "Epoch 00028: val_loss improved from 0.20474 to 0.19748, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 29/30\n",
      "111/111 [==============================] - ETA: 1:25 - loss: 0.339 - ETA: 1:34 - loss: 0.355 - ETA: 1:36 - loss: 0.374 - ETA: 1:35 - loss: 0.375 - ETA: 1:33 - loss: 0.371 - ETA: 1:32 - loss: 0.361 - ETA: 1:31 - loss: 0.349 - ETA: 1:31 - loss: 0.339 - ETA: 1:30 - loss: 0.331 - ETA: 1:29 - loss: 0.333 - ETA: 1:28 - loss: 0.335 - ETA: 1:27 - loss: 0.337 - ETA: 1:26 - loss: 0.338 - ETA: 1:25 - loss: 0.339 - ETA: 1:25 - loss: 0.340 - ETA: 1:24 - loss: 0.341 - ETA: 1:23 - loss: 0.341 - ETA: 1:22 - loss: 0.340 - ETA: 1:21 - loss: 0.339 - ETA: 1:20 - loss: 0.339 - ETA: 1:19 - loss: 0.338 - ETA: 1:19 - loss: 0.337 - ETA: 1:18 - loss: 0.336 - ETA: 1:17 - loss: 0.335 - ETA: 1:16 - loss: 0.334 - ETA: 1:15 - loss: 0.334 - ETA: 1:14 - loss: 0.333 - ETA: 1:13 - loss: 0.333 - ETA: 1:12 - loss: 0.332 - ETA: 1:11 - loss: 0.332 - ETA: 1:11 - loss: 0.331 - ETA: 1:10 - loss: 0.331 - ETA: 1:09 - loss: 0.330 - ETA: 1:08 - loss: 0.329 - ETA: 1:07 - loss: 0.329 - ETA: 1:06 - loss: 0.329 - ETA: 1:05 - loss: 0.328 - ETA: 1:04 - loss: 0.328 - ETA: 1:03 - loss: 0.327 - ETA: 1:03 - loss: 0.327 - ETA: 1:02 - loss: 0.327 - ETA: 1:01 - loss: 0.326 - ETA: 1:00 - loss: 0.326 - ETA: 59s - loss: 0.326 - ETA: 58s - loss: 0.32 - ETA: 57s - loss: 0.32 - ETA: 56s - loss: 0.32 - ETA: 56s - loss: 0.32 - ETA: 55s - loss: 0.32 - ETA: 54s - loss: 0.32 - ETA: 53s - loss: 0.32 - ETA: 52s - loss: 0.32 - ETA: 51s - loss: 0.32 - ETA: 51s - loss: 0.32 - ETA: 50s - loss: 0.32 - ETA: 49s - loss: 0.32 - ETA: 48s - loss: 0.32 - ETA: 47s - loss: 0.32 - ETA: 46s - loss: 0.32 - ETA: 45s - loss: 0.32 - ETA: 45s - loss: 0.32 - ETA: 44s - loss: 0.32 - ETA: 43s - loss: 0.32 - ETA: 42s - loss: 0.32 - ETA: 41s - loss: 0.32 - ETA: 40s - loss: 0.32 - ETA: 39s - loss: 0.32 - ETA: 39s - loss: 0.32 - ETA: 38s - loss: 0.32 - ETA: 37s - loss: 0.32 - ETA: 36s - loss: 0.32 - ETA: 35s - loss: 0.32 - ETA: 34s - loss: 0.32 - ETA: 33s - loss: 0.32 - ETA: 32s - loss: 0.32 - ETA: 32s - loss: 0.32 - ETA: 31s - loss: 0.32 - ETA: 30s - loss: 0.32 - ETA: 29s - loss: 0.33 - ETA: 28s - loss: 0.33 - ETA: 27s - loss: 0.33 - ETA: 26s - loss: 0.33 - ETA: 25s - loss: 0.33 - ETA: 24s - loss: 0.33 - ETA: 23s - loss: 0.33 - ETA: 22s - loss: 0.33 - ETA: 21s - loss: 0.33 - ETA: 20s - loss: 0.33 - ETA: 20s - loss: 0.33 - ETA: 19s - loss: 0.33 - ETA: 18s - loss: 0.33 - ETA: 17s - loss: 0.33 - ETA: 16s - loss: 0.33 - ETA: 15s - loss: 0.33 - ETA: 14s - loss: 0.33 - ETA: 13s - loss: 0.33 - ETA: 12s - loss: 0.33 - ETA: 11s - loss: 0.33 - ETA: 10s - loss: 0.33 - ETA: 10s - loss: 0.33 - ETA: 9s - loss: 0.3357 - ETA: 8s - loss: 0.335 - ETA: 7s - loss: 0.336 - ETA: 6s - loss: 0.336 - ETA: 5s - loss: 0.336 - ETA: 4s - loss: 0.336 - ETA: 3s - loss: 0.336 - ETA: 2s - loss: 0.337 - ETA: 1s - loss: 0.337 - ETA: 0s - loss: 0.337 - ETA: 0s - loss: 0.337 - 104s 938ms/step - loss: 0.3377 - val_loss: 0.1749\n",
      "\n",
      "Epoch 00029: val_loss improved from 0.19748 to 0.17486, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 30/30\n",
      "111/111 [==============================] - ETA: 1:50 - loss: 0.330 - ETA: 1:56 - loss: 0.337 - ETA: 1:51 - loss: 0.346 - ETA: 1:51 - loss: 0.335 - ETA: 1:48 - loss: 0.326 - ETA: 1:48 - loss: 0.316 - ETA: 1:48 - loss: 0.309 - ETA: 1:47 - loss: 0.302 - ETA: 1:45 - loss: 0.298 - ETA: 1:43 - loss: 0.293 - ETA: 1:41 - loss: 0.294 - ETA: 1:39 - loss: 0.295 - ETA: 1:37 - loss: 0.296 - ETA: 1:36 - loss: 0.296 - ETA: 1:34 - loss: 0.296 - ETA: 1:32 - loss: 0.295 - ETA: 1:31 - loss: 0.294 - ETA: 1:29 - loss: 0.292 - ETA: 1:28 - loss: 0.291 - ETA: 1:26 - loss: 0.290 - ETA: 1:25 - loss: 0.292 - ETA: 1:24 - loss: 0.292 - ETA: 1:23 - loss: 0.293 - ETA: 1:22 - loss: 0.294 - ETA: 1:21 - loss: 0.294 - ETA: 1:19 - loss: 0.295 - ETA: 1:18 - loss: 0.296 - ETA: 1:17 - loss: 0.297 - ETA: 1:16 - loss: 0.298 - ETA: 1:15 - loss: 0.298 - ETA: 1:14 - loss: 0.299 - ETA: 1:13 - loss: 0.299 - ETA: 1:12 - loss: 0.299 - ETA: 1:11 - loss: 0.299 - ETA: 1:10 - loss: 0.299 - ETA: 1:09 - loss: 0.299 - ETA: 1:08 - loss: 0.299 - ETA: 1:07 - loss: 0.300 - ETA: 1:06 - loss: 0.300 - ETA: 1:05 - loss: 0.300 - ETA: 1:04 - loss: 0.301 - ETA: 1:03 - loss: 0.302 - ETA: 1:02 - loss: 0.302 - ETA: 1:01 - loss: 0.303 - ETA: 1:00 - loss: 0.304 - ETA: 59s - loss: 0.304 - ETA: 58s - loss: 0.30 - ETA: 57s - loss: 0.30 - ETA: 56s - loss: 0.30 - ETA: 55s - loss: 0.30 - ETA: 54s - loss: 0.30 - ETA: 53s - loss: 0.30 - ETA: 52s - loss: 0.30 - ETA: 51s - loss: 0.30 - ETA: 50s - loss: 0.30 - ETA: 49s - loss: 0.30 - ETA: 49s - loss: 0.30 - ETA: 48s - loss: 0.30 - ETA: 47s - loss: 0.30 - ETA: 46s - loss: 0.30 - ETA: 45s - loss: 0.30 - ETA: 44s - loss: 0.30 - ETA: 43s - loss: 0.30 - ETA: 42s - loss: 0.30 - ETA: 41s - loss: 0.30 - ETA: 40s - loss: 0.30 - ETA: 39s - loss: 0.31 - ETA: 38s - loss: 0.31 - ETA: 37s - loss: 0.31 - ETA: 37s - loss: 0.31 - ETA: 36s - loss: 0.31 - ETA: 35s - loss: 0.31 - ETA: 34s - loss: 0.31 - ETA: 33s - loss: 0.31 - ETA: 32s - loss: 0.31 - ETA: 31s - loss: 0.31 - ETA: 30s - loss: 0.31 - ETA: 29s - loss: 0.31 - ETA: 29s - loss: 0.31 - ETA: 28s - loss: 0.31 - ETA: 27s - loss: 0.31 - ETA: 26s - loss: 0.31 - ETA: 25s - loss: 0.31 - ETA: 24s - loss: 0.32 - ETA: 23s - loss: 0.32 - ETA: 22s - loss: 0.32 - ETA: 21s - loss: 0.32 - ETA: 21s - loss: 0.32 - ETA: 20s - loss: 0.32 - ETA: 19s - loss: 0.32 - ETA: 18s - loss: 0.32 - ETA: 17s - loss: 0.32 - ETA: 16s - loss: 0.32 - ETA: 15s - loss: 0.32 - ETA: 14s - loss: 0.32 - ETA: 13s - loss: 0.32 - ETA: 12s - loss: 0.32 - ETA: 11s - loss: 0.32 - ETA: 11s - loss: 0.32 - ETA: 10s - loss: 0.32 - ETA: 9s - loss: 0.3275 - ETA: 8s - loss: 0.327 - ETA: 7s - loss: 0.328 - ETA: 6s - loss: 0.328 - ETA: 5s - loss: 0.328 - ETA: 4s - loss: 0.328 - ETA: 3s - loss: 0.329 - ETA: 2s - loss: 0.329 - ETA: 1s - loss: 0.329 - ETA: 0s - loss: 0.329 - ETA: 0s - loss: 0.329 - 104s 940ms/step - loss: 0.3300 - val_loss: 0.1881\n",
      "\n",
      "Epoch 00030: val_loss did not improve from 0.17486\n",
      "4/4 [==============================] - ETA: 1s - loss: 0.089 - ETA: 1s - loss: 0.091 - ETA: 0s - loss: 0.105 - ETA: 0s - loss: 0.103 - 2s 571ms/step - loss: 0.1037\n",
      "Val Score:  0.1036805808544159\n",
      "====================================================================================\n",
      "\n",
      "\n",
      "Training on Fold:  2\n",
      "Epoch 1/30\n",
      "111/111 [==============================] - ETA: 3:25 - loss: 2.463 - ETA: 1:54 - loss: 2.027 - ETA: 1:50 - loss: 1.778 - ETA: 1:49 - loss: 1.623 - ETA: 1:49 - loss: 1.523 - ETA: 1:49 - loss: 1.441 - ETA: 1:47 - loss: 1.406 - ETA: 1:45 - loss: 1.370 - ETA: 1:43 - loss: 1.343 - ETA: 1:42 - loss: 1.318 - ETA: 1:39 - loss: 1.303 - ETA: 1:38 - loss: 1.289 - ETA: 1:36 - loss: 1.273 - ETA: 1:34 - loss: 1.263 - ETA: 1:32 - loss: 1.251 - ETA: 1:31 - loss: 1.240 - ETA: 1:30 - loss: 1.228 - ETA: 1:30 - loss: 1.215 - ETA: 1:29 - loss: 1.204 - ETA: 1:28 - loss: 1.192 - ETA: 1:27 - loss: 1.182 - ETA: 1:27 - loss: 1.171 - ETA: 1:25 - loss: 1.160 - ETA: 1:24 - loss: 1.150 - ETA: 1:23 - loss: 1.140 - ETA: 1:22 - loss: 1.130 - ETA: 1:20 - loss: 1.121 - ETA: 1:19 - loss: 1.111 - ETA: 1:18 - loss: 1.102 - ETA: 1:17 - loss: 1.094 - ETA: 1:16 - loss: 1.086 - ETA: 1:14 - loss: 1.078 - ETA: 1:13 - loss: 1.072 - ETA: 1:12 - loss: 1.067 - ETA: 1:11 - loss: 1.062 - ETA: 1:10 - loss: 1.057 - ETA: 1:09 - loss: 1.051 - ETA: 1:08 - loss: 1.047 - ETA: 1:07 - loss: 1.042 - ETA: 1:06 - loss: 1.038 - ETA: 1:05 - loss: 1.034 - ETA: 1:04 - loss: 1.030 - ETA: 1:03 - loss: 1.026 - ETA: 1:02 - loss: 1.022 - ETA: 1:01 - loss: 1.018 - ETA: 1:00 - loss: 1.014 - ETA: 59s - loss: 1.010 - ETA: 58s - loss: 1.00 - ETA: 57s - loss: 1.00 - ETA: 56s - loss: 0.99 - ETA: 55s - loss: 0.99 - ETA: 54s - loss: 0.99 - ETA: 53s - loss: 0.98 - ETA: 52s - loss: 0.98 - ETA: 51s - loss: 0.98 - ETA: 50s - loss: 0.97 - ETA: 49s - loss: 0.97 - ETA: 48s - loss: 0.97 - ETA: 47s - loss: 0.97 - ETA: 46s - loss: 0.96 - ETA: 45s - loss: 0.96 - ETA: 44s - loss: 0.96 - ETA: 44s - loss: 0.95 - ETA: 43s - loss: 0.95 - ETA: 42s - loss: 0.95 - ETA: 41s - loss: 0.94 - ETA: 40s - loss: 0.94 - ETA: 39s - loss: 0.94 - ETA: 38s - loss: 0.94 - ETA: 37s - loss: 0.93 - ETA: 36s - loss: 0.93 - ETA: 35s - loss: 0.93 - ETA: 34s - loss: 0.93 - ETA: 33s - loss: 0.92 - ETA: 32s - loss: 0.92 - ETA: 31s - loss: 0.92 - ETA: 31s - loss: 0.92 - ETA: 30s - loss: 0.91 - ETA: 29s - loss: 0.91 - ETA: 28s - loss: 0.91 - ETA: 27s - loss: 0.91 - ETA: 26s - loss: 0.91 - ETA: 25s - loss: 0.91 - ETA: 24s - loss: 0.90 - ETA: 23s - loss: 0.90 - ETA: 22s - loss: 0.90 - ETA: 21s - loss: 0.90 - ETA: 20s - loss: 0.90 - ETA: 20s - loss: 0.90 - ETA: 19s - loss: 0.89 - ETA: 18s - loss: 0.89 - ETA: 17s - loss: 0.89 - ETA: 16s - loss: 0.89 - ETA: 15s - loss: 0.89 - ETA: 14s - loss: 0.89 - ETA: 13s - loss: 0.88 - ETA: 12s - loss: 0.88 - ETA: 11s - loss: 0.88 - ETA: 11s - loss: 0.88 - ETA: 10s - loss: 0.88 - ETA: 9s - loss: 0.8823 - ETA: 8s - loss: 0.880 - ETA: 7s - loss: 0.879 - ETA: 6s - loss: 0.878 - ETA: 5s - loss: 0.876 - ETA: 4s - loss: 0.875 - ETA: 3s - loss: 0.874 - ETA: 2s - loss: 0.872 - ETA: 1s - loss: 0.871 - ETA: 0s - loss: 0.870 - ETA: 0s - loss: 0.868 - 108s 966ms/step - loss: 0.8675 - val_loss: 0.2150\n",
      "\n",
      "Epoch 00001: val_loss did not improve from 0.17486\n",
      "Epoch 2/30\n",
      "111/111 [==============================] - ETA: 1:50 - loss: 0.399 - ETA: 1:49 - loss: 0.451 - ETA: 1:47 - loss: 0.499 - ETA: 1:45 - loss: 0.498 - ETA: 1:43 - loss: 0.492 - ETA: 1:42 - loss: 0.485 - ETA: 1:40 - loss: 0.479 - ETA: 1:38 - loss: 0.472 - ETA: 1:36 - loss: 0.466 - ETA: 1:35 - loss: 0.463 - ETA: 1:33 - loss: 0.458 - ETA: 1:32 - loss: 0.453 - ETA: 1:31 - loss: 0.456 - ETA: 1:30 - loss: 0.458 - ETA: 1:28 - loss: 0.461 - ETA: 1:27 - loss: 0.463 - ETA: 1:26 - loss: 0.466 - ETA: 1:25 - loss: 0.469 - ETA: 1:24 - loss: 0.471 - ETA: 1:23 - loss: 0.472 - ETA: 1:22 - loss: 0.472 - ETA: 1:21 - loss: 0.473 - ETA: 1:20 - loss: 0.474 - ETA: 1:19 - loss: 0.474 - ETA: 1:18 - loss: 0.475 - ETA: 1:17 - loss: 0.475 - ETA: 1:16 - loss: 0.476 - ETA: 1:15 - loss: 0.476 - ETA: 1:14 - loss: 0.477 - ETA: 1:13 - loss: 0.478 - ETA: 1:12 - loss: 0.478 - ETA: 1:11 - loss: 0.479 - ETA: 1:10 - loss: 0.480 - ETA: 1:09 - loss: 0.481 - ETA: 1:08 - loss: 0.482 - ETA: 1:07 - loss: 0.483 - ETA: 1:06 - loss: 0.484 - ETA: 1:05 - loss: 0.484 - ETA: 1:04 - loss: 0.485 - ETA: 1:03 - loss: 0.486 - ETA: 1:02 - loss: 0.487 - ETA: 1:02 - loss: 0.487 - ETA: 1:01 - loss: 0.488 - ETA: 1:00 - loss: 0.488 - ETA: 59s - loss: 0.489 - ETA: 58s - loss: 0.49 - ETA: 57s - loss: 0.49 - ETA: 56s - loss: 0.49 - ETA: 55s - loss: 0.49 - ETA: 54s - loss: 0.49 - ETA: 53s - loss: 0.49 - ETA: 52s - loss: 0.49 - ETA: 52s - loss: 0.49 - ETA: 51s - loss: 0.49 - ETA: 50s - loss: 0.49 - ETA: 49s - loss: 0.49 - ETA: 48s - loss: 0.49 - ETA: 47s - loss: 0.49 - ETA: 46s - loss: 0.49 - ETA: 45s - loss: 0.49 - ETA: 44s - loss: 0.49 - ETA: 43s - loss: 0.49 - ETA: 43s - loss: 0.49 - ETA: 42s - loss: 0.49 - ETA: 41s - loss: 0.49 - ETA: 40s - loss: 0.49 - ETA: 39s - loss: 0.49 - ETA: 38s - loss: 0.49 - ETA: 37s - loss: 0.49 - ETA: 36s - loss: 0.49 - ETA: 35s - loss: 0.49 - ETA: 34s - loss: 0.49 - ETA: 34s - loss: 0.49 - ETA: 33s - loss: 0.49 - ETA: 32s - loss: 0.49 - ETA: 31s - loss: 0.49 - ETA: 30s - loss: 0.49 - ETA: 29s - loss: 0.49 - ETA: 28s - loss: 0.49 - ETA: 27s - loss: 0.49 - ETA: 26s - loss: 0.49 - ETA: 25s - loss: 0.49 - ETA: 25s - loss: 0.49 - ETA: 24s - loss: 0.49 - ETA: 23s - loss: 0.49 - ETA: 22s - loss: 0.49 - ETA: 21s - loss: 0.49 - ETA: 20s - loss: 0.49 - ETA: 19s - loss: 0.49 - ETA: 18s - loss: 0.49 - ETA: 17s - loss: 0.49 - ETA: 17s - loss: 0.49 - ETA: 16s - loss: 0.49 - ETA: 15s - loss: 0.49 - ETA: 14s - loss: 0.49 - ETA: 13s - loss: 0.49 - ETA: 12s - loss: 0.49 - ETA: 11s - loss: 0.49 - ETA: 10s - loss: 0.49 - ETA: 9s - loss: 0.4920 - ETA: 8s - loss: 0.492 - ETA: 8s - loss: 0.492 - ETA: 7s - loss: 0.492 - ETA: 6s - loss: 0.493 - ETA: 5s - loss: 0.493 - ETA: 4s - loss: 0.493 - ETA: 3s - loss: 0.493 - ETA: 2s - loss: 0.493 - ETA: 1s - loss: 0.494 - ETA: 0s - loss: 0.494 - ETA: 0s - loss: 0.494 - 103s 924ms/step - loss: 0.4945 - val_loss: 0.1759\n",
      "\n",
      "Epoch 00002: val_loss did not improve from 0.17486\n",
      "Epoch 3/30\n",
      "111/111 [==============================] - ETA: 2:24 - loss: 1.141 - ETA: 2:15 - loss: 0.892 - ETA: 2:14 - loss: 0.784 - ETA: 2:14 - loss: 0.736 - ETA: 2:14 - loss: 0.702 - ETA: 2:13 - loss: 0.679 - ETA: 2:12 - loss: 0.657 - ETA: 2:08 - loss: 0.634 - ETA: 2:05 - loss: 0.611 - ETA: 2:03 - loss: 0.612 - ETA: 1:59 - loss: 0.613 - ETA: 1:56 - loss: 0.614 - ETA: 1:54 - loss: 0.615 - ETA: 1:51 - loss: 0.616 - ETA: 1:49 - loss: 0.616 - ETA: 1:46 - loss: 0.616 - ETA: 1:44 - loss: 0.616 - ETA: 1:42 - loss: 0.615 - ETA: 1:40 - loss: 0.614 - ETA: 1:37 - loss: 0.612 - ETA: 1:35 - loss: 0.611 - ETA: 1:34 - loss: 0.610 - ETA: 1:32 - loss: 0.609 - ETA: 1:30 - loss: 0.609 - ETA: 1:29 - loss: 0.608 - ETA: 1:27 - loss: 0.607 - ETA: 1:26 - loss: 0.606 - ETA: 1:24 - loss: 0.605 - ETA: 1:23 - loss: 0.604 - ETA: 1:22 - loss: 0.603 - ETA: 1:20 - loss: 0.601 - ETA: 1:19 - loss: 0.600 - ETA: 1:18 - loss: 0.598 - ETA: 1:16 - loss: 0.596 - ETA: 1:15 - loss: 0.595 - ETA: 1:14 - loss: 0.593 - ETA: 1:13 - loss: 0.591 - ETA: 1:12 - loss: 0.589 - ETA: 1:10 - loss: 0.588 - ETA: 1:09 - loss: 0.586 - ETA: 1:08 - loss: 0.584 - ETA: 1:07 - loss: 0.582 - ETA: 1:06 - loss: 0.580 - ETA: 1:05 - loss: 0.579 - ETA: 1:04 - loss: 0.577 - ETA: 1:02 - loss: 0.576 - ETA: 1:01 - loss: 0.574 - ETA: 1:00 - loss: 0.573 - ETA: 59s - loss: 0.572 - ETA: 58s - loss: 0.57 - ETA: 57s - loss: 0.57 - ETA: 56s - loss: 0.56 - ETA: 55s - loss: 0.56 - ETA: 54s - loss: 0.56 - ETA: 54s - loss: 0.56 - ETA: 53s - loss: 0.56 - ETA: 52s - loss: 0.56 - ETA: 51s - loss: 0.56 - ETA: 50s - loss: 0.56 - ETA: 49s - loss: 0.56 - ETA: 48s - loss: 0.55 - ETA: 47s - loss: 0.55 - ETA: 46s - loss: 0.55 - ETA: 45s - loss: 0.55 - ETA: 44s - loss: 0.55 - ETA: 43s - loss: 0.55 - ETA: 42s - loss: 0.55 - ETA: 41s - loss: 0.55 - ETA: 40s - loss: 0.55 - ETA: 39s - loss: 0.55 - ETA: 38s - loss: 0.55 - ETA: 37s - loss: 0.55 - ETA: 36s - loss: 0.54 - ETA: 35s - loss: 0.54 - ETA: 34s - loss: 0.54 - ETA: 33s - loss: 0.54 - ETA: 32s - loss: 0.54 - ETA: 31s - loss: 0.54 - ETA: 30s - loss: 0.54 - ETA: 29s - loss: 0.54 - ETA: 28s - loss: 0.54 - ETA: 27s - loss: 0.54 - ETA: 26s - loss: 0.54 - ETA: 25s - loss: 0.54 - ETA: 24s - loss: 0.54 - ETA: 23s - loss: 0.54 - ETA: 22s - loss: 0.54 - ETA: 22s - loss: 0.54 - ETA: 21s - loss: 0.54 - ETA: 20s - loss: 0.53 - ETA: 19s - loss: 0.53 - ETA: 18s - loss: 0.53 - ETA: 17s - loss: 0.53 - ETA: 16s - loss: 0.53 - ETA: 15s - loss: 0.53 - ETA: 14s - loss: 0.53 - ETA: 13s - loss: 0.53 - ETA: 12s - loss: 0.53 - ETA: 11s - loss: 0.53 - ETA: 10s - loss: 0.53 - ETA: 9s - loss: 0.5347 - ETA: 8s - loss: 0.534 - ETA: 7s - loss: 0.533 - ETA: 6s - loss: 0.533 - ETA: 5s - loss: 0.532 - ETA: 4s - loss: 0.532 - ETA: 3s - loss: 0.531 - ETA: 2s - loss: 0.531 - ETA: 1s - loss: 0.530 - ETA: 0s - loss: 0.530 - ETA: 0s - loss: 0.529 - 110s 986ms/step - loss: 0.5294 - val_loss: 0.1878\n",
      "\n",
      "Epoch 00003: val_loss did not improve from 0.17486\n",
      "Epoch 4/30\n",
      "111/111 [==============================] - ETA: 2:25 - loss: 1.493 - ETA: 2:23 - loss: 1.172 - ETA: 2:23 - loss: 1.036 - ETA: 2:24 - loss: 0.937 - ETA: 2:23 - loss: 0.877 - ETA: 2:23 - loss: 0.829 - ETA: 2:24 - loss: 0.795 - ETA: 2:22 - loss: 0.771 - ETA: 2:19 - loss: 0.752 - ETA: 2:17 - loss: 0.738 - ETA: 2:14 - loss: 0.725 - ETA: 2:12 - loss: 0.713 - ETA: 2:09 - loss: 0.701 - ETA: 2:06 - loss: 0.690 - ETA: 2:03 - loss: 0.678 - ETA: 1:59 - loss: 0.669 - ETA: 1:56 - loss: 0.660 - ETA: 1:53 - loss: 0.654 - ETA: 1:50 - loss: 0.650 - ETA: 1:47 - loss: 0.647 - ETA: 1:45 - loss: 0.645 - ETA: 1:43 - loss: 0.644 - ETA: 1:40 - loss: 0.643 - ETA: 1:38 - loss: 0.641 - ETA: 1:36 - loss: 0.639 - ETA: 1:34 - loss: 0.636 - ETA: 1:33 - loss: 0.633 - ETA: 1:31 - loss: 0.631 - ETA: 1:30 - loss: 0.629 - ETA: 1:28 - loss: 0.627 - ETA: 1:26 - loss: 0.625 - ETA: 1:25 - loss: 0.623 - ETA: 1:23 - loss: 0.621 - ETA: 1:22 - loss: 0.619 - ETA: 1:20 - loss: 0.616 - ETA: 1:19 - loss: 0.614 - ETA: 1:18 - loss: 0.612 - ETA: 1:16 - loss: 0.610 - ETA: 1:15 - loss: 0.608 - ETA: 1:14 - loss: 0.606 - ETA: 1:12 - loss: 0.604 - ETA: 1:11 - loss: 0.602 - ETA: 1:10 - loss: 0.600 - ETA: 1:08 - loss: 0.599 - ETA: 1:07 - loss: 0.597 - ETA: 1:06 - loss: 0.595 - ETA: 1:05 - loss: 0.593 - ETA: 1:03 - loss: 0.591 - ETA: 1:02 - loss: 0.589 - ETA: 1:01 - loss: 0.587 - ETA: 1:00 - loss: 0.585 - ETA: 59s - loss: 0.583 - ETA: 58s - loss: 0.58 - ETA: 57s - loss: 0.58 - ETA: 56s - loss: 0.57 - ETA: 55s - loss: 0.57 - ETA: 54s - loss: 0.57 - ETA: 53s - loss: 0.57 - ETA: 52s - loss: 0.57 - ETA: 51s - loss: 0.57 - ETA: 50s - loss: 0.56 - ETA: 49s - loss: 0.56 - ETA: 47s - loss: 0.56 - ETA: 46s - loss: 0.56 - ETA: 45s - loss: 0.56 - ETA: 44s - loss: 0.56 - ETA: 43s - loss: 0.55 - ETA: 42s - loss: 0.55 - ETA: 41s - loss: 0.55 - ETA: 40s - loss: 0.55 - ETA: 39s - loss: 0.55 - ETA: 38s - loss: 0.55 - ETA: 37s - loss: 0.55 - ETA: 36s - loss: 0.54 - ETA: 35s - loss: 0.54 - ETA: 34s - loss: 0.54 - ETA: 33s - loss: 0.54 - ETA: 32s - loss: 0.54 - ETA: 31s - loss: 0.54 - ETA: 30s - loss: 0.54 - ETA: 29s - loss: 0.54 - ETA: 28s - loss: 0.54 - ETA: 27s - loss: 0.54 - ETA: 26s - loss: 0.54 - ETA: 26s - loss: 0.53 - ETA: 25s - loss: 0.53 - ETA: 24s - loss: 0.53 - ETA: 23s - loss: 0.53 - ETA: 22s - loss: 0.53 - ETA: 21s - loss: 0.53 - ETA: 20s - loss: 0.53 - ETA: 19s - loss: 0.53 - ETA: 18s - loss: 0.53 - ETA: 17s - loss: 0.53 - ETA: 16s - loss: 0.53 - ETA: 15s - loss: 0.53 - ETA: 14s - loss: 0.53 - ETA: 13s - loss: 0.53 - ETA: 12s - loss: 0.52 - ETA: 11s - loss: 0.52 - ETA: 10s - loss: 0.52 - ETA: 9s - loss: 0.5276 - ETA: 8s - loss: 0.526 - ETA: 7s - loss: 0.526 - ETA: 6s - loss: 0.525 - ETA: 5s - loss: 0.525 - ETA: 4s - loss: 0.524 - ETA: 3s - loss: 0.524 - ETA: 2s - loss: 0.523 - ETA: 1s - loss: 0.523 - ETA: 0s - loss: 0.522 - 115s 1s/step - loss: 0.5219 - val_loss: 0.1642\n",
      "\n",
      "Epoch 00004: val_loss improved from 0.17486 to 0.16422, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 5/30\n",
      "111/111 [==============================] - ETA: 1:27 - loss: 0.524 - ETA: 1:38 - loss: 0.597 - ETA: 1:38 - loss: 0.608 - ETA: 1:37 - loss: 0.586 - ETA: 1:36 - loss: 0.562 - ETA: 1:35 - loss: 0.549 - ETA: 1:35 - loss: 0.537 - ETA: 1:34 - loss: 0.526 - ETA: 1:34 - loss: 0.514 - ETA: 1:33 - loss: 0.504 - ETA: 1:33 - loss: 0.495 - ETA: 1:33 - loss: 0.488 - ETA: 1:32 - loss: 0.482 - ETA: 1:32 - loss: 0.476 - ETA: 1:33 - loss: 0.471 - ETA: 1:34 - loss: 0.467 - ETA: 1:34 - loss: 0.465 - ETA: 1:34 - loss: 0.463 - ETA: 1:34 - loss: 0.462 - ETA: 1:33 - loss: 0.461 - ETA: 1:33 - loss: 0.461 - ETA: 1:32 - loss: 0.461 - ETA: 1:32 - loss: 0.461 - ETA: 1:31 - loss: 0.460 - ETA: 1:30 - loss: 0.459 - ETA: 1:29 - loss: 0.458 - ETA: 1:28 - loss: 0.457 - ETA: 1:26 - loss: 0.456 - ETA: 1:25 - loss: 0.455 - ETA: 1:24 - loss: 0.454 - ETA: 1:23 - loss: 0.454 - ETA: 1:22 - loss: 0.454 - ETA: 1:21 - loss: 0.453 - ETA: 1:19 - loss: 0.454 - ETA: 1:18 - loss: 0.455 - ETA: 1:17 - loss: 0.455 - ETA: 1:16 - loss: 0.455 - ETA: 1:14 - loss: 0.456 - ETA: 1:13 - loss: 0.456 - ETA: 1:12 - loss: 0.456 - ETA: 1:11 - loss: 0.456 - ETA: 1:10 - loss: 0.455 - ETA: 1:09 - loss: 0.455 - ETA: 1:07 - loss: 0.455 - ETA: 1:06 - loss: 0.455 - ETA: 1:05 - loss: 0.455 - ETA: 1:04 - loss: 0.455 - ETA: 1:03 - loss: 0.455 - ETA: 1:03 - loss: 0.455 - ETA: 1:02 - loss: 0.455 - ETA: 1:01 - loss: 0.454 - ETA: 1:00 - loss: 0.454 - ETA: 59s - loss: 0.454 - ETA: 58s - loss: 0.45 - ETA: 57s - loss: 0.45 - ETA: 56s - loss: 0.45 - ETA: 55s - loss: 0.45 - ETA: 54s - loss: 0.45 - ETA: 54s - loss: 0.45 - ETA: 52s - loss: 0.45 - ETA: 52s - loss: 0.45 - ETA: 51s - loss: 0.45 - ETA: 50s - loss: 0.45 - ETA: 49s - loss: 0.45 - ETA: 48s - loss: 0.45 - ETA: 47s - loss: 0.45 - ETA: 46s - loss: 0.45 - ETA: 45s - loss: 0.45 - ETA: 43s - loss: 0.45 - ETA: 42s - loss: 0.45 - ETA: 41s - loss: 0.45 - ETA: 40s - loss: 0.45 - ETA: 39s - loss: 0.45 - ETA: 38s - loss: 0.45 - ETA: 37s - loss: 0.45 - ETA: 36s - loss: 0.45 - ETA: 35s - loss: 0.45 - ETA: 34s - loss: 0.45 - ETA: 33s - loss: 0.45 - ETA: 32s - loss: 0.45 - ETA: 31s - loss: 0.45 - ETA: 30s - loss: 0.45 - ETA: 29s - loss: 0.45 - ETA: 28s - loss: 0.45 - ETA: 27s - loss: 0.45 - ETA: 25s - loss: 0.45 - ETA: 24s - loss: 0.45 - ETA: 23s - loss: 0.45 - ETA: 22s - loss: 0.45 - ETA: 21s - loss: 0.45 - ETA: 20s - loss: 0.45 - ETA: 19s - loss: 0.45 - ETA: 18s - loss: 0.45 - ETA: 17s - loss: 0.45 - ETA: 16s - loss: 0.45 - ETA: 15s - loss: 0.45 - ETA: 14s - loss: 0.45 - ETA: 13s - loss: 0.45 - ETA: 12s - loss: 0.45 - ETA: 11s - loss: 0.45 - ETA: 10s - loss: 0.45 - ETA: 9s - loss: 0.4546 - ETA: 8s - loss: 0.454 - ETA: 7s - loss: 0.454 - ETA: 6s - loss: 0.454 - ETA: 5s - loss: 0.454 - ETA: 4s - loss: 0.453 - ETA: 3s - loss: 0.453 - ETA: 2s - loss: 0.453 - ETA: 1s - loss: 0.453 - ETA: 0s - loss: 0.453 - 115s 1s/step - loss: 0.4534 - val_loss: 0.1656\n",
      "\n",
      "Epoch 00005: val_loss did not improve from 0.16422\n",
      "Epoch 6/30\n",
      "111/111 [==============================] - ETA: 1:42 - loss: 0.198 - ETA: 1:38 - loss: 0.317 - ETA: 1:39 - loss: 0.366 - ETA: 1:37 - loss: 0.406 - ETA: 1:39 - loss: 0.414 - ETA: 1:39 - loss: 0.436 - ETA: 1:40 - loss: 0.456 - ETA: 1:39 - loss: 0.469 - ETA: 1:37 - loss: 0.475 - ETA: 1:36 - loss: 0.481 - ETA: 1:38 - loss: 0.483 - ETA: 1:38 - loss: 0.486 - ETA: 1:36 - loss: 0.488 - ETA: 1:36 - loss: 0.490 - ETA: 1:35 - loss: 0.492 - ETA: 1:33 - loss: 0.492 - ETA: 1:32 - loss: 0.492 - ETA: 1:31 - loss: 0.491 - ETA: 1:30 - loss: 0.490 - ETA: 1:29 - loss: 0.488 - ETA: 1:29 - loss: 0.487 - ETA: 1:29 - loss: 0.487 - ETA: 1:28 - loss: 0.486 - ETA: 1:28 - loss: 0.486 - ETA: 1:28 - loss: 0.485 - ETA: 1:27 - loss: 0.485 - ETA: 1:27 - loss: 0.485 - ETA: 1:26 - loss: 0.484 - ETA: 1:25 - loss: 0.483 - ETA: 1:24 - loss: 0.482 - ETA: 1:23 - loss: 0.481 - ETA: 1:22 - loss: 0.480 - ETA: 1:21 - loss: 0.479 - ETA: 1:19 - loss: 0.478 - ETA: 1:18 - loss: 0.478 - ETA: 1:18 - loss: 0.477 - ETA: 1:17 - loss: 0.477 - ETA: 1:15 - loss: 0.476 - ETA: 1:14 - loss: 0.475 - ETA: 1:13 - loss: 0.475 - ETA: 1:12 - loss: 0.474 - ETA: 1:11 - loss: 0.473 - ETA: 1:09 - loss: 0.472 - ETA: 1:08 - loss: 0.471 - ETA: 1:07 - loss: 0.471 - ETA: 1:07 - loss: 0.470 - ETA: 1:06 - loss: 0.469 - ETA: 1:05 - loss: 0.468 - ETA: 1:04 - loss: 0.467 - ETA: 1:03 - loss: 0.466 - ETA: 1:02 - loss: 0.465 - ETA: 1:00 - loss: 0.464 - ETA: 59s - loss: 0.463 - ETA: 58s - loss: 0.46 - ETA: 57s - loss: 0.46 - ETA: 56s - loss: 0.46 - ETA: 55s - loss: 0.46 - ETA: 54s - loss: 0.46 - ETA: 53s - loss: 0.46 - ETA: 52s - loss: 0.45 - ETA: 51s - loss: 0.45 - ETA: 50s - loss: 0.45 - ETA: 49s - loss: 0.45 - ETA: 47s - loss: 0.45 - ETA: 46s - loss: 0.45 - ETA: 45s - loss: 0.45 - ETA: 44s - loss: 0.45 - ETA: 43s - loss: 0.45 - ETA: 42s - loss: 0.45 - ETA: 41s - loss: 0.45 - ETA: 40s - loss: 0.45 - ETA: 39s - loss: 0.45 - ETA: 38s - loss: 0.45 - ETA: 37s - loss: 0.45 - ETA: 36s - loss: 0.45 - ETA: 35s - loss: 0.45 - ETA: 34s - loss: 0.45 - ETA: 33s - loss: 0.45 - ETA: 32s - loss: 0.45 - ETA: 31s - loss: 0.45 - ETA: 30s - loss: 0.45 - ETA: 29s - loss: 0.45 - ETA: 28s - loss: 0.44 - ETA: 27s - loss: 0.44 - ETA: 26s - loss: 0.44 - ETA: 25s - loss: 0.44 - ETA: 24s - loss: 0.44 - ETA: 23s - loss: 0.44 - ETA: 22s - loss: 0.44 - ETA: 21s - loss: 0.44 - ETA: 20s - loss: 0.44 - ETA: 19s - loss: 0.44 - ETA: 18s - loss: 0.44 - ETA: 17s - loss: 0.44 - ETA: 16s - loss: 0.44 - ETA: 15s - loss: 0.44 - ETA: 14s - loss: 0.44 - ETA: 13s - loss: 0.44 - ETA: 12s - loss: 0.44 - ETA: 11s - loss: 0.44 - ETA: 10s - loss: 0.44 - ETA: 9s - loss: 0.4456 - ETA: 8s - loss: 0.445 - ETA: 7s - loss: 0.445 - ETA: 6s - loss: 0.445 - ETA: 5s - loss: 0.444 - ETA: 4s - loss: 0.444 - ETA: 3s - loss: 0.444 - ETA: 2s - loss: 0.444 - ETA: 1s - loss: 0.444 - ETA: 0s - loss: 0.444 - 116s 1s/step - loss: 0.4438 - val_loss: 0.1563\n",
      "\n",
      "Epoch 00006: val_loss improved from 0.16422 to 0.15633, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 7/30\n",
      "111/111 [==============================] - ETA: 2:02 - loss: 0.356 - ETA: 2:21 - loss: 0.303 - ETA: 2:19 - loss: 0.283 - ETA: 2:16 - loss: 0.293 - ETA: 2:15 - loss: 0.292 - ETA: 2:15 - loss: 0.296 - ETA: 2:16 - loss: 0.297 - ETA: 2:17 - loss: 0.295 - ETA: 2:13 - loss: 0.298 - ETA: 2:09 - loss: 0.300 - ETA: 2:06 - loss: 0.307 - ETA: 2:03 - loss: 0.312 - ETA: 1:59 - loss: 0.322 - ETA: 1:56 - loss: 0.330 - ETA: 1:54 - loss: 0.335 - ETA: 1:52 - loss: 0.338 - ETA: 1:50 - loss: 0.340 - ETA: 1:48 - loss: 0.342 - ETA: 1:46 - loss: 0.343 - ETA: 1:44 - loss: 0.343 - ETA: 1:42 - loss: 0.343 - ETA: 1:41 - loss: 0.344 - ETA: 1:39 - loss: 0.344 - ETA: 1:38 - loss: 0.344 - ETA: 1:37 - loss: 0.344 - ETA: 1:35 - loss: 0.344 - ETA: 1:34 - loss: 0.345 - ETA: 1:33 - loss: 0.346 - ETA: 1:32 - loss: 0.348 - ETA: 1:31 - loss: 0.349 - ETA: 1:29 - loss: 0.350 - ETA: 1:28 - loss: 0.351 - ETA: 1:26 - loss: 0.352 - ETA: 1:25 - loss: 0.354 - ETA: 1:24 - loss: 0.356 - ETA: 1:22 - loss: 0.358 - ETA: 1:21 - loss: 0.361 - ETA: 1:19 - loss: 0.363 - ETA: 1:18 - loss: 0.366 - ETA: 1:17 - loss: 0.368 - ETA: 1:15 - loss: 0.370 - ETA: 1:14 - loss: 0.373 - ETA: 1:13 - loss: 0.375 - ETA: 1:12 - loss: 0.377 - ETA: 1:11 - loss: 0.379 - ETA: 1:09 - loss: 0.380 - ETA: 1:08 - loss: 0.382 - ETA: 1:07 - loss: 0.384 - ETA: 1:06 - loss: 0.385 - ETA: 1:05 - loss: 0.387 - ETA: 1:04 - loss: 0.388 - ETA: 1:03 - loss: 0.389 - ETA: 1:02 - loss: 0.391 - ETA: 1:01 - loss: 0.392 - ETA: 59s - loss: 0.393 - ETA: 58s - loss: 0.39 - ETA: 57s - loss: 0.39 - ETA: 56s - loss: 0.39 - ETA: 55s - loss: 0.39 - ETA: 54s - loss: 0.39 - ETA: 52s - loss: 0.39 - ETA: 51s - loss: 0.39 - ETA: 50s - loss: 0.39 - ETA: 49s - loss: 0.40 - ETA: 48s - loss: 0.40 - ETA: 47s - loss: 0.40 - ETA: 46s - loss: 0.40 - ETA: 45s - loss: 0.40 - ETA: 44s - loss: 0.40 - ETA: 43s - loss: 0.40 - ETA: 42s - loss: 0.40 - ETA: 41s - loss: 0.40 - ETA: 40s - loss: 0.40 - ETA: 39s - loss: 0.40 - ETA: 38s - loss: 0.40 - ETA: 37s - loss: 0.40 - ETA: 36s - loss: 0.40 - ETA: 35s - loss: 0.40 - ETA: 34s - loss: 0.40 - ETA: 33s - loss: 0.40 - ETA: 31s - loss: 0.40 - ETA: 30s - loss: 0.40 - ETA: 29s - loss: 0.40 - ETA: 28s - loss: 0.40 - ETA: 27s - loss: 0.40 - ETA: 26s - loss: 0.40 - ETA: 25s - loss: 0.41 - ETA: 24s - loss: 0.41 - ETA: 23s - loss: 0.41 - ETA: 22s - loss: 0.41 - ETA: 21s - loss: 0.41 - ETA: 19s - loss: 0.41 - ETA: 18s - loss: 0.41 - ETA: 17s - loss: 0.41 - ETA: 16s - loss: 0.41 - ETA: 15s - loss: 0.41 - ETA: 14s - loss: 0.41 - ETA: 13s - loss: 0.41 - ETA: 12s - loss: 0.41 - ETA: 11s - loss: 0.41 - ETA: 10s - loss: 0.41 - ETA: 9s - loss: 0.4125 - ETA: 8s - loss: 0.412 - ETA: 7s - loss: 0.412 - ETA: 6s - loss: 0.412 - ETA: 5s - loss: 0.412 - ETA: 4s - loss: 0.412 - ETA: 3s - loss: 0.412 - ETA: 2s - loss: 0.413 - ETA: 1s - loss: 0.413 - ETA: 0s - loss: 0.413 - 117s 1s/step - loss: 0.4132 - val_loss: 0.1500\n",
      "\n",
      "Epoch 00007: val_loss improved from 0.15633 to 0.15005, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 8/30\n",
      "111/111 [==============================] - ETA: 2:02 - loss: 0.153 - ETA: 1:37 - loss: 0.158 - ETA: 1:36 - loss: 0.174 - ETA: 1:37 - loss: 0.201 - ETA: 1:38 - loss: 0.217 - ETA: 1:38 - loss: 0.251 - ETA: 1:38 - loss: 0.273 - ETA: 1:37 - loss: 0.289 - ETA: 1:37 - loss: 0.299 - ETA: 1:35 - loss: 0.305 - ETA: 1:35 - loss: 0.310 - ETA: 1:34 - loss: 0.319 - ETA: 1:33 - loss: 0.325 - ETA: 1:32 - loss: 0.330 - ETA: 1:31 - loss: 0.334 - ETA: 1:30 - loss: 0.338 - ETA: 1:30 - loss: 0.344 - ETA: 1:29 - loss: 0.349 - ETA: 1:28 - loss: 0.353 - ETA: 1:27 - loss: 0.357 - ETA: 1:26 - loss: 0.361 - ETA: 1:26 - loss: 0.364 - ETA: 1:25 - loss: 0.366 - ETA: 1:24 - loss: 0.368 - ETA: 1:23 - loss: 0.370 - ETA: 1:22 - loss: 0.371 - ETA: 1:22 - loss: 0.372 - ETA: 1:21 - loss: 0.373 - ETA: 1:20 - loss: 0.374 - ETA: 1:20 - loss: 0.375 - ETA: 1:19 - loss: 0.375 - ETA: 1:19 - loss: 0.376 - ETA: 1:18 - loss: 0.376 - ETA: 1:18 - loss: 0.376 - ETA: 1:17 - loss: 0.376 - ETA: 1:16 - loss: 0.376 - ETA: 1:15 - loss: 0.376 - ETA: 1:14 - loss: 0.376 - ETA: 1:13 - loss: 0.377 - ETA: 1:12 - loss: 0.376 - ETA: 1:11 - loss: 0.376 - ETA: 1:10 - loss: 0.376 - ETA: 1:09 - loss: 0.376 - ETA: 1:08 - loss: 0.376 - ETA: 1:07 - loss: 0.376 - ETA: 1:06 - loss: 0.376 - ETA: 1:05 - loss: 0.376 - ETA: 1:03 - loss: 0.376 - ETA: 1:02 - loss: 0.376 - ETA: 1:01 - loss: 0.376 - ETA: 1:00 - loss: 0.376 - ETA: 59s - loss: 0.376 - ETA: 58s - loss: 0.37 - ETA: 57s - loss: 0.37 - ETA: 56s - loss: 0.37 - ETA: 55s - loss: 0.37 - ETA: 54s - loss: 0.37 - ETA: 53s - loss: 0.37 - ETA: 52s - loss: 0.38 - ETA: 51s - loss: 0.38 - ETA: 50s - loss: 0.38 - ETA: 48s - loss: 0.38 - ETA: 47s - loss: 0.38 - ETA: 46s - loss: 0.38 - ETA: 45s - loss: 0.38 - ETA: 44s - loss: 0.38 - ETA: 43s - loss: 0.38 - ETA: 42s - loss: 0.38 - ETA: 41s - loss: 0.38 - ETA: 40s - loss: 0.38 - ETA: 39s - loss: 0.38 - ETA: 38s - loss: 0.38 - ETA: 37s - loss: 0.38 - ETA: 36s - loss: 0.38 - ETA: 35s - loss: 0.38 - ETA: 34s - loss: 0.38 - ETA: 33s - loss: 0.38 - ETA: 32s - loss: 0.38 - ETA: 31s - loss: 0.38 - ETA: 30s - loss: 0.38 - ETA: 29s - loss: 0.38 - ETA: 28s - loss: 0.38 - ETA: 27s - loss: 0.38 - ETA: 26s - loss: 0.38 - ETA: 25s - loss: 0.38 - ETA: 24s - loss: 0.38 - ETA: 23s - loss: 0.38 - ETA: 22s - loss: 0.38 - ETA: 21s - loss: 0.38 - ETA: 20s - loss: 0.38 - ETA: 19s - loss: 0.38 - ETA: 18s - loss: 0.38 - ETA: 17s - loss: 0.38 - ETA: 16s - loss: 0.38 - ETA: 15s - loss: 0.38 - ETA: 14s - loss: 0.38 - ETA: 13s - loss: 0.38 - ETA: 12s - loss: 0.38 - ETA: 11s - loss: 0.38 - ETA: 10s - loss: 0.38 - ETA: 9s - loss: 0.3881 - ETA: 8s - loss: 0.388 - ETA: 7s - loss: 0.388 - ETA: 6s - loss: 0.388 - ETA: 5s - loss: 0.388 - ETA: 4s - loss: 0.389 - ETA: 3s - loss: 0.389 - ETA: 2s - loss: 0.389 - ETA: 1s - loss: 0.389 - ETA: 0s - loss: 0.389 - ETA: 0s - loss: 0.389 - 109s 983ms/step - loss: 0.3897 - val_loss: 0.1468\n",
      "\n",
      "Epoch 00008: val_loss improved from 0.15005 to 0.14682, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 9/30\n",
      "111/111 [==============================] - ETA: 1:30 - loss: 0.226 - ETA: 1:39 - loss: 0.249 - ETA: 1:39 - loss: 0.324 - ETA: 1:39 - loss: 0.338 - ETA: 1:40 - loss: 0.346 - ETA: 1:39 - loss: 0.344 - ETA: 1:38 - loss: 0.345 - ETA: 1:37 - loss: 0.344 - ETA: 1:35 - loss: 0.343 - ETA: 1:34 - loss: 0.341 - ETA: 1:33 - loss: 0.341 - ETA: 1:32 - loss: 0.344 - ETA: 1:31 - loss: 0.345 - ETA: 1:30 - loss: 0.346 - ETA: 1:30 - loss: 0.347 - ETA: 1:30 - loss: 0.347 - ETA: 1:29 - loss: 0.347 - ETA: 1:28 - loss: 0.348 - ETA: 1:28 - loss: 0.350 - ETA: 1:27 - loss: 0.351 - ETA: 1:26 - loss: 0.352 - ETA: 1:25 - loss: 0.353 - ETA: 1:24 - loss: 0.353 - ETA: 1:23 - loss: 0.353 - ETA: 1:22 - loss: 0.354 - ETA: 1:21 - loss: 0.354 - ETA: 1:20 - loss: 0.355 - ETA: 1:19 - loss: 0.356 - ETA: 1:18 - loss: 0.356 - ETA: 1:16 - loss: 0.356 - ETA: 1:15 - loss: 0.357 - ETA: 1:14 - loss: 0.357 - ETA: 1:13 - loss: 0.357 - ETA: 1:12 - loss: 0.357 - ETA: 1:11 - loss: 0.357 - ETA: 1:10 - loss: 0.358 - ETA: 1:09 - loss: 0.358 - ETA: 1:08 - loss: 0.358 - ETA: 1:08 - loss: 0.358 - ETA: 1:07 - loss: 0.358 - ETA: 1:06 - loss: 0.359 - ETA: 1:05 - loss: 0.359 - ETA: 1:04 - loss: 0.359 - ETA: 1:04 - loss: 0.360 - ETA: 1:03 - loss: 0.360 - ETA: 1:02 - loss: 0.360 - ETA: 1:01 - loss: 0.360 - ETA: 1:00 - loss: 0.361 - ETA: 1:00 - loss: 0.361 - ETA: 59s - loss: 0.361 - ETA: 58s - loss: 0.36 - ETA: 57s - loss: 0.36 - ETA: 56s - loss: 0.36 - ETA: 55s - loss: 0.36 - ETA: 54s - loss: 0.36 - ETA: 53s - loss: 0.36 - ETA: 52s - loss: 0.36 - ETA: 51s - loss: 0.36 - ETA: 50s - loss: 0.36 - ETA: 49s - loss: 0.36 - ETA: 48s - loss: 0.36 - ETA: 47s - loss: 0.36 - ETA: 46s - loss: 0.37 - ETA: 45s - loss: 0.37 - ETA: 44s - loss: 0.37 - ETA: 43s - loss: 0.37 - ETA: 42s - loss: 0.37 - ETA: 41s - loss: 0.37 - ETA: 40s - loss: 0.37 - ETA: 39s - loss: 0.37 - ETA: 38s - loss: 0.37 - ETA: 37s - loss: 0.37 - ETA: 37s - loss: 0.37 - ETA: 36s - loss: 0.37 - ETA: 35s - loss: 0.37 - ETA: 34s - loss: 0.37 - ETA: 33s - loss: 0.37 - ETA: 32s - loss: 0.37 - ETA: 31s - loss: 0.37 - ETA: 30s - loss: 0.37 - ETA: 29s - loss: 0.37 - ETA: 28s - loss: 0.37 - ETA: 27s - loss: 0.37 - ETA: 26s - loss: 0.37 - ETA: 25s - loss: 0.37 - ETA: 24s - loss: 0.37 - ETA: 23s - loss: 0.37 - ETA: 22s - loss: 0.37 - ETA: 21s - loss: 0.37 - ETA: 20s - loss: 0.38 - ETA: 19s - loss: 0.38 - ETA: 18s - loss: 0.38 - ETA: 17s - loss: 0.38 - ETA: 16s - loss: 0.38 - ETA: 15s - loss: 0.38 - ETA: 14s - loss: 0.38 - ETA: 13s - loss: 0.38 - ETA: 12s - loss: 0.38 - ETA: 11s - loss: 0.38 - ETA: 10s - loss: 0.38 - ETA: 9s - loss: 0.3831 - ETA: 8s - loss: 0.383 - ETA: 7s - loss: 0.383 - ETA: 6s - loss: 0.383 - ETA: 5s - loss: 0.384 - ETA: 4s - loss: 0.384 - ETA: 3s - loss: 0.384 - ETA: 2s - loss: 0.384 - ETA: 1s - loss: 0.384 - ETA: 0s - loss: 0.385 - ETA: 0s - loss: 0.385 - 109s 980ms/step - loss: 0.3855 - val_loss: 0.1417\n",
      "\n",
      "Epoch 00009: val_loss improved from 0.14682 to 0.14171, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 10/30\n",
      "111/111 [==============================] - ETA: 1:28 - loss: 0.464 - ETA: 1:38 - loss: 0.435 - ETA: 1:46 - loss: 0.390 - ETA: 1:43 - loss: 0.352 - ETA: 1:41 - loss: 0.332 - ETA: 1:39 - loss: 0.318 - ETA: 1:38 - loss: 0.315 - ETA: 1:37 - loss: 0.318 - ETA: 1:37 - loss: 0.318 - ETA: 1:37 - loss: 0.320 - ETA: 1:36 - loss: 0.325 - ETA: 1:35 - loss: 0.328 - ETA: 1:35 - loss: 0.329 - ETA: 1:34 - loss: 0.331 - ETA: 1:33 - loss: 0.333 - ETA: 1:32 - loss: 0.334 - ETA: 1:31 - loss: 0.334 - ETA: 1:30 - loss: 0.335 - ETA: 1:29 - loss: 0.336 - ETA: 1:28 - loss: 0.339 - ETA: 1:27 - loss: 0.341 - ETA: 1:27 - loss: 0.342 - ETA: 1:26 - loss: 0.343 - ETA: 1:25 - loss: 0.344 - ETA: 1:24 - loss: 0.345 - ETA: 1:24 - loss: 0.345 - ETA: 1:23 - loss: 0.346 - ETA: 1:22 - loss: 0.347 - ETA: 1:21 - loss: 0.348 - ETA: 1:20 - loss: 0.349 - ETA: 1:19 - loss: 0.350 - ETA: 1:18 - loss: 0.351 - ETA: 1:17 - loss: 0.353 - ETA: 1:16 - loss: 0.354 - ETA: 1:15 - loss: 0.354 - ETA: 1:14 - loss: 0.355 - ETA: 1:13 - loss: 0.356 - ETA: 1:12 - loss: 0.356 - ETA: 1:11 - loss: 0.357 - ETA: 1:10 - loss: 0.357 - ETA: 1:09 - loss: 0.358 - ETA: 1:08 - loss: 0.358 - ETA: 1:06 - loss: 0.358 - ETA: 1:05 - loss: 0.358 - ETA: 1:04 - loss: 0.359 - ETA: 1:03 - loss: 0.359 - ETA: 1:02 - loss: 0.359 - ETA: 1:01 - loss: 0.359 - ETA: 1:00 - loss: 0.359 - ETA: 59s - loss: 0.359 - ETA: 58s - loss: 0.36 - ETA: 57s - loss: 0.36 - ETA: 56s - loss: 0.36 - ETA: 55s - loss: 0.36 - ETA: 54s - loss: 0.36 - ETA: 53s - loss: 0.36 - ETA: 53s - loss: 0.36 - ETA: 52s - loss: 0.36 - ETA: 51s - loss: 0.36 - ETA: 50s - loss: 0.36 - ETA: 49s - loss: 0.36 - ETA: 48s - loss: 0.36 - ETA: 47s - loss: 0.36 - ETA: 46s - loss: 0.36 - ETA: 45s - loss: 0.36 - ETA: 44s - loss: 0.36 - ETA: 43s - loss: 0.36 - ETA: 42s - loss: 0.36 - ETA: 41s - loss: 0.36 - ETA: 40s - loss: 0.36 - ETA: 39s - loss: 0.36 - ETA: 38s - loss: 0.36 - ETA: 37s - loss: 0.36 - ETA: 36s - loss: 0.36 - ETA: 35s - loss: 0.36 - ETA: 34s - loss: 0.36 - ETA: 33s - loss: 0.36 - ETA: 32s - loss: 0.36 - ETA: 31s - loss: 0.36 - ETA: 30s - loss: 0.36 - ETA: 29s - loss: 0.36 - ETA: 28s - loss: 0.36 - ETA: 27s - loss: 0.36 - ETA: 26s - loss: 0.36 - ETA: 25s - loss: 0.36 - ETA: 24s - loss: 0.36 - ETA: 23s - loss: 0.36 - ETA: 22s - loss: 0.36 - ETA: 21s - loss: 0.36 - ETA: 20s - loss: 0.36 - ETA: 19s - loss: 0.36 - ETA: 18s - loss: 0.36 - ETA: 17s - loss: 0.36 - ETA: 16s - loss: 0.36 - ETA: 15s - loss: 0.36 - ETA: 14s - loss: 0.36 - ETA: 13s - loss: 0.36 - ETA: 12s - loss: 0.36 - ETA: 11s - loss: 0.36 - ETA: 10s - loss: 0.37 - ETA: 9s - loss: 0.3701 - ETA: 8s - loss: 0.370 - ETA: 7s - loss: 0.370 - ETA: 6s - loss: 0.370 - ETA: 5s - loss: 0.370 - ETA: 4s - loss: 0.370 - ETA: 3s - loss: 0.370 - ETA: 2s - loss: 0.370 - ETA: 1s - loss: 0.370 - ETA: 0s - loss: 0.370 - ETA: 0s - loss: 0.370 - 109s 981ms/step - loss: 0.3708 - val_loss: 0.1408\n",
      "\n",
      "Epoch 00010: val_loss improved from 0.14171 to 0.14079, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 11/30\n",
      "111/111 [==============================] - ETA: 1:30 - loss: 0.234 - ETA: 1:46 - loss: 0.203 - ETA: 1:55 - loss: 0.206 - ETA: 1:48 - loss: 0.214 - ETA: 1:44 - loss: 0.217 - ETA: 1:41 - loss: 0.216 - ETA: 1:40 - loss: 0.220 - ETA: 1:39 - loss: 0.221 - ETA: 1:41 - loss: 0.226 - ETA: 1:43 - loss: 0.231 - ETA: 1:45 - loss: 0.234 - ETA: 1:44 - loss: 0.237 - ETA: 1:43 - loss: 0.243 - ETA: 1:42 - loss: 0.248 - ETA: 1:41 - loss: 0.256 - ETA: 1:39 - loss: 0.264 - ETA: 1:38 - loss: 0.270 - ETA: 1:36 - loss: 0.276 - ETA: 1:35 - loss: 0.280 - ETA: 1:34 - loss: 0.284 - ETA: 1:33 - loss: 0.288 - ETA: 1:31 - loss: 0.291 - ETA: 1:30 - loss: 0.293 - ETA: 1:28 - loss: 0.295 - ETA: 1:27 - loss: 0.297 - ETA: 1:26 - loss: 0.298 - ETA: 1:25 - loss: 0.300 - ETA: 1:23 - loss: 0.302 - ETA: 1:22 - loss: 0.304 - ETA: 1:22 - loss: 0.305 - ETA: 1:21 - loss: 0.306 - ETA: 1:20 - loss: 0.308 - ETA: 1:19 - loss: 0.309 - ETA: 1:18 - loss: 0.310 - ETA: 1:16 - loss: 0.311 - ETA: 1:16 - loss: 0.312 - ETA: 1:14 - loss: 0.313 - ETA: 1:13 - loss: 0.313 - ETA: 1:12 - loss: 0.314 - ETA: 1:11 - loss: 0.315 - ETA: 1:10 - loss: 0.316 - ETA: 1:09 - loss: 0.317 - ETA: 1:08 - loss: 0.317 - ETA: 1:07 - loss: 0.318 - ETA: 1:06 - loss: 0.318 - ETA: 1:04 - loss: 0.319 - ETA: 1:03 - loss: 0.320 - ETA: 1:02 - loss: 0.320 - ETA: 1:01 - loss: 0.321 - ETA: 1:00 - loss: 0.322 - ETA: 59s - loss: 0.323 - ETA: 58s - loss: 0.32 - ETA: 57s - loss: 0.32 - ETA: 56s - loss: 0.32 - ETA: 55s - loss: 0.32 - ETA: 54s - loss: 0.32 - ETA: 53s - loss: 0.32 - ETA: 52s - loss: 0.32 - ETA: 51s - loss: 0.33 - ETA: 50s - loss: 0.33 - ETA: 49s - loss: 0.33 - ETA: 48s - loss: 0.33 - ETA: 47s - loss: 0.33 - ETA: 46s - loss: 0.33 - ETA: 45s - loss: 0.33 - ETA: 44s - loss: 0.33 - ETA: 43s - loss: 0.33 - ETA: 42s - loss: 0.33 - ETA: 41s - loss: 0.33 - ETA: 40s - loss: 0.34 - ETA: 39s - loss: 0.34 - ETA: 38s - loss: 0.34 - ETA: 37s - loss: 0.34 - ETA: 36s - loss: 0.34 - ETA: 35s - loss: 0.34 - ETA: 35s - loss: 0.34 - ETA: 34s - loss: 0.34 - ETA: 33s - loss: 0.34 - ETA: 32s - loss: 0.34 - ETA: 31s - loss: 0.34 - ETA: 30s - loss: 0.34 - ETA: 29s - loss: 0.34 - ETA: 28s - loss: 0.34 - ETA: 27s - loss: 0.35 - ETA: 26s - loss: 0.35 - ETA: 25s - loss: 0.35 - ETA: 24s - loss: 0.35 - ETA: 23s - loss: 0.35 - ETA: 22s - loss: 0.35 - ETA: 21s - loss: 0.35 - ETA: 20s - loss: 0.35 - ETA: 19s - loss: 0.35 - ETA: 18s - loss: 0.35 - ETA: 17s - loss: 0.35 - ETA: 16s - loss: 0.35 - ETA: 15s - loss: 0.35 - ETA: 14s - loss: 0.35 - ETA: 13s - loss: 0.35 - ETA: 12s - loss: 0.35 - ETA: 11s - loss: 0.35 - ETA: 10s - loss: 0.35 - ETA: 9s - loss: 0.3586 - ETA: 8s - loss: 0.358 - ETA: 7s - loss: 0.359 - ETA: 6s - loss: 0.359 - ETA: 5s - loss: 0.359 - ETA: 4s - loss: 0.360 - ETA: 3s - loss: 0.360 - ETA: 2s - loss: 0.360 - ETA: 1s - loss: 0.360 - ETA: 0s - loss: 0.360 - 116s 1s/step - loss: 0.3610 - val_loss: 0.1352\n",
      "\n",
      "Epoch 00011: val_loss improved from 0.14079 to 0.13517, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 12/30\n",
      "111/111 [==============================] - ETA: 1:59 - loss: 0.195 - ETA: 1:45 - loss: 0.363 - ETA: 1:47 - loss: 0.417 - ETA: 1:52 - loss: 0.421 - ETA: 1:49 - loss: 0.420 - ETA: 1:49 - loss: 0.421 - ETA: 1:48 - loss: 0.420 - ETA: 1:48 - loss: 0.417 - ETA: 1:49 - loss: 0.413 - ETA: 1:49 - loss: 0.412 - ETA: 1:47 - loss: 0.408 - ETA: 1:46 - loss: 0.404 - ETA: 1:45 - loss: 0.401 - ETA: 1:44 - loss: 0.397 - ETA: 1:44 - loss: 0.393 - ETA: 1:44 - loss: 0.390 - ETA: 1:43 - loss: 0.388 - ETA: 1:42 - loss: 0.387 - ETA: 1:40 - loss: 0.386 - ETA: 1:39 - loss: 0.385 - ETA: 1:38 - loss: 0.384 - ETA: 1:36 - loss: 0.382 - ETA: 1:35 - loss: 0.382 - ETA: 1:33 - loss: 0.380 - ETA: 1:32 - loss: 0.379 - ETA: 1:31 - loss: 0.378 - ETA: 1:30 - loss: 0.377 - ETA: 1:29 - loss: 0.376 - ETA: 1:28 - loss: 0.374 - ETA: 1:28 - loss: 0.373 - ETA: 1:27 - loss: 0.372 - ETA: 1:26 - loss: 0.371 - ETA: 1:25 - loss: 0.370 - ETA: 1:25 - loss: 0.369 - ETA: 1:23 - loss: 0.369 - ETA: 1:22 - loss: 0.368 - ETA: 1:21 - loss: 0.368 - ETA: 1:20 - loss: 0.367 - ETA: 1:19 - loss: 0.366 - ETA: 1:17 - loss: 0.366 - ETA: 1:16 - loss: 0.365 - ETA: 1:15 - loss: 0.364 - ETA: 1:13 - loss: 0.364 - ETA: 1:12 - loss: 0.363 - ETA: 1:11 - loss: 0.362 - ETA: 1:10 - loss: 0.361 - ETA: 1:09 - loss: 0.361 - ETA: 1:07 - loss: 0.360 - ETA: 1:06 - loss: 0.360 - ETA: 1:05 - loss: 0.360 - ETA: 1:04 - loss: 0.360 - ETA: 1:03 - loss: 0.359 - ETA: 1:02 - loss: 0.359 - ETA: 1:01 - loss: 0.359 - ETA: 1:00 - loss: 0.359 - ETA: 58s - loss: 0.359 - ETA: 57s - loss: 0.35 - ETA: 56s - loss: 0.35 - ETA: 55s - loss: 0.35 - ETA: 53s - loss: 0.35 - ETA: 52s - loss: 0.35 - ETA: 51s - loss: 0.35 - ETA: 50s - loss: 0.35 - ETA: 49s - loss: 0.35 - ETA: 48s - loss: 0.35 - ETA: 47s - loss: 0.35 - ETA: 46s - loss: 0.35 - ETA: 45s - loss: 0.35 - ETA: 44s - loss: 0.35 - ETA: 43s - loss: 0.36 - ETA: 42s - loss: 0.36 - ETA: 41s - loss: 0.36 - ETA: 40s - loss: 0.36 - ETA: 39s - loss: 0.36 - ETA: 38s - loss: 0.36 - ETA: 36s - loss: 0.36 - ETA: 35s - loss: 0.36 - ETA: 34s - loss: 0.36 - ETA: 33s - loss: 0.36 - ETA: 32s - loss: 0.36 - ETA: 31s - loss: 0.36 - ETA: 30s - loss: 0.36 - ETA: 29s - loss: 0.36 - ETA: 28s - loss: 0.36 - ETA: 27s - loss: 0.36 - ETA: 26s - loss: 0.36 - ETA: 25s - loss: 0.36 - ETA: 24s - loss: 0.36 - ETA: 23s - loss: 0.36 - ETA: 22s - loss: 0.36 - ETA: 21s - loss: 0.36 - ETA: 20s - loss: 0.36 - ETA: 19s - loss: 0.36 - ETA: 18s - loss: 0.37 - ETA: 17s - loss: 0.37 - ETA: 15s - loss: 0.37 - ETA: 14s - loss: 0.37 - ETA: 13s - loss: 0.37 - ETA: 12s - loss: 0.37 - ETA: 11s - loss: 0.37 - ETA: 10s - loss: 0.37 - ETA: 9s - loss: 0.3722 - ETA: 8s - loss: 0.372 - ETA: 7s - loss: 0.372 - ETA: 6s - loss: 0.372 - ETA: 5s - loss: 0.373 - ETA: 4s - loss: 0.373 - ETA: 3s - loss: 0.373 - ETA: 2s - loss: 0.373 - ETA: 1s - loss: 0.373 - ETA: 0s - loss: 0.373 - 119s 1s/step - loss: 0.3740 - val_loss: 0.1248\n",
      "\n",
      "Epoch 00012: val_loss improved from 0.13517 to 0.12484, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 13/30\n",
      "111/111 [==============================] - ETA: 1:32 - loss: 0.285 - ETA: 1:48 - loss: 0.323 - ETA: 1:45 - loss: 0.347 - ETA: 1:50 - loss: 0.341 - ETA: 1:48 - loss: 0.335 - ETA: 1:44 - loss: 0.333 - ETA: 1:42 - loss: 0.329 - ETA: 1:40 - loss: 0.330 - ETA: 1:40 - loss: 0.329 - ETA: 1:39 - loss: 0.329 - ETA: 1:37 - loss: 0.329 - ETA: 1:37 - loss: 0.330 - ETA: 1:37 - loss: 0.329 - ETA: 1:38 - loss: 0.329 - ETA: 1:38 - loss: 0.328 - ETA: 1:38 - loss: 0.329 - ETA: 1:38 - loss: 0.330 - ETA: 1:36 - loss: 0.329 - ETA: 1:35 - loss: 0.328 - ETA: 1:34 - loss: 0.327 - ETA: 1:32 - loss: 0.326 - ETA: 1:31 - loss: 0.325 - ETA: 1:30 - loss: 0.324 - ETA: 1:29 - loss: 0.323 - ETA: 1:28 - loss: 0.322 - ETA: 1:28 - loss: 0.322 - ETA: 1:27 - loss: 0.321 - ETA: 1:26 - loss: 0.322 - ETA: 1:25 - loss: 0.322 - ETA: 1:24 - loss: 0.322 - ETA: 1:23 - loss: 0.322 - ETA: 1:22 - loss: 0.322 - ETA: 1:21 - loss: 0.322 - ETA: 1:20 - loss: 0.323 - ETA: 1:18 - loss: 0.323 - ETA: 1:17 - loss: 0.324 - ETA: 1:16 - loss: 0.325 - ETA: 1:15 - loss: 0.325 - ETA: 1:14 - loss: 0.326 - ETA: 1:13 - loss: 0.327 - ETA: 1:12 - loss: 0.327 - ETA: 1:11 - loss: 0.328 - ETA: 1:10 - loss: 0.328 - ETA: 1:09 - loss: 0.328 - ETA: 1:08 - loss: 0.328 - ETA: 1:07 - loss: 0.329 - ETA: 1:06 - loss: 0.329 - ETA: 1:05 - loss: 0.330 - ETA: 1:04 - loss: 0.330 - ETA: 1:03 - loss: 0.330 - ETA: 1:02 - loss: 0.331 - ETA: 1:01 - loss: 0.331 - ETA: 1:00 - loss: 0.332 - ETA: 59s - loss: 0.332 - ETA: 58s - loss: 0.33 - ETA: 57s - loss: 0.33 - ETA: 56s - loss: 0.33 - ETA: 54s - loss: 0.33 - ETA: 54s - loss: 0.33 - ETA: 53s - loss: 0.33 - ETA: 52s - loss: 0.33 - ETA: 51s - loss: 0.33 - ETA: 50s - loss: 0.33 - ETA: 49s - loss: 0.33 - ETA: 47s - loss: 0.33 - ETA: 46s - loss: 0.33 - ETA: 45s - loss: 0.33 - ETA: 45s - loss: 0.33 - ETA: 44s - loss: 0.33 - ETA: 43s - loss: 0.33 - ETA: 42s - loss: 0.34 - ETA: 41s - loss: 0.34 - ETA: 40s - loss: 0.34 - ETA: 39s - loss: 0.34 - ETA: 38s - loss: 0.34 - ETA: 37s - loss: 0.34 - ETA: 36s - loss: 0.34 - ETA: 35s - loss: 0.34 - ETA: 34s - loss: 0.34 - ETA: 33s - loss: 0.34 - ETA: 32s - loss: 0.34 - ETA: 31s - loss: 0.34 - ETA: 30s - loss: 0.34 - ETA: 29s - loss: 0.34 - ETA: 28s - loss: 0.34 - ETA: 26s - loss: 0.34 - ETA: 25s - loss: 0.34 - ETA: 24s - loss: 0.34 - ETA: 23s - loss: 0.35 - ETA: 22s - loss: 0.35 - ETA: 21s - loss: 0.35 - ETA: 20s - loss: 0.35 - ETA: 19s - loss: 0.35 - ETA: 18s - loss: 0.35 - ETA: 17s - loss: 0.35 - ETA: 16s - loss: 0.35 - ETA: 15s - loss: 0.35 - ETA: 14s - loss: 0.35 - ETA: 12s - loss: 0.35 - ETA: 11s - loss: 0.35 - ETA: 10s - loss: 0.35 - ETA: 9s - loss: 0.3580 - ETA: 8s - loss: 0.358 - ETA: 7s - loss: 0.359 - ETA: 6s - loss: 0.359 - ETA: 5s - loss: 0.359 - ETA: 4s - loss: 0.360 - ETA: 3s - loss: 0.360 - ETA: 2s - loss: 0.361 - ETA: 1s - loss: 0.361 - ETA: 0s - loss: 0.361 - 120s 1s/step - loss: 0.3619 - val_loss: 0.1254\n",
      "\n",
      "Epoch 00013: val_loss did not improve from 0.12484\n",
      "Epoch 14/30\n",
      "111/111 [==============================] - ETA: 1:43 - loss: 0.250 - ETA: 1:31 - loss: 0.220 - ETA: 1:34 - loss: 0.218 - ETA: 1:38 - loss: 0.217 - ETA: 1:42 - loss: 0.232 - ETA: 1:41 - loss: 0.238 - ETA: 1:38 - loss: 0.246 - ETA: 1:36 - loss: 0.257 - ETA: 1:34 - loss: 0.266 - ETA: 1:34 - loss: 0.273 - ETA: 1:34 - loss: 0.282 - ETA: 1:32 - loss: 0.286 - ETA: 1:31 - loss: 0.289 - ETA: 1:31 - loss: 0.290 - ETA: 1:32 - loss: 0.291 - ETA: 1:31 - loss: 0.292 - ETA: 1:30 - loss: 0.293 - ETA: 1:29 - loss: 0.293 - ETA: 1:28 - loss: 0.292 - ETA: 1:27 - loss: 0.292 - ETA: 1:27 - loss: 0.292 - ETA: 1:26 - loss: 0.293 - ETA: 1:25 - loss: 0.293 - ETA: 1:24 - loss: 0.293 - ETA: 1:23 - loss: 0.295 - ETA: 1:22 - loss: 0.297 - ETA: 1:21 - loss: 0.298 - ETA: 1:20 - loss: 0.299 - ETA: 1:19 - loss: 0.300 - ETA: 1:18 - loss: 0.301 - ETA: 1:17 - loss: 0.302 - ETA: 1:16 - loss: 0.303 - ETA: 1:15 - loss: 0.304 - ETA: 1:15 - loss: 0.304 - ETA: 1:14 - loss: 0.305 - ETA: 1:13 - loss: 0.306 - ETA: 1:12 - loss: 0.306 - ETA: 1:10 - loss: 0.307 - ETA: 1:09 - loss: 0.307 - ETA: 1:08 - loss: 0.308 - ETA: 1:07 - loss: 0.308 - ETA: 1:06 - loss: 0.308 - ETA: 1:05 - loss: 0.309 - ETA: 1:04 - loss: 0.309 - ETA: 1:03 - loss: 0.309 - ETA: 1:02 - loss: 0.310 - ETA: 1:01 - loss: 0.310 - ETA: 1:00 - loss: 0.310 - ETA: 59s - loss: 0.311 - ETA: 58s - loss: 0.31 - ETA: 58s - loss: 0.31 - ETA: 57s - loss: 0.31 - ETA: 56s - loss: 0.31 - ETA: 55s - loss: 0.31 - ETA: 54s - loss: 0.31 - ETA: 53s - loss: 0.31 - ETA: 52s - loss: 0.31 - ETA: 51s - loss: 0.31 - ETA: 50s - loss: 0.31 - ETA: 49s - loss: 0.31 - ETA: 48s - loss: 0.31 - ETA: 47s - loss: 0.31 - ETA: 46s - loss: 0.31 - ETA: 45s - loss: 0.32 - ETA: 44s - loss: 0.32 - ETA: 43s - loss: 0.32 - ETA: 42s - loss: 0.32 - ETA: 41s - loss: 0.32 - ETA: 40s - loss: 0.32 - ETA: 39s - loss: 0.32 - ETA: 39s - loss: 0.32 - ETA: 38s - loss: 0.32 - ETA: 37s - loss: 0.32 - ETA: 36s - loss: 0.32 - ETA: 35s - loss: 0.32 - ETA: 34s - loss: 0.32 - ETA: 33s - loss: 0.32 - ETA: 32s - loss: 0.32 - ETA: 31s - loss: 0.32 - ETA: 30s - loss: 0.32 - ETA: 29s - loss: 0.32 - ETA: 28s - loss: 0.32 - ETA: 27s - loss: 0.32 - ETA: 26s - loss: 0.32 - ETA: 25s - loss: 0.32 - ETA: 24s - loss: 0.32 - ETA: 23s - loss: 0.32 - ETA: 22s - loss: 0.32 - ETA: 21s - loss: 0.32 - ETA: 20s - loss: 0.33 - ETA: 19s - loss: 0.33 - ETA: 18s - loss: 0.33 - ETA: 17s - loss: 0.33 - ETA: 16s - loss: 0.33 - ETA: 15s - loss: 0.33 - ETA: 14s - loss: 0.33 - ETA: 13s - loss: 0.33 - ETA: 12s - loss: 0.33 - ETA: 11s - loss: 0.33 - ETA: 10s - loss: 0.33 - ETA: 9s - loss: 0.3337 - ETA: 8s - loss: 0.334 - ETA: 8s - loss: 0.334 - ETA: 7s - loss: 0.334 - ETA: 6s - loss: 0.335 - ETA: 5s - loss: 0.335 - ETA: 4s - loss: 0.336 - ETA: 3s - loss: 0.336 - ETA: 2s - loss: 0.336 - ETA: 1s - loss: 0.337 - ETA: 0s - loss: 0.337 - 113s 1s/step - loss: 0.3380 - val_loss: 0.1299\n",
      "\n",
      "Epoch 00014: val_loss did not improve from 0.12484\n",
      "Epoch 15/30\n",
      "111/111 [==============================] - ETA: 1:58 - loss: 0.087 - ETA: 1:50 - loss: 0.168 - ETA: 1:42 - loss: 0.177 - ETA: 1:37 - loss: 0.190 - ETA: 1:35 - loss: 0.201 - ETA: 1:33 - loss: 0.252 - ETA: 1:33 - loss: 0.282 - ETA: 1:33 - loss: 0.302 - ETA: 1:32 - loss: 0.321 - ETA: 1:31 - loss: 0.342 - ETA: 1:30 - loss: 0.356 - ETA: 1:30 - loss: 0.368 - ETA: 1:29 - loss: 0.376 - ETA: 1:28 - loss: 0.382 - ETA: 1:27 - loss: 0.395 - ETA: 1:26 - loss: 0.405 - ETA: 1:25 - loss: 0.413 - ETA: 1:25 - loss: 0.419 - ETA: 1:24 - loss: 0.425 - ETA: 1:24 - loss: 0.429 - ETA: 1:23 - loss: 0.432 - ETA: 1:22 - loss: 0.434 - ETA: 1:21 - loss: 0.436 - ETA: 1:20 - loss: 0.438 - ETA: 1:19 - loss: 0.440 - ETA: 1:19 - loss: 0.441 - ETA: 1:18 - loss: 0.443 - ETA: 1:17 - loss: 0.443 - ETA: 1:16 - loss: 0.444 - ETA: 1:15 - loss: 0.444 - ETA: 1:14 - loss: 0.445 - ETA: 1:13 - loss: 0.444 - ETA: 1:13 - loss: 0.444 - ETA: 1:12 - loss: 0.444 - ETA: 1:11 - loss: 0.444 - ETA: 1:10 - loss: 0.443 - ETA: 1:09 - loss: 0.444 - ETA: 1:08 - loss: 0.444 - ETA: 1:07 - loss: 0.444 - ETA: 1:06 - loss: 0.445 - ETA: 1:06 - loss: 0.445 - ETA: 1:05 - loss: 0.445 - ETA: 1:04 - loss: 0.445 - ETA: 1:03 - loss: 0.445 - ETA: 1:02 - loss: 0.445 - ETA: 1:02 - loss: 0.445 - ETA: 1:01 - loss: 0.445 - ETA: 1:00 - loss: 0.445 - ETA: 59s - loss: 0.444 - ETA: 58s - loss: 0.44 - ETA: 57s - loss: 0.44 - ETA: 56s - loss: 0.44 - ETA: 56s - loss: 0.44 - ETA: 55s - loss: 0.44 - ETA: 54s - loss: 0.44 - ETA: 53s - loss: 0.44 - ETA: 52s - loss: 0.44 - ETA: 51s - loss: 0.44 - ETA: 50s - loss: 0.44 - ETA: 49s - loss: 0.44 - ETA: 49s - loss: 0.44 - ETA: 48s - loss: 0.44 - ETA: 47s - loss: 0.44 - ETA: 46s - loss: 0.44 - ETA: 45s - loss: 0.44 - ETA: 44s - loss: 0.44 - ETA: 43s - loss: 0.43 - ETA: 42s - loss: 0.43 - ETA: 41s - loss: 0.43 - ETA: 40s - loss: 0.43 - ETA: 39s - loss: 0.43 - ETA: 38s - loss: 0.43 - ETA: 37s - loss: 0.43 - ETA: 36s - loss: 0.43 - ETA: 35s - loss: 0.43 - ETA: 34s - loss: 0.43 - ETA: 33s - loss: 0.43 - ETA: 32s - loss: 0.43 - ETA: 32s - loss: 0.43 - ETA: 31s - loss: 0.43 - ETA: 30s - loss: 0.43 - ETA: 29s - loss: 0.43 - ETA: 28s - loss: 0.43 - ETA: 27s - loss: 0.43 - ETA: 26s - loss: 0.43 - ETA: 25s - loss: 0.42 - ETA: 24s - loss: 0.42 - ETA: 23s - loss: 0.42 - ETA: 22s - loss: 0.42 - ETA: 21s - loss: 0.42 - ETA: 20s - loss: 0.42 - ETA: 19s - loss: 0.42 - ETA: 18s - loss: 0.42 - ETA: 17s - loss: 0.42 - ETA: 16s - loss: 0.42 - ETA: 15s - loss: 0.42 - ETA: 14s - loss: 0.42 - ETA: 13s - loss: 0.42 - ETA: 12s - loss: 0.42 - ETA: 11s - loss: 0.42 - ETA: 10s - loss: 0.42 - ETA: 9s - loss: 0.4225 - ETA: 8s - loss: 0.422 - ETA: 7s - loss: 0.421 - ETA: 6s - loss: 0.421 - ETA: 5s - loss: 0.420 - ETA: 4s - loss: 0.420 - ETA: 3s - loss: 0.420 - ETA: 2s - loss: 0.420 - ETA: 1s - loss: 0.420 - ETA: 0s - loss: 0.420 - 113s 1s/step - loss: 0.4199 - val_loss: 0.1226\n",
      "\n",
      "Epoch 00015: val_loss improved from 0.12484 to 0.12261, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 16/30\n",
      "111/111 [==============================] - ETA: 1:26 - loss: 0.387 - ETA: 1:41 - loss: 0.390 - ETA: 1:38 - loss: 0.390 - ETA: 1:38 - loss: 0.385 - ETA: 1:35 - loss: 0.386 - ETA: 1:35 - loss: 0.379 - ETA: 1:34 - loss: 0.398 - ETA: 1:34 - loss: 0.407 - ETA: 1:35 - loss: 0.413 - ETA: 1:37 - loss: 0.422 - ETA: 1:37 - loss: 0.428 - ETA: 1:37 - loss: 0.430 - ETA: 1:37 - loss: 0.431 - ETA: 1:35 - loss: 0.429 - ETA: 1:34 - loss: 0.428 - ETA: 1:33 - loss: 0.427 - ETA: 1:32 - loss: 0.425 - ETA: 1:31 - loss: 0.424 - ETA: 1:30 - loss: 0.424 - ETA: 1:29 - loss: 0.423 - ETA: 1:28 - loss: 0.422 - ETA: 1:28 - loss: 0.422 - ETA: 1:27 - loss: 0.421 - ETA: 1:26 - loss: 0.420 - ETA: 1:26 - loss: 0.420 - ETA: 1:26 - loss: 0.420 - ETA: 1:26 - loss: 0.420 - ETA: 1:25 - loss: 0.419 - ETA: 1:24 - loss: 0.420 - ETA: 1:23 - loss: 0.419 - ETA: 1:22 - loss: 0.419 - ETA: 1:21 - loss: 0.419 - ETA: 1:20 - loss: 0.418 - ETA: 1:19 - loss: 0.418 - ETA: 1:18 - loss: 0.418 - ETA: 1:17 - loss: 0.417 - ETA: 1:16 - loss: 0.417 - ETA: 1:15 - loss: 0.416 - ETA: 1:14 - loss: 0.415 - ETA: 1:14 - loss: 0.414 - ETA: 1:13 - loss: 0.413 - ETA: 1:12 - loss: 0.413 - ETA: 1:12 - loss: 0.412 - ETA: 1:11 - loss: 0.412 - ETA: 1:10 - loss: 0.412 - ETA: 1:09 - loss: 0.412 - ETA: 1:08 - loss: 0.411 - ETA: 1:07 - loss: 0.411 - ETA: 1:07 - loss: 0.411 - ETA: 1:06 - loss: 0.411 - ETA: 1:05 - loss: 0.410 - ETA: 1:04 - loss: 0.410 - ETA: 1:02 - loss: 0.410 - ETA: 1:01 - loss: 0.409 - ETA: 1:00 - loss: 0.409 - ETA: 59s - loss: 0.409 - ETA: 58s - loss: 0.40 - ETA: 57s - loss: 0.40 - ETA: 56s - loss: 0.40 - ETA: 55s - loss: 0.41 - ETA: 54s - loss: 0.41 - ETA: 52s - loss: 0.41 - ETA: 51s - loss: 0.41 - ETA: 50s - loss: 0.41 - ETA: 49s - loss: 0.41 - ETA: 48s - loss: 0.41 - ETA: 47s - loss: 0.41 - ETA: 46s - loss: 0.41 - ETA: 45s - loss: 0.41 - ETA: 43s - loss: 0.41 - ETA: 42s - loss: 0.41 - ETA: 41s - loss: 0.41 - ETA: 40s - loss: 0.41 - ETA: 39s - loss: 0.41 - ETA: 38s - loss: 0.41 - ETA: 37s - loss: 0.41 - ETA: 36s - loss: 0.41 - ETA: 34s - loss: 0.41 - ETA: 33s - loss: 0.41 - ETA: 32s - loss: 0.41 - ETA: 31s - loss: 0.40 - ETA: 30s - loss: 0.40 - ETA: 29s - loss: 0.40 - ETA: 28s - loss: 0.40 - ETA: 27s - loss: 0.40 - ETA: 26s - loss: 0.40 - ETA: 25s - loss: 0.40 - ETA: 24s - loss: 0.40 - ETA: 23s - loss: 0.40 - ETA: 22s - loss: 0.40 - ETA: 21s - loss: 0.40 - ETA: 20s - loss: 0.40 - ETA: 19s - loss: 0.40 - ETA: 17s - loss: 0.40 - ETA: 16s - loss: 0.40 - ETA: 15s - loss: 0.40 - ETA: 14s - loss: 0.40 - ETA: 13s - loss: 0.40 - ETA: 12s - loss: 0.40 - ETA: 11s - loss: 0.40 - ETA: 10s - loss: 0.40 - ETA: 9s - loss: 0.4063 - ETA: 8s - loss: 0.406 - ETA: 7s - loss: 0.406 - ETA: 6s - loss: 0.406 - ETA: 5s - loss: 0.405 - ETA: 4s - loss: 0.405 - ETA: 3s - loss: 0.405 - ETA: 2s - loss: 0.405 - ETA: 1s - loss: 0.405 - ETA: 0s - loss: 0.405 - 117s 1s/step - loss: 0.4059 - val_loss: 0.1255\n",
      "\n",
      "Epoch 00016: val_loss did not improve from 0.12261\n",
      "Epoch 17/30\n",
      "111/111 [==============================] - ETA: 1:40 - loss: 0.264 - ETA: 1:30 - loss: 0.229 - ETA: 1:35 - loss: 0.229 - ETA: 1:39 - loss: 0.244 - ETA: 1:43 - loss: 0.260 - ETA: 1:43 - loss: 0.266 - ETA: 1:44 - loss: 0.268 - ETA: 1:42 - loss: 0.268 - ETA: 1:42 - loss: 0.268 - ETA: 1:41 - loss: 0.273 - ETA: 1:39 - loss: 0.282 - ETA: 1:38 - loss: 0.287 - ETA: 1:37 - loss: 0.291 - ETA: 1:35 - loss: 0.293 - ETA: 1:35 - loss: 0.296 - ETA: 1:35 - loss: 0.298 - ETA: 1:34 - loss: 0.299 - ETA: 1:33 - loss: 0.300 - ETA: 1:32 - loss: 0.302 - ETA: 1:30 - loss: 0.303 - ETA: 1:29 - loss: 0.304 - ETA: 1:29 - loss: 0.304 - ETA: 1:28 - loss: 0.305 - ETA: 1:28 - loss: 0.305 - ETA: 1:27 - loss: 0.305 - ETA: 1:27 - loss: 0.305 - ETA: 1:26 - loss: 0.305 - ETA: 1:25 - loss: 0.305 - ETA: 1:25 - loss: 0.305 - ETA: 1:24 - loss: 0.305 - ETA: 1:23 - loss: 0.304 - ETA: 1:22 - loss: 0.304 - ETA: 1:21 - loss: 0.304 - ETA: 1:20 - loss: 0.303 - ETA: 1:19 - loss: 0.303 - ETA: 1:18 - loss: 0.303 - ETA: 1:17 - loss: 0.303 - ETA: 1:16 - loss: 0.303 - ETA: 1:15 - loss: 0.303 - ETA: 1:14 - loss: 0.303 - ETA: 1:13 - loss: 0.303 - ETA: 1:12 - loss: 0.303 - ETA: 1:11 - loss: 0.303 - ETA: 1:10 - loss: 0.303 - ETA: 1:09 - loss: 0.303 - ETA: 1:08 - loss: 0.304 - ETA: 1:06 - loss: 0.304 - ETA: 1:05 - loss: 0.304 - ETA: 1:04 - loss: 0.304 - ETA: 1:03 - loss: 0.304 - ETA: 1:02 - loss: 0.304 - ETA: 1:02 - loss: 0.304 - ETA: 1:01 - loss: 0.304 - ETA: 1:00 - loss: 0.304 - ETA: 59s - loss: 0.305 - ETA: 58s - loss: 0.30 - ETA: 58s - loss: 0.30 - ETA: 57s - loss: 0.30 - ETA: 56s - loss: 0.30 - ETA: 55s - loss: 0.30 - ETA: 54s - loss: 0.30 - ETA: 53s - loss: 0.30 - ETA: 52s - loss: 0.30 - ETA: 51s - loss: 0.30 - ETA: 50s - loss: 0.30 - ETA: 49s - loss: 0.30 - ETA: 48s - loss: 0.30 - ETA: 47s - loss: 0.30 - ETA: 46s - loss: 0.30 - ETA: 45s - loss: 0.30 - ETA: 44s - loss: 0.30 - ETA: 43s - loss: 0.30 - ETA: 42s - loss: 0.30 - ETA: 41s - loss: 0.30 - ETA: 40s - loss: 0.31 - ETA: 39s - loss: 0.31 - ETA: 38s - loss: 0.31 - ETA: 36s - loss: 0.31 - ETA: 35s - loss: 0.31 - ETA: 34s - loss: 0.31 - ETA: 33s - loss: 0.31 - ETA: 32s - loss: 0.31 - ETA: 30s - loss: 0.31 - ETA: 29s - loss: 0.31 - ETA: 28s - loss: 0.31 - ETA: 27s - loss: 0.31 - ETA: 26s - loss: 0.31 - ETA: 25s - loss: 0.31 - ETA: 24s - loss: 0.32 - ETA: 22s - loss: 0.32 - ETA: 21s - loss: 0.32 - ETA: 20s - loss: 0.32 - ETA: 19s - loss: 0.32 - ETA: 18s - loss: 0.32 - ETA: 17s - loss: 0.32 - ETA: 16s - loss: 0.32 - ETA: 15s - loss: 0.32 - ETA: 14s - loss: 0.32 - ETA: 13s - loss: 0.32 - ETA: 11s - loss: 0.32 - ETA: 10s - loss: 0.32 - ETA: 9s - loss: 0.3269 - ETA: 8s - loss: 0.327 - ETA: 7s - loss: 0.327 - ETA: 6s - loss: 0.328 - ETA: 5s - loss: 0.328 - ETA: 4s - loss: 0.328 - ETA: 3s - loss: 0.329 - ETA: 2s - loss: 0.329 - ETA: 1s - loss: 0.329 - ETA: 0s - loss: 0.329 - 120s 1s/step - loss: 0.3302 - val_loss: 0.1230\n",
      "\n",
      "Epoch 00017: val_loss did not improve from 0.12261\n",
      "Epoch 18/30\n",
      "111/111 [==============================] - ETA: 1:43 - loss: 0.182 - ETA: 1:40 - loss: 0.156 - ETA: 1:37 - loss: 0.159 - ETA: 1:35 - loss: 0.160 - ETA: 1:33 - loss: 0.169 - ETA: 1:31 - loss: 0.193 - ETA: 1:31 - loss: 0.211 - ETA: 1:30 - loss: 0.226 - ETA: 1:30 - loss: 0.235 - ETA: 1:31 - loss: 0.240 - ETA: 1:33 - loss: 0.244 - ETA: 1:34 - loss: 0.246 - ETA: 1:36 - loss: 0.259 - ETA: 1:39 - loss: 0.270 - ETA: 1:39 - loss: 0.280 - ETA: 1:40 - loss: 0.288 - ETA: 1:40 - loss: 0.295 - ETA: 1:39 - loss: 0.301 - ETA: 1:38 - loss: 0.306 - ETA: 1:37 - loss: 0.311 - ETA: 1:36 - loss: 0.315 - ETA: 1:36 - loss: 0.318 - ETA: 1:36 - loss: 0.323 - ETA: 1:36 - loss: 0.327 - ETA: 1:35 - loss: 0.330 - ETA: 1:34 - loss: 0.333 - ETA: 1:34 - loss: 0.336 - ETA: 1:33 - loss: 0.339 - ETA: 1:33 - loss: 0.341 - ETA: 1:33 - loss: 0.343 - ETA: 1:32 - loss: 0.345 - ETA: 1:31 - loss: 0.347 - ETA: 1:30 - loss: 0.348 - ETA: 1:29 - loss: 0.349 - ETA: 1:28 - loss: 0.350 - ETA: 1:27 - loss: 0.351 - ETA: 1:26 - loss: 0.352 - ETA: 1:25 - loss: 0.353 - ETA: 1:24 - loss: 0.354 - ETA: 1:23 - loss: 0.355 - ETA: 1:22 - loss: 0.355 - ETA: 1:21 - loss: 0.356 - ETA: 1:20 - loss: 0.357 - ETA: 1:19 - loss: 0.358 - ETA: 1:18 - loss: 0.359 - ETA: 1:17 - loss: 0.360 - ETA: 1:16 - loss: 0.361 - ETA: 1:15 - loss: 0.363 - ETA: 1:14 - loss: 0.364 - ETA: 1:13 - loss: 0.365 - ETA: 1:12 - loss: 0.366 - ETA: 1:11 - loss: 0.367 - ETA: 1:10 - loss: 0.368 - ETA: 1:09 - loss: 0.369 - ETA: 1:08 - loss: 0.369 - ETA: 1:07 - loss: 0.370 - ETA: 1:07 - loss: 0.370 - ETA: 1:06 - loss: 0.371 - ETA: 1:05 - loss: 0.371 - ETA: 1:04 - loss: 0.372 - ETA: 1:03 - loss: 0.372 - ETA: 1:02 - loss: 0.372 - ETA: 1:01 - loss: 0.372 - ETA: 1:00 - loss: 0.372 - ETA: 59s - loss: 0.372 - ETA: 57s - loss: 0.37 - ETA: 56s - loss: 0.37 - ETA: 55s - loss: 0.37 - ETA: 53s - loss: 0.37 - ETA: 52s - loss: 0.37 - ETA: 51s - loss: 0.37 - ETA: 49s - loss: 0.37 - ETA: 48s - loss: 0.37 - ETA: 47s - loss: 0.37 - ETA: 46s - loss: 0.37 - ETA: 44s - loss: 0.37 - ETA: 43s - loss: 0.37 - ETA: 42s - loss: 0.37 - ETA: 40s - loss: 0.37 - ETA: 39s - loss: 0.37 - ETA: 38s - loss: 0.37 - ETA: 37s - loss: 0.37 - ETA: 35s - loss: 0.37 - ETA: 34s - loss: 0.37 - ETA: 33s - loss: 0.37 - ETA: 31s - loss: 0.37 - ETA: 30s - loss: 0.37 - ETA: 29s - loss: 0.37 - ETA: 27s - loss: 0.37 - ETA: 26s - loss: 0.37 - ETA: 25s - loss: 0.37 - ETA: 23s - loss: 0.37 - ETA: 22s - loss: 0.37 - ETA: 21s - loss: 0.36 - ETA: 20s - loss: 0.36 - ETA: 18s - loss: 0.36 - ETA: 17s - loss: 0.36 - ETA: 16s - loss: 0.36 - ETA: 15s - loss: 0.36 - ETA: 13s - loss: 0.36 - ETA: 12s - loss: 0.36 - ETA: 11s - loss: 0.36 - ETA: 9s - loss: 0.3673 - ETA: 8s - loss: 0.367 - ETA: 7s - loss: 0.366 - ETA: 6s - loss: 0.366 - ETA: 4s - loss: 0.366 - ETA: 3s - loss: 0.366 - ETA: 2s - loss: 0.366 - ETA: 1s - loss: 0.365 - ETA: 0s - loss: 0.365 - 137s 1s/step - loss: 0.3656 - val_loss: 0.1203\n",
      "\n",
      "Epoch 00018: val_loss improved from 0.12261 to 0.12031, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 19/30\n",
      "111/111 [==============================] - ETA: 1:50 - loss: 0.355 - ETA: 2:05 - loss: 0.345 - ETA: 2:12 - loss: 0.319 - ETA: 2:15 - loss: 0.306 - ETA: 2:13 - loss: 0.300 - ETA: 2:11 - loss: 0.307 - ETA: 2:10 - loss: 0.313 - ETA: 2:09 - loss: 0.318 - ETA: 2:08 - loss: 0.319 - ETA: 2:09 - loss: 0.318 - ETA: 2:10 - loss: 0.317 - ETA: 2:10 - loss: 0.316 - ETA: 2:09 - loss: 0.314 - ETA: 2:08 - loss: 0.311 - ETA: 2:06 - loss: 0.309 - ETA: 2:04 - loss: 0.308 - ETA: 2:01 - loss: 0.307 - ETA: 1:59 - loss: 0.305 - ETA: 1:57 - loss: 0.304 - ETA: 1:55 - loss: 0.302 - ETA: 1:53 - loss: 0.301 - ETA: 1:51 - loss: 0.301 - ETA: 1:48 - loss: 0.300 - ETA: 1:46 - loss: 0.299 - ETA: 1:44 - loss: 0.298 - ETA: 1:42 - loss: 0.297 - ETA: 1:40 - loss: 0.297 - ETA: 1:38 - loss: 0.297 - ETA: 1:36 - loss: 0.297 - ETA: 1:35 - loss: 0.298 - ETA: 1:33 - loss: 0.298 - ETA: 1:32 - loss: 0.298 - ETA: 1:31 - loss: 0.299 - ETA: 1:30 - loss: 0.298 - ETA: 1:29 - loss: 0.298 - ETA: 1:28 - loss: 0.298 - ETA: 1:27 - loss: 0.299 - ETA: 1:26 - loss: 0.300 - ETA: 1:25 - loss: 0.301 - ETA: 1:23 - loss: 0.301 - ETA: 1:22 - loss: 0.302 - ETA: 1:21 - loss: 0.302 - ETA: 1:19 - loss: 0.303 - ETA: 1:18 - loss: 0.303 - ETA: 1:17 - loss: 0.304 - ETA: 1:15 - loss: 0.304 - ETA: 1:14 - loss: 0.305 - ETA: 1:12 - loss: 0.305 - ETA: 1:11 - loss: 0.306 - ETA: 1:10 - loss: 0.306 - ETA: 1:08 - loss: 0.306 - ETA: 1:07 - loss: 0.307 - ETA: 1:06 - loss: 0.307 - ETA: 1:05 - loss: 0.308 - ETA: 1:03 - loss: 0.308 - ETA: 1:02 - loss: 0.309 - ETA: 1:01 - loss: 0.309 - ETA: 1:00 - loss: 0.310 - ETA: 59s - loss: 0.310 - ETA: 57s - loss: 0.31 - ETA: 56s - loss: 0.31 - ETA: 55s - loss: 0.31 - ETA: 54s - loss: 0.31 - ETA: 52s - loss: 0.31 - ETA: 51s - loss: 0.31 - ETA: 50s - loss: 0.31 - ETA: 49s - loss: 0.31 - ETA: 48s - loss: 0.31 - ETA: 46s - loss: 0.31 - ETA: 45s - loss: 0.31 - ETA: 44s - loss: 0.31 - ETA: 43s - loss: 0.31 - ETA: 42s - loss: 0.31 - ETA: 41s - loss: 0.31 - ETA: 40s - loss: 0.31 - ETA: 39s - loss: 0.31 - ETA: 37s - loss: 0.31 - ETA: 36s - loss: 0.31 - ETA: 35s - loss: 0.31 - ETA: 34s - loss: 0.31 - ETA: 33s - loss: 0.31 - ETA: 32s - loss: 0.31 - ETA: 31s - loss: 0.31 - ETA: 29s - loss: 0.32 - ETA: 28s - loss: 0.32 - ETA: 27s - loss: 0.32 - ETA: 26s - loss: 0.32 - ETA: 25s - loss: 0.32 - ETA: 24s - loss: 0.32 - ETA: 23s - loss: 0.32 - ETA: 22s - loss: 0.32 - ETA: 21s - loss: 0.32 - ETA: 20s - loss: 0.32 - ETA: 18s - loss: 0.32 - ETA: 17s - loss: 0.32 - ETA: 16s - loss: 0.32 - ETA: 15s - loss: 0.32 - ETA: 14s - loss: 0.32 - ETA: 13s - loss: 0.32 - ETA: 12s - loss: 0.32 - ETA: 11s - loss: 0.32 - ETA: 9s - loss: 0.3242 - ETA: 8s - loss: 0.324 - ETA: 7s - loss: 0.324 - ETA: 6s - loss: 0.324 - ETA: 5s - loss: 0.324 - ETA: 4s - loss: 0.325 - ETA: 3s - loss: 0.325 - ETA: 2s - loss: 0.325 - ETA: 1s - loss: 0.325 - ETA: 0s - loss: 0.325 - 124s 1s/step - loss: 0.3256 - val_loss: 0.1181\n",
      "\n",
      "Epoch 00019: val_loss improved from 0.12031 to 0.11808, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 20/30\n",
      "111/111 [==============================] - ETA: 1:31 - loss: 0.079 - ETA: 1:42 - loss: 0.084 - ETA: 1:39 - loss: 0.101 - ETA: 1:39 - loss: 0.109 - ETA: 1:40 - loss: 0.144 - ETA: 1:40 - loss: 0.171 - ETA: 1:41 - loss: 0.186 - ETA: 1:41 - loss: 0.197 - ETA: 1:41 - loss: 0.203 - ETA: 1:39 - loss: 0.206 - ETA: 1:39 - loss: 0.209 - ETA: 1:39 - loss: 0.211 - ETA: 1:38 - loss: 0.218 - ETA: 1:37 - loss: 0.229 - ETA: 1:37 - loss: 0.240 - ETA: 1:37 - loss: 0.248 - ETA: 1:37 - loss: 0.255 - ETA: 1:36 - loss: 0.261 - ETA: 1:36 - loss: 0.266 - ETA: 1:35 - loss: 0.271 - ETA: 1:34 - loss: 0.275 - ETA: 1:33 - loss: 0.279 - ETA: 1:32 - loss: 0.282 - ETA: 1:31 - loss: 0.284 - ETA: 1:30 - loss: 0.287 - ETA: 1:29 - loss: 0.290 - ETA: 1:29 - loss: 0.292 - ETA: 1:28 - loss: 0.295 - ETA: 1:27 - loss: 0.297 - ETA: 1:25 - loss: 0.298 - ETA: 1:24 - loss: 0.299 - ETA: 1:23 - loss: 0.301 - ETA: 1:21 - loss: 0.302 - ETA: 1:20 - loss: 0.302 - ETA: 1:19 - loss: 0.303 - ETA: 1:18 - loss: 0.303 - ETA: 1:17 - loss: 0.304 - ETA: 1:16 - loss: 0.304 - ETA: 1:15 - loss: 0.304 - ETA: 1:14 - loss: 0.304 - ETA: 1:13 - loss: 0.304 - ETA: 1:11 - loss: 0.304 - ETA: 1:10 - loss: 0.305 - ETA: 1:10 - loss: 0.305 - ETA: 1:09 - loss: 0.305 - ETA: 1:07 - loss: 0.305 - ETA: 1:06 - loss: 0.305 - ETA: 1:06 - loss: 0.305 - ETA: 1:05 - loss: 0.305 - ETA: 1:04 - loss: 0.305 - ETA: 1:03 - loss: 0.305 - ETA: 1:02 - loss: 0.305 - ETA: 1:01 - loss: 0.305 - ETA: 59s - loss: 0.305 - ETA: 58s - loss: 0.30 - ETA: 58s - loss: 0.30 - ETA: 56s - loss: 0.30 - ETA: 55s - loss: 0.30 - ETA: 54s - loss: 0.30 - ETA: 53s - loss: 0.30 - ETA: 52s - loss: 0.30 - ETA: 51s - loss: 0.30 - ETA: 50s - loss: 0.30 - ETA: 49s - loss: 0.30 - ETA: 48s - loss: 0.30 - ETA: 47s - loss: 0.30 - ETA: 46s - loss: 0.30 - ETA: 45s - loss: 0.30 - ETA: 44s - loss: 0.30 - ETA: 43s - loss: 0.30 - ETA: 42s - loss: 0.30 - ETA: 41s - loss: 0.30 - ETA: 40s - loss: 0.30 - ETA: 39s - loss: 0.30 - ETA: 38s - loss: 0.30 - ETA: 37s - loss: 0.30 - ETA: 36s - loss: 0.30 - ETA: 34s - loss: 0.30 - ETA: 33s - loss: 0.30 - ETA: 32s - loss: 0.30 - ETA: 31s - loss: 0.30 - ETA: 30s - loss: 0.30 - ETA: 29s - loss: 0.30 - ETA: 28s - loss: 0.30 - ETA: 27s - loss: 0.30 - ETA: 26s - loss: 0.30 - ETA: 25s - loss: 0.31 - ETA: 24s - loss: 0.31 - ETA: 23s - loss: 0.31 - ETA: 22s - loss: 0.31 - ETA: 21s - loss: 0.31 - ETA: 19s - loss: 0.31 - ETA: 18s - loss: 0.31 - ETA: 17s - loss: 0.31 - ETA: 16s - loss: 0.31 - ETA: 15s - loss: 0.31 - ETA: 14s - loss: 0.31 - ETA: 13s - loss: 0.31 - ETA: 12s - loss: 0.31 - ETA: 11s - loss: 0.31 - ETA: 10s - loss: 0.31 - ETA: 9s - loss: 0.3139 - ETA: 8s - loss: 0.314 - ETA: 7s - loss: 0.314 - ETA: 6s - loss: 0.314 - ETA: 5s - loss: 0.314 - ETA: 4s - loss: 0.314 - ETA: 3s - loss: 0.314 - ETA: 2s - loss: 0.314 - ETA: 1s - loss: 0.314 - ETA: 0s - loss: 0.315 - 118s 1s/step - loss: 0.3151 - val_loss: 0.1171\n",
      "\n",
      "Epoch 00020: val_loss improved from 0.11808 to 0.11709, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 21/30\n",
      "111/111 [==============================] - ETA: 2:10 - loss: 0.084 - ETA: 2:16 - loss: 0.092 - ETA: 2:13 - loss: 0.093 - ETA: 2:05 - loss: 0.102 - ETA: 2:04 - loss: 0.111 - ETA: 2:04 - loss: 0.128 - ETA: 2:03 - loss: 0.140 - ETA: 2:03 - loss: 0.150 - ETA: 2:03 - loss: 0.158 - ETA: 2:02 - loss: 0.162 - ETA: 2:02 - loss: 0.169 - ETA: 2:02 - loss: 0.174 - ETA: 2:04 - loss: 0.181 - ETA: 2:05 - loss: 0.186 - ETA: 2:05 - loss: 0.191 - ETA: 2:05 - loss: 0.195 - ETA: 2:05 - loss: 0.199 - ETA: 2:04 - loss: 0.201 - ETA: 2:03 - loss: 0.204 - ETA: 2:00 - loss: 0.206 - ETA: 1:57 - loss: 0.207 - ETA: 1:55 - loss: 0.210 - ETA: 1:52 - loss: 0.212 - ETA: 1:51 - loss: 0.214 - ETA: 1:49 - loss: 0.216 - ETA: 1:46 - loss: 0.218 - ETA: 1:44 - loss: 0.220 - ETA: 1:42 - loss: 0.223 - ETA: 1:40 - loss: 0.226 - ETA: 1:38 - loss: 0.228 - ETA: 1:37 - loss: 0.231 - ETA: 1:35 - loss: 0.234 - ETA: 1:34 - loss: 0.237 - ETA: 1:32 - loss: 0.239 - ETA: 1:31 - loss: 0.241 - ETA: 1:29 - loss: 0.243 - ETA: 1:27 - loss: 0.246 - ETA: 1:26 - loss: 0.247 - ETA: 1:24 - loss: 0.249 - ETA: 1:23 - loss: 0.251 - ETA: 1:21 - loss: 0.253 - ETA: 1:20 - loss: 0.254 - ETA: 1:18 - loss: 0.256 - ETA: 1:17 - loss: 0.257 - ETA: 1:15 - loss: 0.259 - ETA: 1:14 - loss: 0.260 - ETA: 1:13 - loss: 0.262 - ETA: 1:11 - loss: 0.263 - ETA: 1:10 - loss: 0.264 - ETA: 1:09 - loss: 0.265 - ETA: 1:07 - loss: 0.266 - ETA: 1:06 - loss: 0.268 - ETA: 1:04 - loss: 0.269 - ETA: 1:03 - loss: 0.270 - ETA: 1:02 - loss: 0.271 - ETA: 1:00 - loss: 0.272 - ETA: 59s - loss: 0.273 - ETA: 58s - loss: 0.27 - ETA: 57s - loss: 0.27 - ETA: 55s - loss: 0.27 - ETA: 54s - loss: 0.27 - ETA: 53s - loss: 0.27 - ETA: 52s - loss: 0.27 - ETA: 51s - loss: 0.28 - ETA: 49s - loss: 0.28 - ETA: 48s - loss: 0.28 - ETA: 47s - loss: 0.28 - ETA: 46s - loss: 0.28 - ETA: 45s - loss: 0.28 - ETA: 44s - loss: 0.28 - ETA: 43s - loss: 0.28 - ETA: 41s - loss: 0.28 - ETA: 40s - loss: 0.28 - ETA: 39s - loss: 0.28 - ETA: 38s - loss: 0.29 - ETA: 37s - loss: 0.29 - ETA: 36s - loss: 0.29 - ETA: 35s - loss: 0.29 - ETA: 34s - loss: 0.29 - ETA: 33s - loss: 0.29 - ETA: 32s - loss: 0.29 - ETA: 30s - loss: 0.29 - ETA: 29s - loss: 0.29 - ETA: 28s - loss: 0.29 - ETA: 27s - loss: 0.29 - ETA: 26s - loss: 0.30 - ETA: 25s - loss: 0.30 - ETA: 24s - loss: 0.30 - ETA: 23s - loss: 0.30 - ETA: 22s - loss: 0.30 - ETA: 21s - loss: 0.30 - ETA: 20s - loss: 0.30 - ETA: 19s - loss: 0.30 - ETA: 17s - loss: 0.30 - ETA: 16s - loss: 0.30 - ETA: 15s - loss: 0.30 - ETA: 14s - loss: 0.30 - ETA: 13s - loss: 0.31 - ETA: 12s - loss: 0.31 - ETA: 11s - loss: 0.31 - ETA: 10s - loss: 0.31 - ETA: 9s - loss: 0.3126 - ETA: 8s - loss: 0.313 - ETA: 7s - loss: 0.313 - ETA: 6s - loss: 0.314 - ETA: 5s - loss: 0.314 - ETA: 4s - loss: 0.315 - ETA: 3s - loss: 0.315 - ETA: 2s - loss: 0.316 - ETA: 1s - loss: 0.316 - ETA: 0s - loss: 0.317 - 118s 1s/step - loss: 0.3179 - val_loss: 0.1149\n",
      "\n",
      "Epoch 00021: val_loss improved from 0.11709 to 0.11495, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 22/30\n",
      "111/111 [==============================] - ETA: 1:23 - loss: 0.211 - ETA: 1:42 - loss: 0.260 - ETA: 1:38 - loss: 0.267 - ETA: 1:38 - loss: 0.267 - ETA: 1:39 - loss: 0.266 - ETA: 1:40 - loss: 0.272 - ETA: 1:38 - loss: 0.272 - ETA: 1:38 - loss: 0.277 - ETA: 1:39 - loss: 0.282 - ETA: 1:39 - loss: 0.283 - ETA: 1:38 - loss: 0.284 - ETA: 1:38 - loss: 0.284 - ETA: 1:36 - loss: 0.283 - ETA: 1:36 - loss: 0.283 - ETA: 1:35 - loss: 0.284 - ETA: 1:35 - loss: 0.284 - ETA: 1:35 - loss: 0.284 - ETA: 1:36 - loss: 0.284 - ETA: 1:35 - loss: 0.284 - ETA: 1:35 - loss: 0.284 - ETA: 1:35 - loss: 0.283 - ETA: 1:34 - loss: 0.284 - ETA: 1:33 - loss: 0.285 - ETA: 1:33 - loss: 0.285 - ETA: 1:33 - loss: 0.286 - ETA: 1:32 - loss: 0.287 - ETA: 1:31 - loss: 0.287 - ETA: 1:30 - loss: 0.288 - ETA: 1:29 - loss: 0.289 - ETA: 1:28 - loss: 0.289 - ETA: 1:27 - loss: 0.290 - ETA: 1:26 - loss: 0.291 - ETA: 1:25 - loss: 0.293 - ETA: 1:23 - loss: 0.294 - ETA: 1:22 - loss: 0.295 - ETA: 1:21 - loss: 0.296 - ETA: 1:20 - loss: 0.296 - ETA: 1:18 - loss: 0.297 - ETA: 1:17 - loss: 0.297 - ETA: 1:16 - loss: 0.298 - ETA: 1:15 - loss: 0.299 - ETA: 1:13 - loss: 0.299 - ETA: 1:12 - loss: 0.300 - ETA: 1:11 - loss: 0.301 - ETA: 1:10 - loss: 0.301 - ETA: 1:08 - loss: 0.301 - ETA: 1:07 - loss: 0.302 - ETA: 1:06 - loss: 0.302 - ETA: 1:05 - loss: 0.303 - ETA: 1:04 - loss: 0.303 - ETA: 1:02 - loss: 0.303 - ETA: 1:01 - loss: 0.303 - ETA: 1:00 - loss: 0.303 - ETA: 59s - loss: 0.304 - ETA: 58s - loss: 0.30 - ETA: 57s - loss: 0.30 - ETA: 56s - loss: 0.30 - ETA: 55s - loss: 0.30 - ETA: 53s - loss: 0.30 - ETA: 52s - loss: 0.30 - ETA: 51s - loss: 0.30 - ETA: 50s - loss: 0.30 - ETA: 49s - loss: 0.30 - ETA: 48s - loss: 0.30 - ETA: 47s - loss: 0.30 - ETA: 46s - loss: 0.30 - ETA: 45s - loss: 0.30 - ETA: 44s - loss: 0.30 - ETA: 43s - loss: 0.30 - ETA: 42s - loss: 0.30 - ETA: 41s - loss: 0.30 - ETA: 40s - loss: 0.30 - ETA: 39s - loss: 0.30 - ETA: 38s - loss: 0.30 - ETA: 37s - loss: 0.30 - ETA: 36s - loss: 0.30 - ETA: 35s - loss: 0.30 - ETA: 34s - loss: 0.30 - ETA: 33s - loss: 0.30 - ETA: 32s - loss: 0.30 - ETA: 31s - loss: 0.30 - ETA: 30s - loss: 0.30 - ETA: 29s - loss: 0.30 - ETA: 28s - loss: 0.30 - ETA: 27s - loss: 0.30 - ETA: 26s - loss: 0.30 - ETA: 25s - loss: 0.30 - ETA: 24s - loss: 0.30 - ETA: 22s - loss: 0.30 - ETA: 21s - loss: 0.30 - ETA: 20s - loss: 0.30 - ETA: 19s - loss: 0.30 - ETA: 18s - loss: 0.30 - ETA: 17s - loss: 0.30 - ETA: 16s - loss: 0.30 - ETA: 15s - loss: 0.30 - ETA: 14s - loss: 0.31 - ETA: 13s - loss: 0.31 - ETA: 12s - loss: 0.31 - ETA: 11s - loss: 0.31 - ETA: 10s - loss: 0.31 - ETA: 9s - loss: 0.3107 - ETA: 8s - loss: 0.310 - ETA: 7s - loss: 0.311 - ETA: 6s - loss: 0.311 - ETA: 5s - loss: 0.311 - ETA: 4s - loss: 0.311 - ETA: 3s - loss: 0.311 - ETA: 2s - loss: 0.311 - ETA: 1s - loss: 0.312 - ETA: 0s - loss: 0.312 - 116s 1s/step - loss: 0.3122 - val_loss: 0.1123\n",
      "\n",
      "Epoch 00022: val_loss improved from 0.11495 to 0.11226, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 23/30\n",
      "111/111 [==============================] - ETA: 1:25 - loss: 0.227 - ETA: 1:37 - loss: 0.240 - ETA: 1:36 - loss: 0.279 - ETA: 1:35 - loss: 0.285 - ETA: 1:34 - loss: 0.289 - ETA: 1:34 - loss: 0.291 - ETA: 1:32 - loss: 0.290 - ETA: 1:31 - loss: 0.286 - ETA: 1:30 - loss: 0.283 - ETA: 1:29 - loss: 0.283 - ETA: 1:28 - loss: 0.285 - ETA: 1:28 - loss: 0.289 - ETA: 1:27 - loss: 0.298 - ETA: 1:26 - loss: 0.304 - ETA: 1:26 - loss: 0.310 - ETA: 1:25 - loss: 0.318 - ETA: 1:23 - loss: 0.325 - ETA: 1:23 - loss: 0.329 - ETA: 1:22 - loss: 0.333 - ETA: 1:21 - loss: 0.336 - ETA: 1:20 - loss: 0.338 - ETA: 1:19 - loss: 0.340 - ETA: 1:19 - loss: 0.341 - ETA: 1:18 - loss: 0.343 - ETA: 1:17 - loss: 0.344 - ETA: 1:17 - loss: 0.345 - ETA: 1:17 - loss: 0.347 - ETA: 1:17 - loss: 0.347 - ETA: 1:17 - loss: 0.348 - ETA: 1:16 - loss: 0.348 - ETA: 1:16 - loss: 0.348 - ETA: 1:16 - loss: 0.348 - ETA: 1:15 - loss: 0.349 - ETA: 1:15 - loss: 0.349 - ETA: 1:15 - loss: 0.350 - ETA: 1:15 - loss: 0.350 - ETA: 1:14 - loss: 0.351 - ETA: 1:14 - loss: 0.351 - ETA: 1:13 - loss: 0.351 - ETA: 1:12 - loss: 0.351 - ETA: 1:11 - loss: 0.352 - ETA: 1:11 - loss: 0.353 - ETA: 1:10 - loss: 0.354 - ETA: 1:09 - loss: 0.355 - ETA: 1:08 - loss: 0.356 - ETA: 1:07 - loss: 0.356 - ETA: 1:07 - loss: 0.357 - ETA: 1:06 - loss: 0.358 - ETA: 1:06 - loss: 0.358 - ETA: 1:05 - loss: 0.359 - ETA: 1:04 - loss: 0.360 - ETA: 1:04 - loss: 0.361 - ETA: 1:03 - loss: 0.361 - ETA: 1:02 - loss: 0.362 - ETA: 1:01 - loss: 0.362 - ETA: 1:00 - loss: 0.363 - ETA: 59s - loss: 0.363 - ETA: 59s - loss: 0.36 - ETA: 58s - loss: 0.36 - ETA: 57s - loss: 0.36 - ETA: 56s - loss: 0.36 - ETA: 55s - loss: 0.36 - ETA: 54s - loss: 0.36 - ETA: 53s - loss: 0.36 - ETA: 52s - loss: 0.36 - ETA: 50s - loss: 0.36 - ETA: 49s - loss: 0.36 - ETA: 48s - loss: 0.36 - ETA: 47s - loss: 0.36 - ETA: 46s - loss: 0.36 - ETA: 45s - loss: 0.36 - ETA: 43s - loss: 0.36 - ETA: 42s - loss: 0.36 - ETA: 41s - loss: 0.36 - ETA: 40s - loss: 0.36 - ETA: 39s - loss: 0.36 - ETA: 38s - loss: 0.36 - ETA: 36s - loss: 0.36 - ETA: 35s - loss: 0.36 - ETA: 34s - loss: 0.36 - ETA: 33s - loss: 0.36 - ETA: 32s - loss: 0.36 - ETA: 31s - loss: 0.36 - ETA: 30s - loss: 0.36 - ETA: 28s - loss: 0.36 - ETA: 27s - loss: 0.36 - ETA: 26s - loss: 0.36 - ETA: 25s - loss: 0.36 - ETA: 24s - loss: 0.36 - ETA: 23s - loss: 0.36 - ETA: 22s - loss: 0.36 - ETA: 20s - loss: 0.36 - ETA: 19s - loss: 0.36 - ETA: 18s - loss: 0.36 - ETA: 17s - loss: 0.36 - ETA: 16s - loss: 0.36 - ETA: 15s - loss: 0.36 - ETA: 14s - loss: 0.36 - ETA: 13s - loss: 0.36 - ETA: 12s - loss: 0.36 - ETA: 10s - loss: 0.36 - ETA: 9s - loss: 0.3626 - ETA: 8s - loss: 0.362 - ETA: 7s - loss: 0.362 - ETA: 6s - loss: 0.362 - ETA: 5s - loss: 0.362 - ETA: 4s - loss: 0.362 - ETA: 3s - loss: 0.362 - ETA: 2s - loss: 0.362 - ETA: 1s - loss: 0.361 - ETA: 0s - loss: 0.361 - 123s 1s/step - loss: 0.3617 - val_loss: 0.1150\n",
      "\n",
      "Epoch 00023: val_loss did not improve from 0.11226\n",
      "Epoch 24/30\n",
      "111/111 [==============================] - ETA: 1:45 - loss: 0.771 - ETA: 1:42 - loss: 0.730 - ETA: 1:48 - loss: 0.675 - ETA: 1:46 - loss: 0.630 - ETA: 1:47 - loss: 0.588 - ETA: 1:47 - loss: 0.560 - ETA: 1:48 - loss: 0.543 - ETA: 1:48 - loss: 0.538 - ETA: 1:46 - loss: 0.536 - ETA: 1:43 - loss: 0.534 - ETA: 1:44 - loss: 0.531 - ETA: 1:42 - loss: 0.526 - ETA: 1:41 - loss: 0.522 - ETA: 1:40 - loss: 0.518 - ETA: 1:39 - loss: 0.512 - ETA: 1:37 - loss: 0.508 - ETA: 1:37 - loss: 0.504 - ETA: 1:35 - loss: 0.499 - ETA: 1:34 - loss: 0.495 - ETA: 1:33 - loss: 0.491 - ETA: 1:31 - loss: 0.487 - ETA: 1:30 - loss: 0.483 - ETA: 1:29 - loss: 0.480 - ETA: 1:28 - loss: 0.478 - ETA: 1:27 - loss: 0.475 - ETA: 1:26 - loss: 0.473 - ETA: 1:25 - loss: 0.472 - ETA: 1:24 - loss: 0.470 - ETA: 1:23 - loss: 0.469 - ETA: 1:22 - loss: 0.469 - ETA: 1:21 - loss: 0.468 - ETA: 1:19 - loss: 0.467 - ETA: 1:18 - loss: 0.467 - ETA: 1:17 - loss: 0.466 - ETA: 1:16 - loss: 0.465 - ETA: 1:15 - loss: 0.464 - ETA: 1:14 - loss: 0.463 - ETA: 1:14 - loss: 0.462 - ETA: 1:13 - loss: 0.461 - ETA: 1:11 - loss: 0.460 - ETA: 1:10 - loss: 0.459 - ETA: 1:09 - loss: 0.458 - ETA: 1:08 - loss: 0.457 - ETA: 1:07 - loss: 0.456 - ETA: 1:06 - loss: 0.455 - ETA: 1:05 - loss: 0.454 - ETA: 1:04 - loss: 0.453 - ETA: 1:03 - loss: 0.451 - ETA: 1:02 - loss: 0.450 - ETA: 1:01 - loss: 0.449 - ETA: 1:00 - loss: 0.448 - ETA: 59s - loss: 0.447 - ETA: 58s - loss: 0.44 - ETA: 57s - loss: 0.44 - ETA: 56s - loss: 0.44 - ETA: 55s - loss: 0.44 - ETA: 54s - loss: 0.44 - ETA: 53s - loss: 0.44 - ETA: 52s - loss: 0.44 - ETA: 51s - loss: 0.43 - ETA: 50s - loss: 0.43 - ETA: 49s - loss: 0.43 - ETA: 48s - loss: 0.43 - ETA: 47s - loss: 0.43 - ETA: 46s - loss: 0.43 - ETA: 45s - loss: 0.43 - ETA: 44s - loss: 0.43 - ETA: 43s - loss: 0.43 - ETA: 43s - loss: 0.43 - ETA: 42s - loss: 0.42 - ETA: 41s - loss: 0.42 - ETA: 40s - loss: 0.42 - ETA: 39s - loss: 0.42 - ETA: 38s - loss: 0.42 - ETA: 37s - loss: 0.42 - ETA: 36s - loss: 0.42 - ETA: 35s - loss: 0.42 - ETA: 34s - loss: 0.42 - ETA: 33s - loss: 0.42 - ETA: 32s - loss: 0.41 - ETA: 31s - loss: 0.41 - ETA: 30s - loss: 0.41 - ETA: 29s - loss: 0.41 - ETA: 28s - loss: 0.41 - ETA: 27s - loss: 0.41 - ETA: 26s - loss: 0.41 - ETA: 25s - loss: 0.41 - ETA: 24s - loss: 0.41 - ETA: 23s - loss: 0.41 - ETA: 22s - loss: 0.41 - ETA: 20s - loss: 0.40 - ETA: 19s - loss: 0.40 - ETA: 18s - loss: 0.40 - ETA: 17s - loss: 0.40 - ETA: 16s - loss: 0.40 - ETA: 15s - loss: 0.40 - ETA: 14s - loss: 0.40 - ETA: 13s - loss: 0.40 - ETA: 12s - loss: 0.40 - ETA: 11s - loss: 0.40 - ETA: 10s - loss: 0.40 - ETA: 9s - loss: 0.4004 - ETA: 8s - loss: 0.399 - ETA: 7s - loss: 0.398 - ETA: 6s - loss: 0.398 - ETA: 5s - loss: 0.397 - ETA: 4s - loss: 0.396 - ETA: 3s - loss: 0.396 - ETA: 2s - loss: 0.395 - ETA: 1s - loss: 0.395 - ETA: 0s - loss: 0.394 - 116s 1s/step - loss: 0.3941 - val_loss: 0.1203\n",
      "\n",
      "Epoch 00024: val_loss did not improve from 0.11226\n",
      "Epoch 25/30\n",
      "111/111 [==============================] - ETA: 2:00 - loss: 0.114 - ETA: 1:44 - loss: 0.176 - ETA: 1:39 - loss: 0.193 - ETA: 1:39 - loss: 0.219 - ETA: 1:40 - loss: 0.230 - ETA: 1:42 - loss: 0.235 - ETA: 1:40 - loss: 0.240 - ETA: 1:39 - loss: 0.244 - ETA: 1:38 - loss: 0.249 - ETA: 1:36 - loss: 0.263 - ETA: 1:35 - loss: 0.272 - ETA: 1:35 - loss: 0.279 - ETA: 1:34 - loss: 0.284 - ETA: 1:35 - loss: 0.287 - ETA: 1:34 - loss: 0.290 - ETA: 1:34 - loss: 0.291 - ETA: 1:33 - loss: 0.292 - ETA: 1:32 - loss: 0.292 - ETA: 1:31 - loss: 0.292 - ETA: 1:30 - loss: 0.293 - ETA: 1:30 - loss: 0.295 - ETA: 1:30 - loss: 0.298 - ETA: 1:29 - loss: 0.299 - ETA: 1:29 - loss: 0.302 - ETA: 1:28 - loss: 0.304 - ETA: 1:27 - loss: 0.305 - ETA: 1:26 - loss: 0.306 - ETA: 1:25 - loss: 0.307 - ETA: 1:24 - loss: 0.308 - ETA: 1:22 - loss: 0.308 - ETA: 1:22 - loss: 0.308 - ETA: 1:21 - loss: 0.309 - ETA: 1:20 - loss: 0.309 - ETA: 1:19 - loss: 0.309 - ETA: 1:18 - loss: 0.311 - ETA: 1:17 - loss: 0.313 - ETA: 1:17 - loss: 0.314 - ETA: 1:16 - loss: 0.316 - ETA: 1:15 - loss: 0.317 - ETA: 1:14 - loss: 0.318 - ETA: 1:13 - loss: 0.320 - ETA: 1:12 - loss: 0.320 - ETA: 1:11 - loss: 0.321 - ETA: 1:10 - loss: 0.322 - ETA: 1:09 - loss: 0.322 - ETA: 1:07 - loss: 0.323 - ETA: 1:06 - loss: 0.323 - ETA: 1:05 - loss: 0.324 - ETA: 1:03 - loss: 0.324 - ETA: 1:02 - loss: 0.325 - ETA: 1:01 - loss: 0.325 - ETA: 1:00 - loss: 0.325 - ETA: 59s - loss: 0.326 - ETA: 57s - loss: 0.32 - ETA: 56s - loss: 0.32 - ETA: 55s - loss: 0.32 - ETA: 54s - loss: 0.32 - ETA: 53s - loss: 0.32 - ETA: 52s - loss: 0.32 - ETA: 51s - loss: 0.32 - ETA: 49s - loss: 0.32 - ETA: 48s - loss: 0.32 - ETA: 47s - loss: 0.32 - ETA: 46s - loss: 0.32 - ETA: 45s - loss: 0.32 - ETA: 44s - loss: 0.32 - ETA: 43s - loss: 0.32 - ETA: 42s - loss: 0.32 - ETA: 41s - loss: 0.32 - ETA: 40s - loss: 0.32 - ETA: 39s - loss: 0.32 - ETA: 38s - loss: 0.32 - ETA: 37s - loss: 0.32 - ETA: 36s - loss: 0.32 - ETA: 35s - loss: 0.32 - ETA: 34s - loss: 0.32 - ETA: 33s - loss: 0.32 - ETA: 32s - loss: 0.32 - ETA: 31s - loss: 0.32 - ETA: 30s - loss: 0.32 - ETA: 29s - loss: 0.32 - ETA: 28s - loss: 0.32 - ETA: 27s - loss: 0.32 - ETA: 26s - loss: 0.32 - ETA: 25s - loss: 0.32 - ETA: 24s - loss: 0.33 - ETA: 23s - loss: 0.33 - ETA: 22s - loss: 0.33 - ETA: 21s - loss: 0.33 - ETA: 20s - loss: 0.33 - ETA: 19s - loss: 0.33 - ETA: 18s - loss: 0.33 - ETA: 17s - loss: 0.33 - ETA: 16s - loss: 0.33 - ETA: 15s - loss: 0.33 - ETA: 14s - loss: 0.33 - ETA: 13s - loss: 0.33 - ETA: 12s - loss: 0.33 - ETA: 11s - loss: 0.33 - ETA: 10s - loss: 0.33 - ETA: 9s - loss: 0.3354 - ETA: 8s - loss: 0.335 - ETA: 7s - loss: 0.336 - ETA: 6s - loss: 0.336 - ETA: 5s - loss: 0.336 - ETA: 4s - loss: 0.337 - ETA: 3s - loss: 0.337 - ETA: 2s - loss: 0.337 - ETA: 1s - loss: 0.338 - ETA: 0s - loss: 0.338 - ETA: 0s - loss: 0.338 - 109s 982ms/step - loss: 0.3390 - val_loss: 0.1084\n",
      "\n",
      "Epoch 00025: val_loss improved from 0.11226 to 0.10843, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 26/30\n",
      "111/111 [==============================] - ETA: 2:03 - loss: 0.326 - ETA: 2:11 - loss: 0.271 - ETA: 2:08 - loss: 0.254 - ETA: 2:07 - loss: 0.236 - ETA: 2:02 - loss: 0.229 - ETA: 1:54 - loss: 0.228 - ETA: 1:49 - loss: 0.228 - ETA: 1:45 - loss: 0.227 - ETA: 1:42 - loss: 0.233 - ETA: 1:39 - loss: 0.240 - ETA: 1:37 - loss: 0.245 - ETA: 1:35 - loss: 0.251 - ETA: 1:33 - loss: 0.256 - ETA: 1:31 - loss: 0.260 - ETA: 1:29 - loss: 0.264 - ETA: 1:28 - loss: 0.267 - ETA: 1:27 - loss: 0.273 - ETA: 1:25 - loss: 0.277 - ETA: 1:24 - loss: 0.281 - ETA: 1:23 - loss: 0.285 - ETA: 1:22 - loss: 0.290 - ETA: 1:21 - loss: 0.294 - ETA: 1:19 - loss: 0.298 - ETA: 1:18 - loss: 0.301 - ETA: 1:17 - loss: 0.304 - ETA: 1:16 - loss: 0.306 - ETA: 1:15 - loss: 0.308 - ETA: 1:14 - loss: 0.310 - ETA: 1:13 - loss: 0.312 - ETA: 1:12 - loss: 0.314 - ETA: 1:11 - loss: 0.316 - ETA: 1:10 - loss: 0.318 - ETA: 1:09 - loss: 0.319 - ETA: 1:09 - loss: 0.321 - ETA: 1:08 - loss: 0.323 - ETA: 1:07 - loss: 0.324 - ETA: 1:06 - loss: 0.325 - ETA: 1:05 - loss: 0.326 - ETA: 1:04 - loss: 0.327 - ETA: 1:03 - loss: 0.328 - ETA: 1:02 - loss: 0.328 - ETA: 1:01 - loss: 0.329 - ETA: 1:00 - loss: 0.329 - ETA: 59s - loss: 0.330 - ETA: 58s - loss: 0.33 - ETA: 58s - loss: 0.33 - ETA: 57s - loss: 0.33 - ETA: 56s - loss: 0.33 - ETA: 56s - loss: 0.33 - ETA: 55s - loss: 0.33 - ETA: 55s - loss: 0.33 - ETA: 54s - loss: 0.33 - ETA: 54s - loss: 0.33 - ETA: 53s - loss: 0.33 - ETA: 52s - loss: 0.33 - ETA: 52s - loss: 0.33 - ETA: 51s - loss: 0.33 - ETA: 50s - loss: 0.33 - ETA: 49s - loss: 0.33 - ETA: 49s - loss: 0.33 - ETA: 48s - loss: 0.33 - ETA: 47s - loss: 0.33 - ETA: 46s - loss: 0.33 - ETA: 45s - loss: 0.33 - ETA: 44s - loss: 0.33 - ETA: 44s - loss: 0.33 - ETA: 43s - loss: 0.33 - ETA: 42s - loss: 0.33 - ETA: 41s - loss: 0.33 - ETA: 40s - loss: 0.33 - ETA: 39s - loss: 0.33 - ETA: 38s - loss: 0.33 - ETA: 37s - loss: 0.33 - ETA: 36s - loss: 0.33 - ETA: 35s - loss: 0.33 - ETA: 34s - loss: 0.33 - ETA: 33s - loss: 0.33 - ETA: 32s - loss: 0.33 - ETA: 31s - loss: 0.33 - ETA: 30s - loss: 0.33 - ETA: 29s - loss: 0.33 - ETA: 28s - loss: 0.33 - ETA: 27s - loss: 0.33 - ETA: 26s - loss: 0.33 - ETA: 25s - loss: 0.33 - ETA: 24s - loss: 0.33 - ETA: 23s - loss: 0.33 - ETA: 22s - loss: 0.33 - ETA: 21s - loss: 0.33 - ETA: 20s - loss: 0.33 - ETA: 19s - loss: 0.33 - ETA: 18s - loss: 0.33 - ETA: 17s - loss: 0.33 - ETA: 16s - loss: 0.33 - ETA: 15s - loss: 0.33 - ETA: 14s - loss: 0.33 - ETA: 14s - loss: 0.33 - ETA: 13s - loss: 0.32 - ETA: 11s - loss: 0.32 - ETA: 10s - loss: 0.32 - ETA: 9s - loss: 0.3300 - ETA: 8s - loss: 0.330 - ETA: 7s - loss: 0.330 - ETA: 6s - loss: 0.330 - ETA: 5s - loss: 0.330 - ETA: 4s - loss: 0.330 - ETA: 3s - loss: 0.330 - ETA: 2s - loss: 0.330 - ETA: 1s - loss: 0.330 - ETA: 0s - loss: 0.330 - ETA: 0s - loss: 0.330 - 112s 1s/step - loss: 0.3310 - val_loss: 0.1058\n",
      "\n",
      "Epoch 00026: val_loss improved from 0.10843 to 0.10577, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 27/30\n",
      "111/111 [==============================] - ETA: 1:32 - loss: 0.461 - ETA: 1:44 - loss: 0.434 - ETA: 1:45 - loss: 0.397 - ETA: 1:43 - loss: 0.372 - ETA: 1:40 - loss: 0.350 - ETA: 1:41 - loss: 0.335 - ETA: 1:39 - loss: 0.320 - ETA: 1:39 - loss: 0.312 - ETA: 1:39 - loss: 0.308 - ETA: 1:38 - loss: 0.305 - ETA: 1:38 - loss: 0.300 - ETA: 1:37 - loss: 0.296 - ETA: 1:36 - loss: 0.294 - ETA: 1:34 - loss: 0.292 - ETA: 1:33 - loss: 0.292 - ETA: 1:31 - loss: 0.292 - ETA: 1:29 - loss: 0.291 - ETA: 1:28 - loss: 0.290 - ETA: 1:26 - loss: 0.290 - ETA: 1:25 - loss: 0.290 - ETA: 1:24 - loss: 0.289 - ETA: 1:22 - loss: 0.289 - ETA: 1:21 - loss: 0.288 - ETA: 1:20 - loss: 0.288 - ETA: 1:19 - loss: 0.288 - ETA: 1:18 - loss: 0.288 - ETA: 1:16 - loss: 0.287 - ETA: 1:15 - loss: 0.287 - ETA: 1:14 - loss: 0.287 - ETA: 1:13 - loss: 0.287 - ETA: 1:12 - loss: 0.286 - ETA: 1:11 - loss: 0.286 - ETA: 1:10 - loss: 0.286 - ETA: 1:10 - loss: 0.286 - ETA: 1:08 - loss: 0.286 - ETA: 1:08 - loss: 0.286 - ETA: 1:07 - loss: 0.287 - ETA: 1:06 - loss: 0.288 - ETA: 1:05 - loss: 0.288 - ETA: 1:04 - loss: 0.289 - ETA: 1:03 - loss: 0.289 - ETA: 1:02 - loss: 0.290 - ETA: 1:01 - loss: 0.290 - ETA: 1:00 - loss: 0.290 - ETA: 59s - loss: 0.291 - ETA: 58s - loss: 0.29 - ETA: 57s - loss: 0.29 - ETA: 56s - loss: 0.29 - ETA: 55s - loss: 0.29 - ETA: 54s - loss: 0.29 - ETA: 53s - loss: 0.29 - ETA: 52s - loss: 0.29 - ETA: 51s - loss: 0.29 - ETA: 50s - loss: 0.29 - ETA: 50s - loss: 0.29 - ETA: 49s - loss: 0.29 - ETA: 48s - loss: 0.29 - ETA: 47s - loss: 0.29 - ETA: 46s - loss: 0.29 - ETA: 45s - loss: 0.29 - ETA: 44s - loss: 0.29 - ETA: 44s - loss: 0.29 - ETA: 43s - loss: 0.29 - ETA: 42s - loss: 0.29 - ETA: 41s - loss: 0.29 - ETA: 40s - loss: 0.29 - ETA: 39s - loss: 0.29 - ETA: 38s - loss: 0.29 - ETA: 38s - loss: 0.29 - ETA: 37s - loss: 0.29 - ETA: 36s - loss: 0.29 - ETA: 35s - loss: 0.29 - ETA: 34s - loss: 0.29 - ETA: 33s - loss: 0.29 - ETA: 32s - loss: 0.29 - ETA: 31s - loss: 0.29 - ETA: 30s - loss: 0.29 - ETA: 29s - loss: 0.29 - ETA: 28s - loss: 0.30 - ETA: 28s - loss: 0.30 - ETA: 27s - loss: 0.30 - ETA: 26s - loss: 0.30 - ETA: 25s - loss: 0.30 - ETA: 24s - loss: 0.30 - ETA: 23s - loss: 0.30 - ETA: 22s - loss: 0.30 - ETA: 21s - loss: 0.30 - ETA: 20s - loss: 0.30 - ETA: 19s - loss: 0.30 - ETA: 18s - loss: 0.30 - ETA: 17s - loss: 0.30 - ETA: 17s - loss: 0.30 - ETA: 16s - loss: 0.30 - ETA: 15s - loss: 0.30 - ETA: 14s - loss: 0.30 - ETA: 13s - loss: 0.30 - ETA: 12s - loss: 0.30 - ETA: 11s - loss: 0.30 - ETA: 10s - loss: 0.30 - ETA: 9s - loss: 0.3061 - ETA: 8s - loss: 0.306 - ETA: 8s - loss: 0.306 - ETA: 7s - loss: 0.306 - ETA: 6s - loss: 0.307 - ETA: 5s - loss: 0.307 - ETA: 4s - loss: 0.308 - ETA: 3s - loss: 0.308 - ETA: 2s - loss: 0.308 - ETA: 1s - loss: 0.309 - ETA: 0s - loss: 0.309 - ETA: 0s - loss: 0.310 - 101s 906ms/step - loss: 0.3103 - val_loss: 0.1166\n",
      "\n",
      "Epoch 00027: val_loss did not improve from 0.10577\n",
      "Epoch 28/30\n",
      "111/111 [==============================] - ETA: 1:32 - loss: 0.388 - ETA: 1:33 - loss: 0.406 - ETA: 1:33 - loss: 0.396 - ETA: 1:31 - loss: 0.379 - ETA: 1:31 - loss: 0.358 - ETA: 1:30 - loss: 0.340 - ETA: 1:29 - loss: 0.328 - ETA: 1:28 - loss: 0.324 - ETA: 1:27 - loss: 0.328 - ETA: 1:27 - loss: 0.332 - ETA: 1:27 - loss: 0.332 - ETA: 1:27 - loss: 0.338 - ETA: 1:27 - loss: 0.341 - ETA: 1:26 - loss: 0.344 - ETA: 1:26 - loss: 0.345 - ETA: 1:25 - loss: 0.345 - ETA: 1:24 - loss: 0.346 - ETA: 1:24 - loss: 0.348 - ETA: 1:24 - loss: 0.349 - ETA: 1:23 - loss: 0.350 - ETA: 1:22 - loss: 0.352 - ETA: 1:22 - loss: 0.353 - ETA: 1:21 - loss: 0.354 - ETA: 1:20 - loss: 0.355 - ETA: 1:19 - loss: 0.355 - ETA: 1:18 - loss: 0.355 - ETA: 1:17 - loss: 0.355 - ETA: 1:17 - loss: 0.356 - ETA: 1:16 - loss: 0.357 - ETA: 1:15 - loss: 0.357 - ETA: 1:14 - loss: 0.357 - ETA: 1:14 - loss: 0.357 - ETA: 1:13 - loss: 0.357 - ETA: 1:12 - loss: 0.357 - ETA: 1:11 - loss: 0.357 - ETA: 1:10 - loss: 0.356 - ETA: 1:09 - loss: 0.356 - ETA: 1:08 - loss: 0.355 - ETA: 1:07 - loss: 0.355 - ETA: 1:06 - loss: 0.355 - ETA: 1:05 - loss: 0.354 - ETA: 1:04 - loss: 0.354 - ETA: 1:03 - loss: 0.353 - ETA: 1:03 - loss: 0.352 - ETA: 1:02 - loss: 0.352 - ETA: 1:01 - loss: 0.351 - ETA: 1:00 - loss: 0.350 - ETA: 59s - loss: 0.350 - ETA: 58s - loss: 0.34 - ETA: 58s - loss: 0.34 - ETA: 57s - loss: 0.34 - ETA: 56s - loss: 0.34 - ETA: 55s - loss: 0.34 - ETA: 54s - loss: 0.34 - ETA: 53s - loss: 0.34 - ETA: 52s - loss: 0.34 - ETA: 51s - loss: 0.34 - ETA: 50s - loss: 0.34 - ETA: 49s - loss: 0.34 - ETA: 48s - loss: 0.34 - ETA: 47s - loss: 0.34 - ETA: 46s - loss: 0.34 - ETA: 45s - loss: 0.34 - ETA: 45s - loss: 0.34 - ETA: 44s - loss: 0.34 - ETA: 43s - loss: 0.34 - ETA: 42s - loss: 0.34 - ETA: 41s - loss: 0.34 - ETA: 40s - loss: 0.34 - ETA: 39s - loss: 0.34 - ETA: 38s - loss: 0.34 - ETA: 37s - loss: 0.34 - ETA: 36s - loss: 0.34 - ETA: 36s - loss: 0.34 - ETA: 35s - loss: 0.34 - ETA: 34s - loss: 0.34 - ETA: 33s - loss: 0.34 - ETA: 32s - loss: 0.34 - ETA: 31s - loss: 0.34 - ETA: 30s - loss: 0.34 - ETA: 29s - loss: 0.34 - ETA: 28s - loss: 0.34 - ETA: 27s - loss: 0.34 - ETA: 26s - loss: 0.34 - ETA: 25s - loss: 0.34 - ETA: 24s - loss: 0.34 - ETA: 23s - loss: 0.34 - ETA: 22s - loss: 0.34 - ETA: 21s - loss: 0.34 - ETA: 20s - loss: 0.34 - ETA: 19s - loss: 0.34 - ETA: 18s - loss: 0.34 - ETA: 17s - loss: 0.34 - ETA: 16s - loss: 0.34 - ETA: 15s - loss: 0.34 - ETA: 14s - loss: 0.34 - ETA: 13s - loss: 0.34 - ETA: 12s - loss: 0.34 - ETA: 11s - loss: 0.34 - ETA: 10s - loss: 0.34 - ETA: 9s - loss: 0.3406 - ETA: 8s - loss: 0.340 - ETA: 7s - loss: 0.340 - ETA: 6s - loss: 0.340 - ETA: 5s - loss: 0.340 - ETA: 5s - loss: 0.340 - ETA: 3s - loss: 0.340 - ETA: 2s - loss: 0.340 - ETA: 1s - loss: 0.340 - ETA: 0s - loss: 0.340 - ETA: 0s - loss: 0.340 - 112s 1s/step - loss: 0.3399 - val_loss: 0.1144\n",
      "\n",
      "Epoch 00028: val_loss did not improve from 0.10577\n",
      "Epoch 29/30\n",
      "111/111 [==============================] - ETA: 2:03 - loss: 0.265 - ETA: 1:45 - loss: 0.496 - ETA: 1:44 - loss: 0.513 - ETA: 1:46 - loss: 0.507 - ETA: 1:44 - loss: 0.494 - ETA: 1:42 - loss: 0.485 - ETA: 1:40 - loss: 0.473 - ETA: 1:39 - loss: 0.461 - ETA: 1:38 - loss: 0.451 - ETA: 1:38 - loss: 0.443 - ETA: 1:36 - loss: 0.434 - ETA: 1:35 - loss: 0.426 - ETA: 1:35 - loss: 0.422 - ETA: 1:36 - loss: 0.417 - ETA: 1:35 - loss: 0.414 - ETA: 1:34 - loss: 0.409 - ETA: 1:33 - loss: 0.405 - ETA: 1:32 - loss: 0.400 - ETA: 1:31 - loss: 0.397 - ETA: 1:31 - loss: 0.395 - ETA: 1:31 - loss: 0.393 - ETA: 1:30 - loss: 0.390 - ETA: 1:29 - loss: 0.389 - ETA: 1:28 - loss: 0.387 - ETA: 1:27 - loss: 0.385 - ETA: 1:26 - loss: 0.383 - ETA: 1:25 - loss: 0.382 - ETA: 1:23 - loss: 0.381 - ETA: 1:22 - loss: 0.379 - ETA: 1:20 - loss: 0.378 - ETA: 1:19 - loss: 0.378 - ETA: 1:19 - loss: 0.377 - ETA: 1:18 - loss: 0.377 - ETA: 1:17 - loss: 0.376 - ETA: 1:16 - loss: 0.375 - ETA: 1:15 - loss: 0.375 - ETA: 1:14 - loss: 0.375 - ETA: 1:13 - loss: 0.375 - ETA: 1:12 - loss: 0.375 - ETA: 1:10 - loss: 0.375 - ETA: 1:09 - loss: 0.374 - ETA: 1:08 - loss: 0.374 - ETA: 1:07 - loss: 0.374 - ETA: 1:07 - loss: 0.373 - ETA: 1:06 - loss: 0.373 - ETA: 1:05 - loss: 0.373 - ETA: 1:04 - loss: 0.373 - ETA: 1:03 - loss: 0.373 - ETA: 1:03 - loss: 0.372 - ETA: 1:02 - loss: 0.372 - ETA: 1:01 - loss: 0.372 - ETA: 1:00 - loss: 0.371 - ETA: 59s - loss: 0.371 - ETA: 59s - loss: 0.37 - ETA: 58s - loss: 0.37 - ETA: 57s - loss: 0.36 - ETA: 56s - loss: 0.36 - ETA: 56s - loss: 0.36 - ETA: 55s - loss: 0.36 - ETA: 54s - loss: 0.36 - ETA: 53s - loss: 0.36 - ETA: 52s - loss: 0.36 - ETA: 51s - loss: 0.36 - ETA: 50s - loss: 0.36 - ETA: 49s - loss: 0.36 - ETA: 48s - loss: 0.36 - ETA: 47s - loss: 0.36 - ETA: 46s - loss: 0.36 - ETA: 45s - loss: 0.36 - ETA: 44s - loss: 0.36 - ETA: 42s - loss: 0.36 - ETA: 41s - loss: 0.36 - ETA: 40s - loss: 0.36 - ETA: 39s - loss: 0.36 - ETA: 38s - loss: 0.36 - ETA: 37s - loss: 0.36 - ETA: 36s - loss: 0.36 - ETA: 34s - loss: 0.36 - ETA: 33s - loss: 0.35 - ETA: 32s - loss: 0.35 - ETA: 31s - loss: 0.35 - ETA: 30s - loss: 0.35 - ETA: 29s - loss: 0.35 - ETA: 28s - loss: 0.35 - ETA: 27s - loss: 0.35 - ETA: 26s - loss: 0.35 - ETA: 25s - loss: 0.35 - ETA: 24s - loss: 0.35 - ETA: 23s - loss: 0.35 - ETA: 21s - loss: 0.35 - ETA: 20s - loss: 0.35 - ETA: 19s - loss: 0.35 - ETA: 18s - loss: 0.35 - ETA: 17s - loss: 0.35 - ETA: 16s - loss: 0.35 - ETA: 15s - loss: 0.35 - ETA: 14s - loss: 0.35 - ETA: 13s - loss: 0.35 - ETA: 12s - loss: 0.35 - ETA: 11s - loss: 0.35 - ETA: 10s - loss: 0.35 - ETA: 9s - loss: 0.3511 - ETA: 8s - loss: 0.350 - ETA: 7s - loss: 0.350 - ETA: 6s - loss: 0.350 - ETA: 5s - loss: 0.349 - ETA: 4s - loss: 0.349 - ETA: 3s - loss: 0.349 - ETA: 2s - loss: 0.349 - ETA: 1s - loss: 0.348 - ETA: 0s - loss: 0.348 - 117s 1s/step - loss: 0.3483 - val_loss: 0.1019\n",
      "\n",
      "Epoch 00029: val_loss improved from 0.10577 to 0.10186, saving model to clean_notebooks\\cnn_inj_collage_1C.h5\n",
      "Epoch 30/30\n",
      "111/111 [==============================] - ETA: 1:16 - loss: 0.214 - ETA: 1:34 - loss: 0.188 - ETA: 1:36 - loss: 0.180 - ETA: 1:34 - loss: 0.177 - ETA: 1:32 - loss: 0.179 - ETA: 1:31 - loss: 0.186 - ETA: 1:30 - loss: 0.188 - ETA: 1:29 - loss: 0.189 - ETA: 1:29 - loss: 0.192 - ETA: 1:28 - loss: 0.195 - ETA: 1:27 - loss: 0.197 - ETA: 1:26 - loss: 0.198 - ETA: 1:24 - loss: 0.198 - ETA: 1:24 - loss: 0.199 - ETA: 1:23 - loss: 0.199 - ETA: 1:22 - loss: 0.199 - ETA: 1:21 - loss: 0.200 - ETA: 1:20 - loss: 0.200 - ETA: 1:19 - loss: 0.201 - ETA: 1:18 - loss: 0.202 - ETA: 1:17 - loss: 0.203 - ETA: 1:16 - loss: 0.203 - ETA: 1:16 - loss: 0.204 - ETA: 1:15 - loss: 0.204 - ETA: 1:14 - loss: 0.205 - ETA: 1:14 - loss: 0.206 - ETA: 1:13 - loss: 0.207 - ETA: 1:12 - loss: 0.208 - ETA: 1:12 - loss: 0.209 - ETA: 1:11 - loss: 0.209 - ETA: 1:10 - loss: 0.210 - ETA: 1:10 - loss: 0.210 - ETA: 1:09 - loss: 0.211 - ETA: 1:08 - loss: 0.212 - ETA: 1:08 - loss: 0.213 - ETA: 1:07 - loss: 0.214 - ETA: 1:06 - loss: 0.214 - ETA: 1:05 - loss: 0.215 - ETA: 1:04 - loss: 0.216 - ETA: 1:03 - loss: 0.217 - ETA: 1:02 - loss: 0.218 - ETA: 1:01 - loss: 0.219 - ETA: 1:00 - loss: 0.220 - ETA: 59s - loss: 0.221 - ETA: 58s - loss: 0.22 - ETA: 57s - loss: 0.22 - ETA: 56s - loss: 0.22 - ETA: 55s - loss: 0.22 - ETA: 54s - loss: 0.22 - ETA: 53s - loss: 0.22 - ETA: 52s - loss: 0.22 - ETA: 52s - loss: 0.22 - ETA: 51s - loss: 0.22 - ETA: 50s - loss: 0.22 - ETA: 49s - loss: 0.22 - ETA: 48s - loss: 0.22 - ETA: 47s - loss: 0.23 - ETA: 46s - loss: 0.23 - ETA: 45s - loss: 0.23 - ETA: 44s - loss: 0.23 - ETA: 43s - loss: 0.23 - ETA: 43s - loss: 0.23 - ETA: 42s - loss: 0.23 - ETA: 41s - loss: 0.23 - ETA: 40s - loss: 0.23 - ETA: 39s - loss: 0.23 - ETA: 38s - loss: 0.23 - ETA: 37s - loss: 0.23 - ETA: 36s - loss: 0.23 - ETA: 35s - loss: 0.23 - ETA: 35s - loss: 0.24 - ETA: 34s - loss: 0.24 - ETA: 33s - loss: 0.24 - ETA: 32s - loss: 0.24 - ETA: 31s - loss: 0.24 - ETA: 30s - loss: 0.24 - ETA: 29s - loss: 0.24 - ETA: 28s - loss: 0.24 - ETA: 28s - loss: 0.24 - ETA: 27s - loss: 0.24 - ETA: 26s - loss: 0.24 - ETA: 25s - loss: 0.24 - ETA: 24s - loss: 0.24 - ETA: 23s - loss: 0.24 - ETA: 22s - loss: 0.24 - ETA: 21s - loss: 0.24 - ETA: 20s - loss: 0.24 - ETA: 20s - loss: 0.24 - ETA: 19s - loss: 0.25 - ETA: 18s - loss: 0.25 - ETA: 17s - loss: 0.25 - ETA: 16s - loss: 0.25 - ETA: 15s - loss: 0.25 - ETA: 14s - loss: 0.25 - ETA: 13s - loss: 0.25 - ETA: 13s - loss: 0.25 - ETA: 12s - loss: 0.25 - ETA: 11s - loss: 0.25 - ETA: 10s - loss: 0.25 - ETA: 9s - loss: 0.2554 - ETA: 8s - loss: 0.255 - ETA: 7s - loss: 0.256 - ETA: 7s - loss: 0.256 - ETA: 6s - loss: 0.257 - ETA: 5s - loss: 0.257 - ETA: 4s - loss: 0.257 - ETA: 3s - loss: 0.258 - ETA: 2s - loss: 0.258 - ETA: 1s - loss: 0.259 - ETA: 0s - loss: 0.259 - ETA: 0s - loss: 0.259 - 99s 892ms/step - loss: 0.2603 - val_loss: 0.1044\n",
      "\n",
      "Epoch 00030: val_loss did not improve from 0.10186\n",
      "4/4 [==============================] - ETA: 1s - loss: 0.266 - ETA: 1s - loss: 0.175 - ETA: 0s - loss: 0.161 - ETA: 0s - loss: 0.189 - 2s 454ms/step - loss: 0.1899\n",
      "Val Score:  0.18992075324058533\n",
      "====================================================================================\n",
      "\n",
      "\n",
      "Computation time :  115.852 min\n"
763
     ]
Bannier Delphine's avatar
Bannier Delphine committed
764
765
766
    }
   ],
   "source": [
Bannier Delphine's avatar
Bannier Delphine committed
767
    "from processing.models import fit_and_evaluate\n",
768
769
770
771
    "from time import time\n",
    "t0 = time()\n",
    "n_folds = 2\n",
    "epochs = 30\n",
Bannier Delphine's avatar
Bannier Delphine committed
772
773
774
775
776
777
778
779
780
781
    "batch_size=8\n",
    "\n",
    "\n",
    "#save the model history in a list after fitting so that we can plot later\n",
    "model_history = [] \n",
    "\n",
    "for i in range(n_folds):\n",
    "    print(\"Training on Fold: \",i+1)\n",
    "    model = None\n",
    "    model = create_hybrid(trainAttrX.shape[1], shape = (240,240,1))\n",
782
    "    model.compile(loss=\"mean_squared_error\", optimizer=opt)\n",
Bannier Delphine's avatar
Bannier Delphine committed
783
784
    "    t_x, val_x, t_y, val_y = custom_shuffle_split(trainAttrX,train_dataset,trainY,test_size = 0.1)    \n",
    "    model_history.append(fit_and_evaluate(t_x, val_x, t_y, val_y, epochs, batch_size,model,es,cp))\n",
785
786
787
    "    print(\"=======\"*12, end=\"\\n\\n\\n\")\n",
    "\n",
    "print(\"Computation time : \", round((time() - t0)/60,3), \"min\")"
Bannier Delphine's avatar
Bannier Delphine committed
788
789
790
791
   ]
  },
  {
   "cell_type": "code",
792
   "execution_count": 16,
Bannier Delphine's avatar
Bannier Delphine committed
793
   "metadata": {},
794
795
796
797
798
799
800
801
802
803
804
805
806
807
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXUAAAEICAYAAACgQWTXAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAA1PElEQVR4nO3deXxU1f3/8dcne8gOSYAkhICsYQskgGyKogguICoqddcqtlrctbW1RVv7U2utdf/iUndUqBuKIhYUBAWCrGFfJSGQlezbZM7vjzuBiNmZZDKTz/PxmEcmd+7cey6j7zk59yxijEEppZRn8HJ1AZRSSjmPhrpSSnkQDXWllPIgGupKKeVBNNSVUsqDaKgrpZQH0VBXqp0RketF5DtXl0O5Jw111epE5ICInOPqcrSEiEwUEbuIFJ/0GOPqsilVFx9XF0ApN3DYGBPn6kIo1RRaU1cuIyL+IvK0iBx2PJ4WEX/Ha5Ei8pmIHBORPBFZKSJejtceEJEMESkSkZ0iMqmOY48WkSMi4l1r2wwR2ex4PkpEUkWkUESOishTLbyGb0Tk/4nIWsexPhGRzrVenyYiaY7r+EZEBtZ6rYeIfCgi2SKSKyLPnXTsJ0UkX0T2i8jUWtuvF5F9juvfLyJXtaTsyjNpqCtX+iNwOpAEDANGAX9yvHYPkA5EAV2BBwEjIv2B24GRxpgQ4DzgwMkHNsasAUqAs2tt/hXwruP5v4F/G2NCgdOAD07hOq4FbgS6AzbgGQAR6QfMB+50XMdiYJGI+Dm+bD4DDgIJQCzwXq1jjgZ2ApHAE8CrYglyHH+q4/rHAhtPoezKw2ioK1e6CnjEGJNljMkGHgaucbxWhRWSPY0xVcaYlcaaqKga8AcSRcTXGHPAGLO3nuPPB2YBiEgIcL5jW83x+4hIpDGm2BjzQwPljHHUtGs/gmq9/pYxZqsxpgR4CLjcEdpXAJ8bY5YaY6qAJ4FArCAeBcQA9xljSowx5caY2jdHDxpjXjbGVANvOP4tujpeswODRSTQGJNpjElroOyqg9FQV64Ug1VTrXHQsQ3gH8Ae4CtHU8PvAYwxe7BqvnOBLBF5T0RiqNu7wCWOJp1LgB+NMTXnuwnoB+wQkXUicmED5TxsjAk/6VFS6/VDJ12DL1YN+2fXZ4yxO/aNBXpgBbetnnMeqfW+UsfTYMd5rwBuBTJF5HMRGdBA2VUHo6GuXOkw0LPW7/GObRhjiowx9xhjegPTgLtr2s6NMe8aY8Y73muAx+s6uDFmG1aoTuXnTS8YY3YbY2YB0Y73Lzyp9t0cPU66hiog5+TrExFx7JuBFe7xItLszgrGmCXGmHOxau87gJdbWG7lgTTUVVvxFZGAWg8frKaQP4lIlIhEAn8G3gYQkQtFpI8jCAuwml3sItJfRM521L7LgTKs5oj6vAvcAZwBLKjZKCJXi0iUo/Z8zLG5oeM05GoRSRSRTsAjwEJHs8kHwAUiMklEfLHuE1QAq4G1QCbwmIgEOf5NxjV2IhHpKiLTHV9AFUDxKZRbeSANddVWFmMFcM1jLvA3IBXYDGwBfnRsA+gLfI0VWt8DLxhjlmO1pz+GVRM+glXT/kMD550PnAksM8bk1No+BUgTkWKsm6ZXGmPK6jlGTB391C+t9fpbwOuO8gQAcwCMMTuBq4FnHeW9CLjIGFPpCP2LgD7AT1g3ha9o4DpqeAF3Y/0VkOe4tt804X2qgxBdJEOplhORb4C3jTGvuLosSoHW1JVSyqNoqCullAfR5hellPIgWlNXSikP4rIJvSIjI01CQoKrTq+UUm5p/fr1OcaYqPped1moJyQkkJqa6qrTK6WUWxKRgw29rs0vSinlQTTUlVLKgzQa6iLymohkicjWel6/SkQ2i8gWEVktIsOcX0yllFJN0ZQ29deB54A363l9P3CmMSbfMZH/PKy5oJVS7UhVVRXp6emUl5e7uiiqCQICAoiLi8PX17dZ72s01I0xK0QkoYHXV9f69QdAl/1Sqh1KT08nJCSEhIQErHnSVHtljCE3N5f09HR69erVrPc6u039JuALJx9TKeUE5eXldOnSRQPdDYgIXbp0adFfVU7r0igiZ2GF+vgG9rkFuAUgPj7eWadWSjWRBrr7aOln5ZSauogMBV4BphtjcuvbzxgzzxiTYoxJiYqqt+98g3YcKeSJL3dQUFrVwtIqpZTnOuVQF5F44EPgGmPMrlMvUsMO5pbywjd7OZhX0vjOSql2Izc3l6SkJJKSkujWrRuxsbHHf6+srGzwvampqcyZM6fRc4wdO9YpZf3mm28ICws7Xr5zzjmnwf0TEhLIycn5xfa5c+fy5JNP/mL7ihUrGDFiBD4+PixcuNApZa7RaPOLiMwHJgKRIpIO/AVrDUaMMS9hrVbTBXjB8eeCzRiT4tRS1hIbHghARn4ZQ+PCW+s0Sikn69KlCxs3bgSssAsODubee+89/rrNZsPHp+5ISklJISWl8VhZvXp1o/s01YQJE/jss8+cdrza4uPjef311+sM/FPVaE3dGDPLGNPdGONrjIkzxrxqjHnJEegYY35tjIkwxiQ5Hq0W6ABxEY5QP1bfIjVKKXdx/fXXc+uttzJ69Gjuv/9+1q5dy5gxYxg+fDhjx45l586dgFVzvvBCa23wuXPncuONNzJx4kR69+7NM888c/x4wcHBx/efOHEil112GQMGDOCqq66iZkbaxYsXM2DAAJKTk5kzZ87x4zbF/PnzGTJkCIMHD+aBBx6oc59HH32Ufv36MX78+OPlP1lCQgJDhw7Fy8v54z9dNvdLS4UF+hLk5016voa6Ui318KI0th0udOoxE2NC+ctFg5r9vvT0dFavXo23tzeFhYWsXLkSHx8fvv76ax588EH++9///uI9O3bsYPny5RQVFdG/f39+85vf/KI/94YNG0hLSyMmJoZx48axatUqUlJSmD17NitWrKBXr17MmjWr3nKtXLmSpKQkAGbOnMkNN9zAAw88wPr164mIiGDy5Ml8/PHHXHzxxcffs379et577z02btyIzWZjxIgRJCcnN/vf5FS4XaiLCDHhgVpTV8pDzJw5E29vbwAKCgq47rrr2L17NyJCVVXdHSIuuOAC/P398ff3Jzo6mqNHjxIX9/MhMqNGjTq+LSkpiQMHDhAcHEzv3r2P9/2eNWsW8+bNq/McJze/fPLJJ0ycOJGaTh5XXXUVK1as+Fmor1y5khkzZtCpUycApk2b1oJ/kVPjdqEOEBsRyGENdaVarCU16tYSFBR0/PlDDz3EWWedxUcffcSBAweYOHFine/x9/c//tzb2xubzdaifTyRW07oFas1daU8UkFBAbGxsQC8/vrrTj9+//792bdvHwcOHADg/fffb/J7R40axbfffktOTg7V1dXMnz+fM88882f7nHHGGXz88ceUlZVRVFTEokWLnFn8JnHPUI8I5FhpFSUVHeObV6mO4v777+cPf/gDw4cPb5WadWBgIC+88AJTpkwhOTmZkJAQwsLCmvTe7t2789hjj3HWWWcxbNgwkpOTmT59+s/2GTFiBFdccQXDhg1j6tSpjBw5ss5jrVu3jri4OBYsWMDs2bMZNMh5fzm5bI3SlJQU09JFMj7ZmMEd723kq7vOoF/XECeXTCnPtH37dgYOHOjqYrhccXExwcHBGGO47bbb6Nu3L3fddZeri1Wnuj4zEVnfUC9D96yp1+qrrpRSzfHyyy+TlJTEoEGDKCgoYPbs2a4uklO57Y1S0L7qSqnmu+uuu9ptzdwZ3LKmHh0SgI+XaKgrpdRJ3DLUvb2E7uEB2vyilFIncctQB+3WqJRSdXHbUI8JD9SaulJKncRtQz0uPJCjReVUVdtdXRSlVBPo1LsnPPXUUyQmJjJ06FAmTZrEwYMHnVJucNPeL2D1gDEGjhSU06NzJ1cXRynVCJ1694Thw4eTmppKp06dePHFF7n//vubNbq1IW5bU48Nt4JcZ2tUyn111Kl3zzrrrOOTfp1++umkp6c3uQyNceuaOmhfdaVa5Ivfw5Etzj1mtyEw9bFmv62jT7376quvMnXq1Kb/gzXCbUO9e1gAoKNKlXJ3HXnq3bfffpvU1FS+/fbbBvdrDrcN9QBfbyKD/XUKXqVaogU16tbSUafe/frrr3n00Uf59ttvf1bWU+W2bepgNcFo84tSnqOjTL27YcMGZs+ezaeffkp0dHSLr6cubh3qcToASSmP0lGm3r3vvvsoLi5m5syZJCUlOXWFJLecerfG3xdv5/XVB9jxyBS8vMRJJVPKM+nUuxadercdiwkLoNJmJ6ekwtVFUUq5CZ16tx2LjbDuMB8+Vk50SICLS6OUcgc69W47potlKNU8rmpuVc3X0s/KvUP9+ACkUheXRKn2LyAggNzcXA12N2CMITc3l4CA5rdAuHXzS1igLyH+PlpTV6oJ4uLiSE9PJzs729VFUU0QEBDwiwFVTeHWoQ7aV12ppvL19T0+klJ5LrdufgHHvOrHyl1dDKWUahfcPtRjwwPJyNc2daWUAk8I9YhACsttFJXXPfGPUkp1JO4f6uE6Ba9SStVw/1CP0L7qSilVw/1D3VFT1yl4lVLKA0I9KtgfP28v0jXUlVLK/UPdy0voHh6gzS9KKYUHhDo4ujVqTV0ppRoPdRF5TUSyRGRrPa+LiDwjIntEZLOIjHB+MRtm9VXXUFdKqabU1F8HpjTw+lSgr+NxC/DiqRereWLCA8kqqqDCVt3Wp1ZKqXal0VA3xqwA8hrYZTrwprH8AISLSHdnFbAparo1HinQ6QKUUh2bM9rUY4FDtX5Pd2z7BRG5RURSRSTVmTPFxem86kopBbTxjVJjzDxjTIoxJiUqKsppx62pqWu3RqVUR+eMUM8AetT6Pc6xrc10CwtARGvqSinljFD/FLjW0QvmdKDAGJPphOM2mb+PN1HB/jqqVCnV4TW6SIaIzAcmApEikg78BfAFMMa8BCwGzgf2AKXADa1V2IboYhlKKdWEUDfGzGrkdQPc5rQStVBseCBbMgpcXQyllHIpjxhRClZNPfNYOXa7LqqrlOq4PCbU48IDqay2k11c4eqiKKWUy3hMqMfoYhlKKeU5oa6LZSillCeFutbUlVLKc0I9JMCX0AAfrakrpTo0jwl1gNiITlpTV0p1aJ4V6uEBOqpUKdWheVio62IZSqmOzbNCPSKQogobBWVVri6KUkq5hGeFengnQLs1KqU6Ls8K9Qjt1qiU6tg8K9QdfdX1ZqlSqqPyqFDvEuSHn4+X1tSVUh2WR4W6l5doDxilVIfmUaEOVhOMrlWqlOqoPDLUtU1dKdVReV6oRwSSXVRBeVW1q4uilFJtzuNCvWZe9cyCcheXRCml2p7HhfrxKXj1ZqlSqgPyuFCPOz4AqdTFJVFKqbbncaHeLSwAL4GMY9r8opTqeDwu1H29vegaGqDNL0qpDsnjQh2sm6Xa/KKU6og8MtRjwwN1qgClVIfkmaEeEUjmsXKq7cbVRVFKqTblmaEeHojNbsguqnB1UZRSqk15Zqhrt0alVAflmaHuGICUrj1glFIdjEeHut4sVUp1NB4Z6kH+PoR38tW+6kqpDscjQx10Cl6lVMfk0aGuzS9KqY7GY0M9xrGsnTHaV10p1XF4bKjHRQRSUllNQVmVq4uilFJtxmNDXbs1KqU6oiaFuohMEZGdIrJHRH5fx+vxIrJcRDaIyGYROd/5RW2emgFIerNUKdWRNBrqIuINPA9MBRKBWSKSeNJufwI+MMYMB64EXnB2QZtL+6orpTqiptTURwF7jDH7jDGVwHvA9JP2MUCo43kYcNh5RWyZzkF+BPh6aV91pVSH0pRQjwUO1fo93bGttrnA1SKSDiwGflfXgUTkFhFJFZHU7OzsFhS36USEhC5BrDuQpz1glFIdhrNulM4CXjfGxAHnA2+JyC+ObYyZZ4xJMcakREVFOenU9btubAKb0gtYuu1oq59LKaXag6aEegbQo9bvcY5ttd0EfABgjPkeCAAinVHAUzEzOY7ekUH8Y8lOnVtdKdUhNCXU1wF9RaSXiPhh3Qj99KR9fgImAYjIQKxQb932lSbw8fbi3vP6szurmI82nPw9pJRSnqfRUDfG2IDbgSXAdqxeLmki8oiITHPsdg9ws4hsAuYD15t20pA9dXA3hsSG8a+lu6iwVbu6OEop1arEVdmbkpJiUlNT2+Rc3+3O4epX1/DnCxO5cXyvNjmnUkq1BhFZb4xJqe91jx1RWtv4vpGM69OF55bvobjC5uriKKVUq+kQoQ5w/3kDyCup5JWV+1xdFKWUajUdJtSH9Qhn6uBuvLxiH7nFuiC1UsozdZhQB7hncn/Kqqp5fvleVxdFKaVaRYcK9T7RwcxM7sHbPxwkPb/U1cVRSimn61ChDnDHOX1B4F9Ld7u6KEop5XQdLtRjwgO5bkxPPtyQzs4jRa4ujlJKOVWHC3WA307sQ7CfD09+tdPVRVFKKafqkKEeEeTH7DN7s3TbUdYfzHd1cZRSymncM9SLjsIpjoS9YVwvIoP9efzLHTo1r1LKY7hfqG96H/7ZD/JObRBRkL8Pcyb1Ye3+PL7d5fK5x5RSyincL9S7DbZ+pp/6vDFXjoynR+dAnvhyJ3admlcp5QHcL9SjBoBfMKSvO+VD+fl4cc+5/dmWWchnWzKdUDillHIt9wt1L2+IGQ4ZzpnhcdqwGAZ0C+GfX+2kqtrulGMqpZSruF+oA8SNhCNboOrUF5X28hLundyfg7mlLNrk8vWylVLqlLhvqNttkLnJKYebNDCavtHBzFuxT3vCKKXcmpuGumN+eCfcLAUQEW4+ozc7jhSxYneOU46plFKu4J6hHhwN4fFOuVlaY3pSDNEh/sxboTM4KqXcl3uGOlhNME6qqQP4+3hzw7herNqTy9aMAqcdVyml2pJ7h3phOhQ6ryvir0bHE+TnzbwVujqSUso9uW+oxzra1Z3UtREgLNCXWaPi+XxLps63rpRyS+4b6t2HgrefU9vVAW4c3wsBXv1uv1OPq5RSbcF9Q93HH7oNdWq7OljzrV80LIb31x2ioLTKqcdWSqnW5r6hDlbXxsMboNrm1MPePKE3pZXVvL3moFOPq5RSrc3NQ30kVJVC1janHjYxJpQJfSP5z6oDlFdVO/XYSinVmtw81GsGITm3XR1g9hmnkVNcwccbMpx+bKWUai3uHerhPaFTJGSsd/qhx/XpQmL3UOat3KfT8iql3IZ7h7qIYxCS82vqIsLsM3uzL7uE/+3IcvrxlVKqNbh3qIPVBJOzC8qcv9bo+UO6ExseqFMHKKXchmeEOkDGj04/tK+3FzeO78W6A/n8+JMuUK2Uav/cP9RjRgDi9P7qNa4c2YPQAB/mfatTByil2j/3D/WAUIge2Crt6mAtUH316T1Zsu0I+3NKWuUcSinlLO4f6gCxydYcMK20wMX1YxPw9fLilZVaW1dKtW+eEepxI60bpXmtE7rRoQHMGB7LwvXp5BRXtMo5lFLKGTwn1KHVmmAAbj6jFxU2O29+r1MHKKXaL88I9aj+4BfcajdLAfpEh3DOwGje+v4AZZU6dYBSqn1qUqiLyBQR2Skie0Tk9/Xsc7mIbBORNBF517nFbISXN8SOaNWaOsAtZ5xGfmkV7637qVXPo5RSLdVoqIuIN/A8MBVIBGaJSOJJ+/QF/gCMM8YMAu50flEbETcSjm6FytZb3GJkQgRjenfhsS92sHZ/XqudRymlWqopNfVRwB5jzD5jTCXwHjD9pH1uBp43xuQDGGPaflx9bArYbZC5qdVOISI8f9UI4iICuemNdWzPLGy1cymlVEs0JdRjgUO1fk93bKutH9BPRFaJyA8iMqWuA4nILSKSKiKp2dnZLStxfeKcv7xdXToH+fHmTaMJ9vfh2tfWcihPl71TSrUfzrpR6gP0BSYCs4CXRST85J2MMfOMMSnGmJSoqCgnndohONqatbGV29UBYsMDefPGUVTa7Fzz6hrt5qiUajeaEuoZQI9av8c5ttWWDnxqjKkyxuwHdmGFfNuKS2nVHjC19e0awmvXj+RIYTk3/GcdxRXOXX1JKaVaoimhvg7oKyK9RMQPuBL49KR9PsaqpSMikVjNMW0//DJuJBRmQOHhNjldcs8IXrhqBNsyC5n9VioVNu3qqJRyrUZD3RhjA24HlgDbgQ+MMWki8oiITHPstgTIFZFtwHLgPmNMbmsVul7HByG1TW0d4OwBXXni0qGs2pPL3R9soloX1FBKuZBPU3YyxiwGFp+07c+1nhvgbsfDdboNAW8/q109cVrj+zvJpclx5JZU8PfFO+gS5MfD0wYhIm12fqWUqtGkUHcbPv7QbWirLG/XmFvOOI2c4krmrdhHlyB/7jin7W8pKKWUZ4U6WE0w61+Haht4t+3l/X7KAHKKK/jX17voEuzH1af3bNPzK6WUZ8z9UltcCtjKICutzU/t5SU8fulQzh4QzUOfbOWLLZltXgalVMfmmaEObXqztDZfby+e/9UIRsRHcMd7G9maUeCSciilOibPC/XwnhAU5bJQBwj08+aVa1OICPJlznsbdFZHpVSb8bxQF7HmgWmDkaUNiQjy46nLk9iXXcLfPt/m0rIopToOzwt1sJpgcndbqyG50Lg+kdw8oRfvrPmJr7cddWlZlFIdg4eGumMQkgu6Np7s3vP6M7B7KA/8dzNZReWuLo5SysN5ZqjHDAfEpe3qNfx9vPn3lUkUV9i4b8FmTCstjq2UUuCpoR4QCtED20WoA/TrGsKD5w/k213ZusapUqpVeWaog2PGxnXQTmrG147pycT+Ufx98XZ2HS1ydXGUUh7Kc0M9NgXKj0HuXleXBLBWTXrisqEE+/swZ/4GndFRKdUqPDfUj98sbR9NMADRIQE8cdlQdhwp4sklO11dHKWUB/LcUI/qD34hsPl9KG8/ozonDezKVaPjeXnlfr7bnePq4iilPIznhrqXN0y4G/Yuh+dGwdYP2037+p8uSKR3VBD3LNhIfkmlq4ujlPIgnhvqYIX6zcsgpCssvAHeuQzy9ru6VAT6efPMlcPJK6nkwY+2aDdHpZTTeHaoA8SOgF8vgymPwU8/wAunw4onwebaGvLg2DDumdyfL7YeYcH6dJeWRSnlOTw/1MGaV/3038Bta6HvubDsr/B/E+Dg9y4t1s0TenN6787M/TSNrRkFWmNXSp0ycVWQpKSkmNRUF/VM2fklLL4PCn6C4dfAuY9Ap84uKcrhY2VMeXoFheU2QgJ8SOweSmJMKINiwkjsHkqf6GD8fDrGd69SqnEist4Yk1Lv6x0y1AEqS+Dbx2H1cxAYbgX70CvA27fNi3Ior5QVu7PZdriQbZmF7MgsoqzK6sfu6y30jQ5hUIwV9kPjwhkRH65roCrVQWmoN+ZoGiy6E9LXQlA0DLsSRlwLka5bY7TabtifU8K2zMLjQb/tcAE5xdZ9gGE9wrljUh/O6h+t4a5UB6Oh3hR2O+z+Cja8Bbu+BLsNepwOI66BQTPAL8jVJQQgq6icr7dl8cI3e0jPL2NoXBhzzu7LpIEa7kp1FBrqzVV0FDbNtwI+d481gGnwJVbtPTbZWoTDxaqq7Xz4YzrPLd/DobwyBsWEMmdSXyYndm12uBtjqLYbfLy13V4pd6Ch3lLGWF0gN7wFaR9BVSlEDbRq78NmuezGam1V1XY+3pDBc8v3cDC3lIHdQ7ljUh8mJ3bDy+uX4W63Gw7mlZJ2uIC0w4XWI6OA4gob907uz03je9X5PqVU+6Gh7gzlhZD2Ifz4ljWXjG8nq+Y+5jYIj3d16bBV2/lk42GeW76H/TklDOgWwpxJfUnoEnQ8wGva5osrbAD4eAl9u1o3YHOLK1i+M5vTe3fmn5cnERse6OIrUkrVR0Pd2Y6mWT1mtnxg1eYHXwrj7oBug11dMmzVdhZtPsyzy/awL7vk+PZOft4M7B7KoJiaRxh9uwbj7+MNWE0wC1LTeXhRGl4iPHLxIC5OitV2eqXaIQ311lKQDj+8COtfh8pi6HMujL8Teo5zebt7td2wdNtRKmzVDI4NI6FLEN5NaFb5KbeUuz/YSOrBfC4Y0p1HZwwmvJNfG5RYKdVUGuqtrSwf1r0CP7wEpTnWzdRxd8KAC6xJxdxMtd3w0rd7+dfSXXQO8uPJmcM4o1+Uq4ullHLQUG8rVWWw8V1Y/Szk74cufWDM7TD08nbTJbI5tmYUcOf7G9mTVcx1Y3ry+6kDCfRzvy8ppTyNhnpbs1fDtk9g1dOQualWl8jrrMnF3Kiduryqmse/3MF/Vh3gtKggnr5iOEPiwlxdLKU6NA11VzEGDq2xesykfWh1iYxOtOaaGXoFBHVxdQmb7LvdOdy7YBM5xRWMOa0L4Z38CAv0ITTAl7BAX0IDrZ9hgb7Ht0WG+NHJz8fVRVfK42iotwfHu0S+CRnrwdvPanMffg30Pgu82v/An4LSKh5fsoO0jAIKyqooLLdRUFZFtb3u/378vL24e3I/bp7Qu0k3aZVSTaOh3t4c3WYNaNo037rJGhYPw6+CXmdYc88ERUJAmFs00xhjKK2spqCsygp6x8+CsiqWbjvKV9uOMjIhgn/OTCK+SydXF1cpj6Ch3l7ZKmDH51bA710O1PocvP0gKMp6BEf//HlYnNV90q99h6Qxhg9/zGDup2nYjeGhCxO5YmQP7fuu1CnSUHcHBRmQsxOKs6EkG0qy6n5ur7L2DwizpipIvgGiB7i27I3IOFbGfQs2sXpvLmcPiOaxS4cQHRLQ7OMYY6iqNk6dW/617/bzzLLdnN0/munDYxl3WhedA0e1exrqnsIYKD8GR7bA+jesHjb2KogfCyk3QuI08PF3dSnrZLcb3vj+AI99sYNOft48OmMI5w/p3uj7qqrtrN2fx1dpR1i67Si5JZU8fUUSU5vw3sa8u+YnHvxoC4NiQjmUV0phuY3IYH8uHNqdi4fHMiwuTP+qUO2SU0JdRKYA/wa8gVeMMY/Vs9+lwEJgpDGmwcTWUD9FJTmw8R1I/Y/VLz6ws9U2n3wDdDmt4fdWFFvvydsPefuspqDRt0BgRKsWeU9WEXd/sInN6QVcnBTDw9MGE9bp54uSFFfYWLErm6/SjrBsRxaF5TYCfL2Y0DeKrKIKtmYU8NTlw5ieFNvicny8IYO7PtjIWf2jeenqZAyG5Tuy+WRjBv/bkUWlzU6vyCCmJ8VwcVIsCZGNjzOoqraTXVRBVlEFQX7e9O0a0uLyKdWQUw51EfEGdgHnAunAOmCWMWbbSfuFAJ8DfsDtGuptxG6H/d9C6mtWG72phl5nQsoN1mRjNcFd8zN/PxQf/eVxQmNhxkvWDdtWVFVt5/nle3h22R6igv15cuYw+nUL5n/bs/gq7Qir9uZSabMT0cmXSQO7MjmxKxP6RhHo501JhY2b3ljHmv15PH7pUC5P6dHs8y9JO8Jv3/mRUQmdeX1GV/z3LgFbGVTbwF5FeUUFB7KOsf/oMbILS/Ax1UQHe5MQ4U9+rwtJCzqdLEd4ZxVVkFVYTlZRBXklP1/I/KWrRzBl8Kn/RaHUyZwR6mOAucaY8xy//wHAGPP/TtrvaWApcB9wr4a6CxQdsW68rn8DCg79/LWQGOjcGzonOH72hohe0LkX5O6FD2+2fo79HZz9p1Zvytmcfoy73t/I3uwSRKzWpR6dA5mc2I3JiV1J7hlxon3bXg3ZO6Eok7KYMdwyfwsrd+fwt4sHc/XpPZt8zhW7svn1G6lM6lrCM3HL8N36vrUgSm1ePuDlC96+2MWHcrsXxVWCl72CzhTxsO1a3jFTiArxJzrEn6iQAKJDrefRIQFEh/jz7LLd7MsuYdHvxjeplq9Uczgj1C8Dphhjfu34/RpgtDHm9lr7jAD+aIy5VES+oZ5QF5FbgFsA4uPjkw8ePNiCS1KNsldbtfeqciu0IxLAt5HpdCtL4Ks/WTX+bkPgklda/SZseVU1r363n2q7YfKgrvTvGoIAFB62pjjOWA8ZP8LhDdakaQCBnbENuZxH0pN5c18QD12YyE3jezV6rrX783jotY+5N/Azzqn6BvHygeTrYcxvra6k3r5WoNfTjr4/M5vIJbcRcmAJZswc5NyH6x1fkJ5fygXPfEdseCAf/nYsAb46vYJynlYPdRHxApYB1xtjDjQU6rVpTb2d2vkFfHK7FaLn/hVG3dy6febL8iFzsyPEf4T0VCg+Yr3m7Wd9wcQmQ2yK1etn83tWM1N1Jfv9B/J/xeM47axrufmcYfVf0tb17FrwF86XVXj5+CEpN1rTJYd0a15Z7dXwxf3WBG6DL4OLX6j3L5plO45y4+upXDmyB49dOrR551GqAa3e/CIiYcBewFGVohuQB0xrKNg11Nux4iz45DZr3dY+58L05yGka8uOVV1lNQXlH6j7UV5wYt/Op0FcyokQ7za47tAsyYXN72N+fAPJ3kGJ8Wd/1/MYdOFtSI/RJ76EsnZQ+NXfCd7zKRX4UZ18A8ET7275tYDVTvTdv+B/D1v3H6542/qyqcM/luzg+eV7eXLmMC5Ljmv5OZXLHMwtoXOQHyEBvo3v3EacEeo+WDdKJwEZWDdKf2WMSatn/2/Qmrr7M8aqkX71J2uWyWnPwYDz697XVgnHDlprutY88vZZoV2QDsZ+Yl9vP+sGbkSC9QjvCV0TIWZE85cINIbqQ+tY9+G/GZK/lCCpwET2R5J+BZkbMWkfU4Y/C72mcPYNfyWuhxNXqdr0nvXFFzUArloAoTG/2MVWbeeaV9ey4VA+H982jgHdQp13/nboaGE56w7kccGQ7h7RHXTt/jyueuUHwjv58ZeLEtvNdTmrS+P5wNNYXRpfM8Y8KiKPAKnGmE9P2vcbNNQ9R/ZO+O+v4chmqw06cbp1QzV3ryO890L+QavXTY3Azla3yoheJ8K75hHS3elz3djthr9+uJaSDQv4XfhqepSkYfcN4i37ebxhLmTerZPpE90KXQz3LoP3r4GAcLh6IUQP/MUuWUXlXPDMd4T4+/DJ7eNaXOOz2027Xj9246Fj3PxmKtlFFcw5uw93T+7v6iKdkp9yS5n+/HeEd/Ij2N+HLRkFnD0gmkemDyIuwrWjuXXwkTp1tkpY/iis+jfHpzPwDYIuva1542s/Ovd2yaLcxhge+Wwb/1l1gFuGeLE6w8bBUj/m33w6g2NbcbrgzE3wzkywlcOV8yFh3C92WbMvl1+9soYpg7vx3KzhzartZRwr4y+fpPHNzizGnNaFC4d257xB3drVilSfbjrMfQs2ER3qz5DYMBZvOcJfLx7MNc3omVQvu90adNeG/00VlldxyQuryS6q4OPbxtEjIpDXVx/gqaW7MAbumdyP68cmuGz0sYa6cp6jaVCaZ4V3SLd2N+mYMYYnluzkxW/20snPm7duGk1yz9YdUAVYf6m8c5nV3HTJPBg04xe7vPjNXh7/cgdzL0rk+nGN99axVdv5z6oD/OtrK0guGtadH/bl8VNeKT5ewvi+kVw4NIZzE7sSFtiC2v+ur6x1dif/rfk3jB3sdsPTX+/imWV7GJXQmZeuSSY0wIfZb61n2c4sXrzqFPvqZ+2AT39n9YKacDeccT/4tO6Xma3azo1vpLJ6Tw5v3TSaMaedmCI7Pb+UP3+SxrIdWQyODeX/zRjqkvUFNNRVh2KM4aMNGfSOCiapR3jbnbg0D+bPsubQP2eu1Wuo1opXdrvhlrdS+XZXNh/MHsPw+Pq/bDb8lM+DH21le2YhkwZE87DjT35jDFszCvls82E+25xJxrEy/Ly9OKNfJBcM7c45A7s2rXln3auw+F7rXkd4T7jmo8ZHIZ98uZU27vlgE19sPcLM5DgenTHk+Lw8ZZXV/OqVH0g7XMjbN41mVK9m1rJtldbN6JVPWv+G8WNg52KIHgQXPw8xw5t3vGaY+2kar68+wGOXDOHKUb+8B2OMYfGWI8xdlEZucQXXj+3FPZP7EeTfdmsHaKgr1Vaqyqz7Dzs+A29/6DUB+k2BvpMhoicFpVVc8OxK7HbD53MmEBH081pnQVkV/1iyg3fW/ETXkADmTkvkvEHd6myuMcaw8dAxPt+cyedbMsksKMfPx4uJ/aIYHBtGaIAPoY5FS0IDfQkN9CHU35vItY/j9/2/rXKNuQ0WXA+IdbM3dkSTLjOzoIyb30wl7XAhD04dyK8n9PpFGfNLKrn0pdXkFFWw4Nax9O/WxHsah9ZZtfPs7Va30SmPQXCU1dV20Z3WxHYT7oYz7nP6ALm3fjjIQx9v5dfje/GnCxMb3LegrIonvrQ+q9jwQB6ZPohJA0+hV1UzaKgr1ZbsdjiwEnYtgd1LrJvJAFEDod957IkYx0UfVTK6TzSvXTcSLy/BGMOizZn89bNt5BZXcN3YBO6Z3J/gJtb+7HbDhkP5LNqUyZdbj3CksPwX+/hi4wnf/2OG9yrerZ7EP31upktoJ6b3KOOm/ffgX5mPXPEW9JnU4Lk2HjrGLW+mUlJh45lZwxsMsvT8Ui55YTVeInz427HEhDcwAK6iGJb9Dda8ZPUkuuAp6D/l5/uU5cOXf7DWIohOtMYJOKnWvnJ3Ntf/Zx0T+0Ux79qUJi/sknogjz98uIXdWcWM7xNJQmSn46uAHV8N7KTfg/19TqkXjYa6Uq6Us8cK911fwsHVYLdR4RvKF+WD6TTofAaOm8aDXx1h5e4chsSG8fcZQ065nbaq2k5RuY3CsioKy6soKcij7ze3Epm9hrW9b2dF9DUUVtg4kFvKD3tzCavO5e2AJ+hDOhtTHqPPpBvqbKevuSEaFeLPq9eNbFLte3tmIZe/9D1dwwJYeOuYum/w7vkaFt0FBT/ByJth0p8hoIHunzu/hM/utMZTjL8Lzry/ebV2Y6ymJy9rpO+erGJmvLCK2PBAFv5mbJO/TGtU2uzMW7GXD3/M4JhjkZj6VgQD8PYSfjvxNO5pYQ8hDXWl2ovyAti7HLPrS4q3fkFI9TEADppuVMWMpHfyOXjFj4bI/s7r9lmQDm9fZv3FMP15GHbFz14uqbCxcncO323dy8U77iWFbfzddjVb4q9h0sBozhnYlfjOnXj6f7t55n+7GZkQwUtXJ9MluFaIGmP9dbL6OTi4yhqH0KUPRPaFLn3YUh7NDYvySYiL5e1fjz4xbUJJLix50BolHNkPpj0L8ac37brK8mHJH62ZSqMTrWurq/moqgyytls3+bO2wdGt1vPKEkicTtHAWVy4yFBSWc3Ht41zSndFY6zjFZRVUVBaVefKYMkJEZzVP7pFx9dQV6odKimr4NGX32Wk13amhh0kIHMdlOZaLwaEQ49R0GO0FXIxI1q20tWRLVZ3y8oSa+Rr7zMb3L26sozCd24g4uAXfOB3CQ8UXoLBi6gQf7KLKpiZHMffZgzG38cRytVVkPYRfP+c1bWzUyQMvBCKjkLubqs3UK0J03JMKPmB8Zw2YDheod2teYbKj8H4u2HCPeDb/MVT2PUVLJrjqLXfaY1EPpp2Irzz9p4Y/ObbyRpL0HUQIJi0j5CKQg6arvgkX0vsxBvrHETW3mioK+UOjLEGdB1aA4d+gJ/WWKthgTXRWLehEDfS8Ui2BnY11C67dxm8f63VjHHVAkeQNUGt+W1KBlzGgtgH+G5fIeP7dOG6sQlWW3DZMfjxDVjzf1CYYdWyx9wGQ6/4+cRx1VVWd8/c3ZCzm13bfiT/0HYS/bIIseVZ00FMe7bpZatP2TFHrf3tE9sielnH7TrYGrHcdbA1+M3R5GKM4Y8L1lG68SP+GJNKVM5aEC9rWowR11g3kr3bz9QAtWmoK+WuSvPg0FpH0K+xZqusKrVe69TFCvjYFMd8OSNOzEGz8V2rB0nUAPjVBxDWzAVFjIEVT8Lyv1khd/kbVtfC/IPWjcwf37QmfEuYYE3V3OfcJjcXPfHlDl74Zi/3nxXHbycPxW4gp6SCzGPlHD5WxuEC62dmQRmHHdsAbhjXi+vG9qSTXwPt3Yc3WPPiRw8A/4bb++et2MvfF+84Mfo1d6/VlLPxXSjKtNYEHnYlDL8Wovo1+Z+uLWioK+Upqm1WV7/0ddZslumpJ2rzCET1t2qou76wFkq54q16JxtrkvVvWDckY4Zb7eTbPrFqs4MugbG3Q/f6Z8asjzGG+xZuZuH6dOIiAskqrKCy2v6zfQJ9vekeHkBMWCAx4QEcKaxgxa5sIoP9uPXM07j69J4tns74YG4J7679iXkr9nH+4O48O2v4z6dfqLbB3v9ZX1y7vrSajyISrGvtngQxSdZPF4yarqGhrpQnKzsGhx1TFqevs6Yx7ncenP+kc0Zf7vgcFt5o9btPvg5G39r8mv9Jqqrt/GPJTo4UlNM9PIDY8EC6OwI8JiyQ8E6+v+jyt/5gHk8t3cWqPbl0DfXntrP6cMXIHifa9xtQabPz1bYjvLf2EN/tycFLYOqQ7jx52TAC/Rp4f3EWbFlo/ZWUudG6R1AjPP6koB8OQV3qPo6TaagrpU5N4WGrOaORJo228P3eXJ5aupN1B/KJCQvgd5P6cllyHL51zMOyL7uY99cdYuH6dHJLKokND+TKkT2YmdKDbmEtuClblm/dED680Qr5wxut5SFrhPeEPudYX6q9zmh8YZoW0lBXSnkUYwwrd+fwz6W72HToGPGdOzFnUl8uToqh2hi+3HqE+Wt/4od9eXh7CecMjGbWqHgm9I1q8qCiJis7Zs1gengj/PQ97PsWqkrAJ8AK9r6TrZAPd960zxrqSimPZIxh2Y4snlq6i7TDhfTs0onCsiryS6vo0TmQK0fGMzM5jujQFtTKW8pWAQe+sxaY2bXkRE0+aiD0mwx9z7O6qnq3fK4YDXWllEczxrAk7QivfrefqBB/Zo2KZ9xpka6ff94Ya9BXzZQRjhHF+IdZo2DH3t74MerQWKi33dRiSinVCkSEKYO7n9o0v61BxBpVG9nXCvDyQti33BowFdp6ZdVQV0qpthAQaq0clji9VU/jmqU7lFJKtQoNdaWU8iAa6kop5UE01JVSyoNoqCullAfRUFdKKQ+ioa6UUh5EQ10ppTyIy6YJEJFs4GAL3x4J5DixOO2Bp12Tp10PeN41edr1gOddU13X09MYE1XfG1wW6qdCRFIbmvvAHXnaNXna9YDnXZOnXQ943jW15Hq0+UUppTyIhrpSSnkQdw31ea4uQCvwtGvytOsBz7smT7se8Lxravb1uGWbulJKqbq5a01dKaVUHTTUlVLKg7hdqIvIFBHZKSJ7ROT3ri6PM4jIARHZIiIbRcTt1vgTkddEJEtEttba1llElorIbsfPCFeWsbnquaa5IpLh+Jw2isj5rixjc4hIDxFZLiLbRCRNRO5wbHfLz6mB63HnzyhARNaKyCbHNT3s2N5LRNY4Mu99EfFr8Dju1KYuIt7ALuBcIB1YB8wyxmxzacFOkYgcAFKMMW45aEJEzgCKgTeNMYMd254A8owxjzm+fCOMMQ+4spzNUc81zQWKjTFPurJsLSEi3YHuxpgfRSQEWA9cDFyPG35ODVzP5bjvZyRAkDGmWER8ge+AO4C7gQ+NMe+JyEvAJmPMi/Udx91q6qOAPcaYfcaYSuA9oHXXhlKNMsasAPJO2jwdeMPx/A2s/+HcRj3X5LaMMZnGmB8dz4uA7UAsbvo5NXA9bstYih2/+joeBjgbWOjY3uhn5G6hHgscqvV7Om7+QToY4CsRWS8it7i6ME7S1RiT6Xh+BOjqysI40e0istnRPOMWTRUnE5EEYDiwBg/4nE66HnDjz0hEvEVkI5AFLAX2AseMMTbHLo1mnruFuqcab4wZAUwFbnP86e8xjNXG5z7tfPV7ETgNSAIygX+6tDQtICLBwH+BO40xhbVfc8fPqY7rcevPyBhTbYxJAuKwWiYGNPcY7hbqGUCPWr/HOba5NWNMhuNnFvAR1ofp7o462j1r2j+zXFyeU2aMOer4n84OvIybfU6Odtr/Au8YYz50bHbbz6mu63H3z6iGMeYYsBwYA4SLiI/jpUYzz91CfR3Q13E32A+4EvjUxWU6JSIS5LjRg4gEAZOBrQ2/yy18ClzneH4d8IkLy+IUNeHnMAM3+pwcN+FeBbYbY56q9ZJbfk71XY+bf0ZRIhLueB6I1SFkO1a4X+bYrdHPyK16vwA4uig9DXgDrxljHnVtiU6NiPTGqp0D+ADvuts1ich8YCLWNKFHgb8AHwMfAPFYUyxfboxxmxuP9VzTRKw/6w1wAJhdqz26XROR8cBKYAtgd2x+EKsd2u0+pwauZxbu+xkNxboR6o1V4f7AGPOIIyPeAzoDG4CrjTEV9R7H3UJdKaVU/dyt+UUppVQDNNSVUsqDaKgrpZQH0VBXSikPoqGulFIeRENdKaU8iIa6Ukp5kP8PHlWaGAnMUB8AAAAASUVORK5CYII=\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
808
   "source": [
Bannier Delphine's avatar
Bannier Delphine committed
809
    "import matplotlib.pyplot as plt \n",
Bannier Delphine's avatar
Bannier Delphine committed
810
    "\n",
Bannier Delphine's avatar
Bannier Delphine committed
811
812
813
    "plt.title('Loss vs Epochs')\n",
    "plt.plot(model_history[0].history['loss'], label='Training Fold 1')\n",
    "plt.plot(model_history[1].history['loss'], label='Training Fold 2')\n",
814
    "#plt.plot(model_history[2].history['loss'], label='Training Fold 3')\n",
Bannier Delphine's avatar
Bannier Delphine committed
815
816
    "plt.legend()\n",
    "plt.show()"
817
818
819
   ]
  },
  {
Bannier Delphine's avatar
Bannier Delphine committed
820
   "cell_type": "code",
821
   "execution_count": 11,
Bannier Delphine's avatar
Bannier Delphine committed
822
   "metadata": {},
823
824
825
826
827
828
829
830
831
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "WARNING:Error in loading the saved optimizer state. As a result, your model is starting with a freshly initialized optimizer.\n"
     ]
    }
   ],
Bannier Delphine's avatar
Bannier Delphine committed
832
833
834
   "source": [
    "from keras.models import load_model\n",
    "\n",
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
    "model = None\n",
    "model = load_model('clean_notebooks/cnn_inj_collage_1C.h5')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model: \"model_3\"\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "input_2 (InputLayer)            [(None, 240, 240, 1) 0                                            \n",
      "__________________________________________________________________________________________________\n",
      "conv2d_3 (Conv2D)               (None, 240, 240, 32) 320         input_2[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "activation_5 (Activation)       (None, 240, 240, 32) 0           conv2d_3[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "batch_normalization_4 (BatchNor (None, 240, 240, 32) 128         activation_5[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "max_pooling2d_3 (MaxPooling2D)  (None, 120, 120, 32) 0           batch_normalization_4[0][0]      \n",
      "__________________________________________________________________________________________________\n",
      "conv2d_4 (Conv2D)               (None, 120, 120, 64) 18496       max_pooling2d_3[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "activation_6 (Activation)       (None, 120, 120, 64) 0           conv2d_4[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "batch_normalization_5 (BatchNor (None, 120, 120, 64) 256         activation_6[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "max_pooling2d_4 (MaxPooling2D)  (None, 60, 60, 64)   0           batch_normalization_5[0][0]      \n",
      "__________________________________________________________________________________________________\n",
      "conv2d_5 (Conv2D)               (None, 60, 60, 128)  73856       max_pooling2d_4[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "activation_7 (Activation)       (None, 60, 60, 128)  0           conv2d_5[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "batch_normalization_6 (BatchNor (None, 60, 60, 128)  512         activation_7[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "max_pooling2d_5 (MaxPooling2D)  (None, 30, 30, 128)  0           batch_normalization_6[0][0]      \n",
      "__________________________________________________________________________________________________\n",
      "flatten_1 (Flatten)             (None, 115200)       0           max_pooling2d_5[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "dense_8 (Dense)                 (None, 16)           1843216     flatten_1[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "activation_8 (Activation)       (None, 16)           0           dense_8[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "dense_6_input (InputLayer)      [(None, 6)]          0                                            \n",
      "__________________________________________________________________________________________________\n",
      "batch_normalization_7 (BatchNor (None, 16)           64          activation_8[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "dense_6 (Dense)                 (None, 8)            56          dense_6_input[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_4 (Dropout)             (None, 16)           0           batch_normalization_7[0][0]      \n",
      "__________________________________________________________________________________________________\n",
      "dropout_3 (Dropout)             (None, 8)            0           dense_6[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "dense_9 (Dense)                 (None, 4)            68          dropout_4[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "dense_7 (Dense)                 (None, 4)            36          dropout_3[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "activation_9 (Activation)       (None, 4)            0           dense_9[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_1 (Concatenate)     (None, 8)            0           dense_7[0][0]                    \n",
      "                                                                 activation_9[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "dense_10 (Dense)                (None, 4)            36          concatenate_1[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_5 (Dropout)             (None, 4)            0           dense_10[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "dense_11 (Dense)                (None, 1)            5           dropout_5[0][0]                  \n",
      "==================================================================================================\n",
      "Total params: 1,937,049\n",
      "Trainable params: 1,936,569\n",
      "Non-trainable params: 480\n",
      "__________________________________________________________________________________________________\n"
     ]
    }
   ],
   "source": [
    "model.summary()"
918
919
920
921
922
923
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
924
    "# F - Postprocessing : Evaluation"
925
926
927
   ]
  },
  {
Bannier Delphine's avatar
Bannier Delphine committed
928
   "cell_type": "markdown",
929
930
   "metadata": {},
   "source": [
Bannier Delphine's avatar
Bannier Delphine committed
931
    "### Training set"
932
933
934
935
   ]
  },
  {
   "cell_type": "code",
936
   "execution_count": 22,
937
938
939
940
941
942
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
943
      "INFO:predicting ...\n"
944
945
946
947
948
949
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
950
      "avg. FVC: 2690.479018721756, std FVC 832.7709592986739\n",
951
      "mean difference : 146.40%, std: 1064.23%\n"
952
953
954
955
     ]
    }
   ],
   "source": [
956
957
    "from postprocessing.evaluate import evaluate_hybrid\n",
    "preds = evaluate_hybrid(model, df, trainAttrX, train_dataset, trainY, sc)"
958
959
960
961
   ]
  },
  {
   "cell_type": "code",
962
   "execution_count": 23,
963
964
965
966
967
968
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
969
      "35/35 [==============================] - ETA: 20s - loss: 0.13 - ETA: 17s - loss: 0.09 - ETA: 16s - loss: 0.10 - ETA: 16s - loss: 0.11 - ETA: 15s - loss: 0.11 - ETA: 14s - loss: 0.10 - ETA: 14s - loss: 0.13 - ETA: 13s - loss: 0.12 - ETA: 13s - loss: 0.11 - ETA: 13s - loss: 0.11 - ETA: 12s - loss: 0.10 - ETA: 12s - loss: 0.10 - ETA: 11s - loss: 0.10 - ETA: 11s - loss: 0.10 - ETA: 10s - loss: 0.10 - ETA: 10s - loss: 0.11 - ETA: 9s - loss: 0.1154 - ETA: 9s - loss: 0.145 - ETA: 8s - loss: 0.173 - ETA: 8s - loss: 0.170 - ETA: 7s - loss: 0.165 - ETA: 7s - loss: 0.165 - ETA: 6s - loss: 0.163 - ETA: 6s - loss: 0.158 - ETA: 5s - loss: 0.154 - ETA: 5s - loss: 0.153 - ETA: 4s - loss: 0.149 - ETA: 3s - loss: 0.146 - ETA: 3s - loss: 0.148 - ETA: 2s - loss: 0.145 - ETA: 2s - loss: 0.143 - ETA: 1s - loss: 0.143 - ETA: 1s - loss: 0.139 - ETA: 0s - loss: 0.138 - ETA: 0s - loss: 0.138 - 19s 548ms/step - loss: 0.1381\n"
970
971
972
973
974
     ]
    },
    {
     "data": {
      "text/plain": [
975
       "0.13809700310230255"
976
977
      ]
     },
978
     "execution_count": 23,
979
980
981
982
983
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
Bannier Delphine's avatar
Bannier Delphine committed
984
    "model.evaluate([trainAttrX, train_dataset], trainY)"
985
986
987
988
989
990
991
992
993
994
995
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Test set"
   ]
  },
  {
   "cell_type": "code",
996
   "execution_count": 24,
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO:predicting ...\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
1010
      "avg. FVC: 2690.479018721756, std FVC 832.7709592986739\n",
1011
      "mean difference : 80.62%, std: 228.64%\n"
1012
1013
1014
1015
     ]
    }
   ],
   "source": [
1016
    "preds = evaluate_hybrid(model, df, testAttrX, test_dataset, testY, sc)"
1017
1018
1019
1020
   ]
  },
  {
   "cell_type": "code",
1021
   "execution_count": 25,
1022
1023
1024
1025
1026
1027
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
1028
      "9/9 [==============================] - ETA: 5s - loss: 0.081 - ETA: 4s - loss: 0.070 - ETA: 3s - loss: 0.098 - ETA: 3s - loss: 0.136 - ETA: 2s - loss: 0.119 - ETA: 1s - loss: 0.105 - ETA: 1s - loss: 0.100 - ETA: 0s - loss: 0.092 - ETA: 0s - loss: 0.093 - 6s 633ms/step - loss: 0.0939\n"
1029
1030
1031
1032
1033
     ]
    },
    {
     "data": {
      "text/plain": [
1034
       "0.09393472969532013"
1035
1036
      ]
     },
1037
     "execution_count": 25,
1038
1039
1040
1041
1042
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
Bannier Delphine's avatar
Bannier Delphine committed
1043
    "model.evaluate([testAttrX, test_dataset], testY)"
1044
1045
1046
1047
1048
1049
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
1050
    "# G - Postprocessing : Competition score"
1051
1052
   ]
  },
1053
1054
  {
   "cell_type": "code",
1055
   "execution_count": 12,
1056
1057
1058
1059
1060
1061
1062
1063
   "metadata": {},
   "outputs": [],
   "source": [
    "from postprocessing.dropout_predictions import create_dropout_predict_function"
   ]
  },
  {
   "cell_type": "code",
1064
   "execution_count": 13,
1065
1066
1067
1068
1069
1070
1071
1072
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(1093, 240, 240)"
      ]
     },
1073
     "execution_count": 13,
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "train_dataset.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [
1087
1088
1089
1090
1091
1092
1093
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "WARNING:Error in loading the saved optimizer state. As a result, your model is starting with a freshly initialized optimizer.\n"
     ]
    },
1094
1095
1096
1097
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
1098
      "[<tf.Tensor 'dense_6_input_1:0' shape=(None, 6) dtype=float32>, <tf.Tensor 'input_2_1:0' shape=(None, 240, 240, 1) dtype=float32>]\n"
1099
1100
1101
1102
1103
1104
1105
1106
     ]
    }
   ],
   "source": [
    "from keras.models import load_model\n",
    "import tensorflow as tf\n",
    "tf.compat.v1.disable_eager_execution()\n",
    "\n",
1107
    "dropout = 0.5\n",
1108
1109
1110
1111
1112
1113
1114
    "num_iter = 20\n",
    " \n",
    "input_data=[trainAttrX, train_dataset]\n",
    "input_data[1] = np.asarray(input_data[1]).reshape(1093,240,240,1)\n",
    "num_samples = input_data[0].shape[0]\n",
    "\n",
    "\n",
1115
    "model = load_model('clean_notebooks/cnn_inj_collage_1C.h5')\n",
1116
1117
1118
1119
1120
1121
1122
1123
1124
    "\n",
    "predict_with_dropout = create_dropout_predict_function(model, dropout)\n",
    "\n",
    "predictions = np.zeros((num_samples, num_iter))\n",
    "for i in range(num_iter):\n",
    "    predictions[:,i] = predict_with_dropout([input_data,1])[0].reshape(-1)"
   ]
  },
  {
1125
   "cell_type": "markdown",
1126
1127
   "metadata": {},
   "source": [
1128
    "Ne fonctionne pas, trop d'images pour la mémoire"
1129
1130
1131
   ]
  },
  {
1132
   "cell_type": "markdown",
1133
1134
   "metadata": {},
   "source": [
1135
    "# Test score"
1136
1137
1138
1139
   ]
  },
  {
   "cell_type": "code",
1140
   "execution_count": 11,
1141
   "metadata": {},
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(280, 240, 240)"
      ]
     },
     "execution_count": 11,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "test_dataset.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {},
   "outputs": [],
   "source": [
    "from postprocessing.evaluate import compute_score\n",
    "import matplotlib.pyplot as plt"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Dropout 0.3"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 44,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[<tf.Tensor 'dense_6_input_4:0' shape=(None, 6) dtype=float32>, <tf.Tensor 'input_2_4:0' shape=(None, 240, 240, 1) dtype=float32>]\n"
     ]
    }
   ],
   "source": [
    "tf.compat.v1.disable_eager_execution()\n",
    "\n",
    "dropout = 0.3\n",
    "num_iter = 20\n",
    " \n",
    "input_data=[testAttrX, test_dataset]\n",
    "input_data[1] = np.asarray(input_data[1]).reshape(test_dataset.shape[0],240,240,1)\n",
    "num_samples = input_data[0].shape[0]\n",
    "\n",
    "predict_with_dropout = create_dropout_predict_function(model, dropout)\n",
    "\n",
    "predictions = np.zeros((num_samples, num_iter))\n",
    "for i in range(num_iter):\n",
    "    predictions[:,i] = predict_with_dropout([input_data,1])[0].reshape(-1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 45,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXAAAAD4CAYAAAD1jb0+AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAOPklEQVR4nO3dbYxc5XnG8f8dm5eopAXD1rIw7joFNUJVY9CWEhFVLZSW4Ch2JYSI0tYfLFlqG4korRLTSlUitZKp1JBUihq5geI2L5iSRLZAfaHGUVSpNbHDSwwOxRCjYhnsJJDAF1rD3Q/zLEzXs7vj3ZmdufH/J632nGfOzlx6rL185pkzs5GZSJLqeceoA0iSFsYCl6SiLHBJKsoCl6SiLHBJKmr5Uj7YRRddlJOTk0v5kJJU3oEDB36QmRMzx5e0wCcnJ9m/f/9SPqQklRcRz/UadwlFkorq6ww8Io4ArwCvAyczcyoiVgA7gUngCHBzZr40nJiSpJlO5wz81zNzXWZOtf2twJ7MvAzY0/YlSUtkMUsoG4AdbXsHsHHRaSRJfeu3wBP414g4EBFb2tjKzDzWtl8AVvb6wYjYEhH7I2L/iRMnFhlXkjSt36tQ3p+ZRyPiZ4EHI+J73TdmZkZEz0/FysztwHaAqakpPzlLkgakrzPwzDzavh8HvgFcBbwYEasA2vfjwwopSTrVvAUeET8VEe+a3gZ+EzgI7AY2tcM2AbuGFVKSdKp+llBWAt+IiOnjv5KZ/xwR3wbujYjNwHPAzcOLKUmaad4Cz8xngff2GP8hcN0wQlU1ufWBgd7fkW3rB3p/kt5efCemJBVlgUtSURa4JBVlgUtSURa4JBVlgUtSURa4JBVlgUtSURa4JBVlgUtSURa4JBVlgUtSURa4JBVlgUtSURa4JBVlgUtSURa4JBVlgUtSURa4JBVlgUtSURa4JBVlgUtSURa4JBVlgUtSURa4JBVlgUtSURa4JBVlgUtSURa4JBVlgUtSURa4JBVlgUtSURa4JBVlgUtSUX0XeEQsi4hHIuL+tr82IvZFxOGI2BkRZw8vpiRpptM5A78VONS1fztwR2ZeCrwEbB5kMEnS3Poq8IhYDawHvtj2A7gWuK8dsgPYOIR8kqRZ9HsG/lngE8Abbf9C4OXMPNn2nwcu7vWDEbElIvZHxP4TJ04sJqskqcu8BR4RHwSOZ+aBhTxAZm7PzKnMnJqYmFjIXUiSeljexzHXAB+KiBuBc4GfBj4HnB8Ry9tZ+Grg6PBiSpJmmvcMPDNvy8zVmTkJ3AI8lJkfAfYCN7XDNgG7hpZSknSKxVwH/kng4xFxmM6a+J2DiSRJ6kc/SyhvysxvAt9s288CVw0+kiSpH74TU5KKssAlqSgLXJKKssAlqSgLXJKKssAlqSgLXJKKssAlqSgLXJKKssAlqSgLXJKKssAlqSgLXJKKssAlqSgLXJKKssAlqSgLXJKKssAlqSgLXJKKssAlqSgLXJKKssAlqSgLXJKKssAlqSgLXJKKssAlqSgLXJKKssAlqSgLXJKKssAlqSgLXJKKssAlqSgLXJKKmrfAI+LciHg4Ih6LiCci4tNtfG1E7IuIwxGxMyLOHn5cSdK0fs7AXwOuzcz3AuuAGyLiauB24I7MvBR4Cdg8tJSSpFPMW+DZ8WrbPat9JXAtcF8b3wFsHEZASVJvy/s5KCKWAQeAS4HPA88AL2fmyXbI88DFs/zsFmALwJo1axab94wyufWBgd7fkW3rB3p/kkarrxcxM/P1zFwHrAauAt7T7wNk5vbMnMrMqYmJiYWllCSd4rSuQsnMl4G9wPuA8yNi+gx+NXB0sNEkSXPp5yqUiYg4v22/E7geOESnyG9qh20Cdg0poySph37WwFcBO9o6+DuAezPz/oh4ErgnIv4ceAS4c4g5JUkzzFvgmfk4cEWP8WfprIdLkkbAd2JKUlEWuCQVZYFLUlEWuCQVZYFLUlEWuCQVZYFLUlEWuCQVZYFLUlEWuCQVZYFLUlEWuCQVZYFLUlEWuCQVZYFLUlEWuCQVZYFLUlEWuCQVZYFLUlEWuCQVZYFLUlEWuCQVZYFLUlEWuCQVtXzUAbR0Jrc+MND7O7Jt/UDvT9Lp8QxckoqywCWpKAtckopyDVwLNug1dXBdXTodnoFLUlEWuCQVZYFLUlGugUunwXV/jZN5z8Aj4pKI2BsRT0bEExFxaxtfEREPRsTT7fsFw48rSZrWzxLKSeCPMvNy4GrgDyPicmArsCczLwP2tH1J0hKZt8Az81hmfqdtvwIcAi4GNgA72mE7gI1DyihJ6uG01sAjYhK4AtgHrMzMY+2mF4CVs/zMFmALwJo1axYcVFJ//MybM0ffV6FExHnA14CPZeZPum/LzASy189l5vbMnMrMqYmJiUWFlSS9pa8Cj4iz6JT3lzPz6234xYhY1W5fBRwfTkRJUi/9XIUSwJ3Aocz8TNdNu4FNbXsTsGvw8SRJs+lnDfwa4HeB70bEo23sT4BtwL0RsRl4Drh5KAklST3NW+CZ+e9AzHLzdYONI0nql2+ll6SiLHBJKsoCl6SiztgPsxrGhxJJ0lLyDFySirLAJakoC1ySijpj18B1ZvC1Dr2deQYuSUVZ4JJUlAUuSUW5Bi6N2Liv0/uHnMeXZ+CSVJQFLklFWeCSVJRr4Bor474eLI0Tz8AlqSgLXJKKssAlqSgLXJKKssAlqSgLXJKKssAlqSgLXJKKssAlqSgLXJKKssAlqSgLXJKKssAlqSgLXJKKssAlqSgLXJKKKvMHHfygf0n6/+Y9A4+IuyLieEQc7BpbEREPRsTT7fsFw40pSZqpnyWUu4EbZoxtBfZk5mXAnrYvSVpC8xZ4Zn4L+NGM4Q3Ajra9A9g42FiSpPks9EXMlZl5rG2/AKwcUB5JUp8WfRVKZiaQs90eEVsiYn9E7D9x4sRiH06S1Cy0wF+MiFUA7fvx2Q7MzO2ZOZWZUxMTEwt8OEnSTAst8N3Apra9Cdg1mDiSpH71cxnhV4H/AH4hIp6PiM3ANuD6iHga+I22L0laQvO+kSczPzzLTdcNOIsk6TT4VnpJKsoCl6SiLHBJKqrMh1lJevsY9w+nO7Jt/agj9MUzcEkqygKXpKIscEkqygKXpKIscEkqygKXpKIscEkqyuvAJWmGQV+nPqzryj0Dl6SiLHBJKsoCl6SiLHBJKsoCl6SiLHBJKsoCl6SiLHBJKsoCl6SiLHBJKsoCl6SiLHBJKsoCl6SiLHBJKsoCl6SiLHBJKsoCl6SiLHBJKsoCl6SiLHBJKsoCl6SiLHBJKmpRBR4RN0TEUxFxOCK2DiqUJGl+Cy7wiFgGfB74AHA58OGIuHxQwSRJc1vMGfhVwOHMfDYz/we4B9gwmFiSpPksX8TPXgz8d9f+88CvzDwoIrYAW9ruqxHx1CIecz4XAT8Y4v0PijkHr0pWcw5WiZxxO7C4rD/Xa3AxBd6XzNwObB/24wBExP7MnFqKx1oMcw5elazmHKwqOWE4WRezhHIUuKRrf3UbkyQtgcUU+LeByyJibUScDdwC7B5MLEnSfBa8hJKZJyPio8C/AMuAuzLziYElW5glWaoZAHMOXpWs5hysKjlhCFkjMwd9n5KkJeA7MSWpKAtckooqU+ARcVdEHI+Ig11jKyLiwYh4un2/oI1HRPx1e4v/4xFx5YhzfioijkbEo+3rxq7bbms5n4qI31qqnO2xL4mIvRHxZEQ8ERG3tvGxmtc5co7VvEbEuRHxcEQ81nJ+uo2vjYh9Lc/O9qI/EXFO2z/cbp8ccc67I+L7XfO5ro2P7PepK/OyiHgkIu5v+2M1p3PkHO6cZmaJL+BXgSuBg11jfwlsbdtbgdvb9o3APwEBXA3sG3HOTwF/3OPYy4HHgHOAtcAzwLIlzLoKuLJtvwv4r5ZprOZ1jpxjNa9tXs5r22cB+9o83Qvc0sa/APx+2/4D4Att+xZg5xLN52w57wZu6nH8yH6fujJ8HPgKcH/bH6s5nSPnUOe0zBl4Zn4L+NGM4Q3Ajra9A9jYNf732fGfwPkRsWqEOWezAbgnM1/LzO8Dh+l8RMGSyMxjmfmdtv0KcIjOO2zHal7nyDmbkcxrm5dX2+5Z7SuBa4H72vjM+Zye5/uA6yIiRphzNiP7fQKIiNXAeuCLbT8YszntlXMeA5nTMgU+i5WZeaxtvwCsbNu93uY/1y/8Uvhoe6p01/SSBGOUsz3VvILO2djYzuuMnDBm89qeQj8KHAcepHP2/3JmnuyR5c2c7fYfAxeOImdmTs/nX7T5vCMizpmZs1nqf/fPAp8A3mj7FzKGc9oj57ShzWn1An9Tdp6XjOs1kX8D/DywDjgG/NVI08wQEecBXwM+lpk/6b5tnOa1R86xm9fMfD0z19F5Z/JVwHtGm6i3mTkj4heB2+jk/WVgBfDJ0SXsiIgPAscz88Cos8xljpxDndPqBf7i9NOO9v14Gx+rt/ln5ovtF+YN4G956+n8yHNGxFl0SvHLmfn1Njx289or5zjPa2a+DOwF3kfn6fH0m+a6s7yZs93+M8APR5TzhrZUlZn5GvB3jMd8XgN8KCKO0PnE02uBzzF+c3pKzoj40rDntHqB7wY2te1NwK6u8d9rr/ReDfy4a0lgyc1Y2/ptYPoKld3ALe2V87XAZcDDS5grgDuBQ5n5ma6bxmpeZ8s5bvMaERMRcX7bfidwPZ31+r3ATe2wmfM5Pc83AQ+1ZzyjyPm9rv+0g86acvd8juT3KTNvy8zVmTlJ50XJhzLzI4zZnM6S83eGPqcLeeVzFF/AV+k8Tf5fOutFm+msbe0Bngb+DVjRjg06f2ziGeC7wNSIc/5Dy/F4+4db1XX8n7acTwEfWOI5fT+d5ZHHgUfb143jNq9z5ByreQV+CXik5TkI/Fkbfzed/0AOA/8InNPGz237h9vt7x5xzofafB4EvsRbV6qM7PdpRu5f462rO8ZqTufIOdQ59a30klRU9SUUSTpjWeCSVJQFLklFWeCSVJQFLklFWeCSVJQFLklF/R/+Gon5MG7GbwAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "FVC_pred = sc.inverse_transform(predictions)\n",
    "plt.hist(np.std(FVC_pred, axis = 1), bins=15)\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 46,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAD5CAYAAAAp8/5SAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAPgElEQVR4nO3db6xkdX3H8fengLZRLIvcbDbIumioDU+64A2l8U+sKAK2gq0x8gC3Lc3aRBKJNs1WksqDPsC2aNK0wa6BuDYotSqBKFZXQktMKnYXV1hYcYGuKZtlF4sWTBvs4rcP5tw6rPfunTsz597hx/uVTObM75yZ+d7fmfncM+ec+U2qCklSu35hrQuQJPXLoJekxhn0ktQ4g16SGmfQS1LjDHpJatyJyy2Q5BeBu4EXd8t/vqo+kuRM4Bbg5cBu4Iqq+snxHuu0006rTZs2TVy0JL2Q7N69+wdVNTfu/ZcNeuAZ4M1V9eMkJwHfSPIV4IPAx6vqliSfAK4EbjjeA23atIldu3aNW6skvSAl+f4k9192100N/Li7eVJ3KeDNwOe79h3AZZMUIknqx0j76JOckGQPcATYCTwC/KiqjnaLPAac3kuFkqSJjBT0VfVsVW0GXgGcB/zqqE+QZGuSXUl2PfHEE+NVKUka24rOuqmqHwF3Ab8BnJJkYR//K4CDS9xne1XNV9X83NzYxxIkSWNaNuiTzCU5pZv+JeCtwD4Ggf+ubrEtwG091ShJmsAoZ91sAHYkOYHBP4bPVdWXkjwI3JLkz4FvAzf2WKckaUzLBn1V3Qecs0j7owz210uSZpjfjJWkxhn0ktS4UfbRS6tm07Yvj7Tcgeve3nMlUjvcopekxhn0ktQ4g16SGmfQS1LjDHpJapxBL0mNM+glqXEGvSQ1zqCXpMYZ9JLUOINekhpn0EtS4wx6SWqco1fqeclRLqXRuUUvSY0z6CWpcQa9JDXOoJekxhn0ktQ4g16SGmfQS1LjDHpJapxBL0mNM+glqXHLBn2SM5LcleTBJA8k+UDXfm2Sg0n2dJdL+i9XkrRSo4x1cxT4UFXdm+RkYHeSnd28j1fVX/VXniRpUssGfVUdAg51008n2Qec3ndhkqTpWNHolUk2AecA9wCvA65K8l5gF4Ot/h8ucp+twFaAjRs3TlqvZkwro0i28ndIixn5YGySlwJfAK6uqqeAG4BXA5sZbPFfv9j9qmp7Vc1X1fzc3NzkFUuSVmSkoE9yEoOQv7mqvghQVYer6tmq+inwSeC8/sqUJI1rlLNuAtwI7Kuqjw21bxha7J3A3umXJ0ma1Cj76F8HXAHcn2RP1/Zh4PIkm4ECDgDv66E+SdKERjnr5htAFpl1x/TLkSRNm9+MlaTG+ePgLyCeQii9MLlFL0mNM+glqXEGvSQ1zqCXpMYZ9JLUOINekhpn0EtS4zyPXqti1HP4W3leaZa4RS9JjTPoJalxBr0kNc6gl6TGGfSS1DiDXpIaZ9BLUuMMeklqnEEvSY0z6CWpcQa9JDXOoJekxhn0ktQ4g16SGmfQS1LjDHpJapxBL0mNWzbok5yR5K4kDyZ5IMkHuvZTk+xMsr+7Xtd/uZKklRpli/4o8KGqOhs4H3h/krOBbcCdVXUWcGd3W5I0Y5YN+qo6VFX3dtNPA/uA04FLgR3dYjuAy3qqUZI0gRX9OHiSTcA5wD3A+qo61M16HFi/xH22AlsBNm7cOHahWj3+oLbUlpEPxiZ5KfAF4Oqqemp4XlUVUIvdr6q2V9V8Vc3Pzc1NVKwkaeVGCvokJzEI+Zur6otd8+EkG7r5G4Aj/ZQoSZrEKGfdBLgR2FdVHxuadTuwpZveAtw2/fIkSZMaZR/964ArgPuT7OnaPgxcB3wuyZXA94F391KhJGkiywZ9VX0DyBKzL5huOZKkafObsZLUOINekhpn0EtS4wx6SWqcQS9JjTPoJalxBr0kNc6gl6TGGfSS1DiDXpIaZ9BLUuMMeklqnEEvSY0z6CWpcQa9JDXOoJekxhn0ktQ4g16SGmfQS1LjDHpJapxBL0mNM+glqXEGvSQ1zqCXpMYZ9JLUOINekhq3bNAnuSnJkSR7h9quTXIwyZ7uckm/ZUqSxjXKFv2ngIsWaf94VW3uLndMtyxJ0rQsG/RVdTfw5CrUIknqwST76K9Kcl+3a2fd1CqSJE3VuEF/A/BqYDNwCLh+qQWTbE2yK8muJ554YsynkySNa6ygr6rDVfVsVf0U+CRw3nGW3V5V81U1Pzc3N26dkqQxjRX0STYM3XwnsHepZSVJa+vE5RZI8lngTcBpSR4DPgK8KclmoIADwPv6K1GSNIllg76qLl+k+cYeapEk9cBvxkpS4wx6SWqcQS9JjTPoJalxBr0kNc6gl6TGGfSS1DiDXpIaZ9BLUuMMeklqnEEvSY0z6CWpcQa9JDXOoJekxhn0ktQ4g16SGrfsD49o9m3a9uW1LkHSDHOLXpIaZ9BLUuMMeklqnEEvSY0z6CWpcQa9JDXO0yulFVjJqawHrnt7j5VIo3OLXpIaZ9BLUuMMeklq3LJBn+SmJEeS7B1qOzXJziT7u+t1/ZYpSRrXKFv0nwIuOqZtG3BnVZ0F3NndliTNoGWDvqruBp48pvlSYEc3vQO4bLplSZKmZdzTK9dX1aFu+nFg/VILJtkKbAXYuHHjmE/3wuSolJKmYeKDsVVVQB1n/vaqmq+q+bm5uUmfTpK0QuMG/eEkGwC66yPTK0mSNE3jBv3twJZuegtw23TKkSRN2yinV34W+FfgNUkeS3IlcB3w1iT7gbd0tyVJM2jZg7FVdfkSsy6Yci2SpB74zVhJapxBL0mNc5hiqSejfg/C4YzVN7foJalxBr0kNc6gl6TGGfSS1DiDXpIaZ9BLUuM8vXINOPywxuHpmhqXW/SS1DiDXpIaZ9BLUuMMeklqnEEvSY0z6CWpcZ5eOUWeNilpFrlFL0mNM+glqXEGvSQ1zqCXpMYZ9JLUOINekhpn0EtS4wx6SWqcQS9JjTPoJalxEw2BkOQA8DTwLHC0quanUZQkaXqmMdbNb1bVD6bwOJKkHrjrRpIaN+kWfQFfS1LA31XV9mMXSLIV2AqwcePGCZ9ubTgqpaTns0m36F9fVecCFwPvT/LGYxeoqu1VNV9V83NzcxM+nSRppSYK+qo62F0fAW4FzptGUZKk6Rk76JO8JMnJC9PAhcDeaRUmSZqOSfbRrwduTbLwOJ+pqn+aSlWSpKkZO+ir6lHg16ZYiySpB55eKUmNM+glqXEGvSQ1zqCXpMYZ9JLUOINekhpn0EtS4wx6SWrcNMajf15yRErNCl+L6ptb9JLUOINekhpn0EtS4wx6SWqcQS9JjTPoJalxBr0kNc6gl6TGGfSS1DiDXpIaZ9BLUuMMeklqnEEvSY0z6CWpcc+bYYodylWarlHfUweue/uaPeZaPV4fVtKP0+YWvSQ1zqCXpMZNFPRJLkryUJKHk2ybVlGSpOkZO+iTnAD8LXAxcDZweZKzp1WYJGk6JtmiPw94uKoeraqfALcAl06nLEnStEwS9KcD/zF0+7GuTZI0Q1JV490xeRdwUVX9YXf7CuDXq+qqY5bbCmztbr4GeGiZhz4N+MFYRa2eWa/R+iYz6/XB7NdofZM5tr5XVtXcuA82yXn0B4Ezhm6/omt7jqraDmwf9UGT7Kqq+Qnq6t2s12h9k5n1+mD2a7S+yUy7vkl23fwbcFaSM5O8CHgPcPt0ypIkTcvYW/RVdTTJVcBXgROAm6rqgalVJkmaiomGQKiqO4A7plTLgpF386yhWa/R+iYz6/XB7NdofZOZan1jH4yVJD0/OASCJDVuTYM+yT8k2dNdDiTZ07VvSvI/Q/M+MXSf1ya5vxt24a+TpMf6rk1ycKiOS4bm/WlXw0NJ3jbUvmrDQiT5yyTfTXJfkluTnNK1z0T/LVHzmg+bkeSMJHcleTDJA0k+0LWveH33WOOBbj3tSbKrazs1yc4k+7vrdV17unX5cPdaOLfn2l4z1Ed7kjyV5Oq17r8kNyU5kmTvUNuK+yzJlm75/Um29Fzf6ryHq2omLsD1wJ9105uAvUss9y3gfCDAV4CLe6zpWuCPF2k/G/gO8GLgTOARBgekT+imXwW8qFvm7B7ruxA4sZv+KPDRWeq/RZ57VfvnOHVsAM7tpk8Gvtet0xWt755rPACcdkzbXwDbuultQ+v7km5dplu396zyOn0ceOVa9x/wRuDc4df+SvsMOBV4tLte102v67G+VXkPz8Sum+4/0ruBzy6z3AbgZVX1zRr8xZ8GLuu/wp9zKXBLVT1TVf8OPMxgSIhVHRaiqr5WVUe7m99k8F2GJc1A/83EsBlVdaiq7u2mnwb2cfxvdS+1vlfbpcCObnoHP1t3lwKfroFvAqd063o1XAA8UlXfP84yq9J/VXU38OQiz72SPnsbsLOqnqyqHwI7gYv6qm+13sMzEfTAG4DDVbV/qO3MJN9O8i9J3tC1nc5gqIUFqzHswlXdx6qbFj72sfTwD2s5LMQfMPjvvmBW+m/YzA2bkWQTcA5wT9e0kvXdpwK+lmR3Bt8uB1hfVYe66ceB9WtY34L38NwNtFnpvwUr7bMm38O9B32SryfZu8hleEvucp77YjkEbKyqc4APAp9J8rI1qO8G4NXA5q6m6/uoYYL6Fpa5BjgK3Nw1rVr/PZ8leSnwBeDqqnqKGVjfQ15fVecyGB32/UneODyz25pb01PmMvii5DuAf+yaZqn/fs4s9NlS+n4P9/5TglX1luPNT3Ii8DvAa4fu8wzwTDe9O8kjwK8wGGJh+KPNosMuTLO+oTo/CXypu3m84R+WHRZimvUl+T3gt4ALuhfyqvbfCo00bMZqSHISg5C/uaq+CFBVh4fmj7q+e1FVB7vrI0luZbCr43CSDVV1qPsIf2St6utcDNy70G+z1H9DVtpnB4E3HdP+z30WuBrv4VnYdfMW4LtV9f8fR5LMZTDePUleBZwFPNp9BHsqyfndfv33Arf1Vdgx+znfCSwcLb8deE+SFyc5s6vvW6zysBBJLgL+BHhHVf33UPtM9N8iZmLYjO5vvxHYV1UfG2pf6fruq76XJDl5YZrBAbu9XR0LZ4Fs4Wfr7nbgvRk4H/ivod0VfXrOJ/FZ6b9jrLTPvgpcmGRdt+vpwq6tF6v2Hp7G0eRJLsCngD86pu13gQeAPcC9wG8PzZtn8AJ6BPgbui999VTb3wP3A/cxeGFsGJp3TVfDQwwd9WZwNP973bxreu67hxnsT9zTXT4xS/23RM2r1j/HqeH1DD7C3zfUd5eMs757qu9VDM5S+U63Hq/p2l8O3AnsB74OnNq1h8GPAD3S1T+/Cn34EuA/gV8ealvT/mPwT+cQ8L8M9l1fOU6fMdhX/nB3+f2e61uV97DfjJWkxs3CrhtJUo8MeklqnEEvSY0z6CWpcQa9JDXOoJekxhn0ktQ4g16SGvd/X2ASarqGM/4AAAAASUVORK5CYII=\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "diff_fvc = (np.mean(FVC_pred,axis=1) - sc.inverse_transform(testY))\n",
    "diff = (np.mean(predictions,axis=1) - testY)\n",
    "plt.hist(diff_fvc, bins=30)\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 47,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "MSE :  78315.42583786986\n",
      "RMSE :  279.8489339587876\n"
     ]
    }
   ],
   "source": [
    "MSE = np.mean(diff_fvc*diff_fvc)\n",
    "RMSE = np.sqrt(MSE)\n",
    "print(\"MSE : \",MSE)\n",
    "print(\"RMSE : \", RMSE)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 48,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "MSE :  0.11259078245046743\n",
      "RMSE :  0.33554549982151066\n"
     ]
    }
   ],
   "source": [
    "MSE = np.mean(diff*diff)\n",
    "RMSE = np.sqrt(MSE)\n",
    "print(\"MSE : \",MSE)\n",
    "print(\"RMSE : \", RMSE)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 49,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "-12.675885503523693"
      ]
     },
     "execution_count": 49,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "compute_score(testY,np.mean(FVC_pred,axis=1),np.std(FVC_pred,axis=1))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Dropout 0.4"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[<tf.Tensor 'dense_6_input_2:0' shape=(None, 6) dtype=float32>, <tf.Tensor 'input_2_2:0' shape=(None, 240, 240, 1) dtype=float32>]\n"
     ]
    }
   ],
   "source": [
    "tf.compat.v1.disable_eager_execution()\n",
    "\n",
    "dropout = 0.4\n",
    "num_iter = 20\n",
    " \n",
    "input_data=[testAttrX, test_dataset]\n",
    "input_data[1] = np.asarray(input_data[1]).reshape(test_dataset.shape[0],240,240,1)\n",
    "num_samples = input_data[0].shape[0]\n",
    "\n",
    "predict_with_dropout = create_dropout_predict_function(model, dropout)\n",
    "\n",
    "predictions = np.zeros((num_samples, num_iter))\n",
    "for i in range(num_iter):\n",
    "    predictions[:,i] = predict_with_dropout([input_data,1])[0].reshape(-1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXAAAAD4CAYAAAD1jb0+AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAM8UlEQVR4nO3dXahl9XnH8e+vMxrTpM34cpDBkR6DEvGivjBYxVBarcVmQuKFlEho52JgbiwYGkgnLRQCvRhvYlIooRJt5iIkpiatolBrR0MolLFn4rtTcbQToqhzbDVpepHW5OnFXmNOj2c8e87Z5+XZfj+w2Wv919prPw+z53fWWXutdVJVSJL6+aWNLkCStDIGuCQ1ZYBLUlMGuCQ1ZYBLUlNb1/PNzjnnnJqdnV3Pt5Sk9g4fPvx6Vc0sHl/XAJ+dnWVubm4931KS2kvyg6XGPYQiSU0Z4JLUlAEuSU0Z4JLUlAEuSU0Z4JLUlAEuSU0Z4JLUlAEuSU2t65WY02523wMT3d6x/bsmuj1J08U9cElqygCXpKYMcElqygCXpKYMcElqygCXpKYMcElqygCXpKYMcElqygCXpKbGDvAkW5I8luT+Yf6CJIeSHE1yd5LT165MSdJip7IHfitwZMH8bcDtVXUh8AawZ5KFSZLe3VgBnmQHsAv46jAf4FrgnmGVA8CNa1CfJOkkxt0D/xLwOeDnw/zZwJtV9dYw/xJw3lIvTLI3yVySufn5+dXUKklaYNkAT/Jx4HhVHV7JG1TVHVW1s6p2zszMrGQTkqQljHM/8GuATyT5GHAG8KvAl4FtSbYOe+E7gJfXrkxJ0mLL7oFX1eerakdVzQKfAh6uqk8DjwA3DavtBu5dsyolSe+wmvPA/wT44yRHGR0Tv3MyJUmSxnFKf1Ktqr4LfHeYfhG4cvIlSZLG4ZWYktSUAS5JTRngktSUAS5JTRngktSUAS5JTRngktSUAS5JTRngktTUKV2JqfU1u++BiW7v2P5dE92epI3lHrgkNWWAS1JTBrgkNWWAS1JTBrgkNWWAS1JTBrgkNWWAS1JTBrgkNWWAS1JTBrgkNWWAS1JTBrgkNfWevRvhpO/0J0nrzT1wSWrKAJekpgxwSWrKAJekpgxwSWrKAJekpgxwSWrKAJekpgxwSWrKAJekpgxwSWrKAJekpgxwSWrKAJekpt6zt5PV6q3FLXmP7d818W1K02rZPfAkZyR5NMkTSZ5J8oVh/IIkh5IcTXJ3ktPXvlxJ0gnjHEL5KXBtVV0KXAbckOQq4Dbg9qq6EHgD2LNmVUqS3mHZAK+Rnwyzpw2PAq4F7hnGDwA3rkWBkqSljfUlZpItSR4HjgMPAS8Ab1bVW8MqLwHnneS1e5PMJZmbn5+fQMmSJBgzwKvqZ1V1GbADuBK4eNw3qKo7qmpnVe2cmZlZWZWSpHc4pdMIq+pN4BHgamBbkhNnsewAXp5saZKkdzPOWSgzSbYN0+8HrgeOMArym4bVdgP3rlGNkqQljHMe+HbgQJItjAL/W1V1f5JngW8m+QvgMeDONaxTkrTIsgFeVU8Cly8x/iKj4+GSpA3gpfSS1JQBLklNGeCS1JQBLklNGeCS1JQBLklNGeCS1JQBLklNGeCS1JQBLklNGeCS1JQBLklNGeCS1JQBLklNGeCS1JQBLklNGeCS1JQBLklNGeCS1JQBLklNGeCS1JQBLklNGeCS1NTWjS5A62d23wMbXcKyJl3jsf27Jro9aTNxD1ySmjLAJakpA1ySmjLAJakpA1ySmjLAJakpA1ySmjLAJakpA1ySmjLAJakpA1ySmjLAJakpA1ySmjLAJakpA1ySmlo2wJOcn+SRJM8meSbJrcP4WUkeSvL88Hzm2pcrSTphnD3wt4DPVtUlwFXALUkuAfYBB6vqIuDgMC9JWifLBnhVvVJV3x+m/ws4ApwHfBI4MKx2ALhxjWqUJC3hlI6BJ5kFLgcOAedW1SvDoleBc0/ymr1J5pLMzc/Pr6ZWSdICYwd4kg8C3wY+U1U/XrisqgqopV5XVXdU1c6q2jkzM7OqYiVJvzBWgCc5jVF4f72qvjMMv5Zk+7B8O3B8bUqUJC1lnLNQAtwJHKmqLy5YdB+we5jeDdw7+fIkSSezdYx1rgH+AHgqyePD2J8C+4FvJdkD/AD4/TWpUJK0pGUDvKr+GchJFl832XIkSePySkxJasoAl6SmDHBJasoAl6SmxjkLRdJgdt8DG13Cso7t37XRJWiduAcuSU0Z4JLUlAEuSU0Z4JLUlF9iaqp1+NJx0ibds1+Kbl7ugUtSUwa4JDVlgEtSUwa4JDVlgEtSUwa4JDVlgEtSUwa4JDVlgEtSUwa4JDVlgEtSUwa4JDVlgEtSUwa4JDVlgEtSUwa4JDVlgEtSUwa4JDVlgEtSUwa4JDVlgEtSUwa4JDVlgEtSUwa4JDVlgEtSUwa4JDVlgEtSUwa4JDVlgEtSU8sGeJK7khxP8vSCsbOSPJTk+eH5zLUtU5K02Dh74F8Dblg0tg84WFUXAQeHeUnSOlo2wKvqe8B/Lhr+JHBgmD4A3DjZsiRJy1npMfBzq+qVYfpV4NwJ1SNJGtOqv8SsqgLqZMuT7E0yl2Rufn5+tW8nSRqsNMBfS7IdYHg+frIVq+qOqtpZVTtnZmZW+HaSpMVWGuD3AbuH6d3AvZMpR5I0rnFOI/wG8C/AR5K8lGQPsB+4PsnzwO8M85KkdbR1uRWq6uaTLLpuwrVIkk6BV2JKUlMGuCQ1ZYBLUlMGuCQ1teyXmJLe22b3PTDxbR7bv2vi23wvcg9ckpoywCWpKQNckpoywCWpKQNckpoywCWpKQNckpoywCWpKQNckpoywCWpKQNckpoywCWpKQNckpoywCWpqTa3k12LW1pKUmfugUtSUwa4JDVlgEtSUwa4JDVlgEtSUwa4JDVlgEtSUwa4JDXV5kIeSVovk75w8Nj+XRPd3gnugUtSUwa4JDVlgEtSUwa4JDVlgEtSUwa4JDVlgEtSUwa4JDVlgEtSU16JKWnddbnScbNzD1ySmlpVgCe5IclzSY4m2TepoiRJy1txgCfZAvwV8HvAJcDNSS6ZVGGSpHe3mj3wK4GjVfViVf0P8E3gk5MpS5K0nNV8iXke8MMF8y8Bv7F4pSR7gb3D7E+SPLeC9zoHeH0Fr+vEHqfDtPe4KfvLbRPd3MR7nEB9v7bU4JqfhVJVdwB3rGYbSeaqaueEStqU7HE6THuP094f9OpxNYdQXgbOXzC/YxiTJK2D1QT4vwIXJbkgyenAp4D7JlOWJGk5Kz6EUlVvJfkj4EFgC3BXVT0zscr+v1UdgmnCHqfDtPc47f1Box5TVRtdgyRpBbwSU5KaMsAlqalNEeBJ7kpyPMnTC8bOSvJQkueH5zOH8ST5y+Hy/SeTXLFxlY8nyflJHknybJJnktw6jE9Tj2ckeTTJE0OPXxjGL0hyaOjl7uELb5K8b5g/Oiyf3dAGTkGSLUkeS3L/MD9VPSY5luSpJI8nmRvGpumzui3JPUn+LcmRJFd37W9TBDjwNeCGRWP7gINVdRFwcJiH0aX7Fw2PvcBX1qnG1XgL+GxVXQJcBdwy3HZgmnr8KXBtVV0KXAbckOQq4Dbg9qq6EHgD2DOsvwd4Yxi/fVivi1uBIwvmp7HH366qyxacDz1Nn9UvA/9QVRcDlzL6t+zZX1VtigcwCzy9YP45YPswvR14bpj+a+Dmpdbr8gDuBa6f1h6BXwa+z+jK3NeBrcP41cCDw/SDwNXD9NZhvWx07WP0toPRf/BrgfuBTGGPx4BzFo1NxWcV+BDw74v/Hbr2t1n2wJdyblW9Mky/Cpw7TC91Cf9561nYagy/Rl8OHGLKehwOLTwOHAceAl4A3qyqt4ZVFvbxdo/D8h8BZ69rwSvzJeBzwM+H+bOZvh4L+Mckh4dbYcD0fFYvAOaBvxkOg301yQdo2t9mDvC31ehHX/vzHZN8EPg28Jmq+vHCZdPQY1X9rKouY7SXeiVw8cZWNFlJPg4cr6rDG13LGvtoVV3B6PDBLUl+c+HC5p/VrcAVwFeq6nLgv/nF4RKgV3+bOcBfS7IdYHg+Poy3vIQ/yWmMwvvrVfWdYXiqejyhqt4EHmF0OGFbkhMXjC3s4+0eh+UfAv5jfSs9ZdcAn0hyjNHdN69ldDx1mnqkql4eno8Df8foh/G0fFZfAl6qqkPD/D2MAr1lf5s5wO8Ddg/TuxkdNz4x/ofDt8NXAT9a8KvPppQkwJ3Akar64oJF09TjTJJtw/T7GR3jP8IoyG8aVlvc44nebwIeHvZ8Nq2q+nxV7aiqWUa3jni4qj7NFPWY5ANJfuXENPC7wNNMyWe1ql4FfpjkI8PQdcCzdO1vow/CD5/nbwCvAP/L6CfkHkbHCg8CzwP/BJw1rBtGf0jiBeApYOdG1z9Gfx9l9CvZk8Djw+NjU9bjrwOPDT0+Dfz5MP5h4FHgKPC3wPuG8TOG+aPD8g9vdA+n2O9vAfdPW49DL08Mj2eAPxvGp+mzehkwN3xW/x44s2t/XkovSU1t5kMokqR3YYBLUlMGuCQ1ZYBLUlMGuCQ1ZYBLUlMGuCQ19X8XeSshrMd81QAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "FVC_pred = sc.inverse_transform(predictions)\n",
    "plt.hist(np.std(FVC_pred, axis = 1), bins=15)\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXwAAAD4CAYAAADvsV2wAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAPsElEQVR4nO3df6xkZX3H8fengLZRLFBuyAahi5ba8I8L3lAaf8SKIj9awbYx8oduW5K1iSSS2jRbSVqa9A9oiyZNG80aiGuDoq0SiNoqElpjUrELrrCAukDXlM3CrqIF04Z28ds/5mwdtvfunXvnnLlzed6vZHLPPOfMPd99zp3PnjnzzDOpKiRJL3w/td4FSJJmw8CXpEYY+JLUCANfkhph4EtSI46f5c5OPfXU2rx58yx3KUkb3r333vu9qlqY9vfMNPA3b97Mrl27ZrlLSdrwkny3j9/jJR1JaoSBL0mNMPAlqREGviQ1wsCXpEYY+JLUCANfkhph4EtSIwx8SWrETD9pK/Vl8/bPT7TdvusvG7gSaePwDF+SGmHgS1IjDHxJaoSBL0mNMPAlqREGviQ1wsCXpEYY+JLUiBUDP8lPJ/l6km8meTDJn3btZyW5J8kjST6V5EXDlytJWqtJzvCfBd5UVa8GtgAXJ7kAuAH4UFX9AvAD4KrBqpQkTW3FwK+RH3V3T+huBbwJ+PuufSdwxRAFSpL6MdE1/CTHJdkNHATuBB4FflhVh7tNHgdOH6RCSVIvJgr8qnquqrYALwfOB35p0h0k2ZZkV5Jdhw4dWluVkqSprWqUTlX9ELgb+BXgpCRHZtt8ObB/mcfsqKrFqlpcWFiYplZJ0hQmGaWzkOSkbvlngLcADzMK/t/qNtsK3D5QjZKkHkwyH/4mYGeS4xj9B/HpqvpckoeAW5P8GfAN4KYB65QkTWnFwK+q+4Fzl2h/jNH1fEnSBuAnbSWpEX7FoWbCrySU1p9n+JLUCANfkhph4EtSIwx8SWqEgS9JjTDwJakRDsvU/zPpEEpwGKW0kXiGL0mNMPAlqREGviQ1wsCXpEYY+JLUCANfkhph4EtSIwx8SWqEgS9JjTDwJakRBr4kNcLAl6RGGPiS1AgDX5IaYeBLUiNWDPwkZyS5O8lDSR5M8r6u/bok+5Ps7m6XDl+uJGmtJvkClMPA+6vqviQnAvcmubNb96Gq+svhypMk9WXFwK+qA8CBbvmZJA8Dpw9dmCSpX6u6hp9kM3AucE/XdHWS+5PcnOTkZR6zLcmuJLsOHTo0XbWSpDWbOPCTvBT4DHBNVT0NfBh4JbCF0SuAG5d6XFXtqKrFqlpcWFiYvmJJ0ppMFPhJTmAU9rdU1WcBqurJqnquqn4MfBQ4f7gyJUnTmmSUToCbgIer6oNj7ZvGNns7sKf/8iRJfZlklM5rgXcBDyTZ3bV9ALgyyRaggH3AewaoT5LUk0lG6XwVyBKrvtB/OZKkofhJW0lqhIEvSY0w8CWpEQa+JDXCwJekRhj4ktQIA1+SGmHgS1IjDHxJaoSBL0mNMPAlqREGviQ1wsCXpEZMMj2ytKzN2z+/3iVImpBn+JLUCANfkhph4EtSIwx8SWqEgS9JjTDwJakRDsvUXHGYpzQcz/AlqREGviQ1YsXAT3JGkruTPJTkwSTv69pPSXJnkr3dz5OHL1eStFaTnOEfBt5fVecAFwDvTXIOsB24q6rOBu7q7kuS5tSKgV9VB6rqvm75GeBh4HTgcmBnt9lO4IqBapQk9WBV1/CTbAbOBe4BTquqA92qJ4DTlnnMtiS7kuw6dOjQNLVKkqYwceAneSnwGeCaqnp6fF1VFVBLPa6qdlTVYlUtLiwsTFWsJGntJgr8JCcwCvtbquqzXfOTSTZ16zcBB4cpUZLUh0lG6QS4CXi4qj44tuoOYGu3vBW4vf/yJEl9meSTtq8F3gU8kGR31/YB4Hrg00muAr4LvGOQCiVJvVgx8Kvqq0CWWX1hv+VIkobiJ20lqRFOnqYXtEknY9t3/WUDVyKtP8/wJakRBr4kNcLAl6RGGPiS1AgDX5IaYeBLUiMMfElqhIEvSY0w8CWpEQa+JDXCwJekRhj4ktQIA1+SGmHgS1IjDHxJaoSBL0mNMPAlqREGviQ1wsCXpEYY+JLUCANfkhph4EtSI1YM/CQ3JzmYZM9Y23VJ9ifZ3d0uHbZMSdK0JjnD/xhw8RLtH6qqLd3tC/2WJUnq24qBX1VfAZ6aQS2SpAEdP8Vjr07ybmAX8P6q+sFSGyXZBmwDOPPMM6fYnTSczds/P9F2+66/bOBKpOGs9U3bDwOvBLYAB4Abl9uwqnZU1WJVLS4sLKxxd5Kkaa0p8Kvqyap6rqp+DHwUOL/fsiRJfVtT4CfZNHb37cCe5baVJM2HFa/hJ/kk8Ebg1CSPA38CvDHJFqCAfcB7hitRktSHFQO/qq5covmmAWqRJA1omlE62mAmHYki6YXJqRUkqREGviQ1wsCXpEYY+JLUCANfkhph4EtSIwx8SWqEgS9JjTDwJakRBr4kNcLAl6RGGPiS1AgDX5IaYeBLUiMMfElqhIEvSY0w8CWpEQa+JDXCwJekRvidtnPM76CdP6s5Jvuuv2zASqTV8wxfkhph4EtSI1YM/CQ3JzmYZM9Y2ylJ7kyyt/t58rBlSpKmNckZ/seAi49q2w7cVVVnA3d19yVJc2zFwK+qrwBPHdV8ObCzW94JXNFvWZKkvq11lM5pVXWgW34COG25DZNsA7YBnHnmmWvc3fqadGSGozIkzbOp37StqgLqGOt3VNViVS0uLCxMuztJ0hqtNfCfTLIJoPt5sL+SJElDWGvg3wFs7Za3Arf3U44kaSiTDMv8JPAvwKuSPJ7kKuB64C1J9gJv7u5LkubYim/aVtWVy6y6sOdaJEkD8pO2ktQIJ09bB06KJmk9eIYvSY0w8CWpEQa+JDXCwJekRhj4ktQIR+lIA3HSPc0bz/AlqREGviQ1wsCXpEYY+JLUCANfkhph4EtSI5odlukEZpJa4xm+JDXCwJekRhj4ktQIA1+SGmHgS1Ijmh2lMwRH/mgt+v67cTI2LcczfElqhIEvSY2Y6pJOkn3AM8BzwOGqWuyjKElS//q4hv+rVfW9Hn6PJGlAXtKRpEZMG/gFfCnJvUm2LbVBkm1JdiXZdejQoSl3J0laq2kD/3VVdR5wCfDeJG84eoOq2lFVi1W1uLCwMOXuJElrNVXgV9X+7udB4Dbg/D6KkiT1b82Bn+QlSU48sgxcBOzpqzBJUr+mGaVzGnBbkiO/5xNV9Y+9VCVJ6t2aA7+qHgNe3WMtkqQBOSxTkhrh5GnSC8ykk7E5yVp7PMOXpEYY+JLUCANfkhph4EtSIwx8SWrEC26Ujl8zKGlWVpM38zAqyjN8SWqEgS9JjTDwJakRBr4kNcLAl6RGGPiS1IgX3LBMSf3aaEMPtTzP8CWpEQa+JDXCwJekRhj4ktQIA1+SGrFhRuk4KZrUr/V8Tvk1jOvDM3xJaoSBL0mNMPAlqRFTBX6Si5N8O8kjSbb3VZQkqX9rDvwkxwF/A1wCnANcmeScvgqTJPVrmjP884FHquqxqvpv4Fbg8n7KkiT1bZphmacD/z52/3Hgl4/eKMk2YFt390dJvg98b4r9zsqpzH+dG6FG2Bh1boQaYc7rzA1AjzV2v28oM+3LNf5bjtT4833UMPg4/KraAew4cj/JrqpaHHq/09oIdW6EGmFj1LkRaoSNUedGqBE2Rp191zjNJZ39wBlj91/etUmS5tA0gf+vwNlJzkryIuCdwB39lCVJ6tuaL+lU1eEkVwNfBI4Dbq6qByd46I6VN5kLG6HOjVAjbIw6N0KNsDHq3Ag1wsaos9caU1V9/j5J0pzyk7aS1AgDX5IaMWjgJ/lUkt3dbV+S3V375iT/NbbuI2OPeU2SB7rpGv4qSQau8bok+8dquXRs3R91dXw7yVvH2mc+pUSSv0jyrST3J7ktyUld+9z05RI1z83UG0nOSHJ3koeSPJjkfV37qo//wHXu647Z7iS7urZTktyZZG/38+SuPd1xfaT7uzhvRjW+aqy/did5Osk1692XSW5OcjDJnrG2Vfddkq3d9nuTbJ1RnbN5flfVTG7AjcAfd8ubgT3LbPd14AIgwD8Alwxc13XAHyzRfg7wTeDFwFnAo4zenD6uW34F8KJum3Nm0H8XAcd3yzcAN8xbXx6173Xpp2PUswk4r1s+EfhOd4xXdfxnUOc+4NSj2v4c2N4tbx879pd2xzXdcb5nHfr1OOAJRh8MWte+BN4AnDf+fFht3wGnAI91P0/ulk+eQZ0zeX7P5JJO9z/PO4BPrrDdJuBlVfW1Gv2LPg5cMXyFS7ocuLWqnq2qfwMeYTSdxLpMKVFVX6qqw93drzH63MOy5qAv52rqjao6UFX3dcvPAA8z+rT4cpY7/uvhcmBnt7yTnxzHy4GP18jXgJO64z5LFwKPVtV3j7HNTPqyqr4CPLXEvlfTd28F7qyqp6rqB8CdwMVD1zmr5/esruG/HniyqvaOtZ2V5BtJ/jnJ67u20xlN0XDE4xz7SdmXq7uXUjcfecnH0lNHnH6M9ln6XUb/ox8xT315xDz005KSbAbOBe7pmlZz/IdWwJeS3JvRtCQAp1XVgW75CeC0da5x3Dt5/oncPPUlrL7v5qFPB3t+Tx34Sb6cZM8St/GzuSt5/h/FAeDMqjoX+H3gE0leNm0ta6zxw8ArgS1dXTcOVceUdR7Z5lrgMHBL1zTTvtzokrwU+AxwTVU9zRwd/87rquo8RrPQvjfJG8ZXdmdzczGWOqMPXL4N+Luuad768nnmqe+WM/Tze+q5dKrqzcdan+R44DeA14w95lng2W753iSPAr/IaGqG8ZcyvUzXsFKNY7V+FPhcd/dYU0cMMqXEBH3528CvARd2f7wz78tVmLupN5KcwCjsb6mqzwJU1ZNj6yc9/oOpqv3dz4NJbmN06ePJJJuq6kD3Uv7getY45hLgviN9OG992Vlt3+0H3nhU+z/NoM6ZPL9ncUnnzcC3qur/Xn4kWchoPn2SvAI4G3ise+n1dJILuuv+7wZuH7K4o655vh048s75HcA7k7w4yVldjV9nnaaUSHIx8IfA26rqP8fa56YvjzJXU290fXAT8HBVfXCsfbXHf8gaX5LkxCPLjN7I29PVcmS0yFZ+chzvAN6dkQuA/xi7fDELz3vlPk99OWa1ffdF4KIkJ3eXpC7q2gY1s+d3n+8+L/NO8seA3zuq7TeBB4HdwH3Ar4+tW2T0h/Io8Nd0nwYesL6/BR4A7mf0R7BpbN21XR3fZuwdcEbv8H+nW3ft0H3Y7fMRRtcWd3e3j8xbXy5R88z76Ri1vI7Ry/n7x/rw0rUc/wFrfAWj0Szf7I7ptV37zwF3AXuBLwOndO1h9CVEj3b/hsUZ9udLgO8DPzvWtq59yeg/nwPA/zC6pn3VWvqO0TX0R7rb78yozpk8v51aQZIa4SdtJakRBr4kNcLAl6RGGPiS1AgDX5IaYeBLUiMMfElqxP8CyAcLdiB8qqgAAAAASUVORK5CYII=\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "diff_fvc = (np.mean(FVC_pred,axis=1) - sc.inverse_transform(testY))\n",
    "diff = (np.mean(predictions,axis=1) - testY)\n",
    "plt.hist(diff_fvc, bins=30)\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "MSE :  89917.68195683174\n",
      "RMSE :  299.86277187545596\n"
     ]
    }
   ],
   "source": [
    "MSE = np.mean(diff_fvc*diff_fvc)\n",
    "RMSE = np.sqrt(MSE)\n",
    "print(\"MSE : \",MSE)\n",
    "print(\"RMSE : \", RMSE)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "MSE :  0.1292708564033179\n",
      "RMSE :  0.35954256549582264\n"
     ]
    }
   ],
   "source": [
    "MSE = np.mean(diff*diff)\n",
    "RMSE = np.sqrt(MSE)\n",
    "print(\"MSE : \",MSE)\n",
    "print(\"RMSE : \", RMSE)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "-11.532341306997628"
      ]
     },
     "execution_count": 32,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "compute_score(testY,np.mean(FVC_pred,axis=1),np.std(FVC_pred,axis=1))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Dropout 0.5"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "WARNING:Error in loading the saved optimizer state. As a result, your model is starting with a freshly initialized optimizer.\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[<tf.Tensor 'dense_6_input_1:0' shape=(None, 6) dtype=float32>, <tf.Tensor 'input_2_1:0' shape=(None, 240, 240, 1) dtype=float32>]\n"
     ]
    }
   ],
   "source": [
    "from keras.models import load_model\n",
    "import tensorflow as tf\n",
    "tf.compat.v1.disable_eager_execution()\n",
    "\n",
    "dropout = 0.5\n",
    "num_iter = 20\n",
    " \n",
    "input_data=[testAttrX, test_dataset]\n",
    "input_data[1] = np.asarray(input_data[1]).reshape(test_dataset.shape[0],240,240,1)\n",
    "num_samples = input_data[0].shape[0]\n",
    "\n",
    "\n",
    "model = load_model('clean_notebooks/cnn_inj_collage_1C.h5')\n",
    "\n",
    "predict_with_dropout = create_dropout_predict_function(model, dropout)\n",
    "\n",
    "predictions = np.zeros((num_samples, num_iter))\n",
    "for i in range(num_iter):\n",
    "    predictions[:,i] = predict_with_dropout([input_data,1])[0].reshape(-1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXUAAAD4CAYAAAATpHZ6AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAANyUlEQVR4nO3dX4xc5XnH8e8Tm3+BFGO8slwbuo5AibhoAa2oEVEUmaYlOAIuUGQUta7kylKbStBUSp1WaoXUC1NV+VMpamoFWqtKCZSQGhlVlBpHVXthuuavjUMxxCRGNl7aODS9aOPk6cW8trdb2zu7O7Mz8/D9SKM95z1nZ367OvvbM+/smY3MRJJUw/sGHUCS1DuWuiQVYqlLUiGWuiQVYqlLUiFLF/PBVqxYkePj44v5kJI08vbt2/dOZo51s++ilvr4+DiTk5OL+ZCSNPIi4s1u93X6RZIKsdQlqRBLXZIKsdQlqRBLXZIKsdQlqRBLXZIKsdQlqRBLXZIKWdQrSqsb3/pkT+/v8LYNPb0/SfV5pi5JhVjqklSIpS5JhVjqklSIpS5JhVjqklSIpS5JhVjqklSIpS5JhVjqklSIpS5JhVjqklSIpS5JhVjqklSIpS5JhVjqklSIpS5JhVjqklSIpS5JhVjqklSIpS5JhVjqklSIpS5JhXRd6hGxJCKej4hdbX1tROyNiEMR8UhEXNi/mJKkbszlTP1e4OC09QeAL2bmNcAPgM29DCZJmruuSj0i1gAbgK+19QDWA4+1XXYAd/UhnyRpDro9U/8S8Dngp239SuBEZp5s60eA1Wf7xIjYEhGTETE5NTW1kKySpFnMWuoR8UngeGbum88DZOb2zJzIzImxsbH53IUkqUtLu9jnFuCOiLgduBj4GeDLwLKIWNrO1tcAb/UvpiSpG7OeqWfm5zNzTWaOAxuBZzLz08Ae4O622yZgZ99SSpK6spC/U/894LMRcYjOHPuDvYkkSZqvbqZfTsvMbwPfbstvADf1PpIkab68olSSCrHUJakQS12SCrHUJakQS12SCrHUJakQS12SCrHUJakQS12SCrHUJakQS12SCrHUJakQS12SCrHUJakQS12SCrHUJakQS12SCrHUJakQS12SCrHUJakQS12SCrHUJakQS12SCrHUJakQS12SCrHUJakQS12SCrHUJakQS12SCrHUJamQpYMOoHMb3/pkT+/v8LYNPb2/XueD3meU3ms8U5ekQix1SSrEUpekQix1SSrEUpekQmYt9Yi4OCKejYgXI+JARNzfxtdGxN6IOBQRj0TEhf2PK0k6n27O1P8bWJ+ZvwBcD9wWEeuAB4AvZuY1wA+AzX1LKUnqyqylnh0/aqsXtFsC64HH2vgO4K5+BJQkda+ri48iYgmwD7gG+ArwOnAiM0+2XY4Aq8/xuVuALQBXX331QvP2TD8unJGkQevqhdLM/ElmXg+sAW4CPtztA2Tm9sycyMyJsbGx+aWUJHVlTn/9kpkngD3AzcCyiDh1pr8GeKu30SRJc9XNX7+MRcSytnwJ8HHgIJ1yv7vttgnY2aeMkqQudTOnvgrY0ebV3wc8mpm7IuIV4BsR8cfA88CDfcwpSerCrKWemS8BN5xl/A068+uSpCHhFaWSVIilLkmFWOqSVIj/+eg9xAuupPo8U5ekQix1SSrEUpekQix1SSrEUpekQix1SSrEUpekQix1SSrEUpekQix1SSrEUpekQix1SSrEUpekQix1SSrEUpekQix1SSrEUpekQvzPRxoqw/7fmQ5v2zDoCNJ5eaYuSYVY6pJUiKUuSYVY6pJUiKUuSYVY6pJUiKUuSYVY6pJUiBcfSXPQj4ujvKBJveSZuiQVYqlLUiGWuiQVYqlLUiGWuiQVMmupR8RVEbEnIl6JiAMRcW8bXx4RT0fEa+3jFf2PK0k6n27O1E8Cv5uZ1wHrgM9ExHXAVmB3Zl4L7G7rkqQBmrXUM/NoZj7Xlv8TOAisBu4EdrTddgB39SmjJKlLc5pTj4hx4AZgL7AyM4+2TceAlef4nC0RMRkRk1NTUwvJKkmaRdelHhGXAd8E7svMd6dvy8wE8myfl5nbM3MiMyfGxsYWFFaSdH5dlXpEXECn0L+emY+34bcjYlXbvgo43p+IkqRudfPXLwE8CBzMzC9M2/QEsKktbwJ29j6eJGkuunlDr1uAXwVejogX2tjvA9uARyNiM/Am8Km+JJQkdW3WUs/MfwbiHJtv7W0cSdJCeEWpJBViqUtSIZa6JBViqUtSIZa6JBViqUtSIZa6JBViqUtSIZa6JBViqUtSIZa6JBViqUtSIZa6JBViqUtSIZa6JBViqUtSIZa6JBViqUtSIZa6JBViqUtSIZa6JBWydNABJPXW+NYne3p/h7dt6On9qb88U5ekQix1SSrEUpekQix1SSrEUpekQix1SSrEUpekQix1SSrEUpekQix1SSrEUpekQix1SSrEUpekQix1SSpk1lKPiIci4nhE7J82tjwino6I19rHK/obU5LUjW7O1P8KuG3G2FZgd2ZeC+xu65KkAZu11DPzn4D/mDF8J7CjLe8A7uptLEnSfMx3Tn1lZh5ty8eAlefaMSK2RMRkRExOTU3N8+EkSd1Y8AulmZlAnmf79sycyMyJsbGxhT6cJOk85lvqb0fEKoD28XjvIkmS5mu+pf4EsKktbwJ29iaOJGkhls62Q0Q8DHwMWBERR4A/ArYBj0bEZuBN4FP9DClVNr71yUFHUCGzlnpm3nOOTbf2OIskaYG8olSSCrHUJakQS12SCrHUJakQS12SCrHUJakQS12SCrHUJakQS12SCrHUJakQS12SCrHUJamQWd/QS9J7Wz/eRfLwtg09v091eKYuSYVY6pJUiKUuSYU4py5p0fV6nt45+jM8U5ekQix1SSrEUpekQix1SSpkZF4o7ccFEJJUjWfqklSIpS5JhVjqklSIpS5JhVjqklSIpS5JhVjqklSIpS5JhYzMxUeSdC7DfnHiYr6LpGfqklSIpS5JhVjqklSIpS5JhVjqklTIgko9Im6LiFcj4lBEbO1VKEnS/My71CNiCfAV4BPAdcA9EXFdr4JJkuZuIWfqNwGHMvONzPwf4BvAnb2JJUmaj4VcfLQa+P609SPAL87cKSK2AFva6o8i4tUZu6wA3llAjkEZ1dwwutnNvfhGNftQ5Y4H5rT72bL/XLef3PcrSjNzO7D9XNsjYjIzJ/qdo9dGNTeMbnZzL75RzT6quWHh2Rcy/fIWcNW09TVtTJI0IAsp9X8Fro2ItRFxIbAReKI3sSRJ8zHv6ZfMPBkRvw08BSwBHsrMA/O4q3NOzQy5Uc0No5vd3ItvVLOPam5YYPbIzF4FkSQNmFeUSlIhlrokFdL3Uo+IhyLieETsnza2PCKejojX2scr2nhExJ+1tx14KSJu7He+8+S+KiL2RMQrEXEgIu4dhewRcXFEPBsRL7bc97fxtRGxt+V7pL24TURc1NYPte3jg8g9Lf+SiHg+InaNWO7DEfFyRLwQEZNtbKiPlZZlWUQ8FhHfiYiDEXHziOT+UPten7q9GxH3jUj232k/m/sj4uH2M9u74zwz+3oDPgrcCOyfNvYnwNa2vBV4oC3fDvw9EMA6YG+/850n9yrgxrb8AeDf6LwdwlBnb49/WVu+ANjb8jwKbGzjXwV+sy3/FvDVtrwReGRQ3/OW4bPA3wC72vqo5D4MrJgxNtTHSsuyA/iNtnwhsGwUcs/4GpYAx+hcoDPU2elctPld4JK2/ijw6708zhfrCxnn/5b6q8CqtrwKeLUt/wVwz9n2G/QN2Al8fJSyA+8HnqNzpe87wNI2fjPwVFt+Cri5LS9t+8WA8q4BdgPrgV3tB3Doc7cMh/n/pT7UxwpweSuYmDE+1LnP8nX8MvAvo5CdM1fiL2/H7S7gV3p5nA9qTn1lZh5ty8eAlW35bG89sHoxg51Ne8pzA52z3qHP3qYwXgCOA08DrwMnMvPkWbKdzt22/xC4clEDn/El4HPAT9v6lYxGboAE/iEi9kXnrTFg+I+VtcAU8JdtyutrEXEpw597po3Aw215qLNn5lvAnwLfA47SOW730cPjfOAvlGbnV9DQ/l1lRFwGfBO4LzPfnb5tWLNn5k8y83o6Z743AR8ebKLZRcQngeOZuW/QWebpI5l5I513Lf1MRHx0+sYhPVaW0pka/fPMvAH4LzpTFqcNae7T2tzzHcDfztw2jNnbHP+ddH6h/ixwKXBbLx9jUKX+dkSsAmgfj7fxoXrrgYi4gE6hfz0zH2/DI5EdIDNPAHvoPJ1bFhGnLjabnu107rb9cuDfFzcpALcAd0TEYTrv+Lke+DLDnxs4fQZGZh4HvkXnl+mwHytHgCOZubetP0an5Ic993SfAJ7LzLfb+rBn/yXgu5k5lZk/Bh6nc+z37DgfVKk/AWxqy5vozFefGv+19kr1OuCH055KLaqICOBB4GBmfmHapqHOHhFjEbGsLV9C53WAg3TK/e6228zcp76eu4Fn2hnOosrMz2fmmswcp/N0+pnM/DRDnhsgIi6NiA+cWqYzx7ufIT9WMvMY8P2I+FAbuhV4hSHPPcM9nJl6geHP/j1gXUS8v3XMqe95747zRXhh4GE6c0c/pnNmsJnOnNBu4DXgH4Hlbd+g8483XgdeBiYW+4WMabk/Quep20vAC+12+7BnB34eeL7l3g/8YRv/IPAscIjOU9WL2vjFbf1Q2/7BQX3Pp30NH+PMX78Mfe6W8cV2OwD8QRsf6mOlZbkemGzHy98BV4xC7pbnUjpnrZdPGxv67MD9wHfaz+dfAxf18jj3bQIkqZCBv1AqSeodS12SCrHUJakQS12SCrHUJakQS12SCrHUJamQ/wWdlzj5mjeDDQAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "FVC_pred = sc.inverse_transform(predictions)\n",
    "plt.hist(np.std(FVC_pred, axis = 1), bins=15)\n",
    "plt.show()"
   ]
1536
1537
1538
  },
  {
   "cell_type": "code",
1539
   "execution_count": 20,
1540
   "metadata": {},
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556
1557
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571
1572
1573
1574
1575
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXAAAAD4CAYAAAD1jb0+AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAQAklEQVR4nO3df6zddX3H8efLgj+iOMq4aRqgFpVpyBILu+tYdMaBYsHNYmYW+UO7jaVukUQy96NKsmGyJbhNTZYZXQ2MuqDOqQTij2llbMZk4lospaUiBWtGU2gdOjBb2Irv/XG+V88u9/ace+45995P+3wkJ+d7Pt/v6ffN53vPi+/9fD/ne1NVSJLa86zlLkCSNBoDXJIaZYBLUqMMcElqlAEuSY06bSl3dvbZZ9f69euXcpeS1Lzdu3d/r6qmZrcvaYCvX7+eXbt2LeUuJal5Sb47V7tDKJLUKANckhplgEtSowxwSWqUAS5JjTLAJalRAwM8yXOTfCPJvUn2J3lv135Lku8k2dM9Nky8WknSjw0zD/wp4NKq+mGS04GvJflit+4PqurTkytPkjSfgQFevRuG/7B7eXr38CbikrTMhvomZpJVwG7gpcCHquruJL8L/FmSPwbuBLZV1VNzvHcrsBVg3bp1YytcC7d+2+eH2u7QjW+YcCWSxmGoi5hV9XRVbQDOBTYm+Vng3cDLgZ8HzgL+aJ73bq+q6aqanpp6xlf5JUkjWtAslKr6AXAXsKmqjlTPU8DfAhsnUJ8kaR7DzEKZSnJmt/w84HXAt5Ks7doCXAXsm1yZkqTZhhkDXwvs6MbBnwV8qqo+l+SfkkwBAfYAvzO5MiVJsw0zC2UvcNEc7ZdOpCJJ0lD8JqYkNcoAl6RGGeCS1CgDXJIaZYBLUqMMcElqlAEuSY0ywCWpUQa4JDXKAJekRhngktQoA1ySGmWAS1KjDHBJapQBLkmNMsAlqVEGuCQ1ygCXpEYZ4JLUKANckho1MMCTPDfJN5Lcm2R/kvd27ecnuTvJwSR/n+TZky9XkjRjmDPwp4BLq+oVwAZgU5JLgPcBH6yqlwLfB66ZWJWSpGcYGODV88Pu5endo4BLgU937TuAqyZRoCRpbkONgSdZlWQPcBTYCTwE/KCqjnebPAKcM897tybZlWTXsWPHxlCyJAmGDPCqerqqNgDnAhuBlw+7g6raXlXTVTU9NTU1WpWSpGdY0CyUqvoBcBfwi8CZSU7rVp0LHB5vaZKkExlmFspUkjO75ecBrwMO0AvyN3ebbQFun1CNkqQ5nDZ4E9YCO5Ksohf4n6qqzyW5H/hkkj8FvgncNME6JUmzDAzwqtoLXDRH+8P0xsMlScvAb2JKUqMMcElqlAEuSY0ywCWpUQa4JDXKAJekRhngktQoA1ySGmWAS1KjDHBJapQBLkmNMsAlqVEGuCQ1ygCXpEYZ4JLUKANckhplgEtSowxwSWqUAS5JjTLAJalRBrgkNWpggCc5L8ldSe5Psj/JO7v2G5IcTrKne1w5+XIlSTNOG2Kb48C7quqeJGcAu5Ps7NZ9sKr+cnLlSZLmMzDAq+oIcKRbfjLJAeCcSRcmSTqxBY2BJ1kPXATc3TVdm2RvkpuTrJ7nPVuT7Eqy69ixY4urVpL0Y0MHeJIXAJ8BrquqJ4APAy8BNtA7Q3//XO+rqu1VNV1V01NTU4uvWJIEDBngSU6nF963VtVnAarqsap6uqp+BHwU2Di5MiVJsw0zCyXATcCBqvpAX/vavs3eBOwbf3mSpPkMMwvllcBbgfuS7Ona3gNcnWQDUMAh4O0TqE+SNI9hZqF8Dcgcq74w/nIkScMa5gxcWrT12z4/1HaHbnzDhCuRTh5+lV6SGmWAS1KjDHBJapQBLkmNMsAlqVHOQjkJDDvDowXOVpGG5xm4JDXKAJekRhngktQoA1ySGmWAS1KjnIWiZ1jIrBZng0jLxzNwSWqUAS5JjTLAJalRBrgkNcoAl6RGOQtFJzXvraKTmWfgktSogQGe5LwkdyW5P8n+JO/s2s9KsjPJg93z6smXK0maMcwZ+HHgXVV1IXAJ8I4kFwLbgDur6gLgzu61JGmJDAzwqjpSVfd0y08CB4BzgM3Ajm6zHcBVE6pRkjSHBV3ETLIeuAi4G1hTVUe6VY8Ca+Z5z1ZgK8C6detGLlTqdzL9EQtpVENfxEzyAuAzwHVV9UT/uqoqoOZ6X1Vtr6rpqpqemppaVLGSpJ8YKsCTnE4vvG+tqs92zY8lWdutXwscnUyJkqS5DDMLJcBNwIGq+kDfqjuALd3yFuD28ZcnSZrPMGPgrwTeCtyXZE/X9h7gRuBTSa4Bvgv8+kQqlCTNaWCAV9XXgMyz+rLxliNJGpZfpdeiOBtEWj5+lV6SGmWAS1KjDHBJapQBLkmNMsAlqVEGuCQ1ygCXpEYZ4JLUKANckhplgEtSowxwSWqUAS5JjTLAJalRBrgkNcoAl6RGGeCS1CgDXJIaZYBLUqMMcElqlAEuSY0aGOBJbk5yNMm+vrYbkhxOsqd7XDnZMiVJsw1zBn4LsGmO9g9W1Ybu8YXxliVJGmRggFfVV4HHl6AWSdICLGYM/Noke7shltXzbZRka5JdSXYdO3ZsEbuTJPUbNcA/DLwE2AAcAd4/34ZVtb2qpqtqempqasTdSZJmGynAq+qxqnq6qn4EfBTYON6yJEmDjBTgSdb2vXwTsG++bSVJk3HaoA2SfAJ4DXB2kkeAPwFek2QDUMAh4O2TK1GSNJeBAV5VV8/RfNMEapEkLcDAAJdOBeu3fX6o7Q7d+IYJVyINz6/SS1KjDHBJapQBLkmNMsAlqVEGuCQ1ygCXpEYZ4JLUKANckhplgEtSowxwSWqUAS5JjTLAJalRBrgkNcoAl6RGGeCS1CgDXJIaZYBLUqP8izxj5F91kbSUPAOXpEYNDPAkNyc5mmRfX9tZSXYmebB7Xj3ZMiVJsw1zBn4LsGlW2zbgzqq6ALizey1JWkIDA7yqvgo8Pqt5M7CjW94BXDXesiRJg4w6Br6mqo50y48Ca8ZUjyRpSIuehVJVlaTmW59kK7AVYN26dYvd3UnB2SqSxmHUM/DHkqwF6J6PzrdhVW2vqumqmp6amhpxd5Kk2UYN8DuALd3yFuD28ZQjSRrWMNMIPwH8K/CyJI8kuQa4EXhdkgeB13avJUlLaOAYeFVdPc+qy8ZciyRpAfwmpiQ1ynuhSMvMWUkalWfgktQoA1ySGmWAS1KjDHBJapQXMaUFGPaCI3jRUZPnGbgkNcoAl6RGGeCS1CgDXJIaZYBLUqOchbKCLWTGg6RTj2fgktQoA1ySGmWAS1KjDHBJapQBLkmNOmVnoXhPC0mt8wxckhplgEtSoxY1hJLkEPAk8DRwvKqmx1GUJGmwcYyB/3JVfW8M/44kaQEcQpGkRi32DLyALycp4G+qavvsDZJsBbYCrFu3buQdeV8QaTjDflacXdW+xZ6Bv6qqLgauAN6R5NWzN6iq7VU1XVXTU1NTi9ydJGnGogK8qg53z0eB24CN4yhKkjTYyAGe5PlJzphZBi4H9o2rMEnSiS1mDHwNcFuSmX/n41X1j2OpSpI00MgBXlUPA68YYy2SpAU4Ze+FshDOgJG0EjkPXJIaZYBLUqMMcElqlAEuSY0ywCWpUc5CkRrhbCjN5hm4JDXKAJekRhngktQoA1ySGmWAS1KjnIUiSSNayMygSfwFJM/AJalRBrgkNcoAl6RGGeCS1CgDXJIa5SwUaUJW+r1Lhq1vIbMnJvFvjtNKr2+hPAOXpEYZ4JLUqEUFeJJNSR5IcjDJtnEVJUkabOQAT7IK+BBwBXAhcHWSC8dVmCTpxBZzBr4ROFhVD1fV/wCfBDaPpyxJ0iCpqtHemLwZ2FRVv929fivwC1V17azttgJbu5cvAx4YvdyJORv43nIXMYeVWhdY26isbTSnem0vqqqp2Y0Tn0ZYVduB7ZPez2Ik2VVV08tdx2wrtS6wtlFZ22isbW6LGUI5DJzX9/rcrk2StAQWE+D/BlyQ5PwkzwbeAtwxnrIkSYOMPIRSVceTXAt8CVgF3FxV+8dW2dJaqUM8K7UusLZRWdtorG0OI1/ElCQtL7+JKUmNMsAlqVGnVIAnuSHJ4SR7useVfeve3d0S4IEkr+9rX5LbBST5iyTfSrI3yW1Jzuza1yf5776aP9L3np9Lcl9X218lyaTqm1Xrst1CIcl5Se5Kcn+S/Une2bUv+NhOqL5D3THZk2RX13ZWkp1JHuyeV3ft6Y7bwe64XzzBul7W1zd7kjyR5Lrl6rckNyc5mmRfX9uC+ynJlm77B5NsmWBtK/PzWVWnzAO4Afj9OdovBO4FngOcDzxE78Lsqm75xcCzu20unFBtlwOndcvvA97XLa8H9s3znm8AlwABvghcsQR9uGR9Ms/+1wIXd8tnAN/ujt+Cju0E6zsEnD2r7c+Bbd3ytr5je2V33NIdx7uXqA9XAY8CL1qufgNeDVzc/7O90H4CzgIe7p5Xd8urJ1Tbivx8nlJn4CewGfhkVT1VVd8BDtK7VcCS3S6gqr5cVce7l1+nN69+XknWAi+sqq9X76flY8BVk6htlmW9hUJVHamqe7rlJ4EDwDkneMt8x3YpbQZ2dMs7+Mlx2gx8rHq+DpzZHddJuwx4qKq+e4JtJtpvVfVV4PE59rmQfno9sLOqHq+q7wM7gU2TqG2lfj5PxQC/tvs16OaZX9HoBcC/923zSNc2X/uk/Ra9/2PPOD/JN5P8S5Jf6trO6epZ6tqWq0+eIcl64CLg7q5pIcd2Ugr4cpLd6d1GAmBNVR3plh8F1ixTbTPeAnyi7/VK6DdYeD+d8p/Pky7Ak3wlyb45HpuBDwMvATYAR4D3r6DaZra5HjgO3No1HQHWVdVFwO8BH0/ywqWseyVK8gLgM8B1VfUEy3xs+7yqqi6md5fOdyR5df/K7mxs2ebupveluzcC/9A1rZR++3+Wu5/ms9I+nyfdn1SrqtcOs12SjwKf616e6LYAY7tdwKDakvwG8CvAZd0PMFX1FPBUt7w7yUPAz3R19P8at1S3Mlj2WygkOZ1eeN9aVZ8FqKrH+tYPe2zHrqoOd89Hk9xGb9jhsSRrq+pI96v10eWorXMFcM9Mf62UfusstJ8OA6+Z1f7PkypuJX4+T7oz8BOZNb74JmDmKvMdwFuSPCfJ+cAF9C5ALNntApJsAv4QeGNV/Vdf+1R6914nyYu72h7uftV8Iskl3dXttwG3T6K2WZb1Fgrdf+tNwIGq+kBf+0KP7SRqe36SM2aW6V342tfVMDNDYgs/OU53AG/rZllcAvxn3xDCpFxN3/DJSui3Pgvtpy8BlydZ3Q39XN61jd2K/XyO+6roSn4AfwfcB+yl90Oxtm/d9fSutD9A39VielfAv92tu36CtR2kN563p3t8pGv/NWB/13YP8Kt975mm94F7CPhrum/WLkE/LkmfzLPvV9H71XpvX19dOcqxnUBtL6Y3c+Pe7phd37X/NHAn8CDwFeCsrj30/ijKQ13t0xPuu+cD/wH8VF/bsvQbvf+JHAH+l9748DWj9BO98eiD3eM3J1jbivx8+lV6SWrUKTWEIkknEwNckhplgEtSowxwSWqUAS5JjTLAJalRBrgkNer/AB+rFHlxMCdIAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "diff_fvc = (np.mean(FVC_pred,axis=1) - sc.inverse_transform(testY))\n",
    "diff = (np.mean(predictions,axis=1) - testY)\n",
    "plt.hist(diff_fvc, bins=30)\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "MSE :  103853.16952017092\n",
      "RMSE :  322.2625785290171\n"
     ]
    }
   ],
1576
1577
1578
1579
1580
1581
1582
1583
1584
   "source": [
    "MSE = np.mean(diff_fvc*diff_fvc)\n",
    "RMSE = np.sqrt(MSE)\n",
    "print(\"MSE : \",MSE)\n",
    "print(\"RMSE : \", RMSE)"
   ]
  },
  {
   "cell_type": "code",
1585
   "execution_count": 22,
1586
   "metadata": {},
1587
1588
1589
1590
1591
1592
1593
1594
1595
1596
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "MSE :  0.14930531873048822\n",
      "RMSE :  0.3864004641954875\n"
     ]
    }
   ],
1597
1598
1599
1600
1601
1602
1603
1604
1605
   "source": [
    "MSE = np.mean(diff*diff)\n",
    "RMSE = np.sqrt(MSE)\n",
    "print(\"MSE : \",MSE)\n",
    "print(\"RMSE : \", RMSE)"
   ]
  },
  {
   "cell_type": "code",
1606
   "execution_count": 26,
1607
   "metadata": {},
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
   "outputs": [
    {
     "data": {
      "text/plain": [
       "-10.699960980031888"
      ]
     },
     "execution_count": 26,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
1620
   "source": [
1621
    "compute_score(testY,np.mean(FVC_pred,axis=1),np.std(FVC_pred,axis=1))"
1622
1623
1624
1625
1626
1627
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
1628
    "## Dropout 0.6"
1629
1630
1631
1632
   ]
  },
  {
   "cell_type": "code",
1633
   "execution_count": 33,
1634
1635
1636
   "metadata": {},
   "outputs": [
    {
1637
1638
1639
1640
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[<tf.Tensor 'dense_6_input_3:0' shape=(None, 6) dtype=float32>, <tf.Tensor 'input_2_3:0' shape=(None, 240, 240, 1) dtype=float32>]\n"
1641
1642
1643
1644
     ]
    }
   ],
   "source": [
1645
1646
1647
1648
1649
1650
1651
1652
1653
1654
1655
1656
1657
1658
    "tf.compat.v1.disable_eager_execution()\n",
    "\n",
    "dropout = 0.6\n",
    "num_iter = 20\n",
    " \n",
    "input_data=[testAttrX, test_dataset]\n",
    "input_data[1] = np.asarray(input_data[1]).reshape(test_dataset.shape[0],240,240,1)\n",
    "num_samples = input_data[0].shape[0]\n",
    "\n",
    "predict_with_dropout = create_dropout_predict_function(model, dropout)\n",
    "\n",
    "predictions = np.zeros((num_samples, num_iter))\n",
    "for i in range(num_iter):\n",
    "    predictions[:,i] = predict_with_dropout([input_data,1])[0].reshape(-1)"
1659
1660
   ]
  },
1661
1662
  {
   "cell_type": "code",
1663
   "execution_count": 39,
1664
   "metadata": {},
1665
1666
1667
1668
1669
1670
1671
1672
1673
1674
1675
1676
1677
1678
1679
1680
1681
1682
1683
1684
1685
1686
1687
1688
1689
1690
1691
1692
1693
1694
1695
1696
1697
1698
1699
1700
1701
1702
1703
1704
1705
1706
1707
1708
1709
1710
1711
1712
1713
1714
1715
1716
1717
1718
1719
1720
1721
1722
1723
1724
1725
1726
1727
1728
1729
1730
1731
1732
1733
1734
1735
1736
1737
1738
1739
1740
1741
1742
1743
1744
1745
1746
1747
1748
1749
1750
1751
1752
1753
1754
1755
1756
1757
1758
1759
1760
1761
1762
1763
1764
1765
1766
1767
1768
1769
1770
1771
1772
1773
1774
1775
1776
1777
1778
1779
1780
1781
1782
1783
1784
1785
1786
1787
1788
1789
1790
1791
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXAAAAD4CAYAAAD1jb0+AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAQ2ElEQVR4nO3dfYxldX3H8feny/NDWR6mZMuyXXyIhpi60OkWgjUWxCIYwYY2EKPbhGZtqwlUU7vatJWkTZZGRZs06uqim0YRilgIPlLAWJtm6SwssMtKWXFVyMIuFUT6hy347R/3DEyGmZ27M/fO3t/6fiU3c87v3Lnnkzt3PnPm3HPuSVUhSWrPLx3oAJKk+bHAJalRFrgkNcoCl6RGWeCS1KhDFnNlJ510Uq1cuXIxVylJzduyZcuTVTU2fXxRC3zlypVMTEws5iolqXlJfjDTuLtQJKlRFrgkNcoCl6RGWeCS1CgLXJIaZYFLUqP6LvAkS5Lcm+S2bv60JJuT7ExyQ5LDhhdTkjTd/myBXwnsmDJ/DXBtVb0CeAq4YpDBJEn71leBJ1kOXAR8ppsPcC5wU3eXTcAlQ8gnSZpFv2difgx4P3BsN38i8HRVPdfNPwqcMtM3JlkLrAVYsWLFvINq4Vau+8rAH3PX+osG/piS+jPnFniStwB7qmrLfFZQVRuqaryqxsfGXnIqvyRpnvrZAj8HeGuSC4EjgF8GPg4sTXJItxW+HHhseDElSdPNuQVeVR+oquVVtRK4DLizqt4O3AVc2t1tDXDL0FJKkl5iIceB/wXw3iQ76e0T3ziYSJKkfuzXx8lW1beAb3XTjwCrBx9JktQPz8SUpEZZ4JLUKAtckhplgUtSoyxwSWqUBS5JjbLAJalRFrgkNcoCl6RGWeCS1CgLXJIaZYFLUqMscElqlAUuSY2ywCWpURa4JDWqn4saH5Hk7iT3Jdme5Opu/HNJvp9ka3dbNfS0kqQX9HNFnp8B51bVs0kOBb6T5Gvdsj+vqpuGF0+SNJs5C7yqCni2mz20u9UwQ0mS5tbXPvAkS5JsBfYAt1fV5m7R3yW5P8m1SQ4fVkhJ0kv1VeBV9XxVrQKWA6uTvAb4APBq4DeBE+hdpf4lkqxNMpFkYu/evYNJLUnav6NQqupp4C7ggqraXT0/Az7LLFeor6oNVTVeVeNjY2MLDixJ6unnKJSxJEu76SOB84HvJlnWjQW4BNg2vJiSpOn6OQplGbApyRJ6hX9jVd2W5M4kY0CArcAfDy+mJGm6fo5CuR84Y4bxc4eSSJLUF8/ElKRGWeCS1CgLXJIaZYFLUqMscElqVD+HEapPK9d9ZaCPt2v9RQN9PEkHF7fAJalRFrgkNcoCl6RGWeCS1CgLXJIaZYFLUqMscElqlAUuSY2ywCWpUZ6JOcIGfWanpIOLW+CS1Kh+rol5RJK7k9yXZHuSq7vx05JsTrIzyQ1JDht+XEnSpH62wH8GnFtVrwVWARckOQu4Bri2ql4BPAVcMbSUkqSXmLPAq+fZbvbQ7lbAucBN3fgmelemlyQtkr72gSdZkmQrsAe4Hfge8HRVPdfd5VHglFm+d22SiSQTe/fuHUBkSRL0WeBV9XxVrQKWA6uBV/e7gqraUFXjVTU+NjY2v5SSpJfYr6NQqupp4C7gbGBpksnDEJcDjw02miRpX/o5CmUsydJu+kjgfGAHvSK/tLvbGuCWIWWUJM2gnxN5lgGbkiyhV/g3VtVtSR4Evpjkb4F7gY1DzClJmmbOAq+q+4EzZhh/hN7+cEnSAeCp9BopXhha6p+n0ktSoyxwSWqUBS5JjbLAJalRFrgkNcoCl6RGWeCS1CgLXJIaZYFLUqM8E1MHNc/s1MHMLXBJapQFLkmNssAlqVEWuCQ1ygKXpEZZ4JLUqH6uiXlqkruSPJhke5Iru/EPJXksydbuduHw40qSJvVzHPhzwPuq6p4kxwJbktzeLbu2qj48vHiSpNn0c03M3cDubvqnSXYApww7mCRp3/ZrH3iSlfQucLy5G3pPkvuTXJfk+Fm+Z22SiSQTe/fuXVhaSdIL+i7wJMcAXwKuqqpngE8ALwdW0dtC/8hM31dVG6pqvKrGx8bGFp5YkgT0WeBJDqVX3p+vqpsBquqJqnq+qn4OfBpYPbyYkqTp+jkKJcBGYEdVfXTK+LIpd3sbsG3w8SRJs+nnKJRzgHcADyTZ2o19ELg8ySqggF3Au4aQT5I0i36OQvkOkBkWfXXwcSRJ/fJMTElqlAUuSY2ywCWpURa4JDXKApekRv3CXtR40Be7laTF5ha4JDXKApekRlngktQoC1ySGvUL+yamBsM3g6UDxy1wSWqUBS5JjbLAJalRFrgkNcoCl6RGWeCS1Kh+rol5apK7kjyYZHuSK7vxE5LcnuTh7uvxw48rSZrUzxb4c8D7qup04Czg3UlOB9YBd1TVK4E7unlJ0iKZs8CrandV3dNN/xTYAZwCXAxs6u62CbhkSBklSTPYrzMxk6wEzgA2AydX1e5u0ePAybN8z1pgLcCKFSvmHVQ6WA36bNZd6y8a6ONpdPX9JmaSY4AvAVdV1TNTl1VVATXT91XVhqoar6rxsbGxBYWVJL2orwJPcii98v58Vd3cDT+RZFm3fBmwZzgRJUkz6ecolAAbgR1V9dEpi24F1nTTa4BbBh9PkjSbfvaBnwO8A3ggydZu7IPAeuDGJFcAPwD+YCgJJUkzmrPAq+o7QGZZfN5g40iS+uWZmJLUKAtckhplgUtSoyxwSWqUBS5JjfKixtJ+8CLOGiVugUtSoyxwSWqUBS5JjbLAJalRFrgkNcoCl6RGWeCS1CgLXJIaZYFLUqMscElqlAUuSY3q55qY1yXZk2TblLEPJXksydbuduFwY0qSputnC/xzwAUzjF9bVau621cHG0uSNJc5C7yqvg38eBGySJL2w0I+TvY9Sd4JTADvq6qnZrpTkrXAWoAVK1YsYHWS+jHoj7zdtf6igT6eBme+b2J+Ang5sArYDXxktjtW1YaqGq+q8bGxsXmuTpI03bwKvKqeqKrnq+rnwKeB1YONJUmay7wKPMmyKbNvA7bNdl9J0nDMuQ88yfXAG4CTkjwK/A3whiSrgAJ2Ae8aXkRJ0kzmLPCqunyG4Y1DyCJJ2g+eiSlJjbLAJalRFrgkNcoCl6RGWeCS1CgLXJIaZYFLUqMscElqlAUuSY2ywCWpURa4JDXKApekRlngktQoC1ySGmWBS1KjLHBJapQFLkmNmrPAk1yXZE+SbVPGTkhye5KHu6/HDzemJGm6frbAPwdcMG1sHXBHVb0SuKOblyQtojkLvKq+Dfx42vDFwKZuehNwyWBjSZLmMt994CdX1e5u+nHg5NnumGRtkokkE3v37p3n6iRJ0y34TcyqKqD2sXxDVY1X1fjY2NhCVydJ6sy3wJ9Isgyg+7pncJEkSf2Yb4HfCqzpptcAtwwmjiSpX/0cRng98B/Aq5I8muQKYD1wfpKHgTd285KkRXTIXHeoqstnWXTegLPs08p1X1nM1UnSyPNMTElqlAUuSY2ywCWpURa4JDXKApekRlngktQoC1ySGmWBS1KjLHBJapQFLkmNssAlqVEWuCQ1ygKXpEZZ4JLUKAtckho15+eBS9KgDfrz/Xetv2igj9cKt8AlqVEL2gJPsgv4KfA88FxVjQ8ilCRpboPYhfI7VfXkAB5HkrQf3IUiSY1a6BZ4Ad9MUsCnqmrD9DskWQusBVixYsUCVydpsXlB8dG10C3w11XVmcCbgXcnef30O1TVhqoar6rxsbGxBa5OkjRpQQVeVY91X/cAXwZWDyKUJGlu8y7wJEcnOXZyGngTsG1QwSRJ+7aQfeAnA19OMvk4X6iqrw8klSRpTvMu8Kp6BHjtALNIkvaDhxFKUqMscElqlAUuSY2ywCWpURa4JDXKApekRlngktQoC1ySGmWBS1KjLHBJapQFLkmNssAlqVEWuCQ1ygKXpEZZ4JLUqIVe1FiSDrgWLry8a/1FA39Mt8AlqVELKvAkFyR5KMnOJOsGFUqSNLeFXNR4CfCPwJuB04HLk5w+qGCSpH1byBb4amBnVT1SVf8LfBG4eDCxJElzWcibmKcAP5oy/yjwW9PvlGQtsLabfTbJQwtY50KdBDx5ANc/l1HPB2YclFHPOOr5oLGMuWZBj/NrMw0O/SiUqtoAbBj2evqRZKKqxg90jtmMej4w46CMesZRzwdmhIXtQnkMOHXK/PJuTJK0CBZS4P8JvDLJaUkOAy4Dbh1MLEnSXOa9C6WqnkvyHuAbwBLguqraPrBkwzESu3L2YdTzgRkHZdQzjno+MCOpqmE+viRpSDwTU5IaZYFLUqMOmgJPcmqSu5I8mGR7kiu78ROS3J7k4e7r8d14kvxD9zEA9yc5cxEyHpHk7iT3dRmv7sZPS7K5y3JD96YwSQ7v5nd2y1cOO2O33iVJ7k1y24jm25XkgSRbk0x0YyPzc+7WuzTJTUm+m2RHkrNHKWOSV3XP3+TtmSRXjVjGP+t+T7Ylub77/Rm11+KVXb7tSa7qxhbvOayqg+IGLAPO7KaPBf6L3in+fw+s68bXAdd00xcCXwMCnAVsXoSMAY7ppg8FNnfrvhG4rBv/JPAn3fSfAp/spi8Dblik5/K9wBeA27r5Ucu3Czhp2tjI/Jy79W4C/qibPgxYOmoZp2RdAjxO72SRkchI70TB7wNHTnkN/uEovRaB1wDbgKPoHRDyr8ArFvM5XLQXyWLfgFuA84GHgGXd2DLgoW76U8DlU+7/wv0WKd9RwD30zl59EjikGz8b+EY3/Q3g7G76kO5+GXKu5cAdwLnAbd2LbWTydevaxUsLfGR+zsBxXflkVDNOy/Um4N9HKSMvnul9Qvfaug343VF6LQK/D2ycMv9XwPsX8zk8aHahTNX9+3QGvS3ck6tqd7foceDkbnqmjwI4ZRGyLUmyFdgD3A58D3i6qp6bIccLGbvlPwFOHHLEj9F7Ef68mz9xxPIBFPDNJFvS+6gGGK2f82nAXuCz3a6ozyQ5esQyTnUZcH03PRIZq+ox4MPAD4Hd9F5bWxit1+I24LeTnJjkKHpb2KeyiM/hQVfgSY4BvgRcVVXPTF1WvT97B/S4yap6vqpW0dvSXQ28+kDmmSrJW4A9VbXlQGeZw+uq6kx6n4T57iSvn7pwBH7OhwBnAp+oqjOA/6H3r/QLRiAjAN0+5LcC/zx92YHM2O03vpjeH8NfBY4GLjgQWWZTVTuAa4BvAl8HtgLPT7vPUJ/Dg6rAkxxKr7w/X1U3d8NPJFnWLV9Gb8sXDvBHAVTV08Bd9P4NXJpk8qSqqTleyNgtPw747yHGOgd4a5Jd9D5d8lzg4yOUD3hh64yq2gN8md4fwlH6OT8KPFpVm7v5m+gV+ihlnPRm4J6qeqKbH5WMbwS+X1V7q+r/gJvpvT5H7bW4sap+o6peDzxF7723RXsOD5oCTxJgI7Cjqj46ZdGtwJpueg29feOT4+/s3hk+C/jJlH97hpVxLMnSbvpIevvod9Ar8ktnyTiZ/VLgzu4v+lBU1QeqanlVraT3b/WdVfX2UckHkOToJMdOTtPbf7uNEfo5V9XjwI+SvKobOg94cJQyTnE5L+4+mcwyChl/CJyV5Kjud3vyORyZ1yJAkl/pvq4Afo/em/+L9xwOcyf/Yt6A19H7V+V+ev/KbKW3T+pEem/KPUzvXeITuvuH3gUpvgc8AIwvQsZfB+7tMm4D/robfxlwN7CT3r+yh3fjR3TzO7vlL1vE5/MNvHgUysjk67Lc1922A3/ZjY/Mz7lb7ypgovtZ/wtw/AhmPJreVupxU8ZGJiNwNfDd7nfln4DDR+m12K333+j9YbkPOG+xn0NPpZekRh00u1Ak6ReNBS5JjbLAJalRFrgkNcoCl6RGWeCS1CgLXJIa9f++STmeLEGQSgAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "FVC_pred = sc.inverse_transform(predictions)\n",
    "plt.hist(np.std(FVC_pred, axis = 1), bins=15)\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 40,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXkAAAD4CAYAAAAJmJb0AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAOrUlEQVR4nO3dYYxc1XnG8ecpJvkApLHrlbVyoQuIRvKXGmdFkUIQFZQYu42hlSL8IXFbpE0lkEBNVW2D1PLRtIVIVSsiIyycipC2AoQV0gbHokGRAuna3Zg1DrFNFxVrsZdS1VStaE3efpizyXi1652Ze+/M7Ov/TxrNnXPv7H05d+bhzp0zx44IAQBy+rlBFwAAaA4hDwCJEfIAkBghDwCJEfIAkNiafu5s/fr1MTY21s9dAsCqd+jQoXcjYqSX5/Y15MfGxjQ1NdXPXQLAqmf7rV6fy+UaAEiMkAeAxAh5AEiMkAeAxAh5AEiMkAeAxAh5AEiMkAeAxAh5AEisr794BfptbPKFjrab3b294UqAweBMHgASI+QBIDFCHgASI+QBIDFCHgASI+QBIDFCHgASI+QBIDFCHgASI+QBIDFCHgASI+QBIDFCHgASI+QBIDGmGgbElMTIizN5AEiMkAeAxAh5AEhsxZC3faXtl2y/bvuo7ftL+0O2T9meLrdtzZcLAOhGJ1+8npP0pYg4bPsKSYdsHyjrvhIRf9FceQCAKlYM+YiYkzRXlt+3fUzSxqYLAwBU19U1edtjkq6X9Gppus/2Edt7ba9d5jkTtqdsT83Pz1erFgDQlY5D3vblkp6R9EBEnJX0mKRrJW1W60z/kaWeFxF7ImI8IsZHRkaqVwwA6FhHIW/7UrUC/qmIeFaSIuJ0RHwYET+R9LikG5orEwDQi05G11jSE5KORcSjbe2jbZvdJWmm/vIAAFV0MrrmU5I+L+k129Ol7cuSdtreLCkkzUr6YgP1AQAq6GR0zfckeYlV36q/HABAnfjFKwAkxiyUWJU6nTUSuNhxJg8AiRHyAJAYIQ8AiRHyAJAYIQ8AiRHyAJAYIQ8AiTFOHn3BuHZgMDiTB4DECHkASIyQB4DECHkASIyQB4DECHkASIyQB4DECHkASIyQB4DECHkASIyQB4DECHkASIyQB4DECHkASIyQB4DECHkASIyQB4DECHkASGzFkLd9pe2XbL9u+6jt+0v7OtsHbB8v92ubLxcA0I1OzuTPSfpSRGySdKOke21vkjQp6WBEXCfpYHkMABgiK4Z8RMxFxOGy/L6kY5I2StohaV/ZbJ+kOxuqEQDQo66uydsek3S9pFclbYiIubLqHUkblnnOhO0p21Pz8/NVagUAdKnjkLd9uaRnJD0QEWfb10VESIqlnhcReyJiPCLGR0ZGKhULAOhORyFv+1K1Av6piHi2NJ+2PVrWj0o600yJAIBedTK6xpKekHQsIh5tW7Vf0q6yvEvS8/WXBwCoYk0H23xK0uclvWZ7urR9WdJuSX9n+x5Jb0n6XCMVAgB6tmLIR8T3JHmZ1bfWWw4AoE784hUAEuvkcg2ABo1NvtDRdrO7tzdcCTLiTB4AEiPkASAxQh4AEiPkASAxQh4AEiPkASAxQh4AEmOcPNCFTse0S4xrx3DgTB4AEiPkASAxQh4AEiPkASAxQh4AEiPkASAxQh4AEiPkASAxQh4AEiPkASAxQh4AEiPkASAxQh4AEiPkASAxQh4AEiPkASAxQh4AEiPkASCxFUPe9l7bZ2zPtLU9ZPuU7ely29ZsmQCAXnRyJv+kpK1LtH8lIjaX27fqLQsAUIcVQz4iXpb0Xh9qAQDUrMo1+ftsHymXc9Yut5HtCdtTtqfm5+cr7A4A0K1eQ/4xSddK2ixpTtIjy20YEXsiYjwixkdGRnrcHQCgFz2FfEScjogPI+Inkh6XdEO9ZQEA6tBTyNsebXt4l6SZ5bYFAAzOmpU2sP20pFskrbf9tqQ/lXSL7c2SQtKspC82VyIAoFcrhnxE7Fyi+YkGagEA1IxfvAJAYiueyePiMzb5Qsfbzu7e3mAlq1s3/Qg0hTN5AEiMkAeAxAh5AEiMkAeAxAh5AEiMkAeAxAh5AEiMcfKohLHgwHDjTB4AEiPkASAxQh4AEiPkASAxQh4AEiPkASAxQh4AEiPkASAxQh4AEiPkASAxQh4AEiPkASAxQh4AEmMWygHodObG2d3bG64EFzNehxcHzuQBIDFCHgASI+QBIDFCHgASWzHkbe+1fcb2TFvbOtsHbB8v92ubLRMA0ItOzuSflLR1UdukpIMRcZ2kg+UxAGDIrBjyEfGypPcWNe+QtK8s75N0Z71lAQDq0Os4+Q0RMVeW35G0YbkNbU9ImpCkq666qsfdXZwYxwygqspfvEZESIoLrN8TEeMRMT4yMlJ1dwCALvQa8qdtj0pSuT9TX0kAgLr0GvL7Je0qy7skPV9POQCAOnUyhPJpSd+X9Anbb9u+R9JuSb9u+7ik28pjAMCQWfGL14jYucyqW2uuBQBQM37xCgCJMdVwAp0OtcTqxpBa9IIzeQBIjJAHgMQIeQBIjJAHgMQIeQBIjJAHgMQIeQBIjJAHgMQIeQBIjJAHgMQIeQBIjJAHgMQIeQBIjJAHgMQIeQBIjJAHgMQIeQBIjJAHgMQIeQBIjJAHgMQIeQBIjJAHgMQIeQBIjJAHgMQIeQBIjJAHgMTWVHmy7VlJ70v6UNK5iBivoygAQD0qhXzxaxHxbg1/BwBQMy7XAEBiVUM+JL1o+5DtiaU2sD1he8r21Pz8fMXdAQC6UTXkb4qILZLukHSv7ZsXbxAReyJiPCLGR0ZGKu4OANCNSiEfEafK/RlJz0m6oY6iAAD16DnkbV9m+4qFZUm3S5qpqzAAQHVVRtdskPSc7YW/8/WI+MdaqgIA1KLnkI+INyX9So21AABqxhBKAEiMkAeAxAh5AEiMkAeAxAh5AEiMkAeAxAh5AEiMkAeAxAh5AEisjn80ZKiMTb7Q0Xazu7cPbN/AatLN67qJ9xWq4UweABIj5AEgMUIeABIj5AEgMUIeABIj5AEgsXRDKAEMv0EOda7Tahheypk8ACRGyANAYoQ8ACRGyANAYoQ8ACRGyANAYqtmCGXdMzyuhqFPQC8GORvqsM/EOuz1NYEzeQBIjJAHgMQIeQBIjJAHgMQqhbztrbbfsH3C9mRdRQEA6tFzyNu+RNJfS7pD0iZJO21vqqswAEB1Vc7kb5B0IiLejIj/lfQNSTvqKQsAUIcq4+Q3Svq3tsdvS/rVxRvZnpA0UR7+l+03KuyzF+slvVvlD/jhmio5X+W6GkJdnRvGmqREdTX03mvXt77q8r9lcV2/1Ot+G/8xVETskbSn6f0sx/ZURIwPav/Loa7uDGNdw1iTRF3dGMaapHrrqnK55pSkK9se/2JpAwAMiSoh/8+SrrN9te2PSLpb0v56ygIA1KHnyzURcc72fZK+LekSSXsj4mhtldVnYJeKVkBd3RnGuoaxJom6ujGMNUk11uWIqOtvAQCGDL94BYDECHkASCxVyNv+W9vT5TZre7q0j9n+n7Z1X217zidtv1amZvhL226grodsn2rb/7a2dX9c9v2G7c+0tTc6ZYTtP7f9I9tHbD9n++OlfaB9tUSdA5k6w/aVtl+y/brto7bvL+1dH8sGapstx2Ha9lRpW2f7gO3j5X5taXc5VifKsd7SUE2faOuTadtnbT8wiP6yvdf2GdszbW1d94/tXWX747Z3NVBTf96DEZHyJukRSX9SlsckzSyz3Q8k3SjJkv5B0h0N1PKQpD9con2TpB9K+qikqyWdVOtL7EvK8jWSPlK22VRzTbdLWlOWH5b08DD01aL9Nd4PF9j3qKQtZfkKST8ux6urY9lQbbOS1i9q+zNJk2V5su14bivHyuXYvdqHvrtE0jtq/YCn7/0l6WZJW9pfx932j6R1kt4s92vL8tqaa+rLezDVmfyC8n+3z0l6eoXtRiV9LCJeiVYPfk3Snc1X+FM7JH0jIj6IiH+VdEKt6SIanzIiIl6MiHPl4Stq/c5hWQPqq4FNnRERcxFxuCy/L+mYWr/yXs5yx7JfdkjaV5b36WfHZoekr0XLK5I+Xo5lk26VdDIi3rrANo31V0S8LOm9JfbXTf98RtKBiHgvIv5D0gFJW+usqV/vwZQhL+nTkk5HxPG2tqtt/4vt79r+dGnbqNZ0DAve1oXfyFXcVz6W7V34qKilp4bYeIH2pvyeWmcFCwbdVwv63Q9Lsj0m6XpJr5ambo5lE0LSi7YPuTVtiCRtiIi5svyOpA0DqGvB3Tr/BGvQ/SV13z9p3oOrLuRtf8f2zBK39jO8nTr/RTYn6aqIuF7SH0j6uu2P9bGuxyRdK2lzqeWROvfdY00L2zwo6Zykp0pT4321mti+XNIzkh6IiLMa0LFc5KaI2KLWDLD32r65fWU5yxvI2Gi3fhj5WUl/X5qGob/OM8j+WUrT78FV8w95L4iI2y603vYaSb8l6ZNtz/lA0gdl+ZDtk5J+Wa1pGNo/IvU8NcNKdbXV97ikb5aHF5oaovKUER301e9I+g1Jt5YXfl/6qgsDnTrD9qVqBfxTEfGsJEXE6bb1nR7LWkXEqXJ/xvZzal3mOG17NCLmysf6M/2uq7hD0uGFfhqG/iq67Z9Tkm5Z1P5PdRfVj/fgqjuT78Btkn4UET/9WGN7xK3572X7GknXSXqzfHw7a/vGch3/C5Ker7ugRddA75K08A37fkl32/6o7atLXT9QH6aMsL1V0h9J+mxE/Hdb+0D7apGBTZ1R/hufkHQsIh5ta+/2WNZd12W2r1hYVuvLu5my/4URILv0s2OzX9IX3HKjpP9su2zRhPM+RQ+6v9p02z/flnS77bXlEtPtpa02fXsP9vpt8bDeJD0p6fcXtf22pKOSpiUdlvSbbevG1XrhnZT0Vyq/Aq65pr+R9JqkI2q9qEbb1j1Y9v2G2r4pV+tb/x+XdQ82UNMJta45TpfbV4ehr5aos9F+uMB+b1LrI/2Rtj7a1suxrLmua9QalfLDcpweLO2/IOmgpOOSviNpXWm3Wv+4z8lS93iDfXaZpH+X9PNVXvs11PG0Wpc8/k+t69b39NI/al0nP1Fuv9tATX15DzKtAQAklvFyDQCgIOQBIDFCHgASI+QBIDFCHgASI+QBIDFCHgAS+3+qjacLjrlioAAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "diff_fvc = (np.mean(FVC_pred,axis=1) - sc.inverse_transform(testY))\n",
    "diff = (np.mean(predictions,axis=1) - testY)\n",
    "plt.hist(diff_fvc, bins=30)\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 41,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "MSE :  116024.5441816608\n",
      "RMSE :  340.6237575120984\n"
     ]
    }
   ],
   "source": [
    "MSE = np.mean(diff_fvc*diff_fvc)\n",
    "RMSE = np.sqrt(MSE)\n",
    "print(\"MSE : \",MSE)\n",
    "print(\"RMSE : \", RMSE)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 42,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "MSE :  0.16680359039247164\n",
      "RMSE :  0.40841595266648395\n"
     ]
    }
   ],
   "source": [
    "MSE = np.mean(diff*diff)\n",
    "RMSE = np.sqrt(MSE)\n",
    "print(\"MSE : \",MSE)\n",
    "print(\"RMSE : \", RMSE)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 43,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "-10.032865158264682"
      ]
     },
     "execution_count": 43,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "compute_score(testY,np.mean(FVC_pred,axis=1),np.std(FVC_pred,axis=1))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Dropout 0.7"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 50,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[<tf.Tensor 'dense_6_input_5:0' shape=(None, 6) dtype=float32>, <tf.Tensor 'input_2_5:0' shape=(None, 240, 240, 1) dtype=float32>]\n"
     ]
    }
   ],
1792
   "source": [
1793
1794
1795
1796
1797
    "tf.compat.v1.disable_eager_execution()\n",
    "\n",
    "dropout = 0.7\n",
    "num_iter = 20\n",
    " \n",