CNN_injection_superposition_4Chann.ipynb 265 KB
Newer Older
Bannier Delphine's avatar
Bannier Delphine committed
1
2
3
4
5
6
{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
7
    "# CNN Superposition de 4 images + MLP"
Bannier Delphine's avatar
Bannier Delphine committed
8
9
10
11
12
13
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
Bannier Delphine's avatar
Bannier Delphine committed
14
15
16
    "https://www.pyimagesearch.com/2019/02/04/keras-multiple-inputs-and-mixed-data/\n",
    "\n",
    "https://www.kaggle.com/franklemuchahary/basic-cnn-keras-with-cross-validation"
Bannier Delphine's avatar
Bannier Delphine committed
17
18
19
20
   ]
  },
  {
   "cell_type": "code",
21
   "execution_count": 1,
Bannier Delphine's avatar
Bannier Delphine committed
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import pandas as pd\n",
    "import os\n",
    "import logging\n",
    "logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## A - Preprocessing : Reading Data"
   ]
  },
  {
   "cell_type": "code",
41
   "execution_count": 2,
Bannier Delphine's avatar
Bannier Delphine committed
42
43
44
45
46
47
48
49
   "metadata": {},
   "outputs": [],
   "source": [
    "os.chdir('../')"
   ]
  },
  {
   "cell_type": "code",
50
   "execution_count": 3,
Bannier Delphine's avatar
Bannier Delphine committed
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>Patient</th>\n",
       "      <th>Weeks</th>\n",
       "      <th>FVC</th>\n",
       "      <th>Percent</th>\n",
       "      <th>Age</th>\n",
       "      <th>Sex</th>\n",
       "      <th>SmokingStatus</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>-4</td>\n",
       "      <td>2315</td>\n",
       "      <td>58.253649</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>5</td>\n",
       "      <td>2214</td>\n",
       "      <td>55.712129</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>7</td>\n",
       "      <td>2061</td>\n",
       "      <td>51.862104</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>9</td>\n",
       "      <td>2144</td>\n",
       "      <td>53.950679</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>ID00007637202177411956430</td>\n",
       "      <td>11</td>\n",
       "      <td>2069</td>\n",
       "      <td>52.063412</td>\n",
       "      <td>79</td>\n",
       "      <td>Male</td>\n",
       "      <td>Ex-smoker</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                     Patient  Weeks   FVC    Percent  Age   Sex SmokingStatus\n",
       "0  ID00007637202177411956430     -4  2315  58.253649   79  Male     Ex-smoker\n",
       "1  ID00007637202177411956430      5  2214  55.712129   79  Male     Ex-smoker\n",
       "2  ID00007637202177411956430      7  2061  51.862104   79  Male     Ex-smoker\n",
       "3  ID00007637202177411956430      9  2144  53.950679   79  Male     Ex-smoker\n",
       "4  ID00007637202177411956430     11  2069  52.063412   79  Male     Ex-smoker"
      ]
     },
147
     "execution_count": 3,
Bannier Delphine's avatar
Bannier Delphine committed
148
149
150
151
152
153
154
155
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from preprocessing.read_load_data import read_data\n",
    "\n",
    "input_directory='../osic-pulmonary-fibrosis-progression'\n",
Bannier Delphine's avatar
Bannier Delphine committed
156
    "train_df, test_df, sample_df = read_data(input_directory)\n",
Bannier Delphine's avatar
Bannier Delphine committed
157
158
159
160
161
162
163
164
165
166
167
168
    "train_df.head()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## B - Preprocessing : Loading Data"
   ]
  },
  {
   "cell_type": "code",
169
   "execution_count": 4,
Bannier Delphine's avatar
Bannier Delphine committed
170
171
172
173
174
175
176
177
178
179
   "metadata": {},
   "outputs": [],
   "source": [
    "patients_train_ids= train_df.Patient.unique()\n",
    "patient_test_list= test_df.Patient.unique()\n",
    "patients_train_ids = [pat for pat in patients_train_ids]"
   ]
  },
  {
   "cell_type": "code",
180
   "execution_count": 5,
Bannier Delphine's avatar
Bannier Delphine committed
181
182
183
184
185
186
187
188
189
190
191
192
193
194
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO:loading  attributes...\n",
      "INFO:loading images...\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
Bannier Delphine's avatar
Bannier Delphine committed
195
196
      "Array shape:  (176, 240, 240, 4)\n",
      "min value:  -0.1251496147096971\n",
197
      "max value:  0.16921848376184256\n"
Bannier Delphine's avatar
Bannier Delphine committed
198
199
200
201
202
203
204
205
206
207
208
209
210
211
     ]
    }
   ],
   "source": [
    "from preprocessing.read_load_data import load_images\n",
    "\n",
    "logging.info(\"loading  attributes...\")\n",
    "df = pd.read_csv(f'{input_directory}/train.csv')\n",
    "patients_train_ids= df.Patient.unique().tolist()\n",
    "\n",
    "logging.info(\"loading images...\")\n",
    "images = load_images(input_directory,\n",
    "                    'train',\n",
    "                     patients_train_ids,\n",
Bannier Delphine's avatar
Bannier Delphine committed
212
    "                     option='superposition',\n",
Bannier Delphine's avatar
Bannier Delphine committed
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
    "                     outputH = 240,\n",
    "                     outputW = 240)\n",
    "\n",
    "print(\"Array shape: \", images.shape)\n",
    "#check value between -1,1\n",
    "print('min value: ', np.amin(images))\n",
    "print('max value: ', np.amax(images))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## C - Preprocessing : shuffle"
   ]
  },
  {
   "cell_type": "code",
231
   "execution_count": 6,
Bannier Delphine's avatar
Bannier Delphine committed
232
233
234
235
236
   "metadata": {},
   "outputs": [],
   "source": [
    "from sklearn.model_selection import train_test_split\n",
    "\n",
Bannier Delphine's avatar
Bannier Delphine committed
237
238
239
240
241
242
    "split = train_test_split(patients_train_ids, images, test_size=0.2, random_state=42)\n",
    "(trainPatient, testPatient, trainImagesX, testImagesX) = split"
   ]
  },
  {
   "cell_type": "code",
243
   "execution_count": 7,
Bannier Delphine's avatar
Bannier Delphine committed
244
245
246
247
248
249
250
251
252
253
   "metadata": {},
   "outputs": [],
   "source": [
    "#split the dataframe like the images\n",
    "df_train = df[df.Patient.isin(trainPatient)].copy()\n",
    "df_test = df[df.Patient.isin(testPatient)].copy()"
   ]
  },
  {
   "cell_type": "code",
254
   "execution_count": 8,
Bannier Delphine's avatar
Bannier Delphine committed
255
   "metadata": {},
256
   "outputs": [
Bannier Delphine's avatar
Bannier Delphine committed
257
258
259
260
261
262
263
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO:NumExpr defaulting to 8 threads.\n"
     ]
    },
264
265
266
267
268
269
270
271
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(1093, 8) (280, 8)\n"
     ]
    }
   ],
Bannier Delphine's avatar
Bannier Delphine committed
272
273
274
275
   "source": [
    "from preprocessing.read_load_data import create_dataframe\n",
    "\n",
    "trainAttrX = create_dataframe(df_train)\n",
276
277
278
279
280
281
    "testAttrX = create_dataframe(df_test)\n",
    "print(trainAttrX.shape, testAttrX.shape)"
   ]
  },
  {
   "cell_type": "code",
282
   "execution_count": 9,
283
284
285
286
287
288
289
290
291
292
293
294
295
296
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "1093 280\n"
     ]
    }
   ],
   "source": [
    "#set one image per training row\n",
    "\n",
    "indice = 0\n",
Bannier Delphine's avatar
Bannier Delphine committed
297
    "train_dataset = np.ndarray((len(trainAttrX),240,240,4))\n",
Bannier Delphine's avatar
Bannier Delphine committed
298
    "for i,patient in enumerate(trainPatient):\n",
299
300
301
302
303
304
305
    "    nb_data = len(trainAttrX[trainAttrX.PatientID ==patient])\n",
    "    for ii in range(nb_data):\n",
    "        train_dataset[indice]=(trainImagesX[i])\n",
    "        indice+=1\n",
    "        \n",
    "        \n",
    "indicet = 0        \n",
Bannier Delphine's avatar
Bannier Delphine committed
306
    "test_dataset = np.ndarray((len(testAttrX),240,240,4))\n",
Bannier Delphine's avatar
Bannier Delphine committed
307
    "for i,patient in enumerate(testPatient):\n",
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
    "    nb_data = len(testAttrX[testAttrX.PatientID ==patient])\n",
    "    for ii in range(nb_data):\n",
    "        test_dataset[indicet] = testImagesX[i]\n",
    "        indicet+=1\n",
    "        \n",
    "        \n",
    "print(len(train_dataset),len(test_dataset))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## D - Preprocessing : Scaling + Encoding"
   ]
  },
  {
   "cell_type": "code",
326
   "execution_count": 10,
327
328
329
330
331
332
   "metadata": {},
   "outputs": [],
   "source": [
    "from preprocessing.scale_data import scale_variable\n",
    "\n",
    "sc, trainAttrX, testAttrX = scale_variable(trainAttrX, testAttrX,'Target_FVC')\n",
Bannier Delphine's avatar
Bannier Delphine committed
333
334
335
    "sc1, trainAttrX, testAttrX = scale_variable(trainAttrX, testAttrX,'First_FVC')\n",
    "sc2, trainAttrX, testAttrX = scale_variable(trainAttrX, testAttrX,'Age')\n",
    "\n",
336
337
338
339
340
341
    "trainY = trainAttrX.loc[:,'Target_FVC_scaled']\n",
    "testY = testAttrX.loc[:,'Target_FVC_scaled']"
   ]
  },
  {
   "cell_type": "code",
342
   "execution_count": 11,
343
344
345
346
347
348
349
350
   "metadata": {},
   "outputs": [],
   "source": [
    "from preprocessing.scale_data import encode_variable\n",
    "\n",
    "trainAttrX, testAttrX = encode_variable(trainAttrX, testAttrX,'Sex')\n",
    "trainAttrX, testAttrX = encode_variable(trainAttrX, testAttrX,'SmokingStatus')\n",
    "\n",
Bannier Delphine's avatar
Bannier Delphine committed
351
352
353
354
355
    "for dft in [trainAttrX,testAttrX]:\n",
    "    dft.drop(columns = ['Sex','SmokingStatus','Target_FVC','Target_FVC_scaled',\n",
    "                          'PatientID','First_FVC','Age'], inplace = True)\n",
    "    dft.loc[:,'First_Percent'] = dft.loc[:,'First_Percent']/100\n",
    "    dft.loc[:,'Delta_week'] = dft.loc[:,'Delta_week']/133"
Bannier Delphine's avatar
Bannier Delphine committed
356
357
358
359
   ]
  },
  {
   "cell_type": "code",
360
   "execution_count": 12,
Bannier Delphine's avatar
Bannier Delphine committed
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>First_Percent</th>\n",
       "      <th>Delta_week</th>\n",
Bannier Delphine's avatar
Bannier Delphine committed
386
387
       "      <th>First_FVC_scaled</th>\n",
       "      <th>Age_scaled</th>\n",
388
389
       "      <th>Sex_le</th>\n",
       "      <th>SmokingStatus_le</th>\n",
Bannier Delphine's avatar
Bannier Delphine committed
390
391
392
393
394
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
Bannier Delphine's avatar
Bannier Delphine committed
395
396
397
398
       "      <td>0.582536</td>\n",
       "      <td>0.067669</td>\n",
       "      <td>-0.631784</td>\n",
       "      <td>1.684379</td>\n",
399
400
       "      <td>1</td>\n",
       "      <td>1</td>\n",
Bannier Delphine's avatar
Bannier Delphine committed
401
402
403
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
Bannier Delphine's avatar
Bannier Delphine committed
404
405
406
407
       "      <td>0.582536</td>\n",
       "      <td>0.082707</td>\n",
       "      <td>-0.631784</td>\n",
       "      <td>1.684379</td>\n",
408
409
       "      <td>1</td>\n",
       "      <td>1</td>\n",
Bannier Delphine's avatar
Bannier Delphine committed
410
411
412
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
Bannier Delphine's avatar
Bannier Delphine committed
413
414
415
416
       "      <td>0.582536</td>\n",
       "      <td>0.097744</td>\n",
       "      <td>-0.631784</td>\n",
       "      <td>1.684379</td>\n",
417
418
       "      <td>1</td>\n",
       "      <td>1</td>\n",
Bannier Delphine's avatar
Bannier Delphine committed
419
420
421
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
Bannier Delphine's avatar
Bannier Delphine committed
422
423
424
425
       "      <td>0.582536</td>\n",
       "      <td>0.112782</td>\n",
       "      <td>-0.631784</td>\n",
       "      <td>1.684379</td>\n",
426
427
       "      <td>1</td>\n",
       "      <td>1</td>\n",
Bannier Delphine's avatar
Bannier Delphine committed
428
429
430
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
Bannier Delphine's avatar
Bannier Delphine committed
431
432
433
434
       "      <td>0.582536</td>\n",
       "      <td>0.157895</td>\n",
       "      <td>-0.631784</td>\n",
       "      <td>1.684379</td>\n",
435
436
       "      <td>1</td>\n",
       "      <td>1</td>\n",
Bannier Delphine's avatar
Bannier Delphine committed
437
438
439
440
441
442
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
Bannier Delphine's avatar
Bannier Delphine committed
443
444
445
446
447
448
449
450
451
452
453
454
455
       "   First_Percent  Delta_week  First_FVC_scaled  Age_scaled  Sex_le  \\\n",
       "0       0.582536    0.067669         -0.631784    1.684379       1   \n",
       "1       0.582536    0.082707         -0.631784    1.684379       1   \n",
       "2       0.582536    0.097744         -0.631784    1.684379       1   \n",
       "3       0.582536    0.112782         -0.631784    1.684379       1   \n",
       "4       0.582536    0.157895         -0.631784    1.684379       1   \n",
       "\n",
       "   SmokingStatus_le  \n",
       "0                 1  \n",
       "1                 1  \n",
       "2                 1  \n",
       "3                 1  \n",
       "4                 1  "
Bannier Delphine's avatar
Bannier Delphine committed
456
457
      ]
     },
458
     "execution_count": 12,
Bannier Delphine's avatar
Bannier Delphine committed
459
460
461
462
463
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
464
    "trainAttrX.head(5)"
Bannier Delphine's avatar
Bannier Delphine committed
465
466
467
468
469
470
471
472
473
474
475
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## E - Processing : Create models"
   ]
  },
  {
   "cell_type": "code",
476
   "execution_count": 13,
Bannier Delphine's avatar
Bannier Delphine committed
477
478
479
480
481
482
   "metadata": {},
   "outputs": [],
   "source": [
    "from processing.models import create_hybrid\n",
    "from keras.optimizers import Adam\n",
    "\n",
483
    "opt = Adam(lr=1e-3, decay=1e-3 / 200)"
Bannier Delphine's avatar
Bannier Delphine committed
484
485
   ]
  },
Bannier Delphine's avatar
Bannier Delphine committed
486
487
  {
   "cell_type": "code",
488
   "execution_count": 24,
Bannier Delphine's avatar
Bannier Delphine committed
489
   "metadata": {},
Bannier Delphine's avatar
Bannier Delphine committed
490
491
492
493
494
   "outputs": [],
   "source": [
    "from tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping\n",
    "\n",
    "#set early stopping criteria\n",
495
    "pat = 5 #this is the number of epochs with no improvement after which the training will stop\n",
Bannier Delphine's avatar
Bannier Delphine committed
496
497
498
    "es = EarlyStopping(monitor='val_loss', patience=pat, verbose=1)\n",
    "\n",
    "#define the model checkpoint callback -> this will keep on saving the model as a physical file\n",
499
    "cp = ModelCheckpoint('clean_notebooks/cnn_injection_superposition.h5', verbose=1, save_best_only=True)"
Bannier Delphine's avatar
Bannier Delphine committed
500
501
502
503
   ]
  },
  {
   "cell_type": "code",
504
   "execution_count": 13,
Bannier Delphine's avatar
Bannier Delphine committed
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
   "metadata": {},
   "outputs": [],
   "source": [
    "def custom_shuffle_split(trainAttrX,train_dataset,trainY,test_size = 0.1 ):\n",
    "    cut = int(len(trainY)*test_size)\n",
    "    arr = list(np.arange(len(trainY)))\n",
    "    np.random.shuffle(arr)\n",
    "    trainidx = arr[cut:]\n",
    "    testidx = arr[:cut]\n",
    "    train_x, train_y = [trainAttrX.iloc[trainidx], train_dataset[trainidx]] , trainY[trainidx]\n",
    "    val_x, val_y = [trainAttrX.iloc[testidx], train_dataset[testidx]] , trainY[testidx]\n",
    "    return train_x, val_x, train_y, val_y"
   ]
  },
  {
   "cell_type": "code",
521
   "execution_count": 27,
Bannier Delphine's avatar
Bannier Delphine committed
522
   "metadata": {},
Bannier Delphine's avatar
Bannier Delphine committed
523
524
   "outputs": [
    {
525
526
527
     "name": "stdout",
     "output_type": "stream",
     "text": [
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
      "Training on Fold:  1\n",
      "Epoch 1/30\n",
      "111/111 [==============================] - ETA: 6:20 - loss: 1.450 - ETA: 2:06 - loss: 1.812 - ETA: 2:01 - loss: 2.012 - ETA: 1:55 - loss: 2.118 - ETA: 1:52 - loss: 2.210 - ETA: 1:49 - loss: 2.247 - ETA: 1:46 - loss: 2.256 - ETA: 1:44 - loss: 2.297 - ETA: 1:43 - loss: 2.314 - ETA: 1:42 - loss: 2.310 - ETA: 1:41 - loss: 2.306 - ETA: 1:40 - loss: 2.311 - ETA: 1:38 - loss: 2.311 - ETA: 1:37 - loss: 2.319 - ETA: 1:36 - loss: 2.318 - ETA: 1:35 - loss: 2.316 - ETA: 1:34 - loss: 2.319 - ETA: 1:32 - loss: 2.320 - ETA: 1:31 - loss: 2.319 - ETA: 1:31 - loss: 2.317 - ETA: 1:30 - loss: 2.315 - ETA: 1:28 - loss: 2.316 - ETA: 1:27 - loss: 2.317 - ETA: 1:26 - loss: 2.318 - ETA: 1:25 - loss: 2.316 - ETA: 1:24 - loss: 2.315 - ETA: 1:23 - loss: 2.313 - ETA: 1:22 - loss: 2.309 - ETA: 1:21 - loss: 2.305 - ETA: 1:20 - loss: 2.302 - ETA: 1:19 - loss: 2.300 - ETA: 1:18 - loss: 2.297 - ETA: 1:17 - loss: 2.294 - ETA: 1:16 - loss: 2.290 - ETA: 1:15 - loss: 2.287 - ETA: 1:14 - loss: 2.285 - ETA: 1:13 - loss: 2.282 - ETA: 1:12 - loss: 2.279 - ETA: 1:11 - loss: 2.276 - ETA: 1:10 - loss: 2.273 - ETA: 1:09 - loss: 2.270 - ETA: 1:08 - loss: 2.267 - ETA: 1:07 - loss: 2.263 - ETA: 1:06 - loss: 2.261 - ETA: 1:05 - loss: 2.258 - ETA: 1:04 - loss: 2.256 - ETA: 1:03 - loss: 2.253 - ETA: 1:02 - loss: 2.251 - ETA: 1:01 - loss: 2.248 - ETA: 1:00 - loss: 2.245 - ETA: 59s - loss: 2.242 - ETA: 58s - loss: 2.23 - ETA: 57s - loss: 2.23 - ETA: 56s - loss: 2.23 - ETA: 55s - loss: 2.22 - ETA: 54s - loss: 2.22 - ETA: 53s - loss: 2.22 - ETA: 52s - loss: 2.21 - ETA: 51s - loss: 2.21 - ETA: 50s - loss: 2.21 - ETA: 49s - loss: 2.20 - ETA: 48s - loss: 2.20 - ETA: 47s - loss: 2.20 - ETA: 46s - loss: 2.19 - ETA: 45s - loss: 2.19 - ETA: 44s - loss: 2.19 - ETA: 43s - loss: 2.18 - ETA: 42s - loss: 2.18 - ETA: 41s - loss: 2.18 - ETA: 40s - loss: 2.18 - ETA: 39s - loss: 2.17 - ETA: 38s - loss: 2.17 - ETA: 37s - loss: 2.17 - ETA: 36s - loss: 2.17 - ETA: 35s - loss: 2.16 - ETA: 34s - loss: 2.16 - ETA: 33s - loss: 2.16 - ETA: 32s - loss: 2.16 - ETA: 31s - loss: 2.16 - ETA: 30s - loss: 2.16 - ETA: 29s - loss: 2.15 - ETA: 28s - loss: 2.15 - ETA: 27s - loss: 2.15 - ETA: 26s - loss: 2.15 - ETA: 25s - loss: 2.15 - ETA: 24s - loss: 2.14 - ETA: 23s - loss: 2.14 - ETA: 22s - loss: 2.14 - ETA: 21s - loss: 2.14 - ETA: 20s - loss: 2.14 - ETA: 19s - loss: 2.13 - ETA: 18s - loss: 2.13 - ETA: 17s - loss: 2.13 - ETA: 16s - loss: 2.13 - ETA: 15s - loss: 2.13 - ETA: 14s - loss: 2.12 - ETA: 13s - loss: 2.12 - ETA: 12s - loss: 2.12 - ETA: 11s - loss: 2.12 - ETA: 10s - loss: 2.11 - ETA: 9s - loss: 2.1174 - ETA: 8s - loss: 2.115 - ETA: 7s - loss: 2.112 - ETA: 6s - loss: 2.110 - ETA: 5s - loss: 2.107 - ETA: 4s - loss: 2.105 - ETA: 3s - loss: 2.103 - ETA: 2s - loss: 2.100 - ETA: 1s - loss: 2.098 - ETA: 0s - loss: 2.095 - ETA: 0s - loss: 2.093 - 115s 1s/step - loss: 2.0909 - val_loss: 1.3986\n",
      "\n",
      "Epoch 00001: val_loss improved from inf to 1.39859, saving model to clean_notebooks\\injection_superposition.h5\n",
      "Epoch 2/30\n",
      "111/111 [==============================] - ETA: 1:52 - loss: 3.406 - ETA: 1:47 - loss: 3.284 - ETA: 1:46 - loss: 3.151 - ETA: 1:45 - loss: 2.992 - ETA: 1:44 - loss: 2.843 - ETA: 1:43 - loss: 2.719 - ETA: 1:42 - loss: 2.611 - ETA: 1:41 - loss: 2.517 - ETA: 1:40 - loss: 2.458 - ETA: 1:39 - loss: 2.407 - ETA: 1:38 - loss: 2.352 - ETA: 1:37 - loss: 2.298 - ETA: 1:37 - loss: 2.244 - ETA: 1:36 - loss: 2.196 - ETA: 1:35 - loss: 2.157 - ETA: 1:34 - loss: 2.122 - ETA: 1:33 - loss: 2.094 - ETA: 1:32 - loss: 2.071 - ETA: 1:31 - loss: 2.049 - ETA: 1:30 - loss: 2.030 - ETA: 1:29 - loss: 2.010 - ETA: 1:28 - loss: 1.991 - ETA: 1:27 - loss: 1.974 - ETA: 1:26 - loss: 1.960 - ETA: 1:25 - loss: 1.947 - ETA: 1:24 - loss: 1.934 - ETA: 1:23 - loss: 1.923 - ETA: 1:22 - loss: 1.913 - ETA: 1:21 - loss: 1.903 - ETA: 1:20 - loss: 1.893 - ETA: 1:19 - loss: 1.884 - ETA: 1:18 - loss: 1.875 - ETA: 1:17 - loss: 1.866 - ETA: 1:16 - loss: 1.857 - ETA: 1:15 - loss: 1.848 - ETA: 1:14 - loss: 1.842 - ETA: 1:13 - loss: 1.835 - ETA: 1:12 - loss: 1.830 - ETA: 1:11 - loss: 1.825 - ETA: 1:10 - loss: 1.819 - ETA: 1:09 - loss: 1.813 - ETA: 1:08 - loss: 1.807 - ETA: 1:07 - loss: 1.802 - ETA: 1:06 - loss: 1.796 - ETA: 1:05 - loss: 1.791 - ETA: 1:04 - loss: 1.786 - ETA: 1:03 - loss: 1.782 - ETA: 1:02 - loss: 1.777 - ETA: 1:01 - loss: 1.773 - ETA: 1:00 - loss: 1.769 - ETA: 59s - loss: 1.764 - ETA: 58s - loss: 1.75 - ETA: 57s - loss: 1.75 - ETA: 56s - loss: 1.75 - ETA: 55s - loss: 1.74 - ETA: 54s - loss: 1.74 - ETA: 53s - loss: 1.73 - ETA: 52s - loss: 1.73 - ETA: 51s - loss: 1.73 - ETA: 50s - loss: 1.72 - ETA: 49s - loss: 1.72 - ETA: 48s - loss: 1.71 - ETA: 47s - loss: 1.71 - ETA: 46s - loss: 1.71 - ETA: 45s - loss: 1.70 - ETA: 44s - loss: 1.70 - ETA: 43s - loss: 1.69 - ETA: 42s - loss: 1.69 - ETA: 41s - loss: 1.69 - ETA: 40s - loss: 1.68 - ETA: 39s - loss: 1.68 - ETA: 38s - loss: 1.68 - ETA: 37s - loss: 1.68 - ETA: 36s - loss: 1.67 - ETA: 35s - loss: 1.67 - ETA: 34s - loss: 1.67 - ETA: 33s - loss: 1.66 - ETA: 32s - loss: 1.66 - ETA: 31s - loss: 1.66 - ETA: 30s - loss: 1.66 - ETA: 29s - loss: 1.65 - ETA: 28s - loss: 1.65 - ETA: 27s - loss: 1.65 - ETA: 26s - loss: 1.64 - ETA: 25s - loss: 1.64 - ETA: 24s - loss: 1.64 - ETA: 23s - loss: 1.64 - ETA: 22s - loss: 1.63 - ETA: 21s - loss: 1.63 - ETA: 20s - loss: 1.63 - ETA: 19s - loss: 1.62 - ETA: 18s - loss: 1.62 - ETA: 17s - loss: 1.62 - ETA: 16s - loss: 1.62 - ETA: 15s - loss: 1.61 - ETA: 14s - loss: 1.61 - ETA: 13s - loss: 1.61 - ETA: 12s - loss: 1.61 - ETA: 11s - loss: 1.61 - ETA: 10s - loss: 1.60 - ETA: 9s - loss: 1.6057 - ETA: 8s - loss: 1.603 - ETA: 7s - loss: 1.601 - ETA: 6s - loss: 1.598 - ETA: 5s - loss: 1.596 - ETA: 4s - loss: 1.594 - ETA: 3s - loss: 1.592 - ETA: 2s - loss: 1.590 - ETA: 1s - loss: 1.587 - ETA: 0s - loss: 1.585 - ETA: 0s - loss: 1.583 - 111s 1s/step - loss: 1.5814 - val_loss: 1.0767\n",
      "\n",
      "Epoch 00002: val_loss improved from 1.39859 to 1.07667, saving model to clean_notebooks\\injection_superposition.h5\n",
      "Epoch 3/30\n",
      "111/111 [==============================] - ETA: 1:53 - loss: 1.038 - ETA: 1:53 - loss: 1.142 - ETA: 1:49 - loss: 1.214 - ETA: 1:47 - loss: 1.189 - ETA: 1:46 - loss: 1.149 - ETA: 1:46 - loss: 1.118 - ETA: 1:45 - loss: 1.094 - ETA: 1:44 - loss: 1.079 - ETA: 1:42 - loss: 1.062 - ETA: 1:40 - loss: 1.049 - ETA: 1:39 - loss: 1.036 - ETA: 1:38 - loss: 1.035 - ETA: 1:37 - loss: 1.034 - ETA: 1:36 - loss: 1.035 - ETA: 1:35 - loss: 1.038 - ETA: 1:34 - loss: 1.040 - ETA: 1:33 - loss: 1.040 - ETA: 1:32 - loss: 1.042 - ETA: 1:31 - loss: 1.043 - ETA: 1:31 - loss: 1.045 - ETA: 1:30 - loss: 1.046 - ETA: 1:29 - loss: 1.049 - ETA: 1:29 - loss: 1.051 - ETA: 1:28 - loss: 1.051 - ETA: 1:27 - loss: 1.051 - ETA: 1:26 - loss: 1.051 - ETA: 1:26 - loss: 1.051 - ETA: 1:25 - loss: 1.052 - ETA: 1:24 - loss: 1.053 - ETA: 1:24 - loss: 1.053 - ETA: 1:23 - loss: 1.055 - ETA: 1:22 - loss: 1.056 - ETA: 1:21 - loss: 1.058 - ETA: 1:20 - loss: 1.061 - ETA: 1:19 - loss: 1.065 - ETA: 1:18 - loss: 1.068 - ETA: 1:17 - loss: 1.071 - ETA: 1:15 - loss: 1.074 - ETA: 1:14 - loss: 1.076 - ETA: 1:13 - loss: 1.079 - ETA: 1:12 - loss: 1.081 - ETA: 1:11 - loss: 1.084 - ETA: 1:10 - loss: 1.086 - ETA: 1:09 - loss: 1.089 - ETA: 1:07 - loss: 1.091 - ETA: 1:06 - loss: 1.094 - ETA: 1:05 - loss: 1.095 - ETA: 1:04 - loss: 1.097 - ETA: 1:03 - loss: 1.099 - ETA: 1:02 - loss: 1.100 - ETA: 1:01 - loss: 1.102 - ETA: 1:00 - loss: 1.103 - ETA: 59s - loss: 1.105 - ETA: 58s - loss: 1.10 - ETA: 57s - loss: 1.10 - ETA: 56s - loss: 1.10 - ETA: 55s - loss: 1.11 - ETA: 54s - loss: 1.11 - ETA: 52s - loss: 1.11 - ETA: 51s - loss: 1.11 - ETA: 50s - loss: 1.11 - ETA: 49s - loss: 1.11 - ETA: 48s - loss: 1.11 - ETA: 47s - loss: 1.12 - ETA: 46s - loss: 1.12 - ETA: 45s - loss: 1.12 - ETA: 44s - loss: 1.12 - ETA: 43s - loss: 1.12 - ETA: 42s - loss: 1.12 - ETA: 41s - loss: 1.12 - ETA: 40s - loss: 1.12 - ETA: 39s - loss: 1.12 - ETA: 38s - loss: 1.12 - ETA: 37s - loss: 1.12 - ETA: 36s - loss: 1.12 - ETA: 35s - loss: 1.12 - ETA: 34s - loss: 1.12 - ETA: 33s - loss: 1.12 - ETA: 32s - loss: 1.12 - ETA: 31s - loss: 1.13 - ETA: 30s - loss: 1.13 - ETA: 29s - loss: 1.13 - ETA: 28s - loss: 1.13 - ETA: 27s - loss: 1.13 - ETA: 26s - loss: 1.13 - ETA: 25s - loss: 1.13 - ETA: 24s - loss: 1.12 - ETA: 23s - loss: 1.12 - ETA: 22s - loss: 1.12 - ETA: 21s - loss: 1.12 - ETA: 20s - loss: 1.12 - ETA: 19s - loss: 1.12 - ETA: 18s - loss: 1.12 - ETA: 17s - loss: 1.12 - ETA: 16s - loss: 1.12 - ETA: 15s - loss: 1.12 - ETA: 14s - loss: 1.12 - ETA: 13s - loss: 1.12 - ETA: 12s - loss: 1.12 - ETA: 11s - loss: 1.12 - ETA: 10s - loss: 1.12 - ETA: 9s - loss: 1.1231 - ETA: 8s - loss: 1.122 - ETA: 7s - loss: 1.121 - ETA: 6s - loss: 1.121 - ETA: 5s - loss: 1.120 - ETA: 4s - loss: 1.119 - ETA: 3s - loss: 1.119 - ETA: 2s - loss: 1.118 - ETA: 1s - loss: 1.117 - ETA: 0s - loss: 1.117 - 113s 1s/step - loss: 1.1165 - val_loss: 0.6785\n",
      "\n",
      "Epoch 00003: val_loss improved from 1.07667 to 0.67850, saving model to clean_notebooks\\injection_superposition.h5\n",
      "Epoch 4/30\n",
      "111/111 [==============================] - ETA: 1:55 - loss: 1.124 - ETA: 1:49 - loss: 1.206 - ETA: 1:46 - loss: 1.251 - ETA: 1:45 - loss: 1.235 - ETA: 1:44 - loss: 1.209 - ETA: 1:43 - loss: 1.171 - ETA: 1:42 - loss: 1.150 - ETA: 1:41 - loss: 1.132 - ETA: 1:40 - loss: 1.112 - ETA: 1:39 - loss: 1.091 - ETA: 1:38 - loss: 1.071 - ETA: 1:38 - loss: 1.052 - ETA: 1:37 - loss: 1.032 - ETA: 1:36 - loss: 1.013 - ETA: 1:35 - loss: 0.998 - ETA: 1:34 - loss: 0.983 - ETA: 1:33 - loss: 0.970 - ETA: 1:32 - loss: 0.957 - ETA: 1:31 - loss: 0.947 - ETA: 1:30 - loss: 0.939 - ETA: 1:29 - loss: 0.931 - ETA: 1:28 - loss: 0.923 - ETA: 1:27 - loss: 0.915 - ETA: 1:26 - loss: 0.908 - ETA: 1:25 - loss: 0.901 - ETA: 1:24 - loss: 0.894 - ETA: 1:23 - loss: 0.889 - ETA: 1:22 - loss: 0.883 - ETA: 1:21 - loss: 0.878 - ETA: 1:20 - loss: 0.874 - ETA: 1:18 - loss: 0.869 - ETA: 1:18 - loss: 0.866 - ETA: 1:16 - loss: 0.863 - ETA: 1:15 - loss: 0.860 - ETA: 1:14 - loss: 0.858 - ETA: 1:13 - loss: 0.855 - ETA: 1:12 - loss: 0.853 - ETA: 1:11 - loss: 0.850 - ETA: 1:10 - loss: 0.847 - ETA: 1:09 - loss: 0.844 - ETA: 1:08 - loss: 0.841 - ETA: 1:07 - loss: 0.839 - ETA: 1:06 - loss: 0.836 - ETA: 1:05 - loss: 0.834 - ETA: 1:04 - loss: 0.832 - ETA: 1:03 - loss: 0.830 - ETA: 1:03 - loss: 0.828 - ETA: 1:02 - loss: 0.827 - ETA: 1:01 - loss: 0.826 - ETA: 1:00 - loss: 0.825 - ETA: 59s - loss: 0.824 - ETA: 58s - loss: 0.82 - ETA: 57s - loss: 0.82 - ETA: 56s - loss: 0.82 - ETA: 55s - loss: 0.82 - ETA: 54s - loss: 0.82 - ETA: 53s - loss: 0.82 - ETA: 52s - loss: 0.82 - ETA: 51s - loss: 0.82 - ETA: 50s - loss: 0.82 - ETA: 49s - loss: 0.82 - ETA: 48s - loss: 0.81 - ETA: 47s - loss: 0.81 - ETA: 46s - loss: 0.81 - ETA: 45s - loss: 0.81 - ETA: 44s - loss: 0.81 - ETA: 43s - loss: 0.81 - ETA: 42s - loss: 0.81 - ETA: 41s - loss: 0.81 - ETA: 40s - loss: 0.81 - ETA: 39s - loss: 0.81 - ETA: 38s - loss: 0.81 - ETA: 37s - loss: 0.81 - ETA: 36s - loss: 0.81 - ETA: 35s - loss: 0.81 - ETA: 34s - loss: 0.81 - ETA: 33s - loss: 0.81 - ETA: 32s - loss: 0.81 - ETA: 31s - loss: 0.81 - ETA: 30s - loss: 0.81 - ETA: 29s - loss: 0.81 - ETA: 28s - loss: 0.81 - ETA: 27s - loss: 0.81 - ETA: 26s - loss: 0.81 - ETA: 25s - loss: 0.81 - ETA: 24s - loss: 0.81 - ETA: 23s - loss: 0.81 - ETA: 22s - loss: 0.81 - ETA: 21s - loss: 0.81 - ETA: 20s - loss: 0.81 - ETA: 19s - loss: 0.80 - ETA: 18s - loss: 0.80 - ETA: 17s - loss: 0.80 - ETA: 16s - loss: 0.80 - ETA: 15s - loss: 0.80 - ETA: 14s - loss: 0.80 - ETA: 13s - loss: 0.80 - ETA: 12s - loss: 0.80 - ETA: 11s - loss: 0.80 - ETA: 10s - loss: 0.80 - ETA: 9s - loss: 0.8052 - ETA: 8s - loss: 0.804 - ETA: 7s - loss: 0.804 - ETA: 6s - loss: 0.804 - ETA: 5s - loss: 0.803 - ETA: 4s - loss: 0.803 - ETA: 3s - loss: 0.802 - ETA: 2s - loss: 0.802 - ETA: 1s - loss: 0.801 - ETA: 0s - loss: 0.801 - ETA: 0s - loss: 0.800 - 111s 998ms/step - loss: 0.8003 - val_loss: 0.4950\n",
      "\n",
      "Epoch 00004: val_loss improved from 0.67850 to 0.49504, saving model to clean_notebooks\\injection_superposition.h5\n",
      "Epoch 5/30\n",
      "111/111 [==============================] - ETA: 1:52 - loss: 0.474 - ETA: 1:46 - loss: 0.471 - ETA: 1:47 - loss: 0.454 - ETA: 1:46 - loss: 0.467 - ETA: 1:45 - loss: 0.495 - ETA: 1:43 - loss: 0.536 - ETA: 1:42 - loss: 0.560 - ETA: 1:41 - loss: 0.578 - ETA: 1:40 - loss: 0.592 - ETA: 1:39 - loss: 0.600 - ETA: 1:38 - loss: 0.603 - ETA: 1:37 - loss: 0.602 - ETA: 1:36 - loss: 0.604 - ETA: 1:36 - loss: 0.605 - ETA: 1:35 - loss: 0.605 - ETA: 1:34 - loss: 0.604 - ETA: 1:33 - loss: 0.603 - ETA: 1:31 - loss: 0.602 - ETA: 1:30 - loss: 0.601 - ETA: 1:30 - loss: 0.602 - ETA: 1:28 - loss: 0.601 - ETA: 1:28 - loss: 0.600 - ETA: 1:27 - loss: 0.598 - ETA: 1:26 - loss: 0.597 - ETA: 1:25 - loss: 0.595 - ETA: 1:24 - loss: 0.595 - ETA: 1:23 - loss: 0.594 - ETA: 1:22 - loss: 0.593 - ETA: 1:21 - loss: 0.591 - ETA: 1:20 - loss: 0.591 - ETA: 1:19 - loss: 0.591 - ETA: 1:18 - loss: 0.592 - ETA: 1:17 - loss: 0.592 - ETA: 1:15 - loss: 0.592 - ETA: 1:14 - loss: 0.592 - ETA: 1:13 - loss: 0.592 - ETA: 1:13 - loss: 0.591 - ETA: 1:12 - loss: 0.591 - ETA: 1:11 - loss: 0.591 - ETA: 1:10 - loss: 0.590 - ETA: 1:09 - loss: 0.590 - ETA: 1:07 - loss: 0.591 - ETA: 1:06 - loss: 0.591 - ETA: 1:05 - loss: 0.591 - ETA: 1:04 - loss: 0.592 - ETA: 1:03 - loss: 0.592 - ETA: 1:02 - loss: 0.593 - ETA: 1:01 - loss: 0.593 - ETA: 1:00 - loss: 0.594 - ETA: 59s - loss: 0.594 - ETA: 58s - loss: 0.59 - ETA: 57s - loss: 0.59 - ETA: 56s - loss: 0.59 - ETA: 56s - loss: 0.59 - ETA: 55s - loss: 0.59 - ETA: 54s - loss: 0.59 - ETA: 53s - loss: 0.59 - ETA: 52s - loss: 0.59 - ETA: 51s - loss: 0.59 - ETA: 50s - loss: 0.59 - ETA: 49s - loss: 0.59 - ETA: 48s - loss: 0.59 - ETA: 47s - loss: 0.59 - ETA: 46s - loss: 0.59 - ETA: 45s - loss: 0.59 - ETA: 44s - loss: 0.59 - ETA: 43s - loss: 0.59 - ETA: 42s - loss: 0.60 - ETA: 41s - loss: 0.60 - ETA: 40s - loss: 0.60 - ETA: 39s - loss: 0.60 - ETA: 38s - loss: 0.60 - ETA: 37s - loss: 0.60 - ETA: 36s - loss: 0.60 - ETA: 35s - loss: 0.60 - ETA: 34s - loss: 0.60 - ETA: 33s - loss: 0.60 - ETA: 32s - loss: 0.60 - ETA: 31s - loss: 0.60 - ETA: 30s - loss: 0.60 - ETA: 29s - loss: 0.60 - ETA: 28s - loss: 0.60 - ETA: 27s - loss: 0.60 - ETA: 26s - loss: 0.60 - ETA: 25s - loss: 0.60 - ETA: 24s - loss: 0.60 - ETA: 23s - loss: 0.60 - ETA: 22s - loss: 0.60 - ETA: 21s - loss: 0.60 - ETA: 20s - loss: 0.60 - ETA: 19s - loss: 0.60 - ETA: 18s - loss: 0.60 - ETA: 17s - loss: 0.60 - ETA: 16s - loss: 0.60 - ETA: 15s - loss: 0.60 - ETA: 14s - loss: 0.60 - ETA: 13s - loss: 0.60 - ETA: 12s - loss: 0.60 - ETA: 11s - loss: 0.60 - ETA: 10s - loss: 0.60 - ETA: 9s - loss: 0.6092 - ETA: 8s - loss: 0.609 - ETA: 7s - loss: 0.609 - ETA: 6s - loss: 0.609 - ETA: 5s - loss: 0.609 - ETA: 4s - loss: 0.609 - ETA: 3s - loss: 0.609 - ETA: 2s - loss: 0.609 - ETA: 1s - loss: 0.609 - ETA: 0s - loss: 0.609 - ETA: 0s - loss: 0.609 - 110s 994ms/step - loss: 0.6092 - val_loss: 0.3938\n",
      "\n",
      "Epoch 00005: val_loss improved from 0.49504 to 0.39385, saving model to clean_notebooks\\injection_superposition.h5\n",
      "Epoch 6/30\n",
      "111/111 [==============================] - ETA: 1:56 - loss: 2.029 - ETA: 1:49 - loss: 1.754 - ETA: 1:47 - loss: 1.561 - ETA: 1:46 - loss: 1.425 - ETA: 1:45 - loss: 1.315 - ETA: 1:43 - loss: 1.229 - ETA: 1:42 - loss: 1.159 - ETA: 1:41 - loss: 1.101 - ETA: 1:40 - loss: 1.055 - ETA: 1:39 - loss: 1.015 - ETA: 1:38 - loss: 0.978 - ETA: 1:37 - loss: 0.946 - ETA: 1:36 - loss: 0.918 - ETA: 1:35 - loss: 0.896 - ETA: 1:34 - loss: 0.878 - ETA: 1:33 - loss: 0.861 - ETA: 1:32 - loss: 0.844 - ETA: 1:31 - loss: 0.829 - ETA: 1:30 - loss: 0.814 - ETA: 1:29 - loss: 0.801 - ETA: 1:28 - loss: 0.788 - ETA: 1:27 - loss: 0.777 - ETA: 1:26 - loss: 0.767 - ETA: 1:25 - loss: 0.757 - ETA: 1:24 - loss: 0.749 - ETA: 1:23 - loss: 0.741 - ETA: 1:22 - loss: 0.734 - ETA: 1:22 - loss: 0.727 - ETA: 1:21 - loss: 0.720 - ETA: 1:20 - loss: 0.714 - ETA: 1:19 - loss: 0.710 - ETA: 1:18 - loss: 0.706 - ETA: 1:17 - loss: 0.703 - ETA: 1:16 - loss: 0.700 - ETA: 1:15 - loss: 0.697 - ETA: 1:14 - loss: 0.694 - ETA: 1:13 - loss: 0.692 - ETA: 1:12 - loss: 0.690 - ETA: 1:11 - loss: 0.688 - ETA: 1:10 - loss: 0.686 - ETA: 1:09 - loss: 0.684 - ETA: 1:08 - loss: 0.682 - ETA: 1:07 - loss: 0.681 - ETA: 1:06 - loss: 0.679 - ETA: 1:05 - loss: 0.677 - ETA: 1:04 - loss: 0.676 - ETA: 1:03 - loss: 0.674 - ETA: 1:02 - loss: 0.673 - ETA: 1:01 - loss: 0.672 - ETA: 1:00 - loss: 0.670 - ETA: 59s - loss: 0.669 - ETA: 58s - loss: 0.66 - ETA: 57s - loss: 0.66 - ETA: 56s - loss: 0.66 - ETA: 55s - loss: 0.66 - ETA: 54s - loss: 0.66 - ETA: 53s - loss: 0.66 - ETA: 52s - loss: 0.65 - ETA: 51s - loss: 0.65 - ETA: 50s - loss: 0.65 - ETA: 49s - loss: 0.65 - ETA: 48s - loss: 0.65 - ETA: 47s - loss: 0.65 - ETA: 46s - loss: 0.65 - ETA: 45s - loss: 0.64 - ETA: 44s - loss: 0.64 - ETA: 43s - loss: 0.64 - ETA: 42s - loss: 0.64 - ETA: 41s - loss: 0.64 - ETA: 40s - loss: 0.64 - ETA: 39s - loss: 0.64 - ETA: 38s - loss: 0.64 - ETA: 37s - loss: 0.64 - ETA: 36s - loss: 0.64 - ETA: 35s - loss: 0.64 - ETA: 34s - loss: 0.63 - ETA: 33s - loss: 0.63 - ETA: 32s - loss: 0.63 - ETA: 31s - loss: 0.63 - ETA: 30s - loss: 0.63 - ETA: 29s - loss: 0.63 - ETA: 28s - loss: 0.63 - ETA: 27s - loss: 0.63 - ETA: 26s - loss: 0.63 - ETA: 25s - loss: 0.63 - ETA: 24s - loss: 0.63 - ETA: 23s - loss: 0.63 - ETA: 22s - loss: 0.63 - ETA: 21s - loss: 0.63 - ETA: 20s - loss: 0.62 - ETA: 19s - loss: 0.62 - ETA: 18s - loss: 0.62 - ETA: 17s - loss: 0.62 - ETA: 16s - loss: 0.62 - ETA: 15s - loss: 0.62 - ETA: 14s - loss: 0.62 - ETA: 13s - loss: 0.62 - ETA: 12s - loss: 0.62 - ETA: 11s - loss: 0.62 - ETA: 10s - loss: 0.62 - ETA: 9s - loss: 0.6246 - ETA: 8s - loss: 0.624 - ETA: 7s - loss: 0.623 - ETA: 6s - loss: 0.622 - ETA: 5s - loss: 0.622 - ETA: 4s - loss: 0.621 - ETA: 3s - loss: 0.621 - ETA: 2s - loss: 0.620 - ETA: 1s - loss: 0.620 - ETA: 0s - loss: 0.619 - ETA: 0s - loss: 0.619 - 111s 997ms/step - loss: 0.6190 - val_loss: 0.3925\n",
      "\n",
      "Epoch 00006: val_loss improved from 0.39385 to 0.39255, saving model to clean_notebooks\\injection_superposition.h5\n",
      "Epoch 7/30\n",
      "111/111 [==============================] - ETA: 1:48 - loss: 0.368 - ETA: 1:49 - loss: 0.356 - ETA: 1:46 - loss: 0.337 - ETA: 1:45 - loss: 0.354 - ETA: 1:43 - loss: 0.353 - ETA: 1:42 - loss: 0.345 - ETA: 1:41 - loss: 0.338 - ETA: 1:40 - loss: 0.338 - ETA: 1:39 - loss: 0.342 - ETA: 1:38 - loss: 0.344 - ETA: 1:37 - loss: 0.346 - ETA: 1:36 - loss: 0.349 - ETA: 1:35 - loss: 0.354 - ETA: 1:34 - loss: 0.357 - ETA: 1:33 - loss: 0.362 - ETA: 1:32 - loss: 0.368 - ETA: 1:31 - loss: 0.373 - ETA: 1:30 - loss: 0.378 - ETA: 1:29 - loss: 0.383 - ETA: 1:28 - loss: 0.389 - ETA: 1:27 - loss: 0.395 - ETA: 1:26 - loss: 0.402 - ETA: 1:25 - loss: 0.408 - ETA: 1:24 - loss: 0.414 - ETA: 1:23 - loss: 0.418 - ETA: 1:22 - loss: 0.423 - ETA: 1:21 - loss: 0.427 - ETA: 1:20 - loss: 0.431 - ETA: 1:19 - loss: 0.435 - ETA: 1:18 - loss: 0.439 - ETA: 1:18 - loss: 0.444 - ETA: 1:17 - loss: 0.449 - ETA: 1:16 - loss: 0.453 - ETA: 1:15 - loss: 0.457 - ETA: 1:14 - loss: 0.461 - ETA: 1:13 - loss: 0.465 - ETA: 1:12 - loss: 0.469 - ETA: 1:11 - loss: 0.472 - ETA: 1:10 - loss: 0.475 - ETA: 1:09 - loss: 0.477 - ETA: 1:08 - loss: 0.480 - ETA: 1:07 - loss: 0.482 - ETA: 1:06 - loss: 0.484 - ETA: 1:05 - loss: 0.486 - ETA: 1:04 - loss: 0.487 - ETA: 1:03 - loss: 0.488 - ETA: 1:02 - loss: 0.490 - ETA: 1:01 - loss: 0.491 - ETA: 1:00 - loss: 0.492 - ETA: 59s - loss: 0.493 - ETA: 58s - loss: 0.49 - ETA: 57s - loss: 0.49 - ETA: 56s - loss: 0.49 - ETA: 55s - loss: 0.49 - ETA: 54s - loss: 0.49 - ETA: 53s - loss: 0.49 - ETA: 52s - loss: 0.49 - ETA: 51s - loss: 0.49 - ETA: 50s - loss: 0.50 - ETA: 49s - loss: 0.50 - ETA: 48s - loss: 0.50 - ETA: 47s - loss: 0.50 - ETA: 47s - loss: 0.50 - ETA: 46s - loss: 0.50 - ETA: 45s - loss: 0.50 - ETA: 44s - loss: 0.50 - ETA: 43s - loss: 0.50 - ETA: 42s - loss: 0.50 - ETA: 41s - loss: 0.50 - ETA: 40s - loss: 0.50 - ETA: 39s - loss: 0.50 - ETA: 38s - loss: 0.50 - ETA: 37s - loss: 0.50 - ETA: 36s - loss: 0.50 - ETA: 35s - loss: 0.50 - ETA: 34s - loss: 0.50 - ETA: 33s - loss: 0.50 - ETA: 32s - loss: 0.50 - ETA: 31s - loss: 0.50 - ETA: 30s - loss: 0.50 - ETA: 29s - loss: 0.51 - ETA: 28s - loss: 0.51 - ETA: 27s - loss: 0.51 - ETA: 26s - loss: 0.51 - ETA: 25s - loss: 0.51 - ETA: 24s - loss: 0.51 - ETA: 23s - loss: 0.51 - ETA: 22s - loss: 0.51 - ETA: 21s - loss: 0.51 - ETA: 20s - loss: 0.51 - ETA: 19s - loss: 0.51 - ETA: 18s - loss: 0.51 - ETA: 17s - loss: 0.51 - ETA: 16s - loss: 0.51 - ETA: 15s - loss: 0.51 - ETA: 14s - loss: 0.51 - ETA: 13s - loss: 0.51 - ETA: 12s - loss: 0.51 - ETA: 11s - loss: 0.51 - ETA: 10s - loss: 0.51 - ETA: 9s - loss: 0.5170 - ETA: 8s - loss: 0.517 - ETA: 7s - loss: 0.517 - ETA: 6s - loss: 0.517 - ETA: 5s - loss: 0.517 - ETA: 4s - loss: 0.517 - ETA: 3s - loss: 0.517 - ETA: 2s - loss: 0.517 - ETA: 1s - loss: 0.517 - ETA: 0s - loss: 0.517 - ETA: 0s - loss: 0.517 - 112s 1s/step - loss: 0.5180 - val_loss: 0.3172\n",
      "\n",
      "Epoch 00007: val_loss improved from 0.39255 to 0.31715, saving model to clean_notebooks\\injection_superposition.h5\n",
      "Epoch 8/30\n",
      "111/111 [==============================] - ETA: 1:52 - loss: 0.892 - ETA: 1:49 - loss: 0.721 - ETA: 1:48 - loss: 0.732 - ETA: 1:45 - loss: 0.700 - ETA: 1:44 - loss: 0.659 - ETA: 1:43 - loss: 0.629 - ETA: 1:42 - loss: 0.619 - ETA: 1:40 - loss: 0.609 - ETA: 1:39 - loss: 0.612 - ETA: 1:38 - loss: 0.612 - ETA: 1:37 - loss: 0.612 - ETA: 1:36 - loss: 0.610 - ETA: 1:35 - loss: 0.606 - ETA: 1:34 - loss: 0.602 - ETA: 1:33 - loss: 0.598 - ETA: 1:32 - loss: 0.594 - ETA: 1:31 - loss: 0.590 - ETA: 1:31 - loss: 0.586 - ETA: 1:30 - loss: 0.581 - ETA: 1:29 - loss: 0.578 - ETA: 1:28 - loss: 0.575 - ETA: 1:27 - loss: 0.572 - ETA: 1:26 - loss: 0.570 - ETA: 1:25 - loss: 0.567 - ETA: 1:24 - loss: 0.564 - ETA: 1:23 - loss: 0.561 - ETA: 1:22 - loss: 0.558 - ETA: 1:21 - loss: 0.555 - ETA: 1:20 - loss: 0.553 - ETA: 1:19 - loss: 0.551 - ETA: 1:18 - loss: 0.548 - ETA: 1:17 - loss: 0.546 - ETA: 1:16 - loss: 0.544 - ETA: 1:15 - loss: 0.542 - ETA: 1:14 - loss: 0.540 - ETA: 1:13 - loss: 0.539 - ETA: 1:12 - loss: 0.537 - ETA: 1:11 - loss: 0.535 - ETA: 1:10 - loss: 0.534 - ETA: 1:09 - loss: 0.532 - ETA: 1:08 - loss: 0.530 - ETA: 1:07 - loss: 0.528 - ETA: 1:06 - loss: 0.527 - ETA: 1:05 - loss: 0.525 - ETA: 1:04 - loss: 0.523 - ETA: 1:03 - loss: 0.522 - ETA: 1:02 - loss: 0.521 - ETA: 1:01 - loss: 0.519 - ETA: 1:00 - loss: 0.518 - ETA: 59s - loss: 0.517 - ETA: 58s - loss: 0.51 - ETA: 57s - loss: 0.51 - ETA: 56s - loss: 0.51 - ETA: 55s - loss: 0.51 - ETA: 54s - loss: 0.51 - ETA: 54s - loss: 0.51 - ETA: 53s - loss: 0.50 - ETA: 52s - loss: 0.50 - ETA: 51s - loss: 0.50 - ETA: 50s - loss: 0.50 - ETA: 49s - loss: 0.50 - ETA: 48s - loss: 0.50 - ETA: 47s - loss: 0.50 - ETA: 46s - loss: 0.50 - ETA: 45s - loss: 0.50 - ETA: 44s - loss: 0.50 - ETA: 43s - loss: 0.50 - ETA: 42s - loss: 0.49 - ETA: 41s - loss: 0.49 - ETA: 40s - loss: 0.49 - ETA: 39s - loss: 0.49 - ETA: 38s - loss: 0.49 - ETA: 37s - loss: 0.49 - ETA: 36s - loss: 0.49 - ETA: 35s - loss: 0.49 - ETA: 34s - loss: 0.49 - ETA: 33s - loss: 0.49 - ETA: 32s - loss: 0.49 - ETA: 31s - loss: 0.49 - ETA: 30s - loss: 0.48 - ETA: 29s - loss: 0.48 - ETA: 28s - loss: 0.48 - ETA: 27s - loss: 0.48 - ETA: 26s - loss: 0.48 - ETA: 25s - loss: 0.48 - ETA: 24s - loss: 0.48 - ETA: 23s - loss: 0.48 - ETA: 22s - loss: 0.48 - ETA: 21s - loss: 0.48 - ETA: 20s - loss: 0.48 - ETA: 19s - loss: 0.48 - ETA: 18s - loss: 0.48 - ETA: 17s - loss: 0.48 - ETA: 16s - loss: 0.48 - ETA: 15s - loss: 0.48 - ETA: 14s - loss: 0.48 - ETA: 13s - loss: 0.48 - ETA: 12s - loss: 0.48 - ETA: 11s - loss: 0.48 - ETA: 10s - loss: 0.47 - ETA: 9s - loss: 0.4793 - ETA: 8s - loss: 0.479 - ETA: 7s - loss: 0.478 - ETA: 6s - loss: 0.478 - ETA: 5s - loss: 0.478 - ETA: 4s - loss: 0.477 - ETA: 3s - loss: 0.477 - ETA: 2s - loss: 0.477 - ETA: 1s - loss: 0.477 - ETA: 0s - loss: 0.476 - ETA: 0s - loss: 0.476 - 111s 998ms/step - loss: 0.4762 - val_loss: 0.3329\n",
      "\n",
      "Epoch 00008: val_loss did not improve from 0.31715\n",
      "Epoch 9/30\n",
      "111/111 [==============================] - ETA: 1:43 - loss: 0.700 - ETA: 1:44 - loss: 0.657 - ETA: 1:47 - loss: 0.607 - ETA: 1:45 - loss: 0.562 - ETA: 1:44 - loss: 0.532 - ETA: 1:43 - loss: 0.520 - ETA: 1:43 - loss: 0.509 - ETA: 1:41 - loss: 0.504 - ETA: 1:40 - loss: 0.497 - ETA: 1:39 - loss: 0.490 - ETA: 1:38 - loss: 0.480 - ETA: 1:37 - loss: 0.473 - ETA: 1:36 - loss: 0.468 - ETA: 1:35 - loss: 0.463 - ETA: 1:34 - loss: 0.457 - ETA: 1:33 - loss: 0.452 - ETA: 1:32 - loss: 0.447 - ETA: 1:31 - loss: 0.442 - ETA: 1:30 - loss: 0.440 - ETA: 1:29 - loss: 0.438 - ETA: 1:28 - loss: 0.436 - ETA: 1:27 - loss: 0.435 - ETA: 1:26 - loss: 0.434 - ETA: 1:25 - loss: 0.434 - ETA: 1:24 - loss: 0.434 - ETA: 1:23 - loss: 0.434 - ETA: 1:22 - loss: 0.434 - ETA: 1:21 - loss: 0.435 - ETA: 1:20 - loss: 0.436 - ETA: 1:19 - loss: 0.437 - ETA: 1:18 - loss: 0.437 - ETA: 1:17 - loss: 0.438 - ETA: 1:16 - loss: 0.438 - ETA: 1:15 - loss: 0.438 - ETA: 1:14 - loss: 0.438 - ETA: 1:13 - loss: 0.437 - ETA: 1:12 - loss: 0.437 - ETA: 1:11 - loss: 0.437 - ETA: 1:10 - loss: 0.437 - ETA: 1:09 - loss: 0.436 - ETA: 1:08 - loss: 0.436 - ETA: 1:07 - loss: 0.436 - ETA: 1:06 - loss: 0.436 - ETA: 1:05 - loss: 0.436 - ETA: 1:04 - loss: 0.435 - ETA: 1:03 - loss: 0.435 - ETA: 1:02 - loss: 0.435 - ETA: 1:01 - loss: 0.435 - ETA: 1:00 - loss: 0.435 - ETA: 59s - loss: 0.434 - ETA: 58s - loss: 0.43 - ETA: 57s - loss: 0.43 - ETA: 56s - loss: 0.43 - ETA: 55s - loss: 0.43 - ETA: 54s - loss: 0.43 - ETA: 53s - loss: 0.43 - ETA: 52s - loss: 0.43 - ETA: 51s - loss: 0.43 - ETA: 50s - loss: 0.43 - ETA: 49s - loss: 0.43 - ETA: 48s - loss: 0.43 - ETA: 48s - loss: 0.43 - ETA: 47s - loss: 0.43 - ETA: 46s - loss: 0.43 - ETA: 45s - loss: 0.43 - ETA: 44s - loss: 0.43 - ETA: 43s - loss: 0.43 - ETA: 42s - loss: 0.43 - ETA: 41s - loss: 0.43 - ETA: 40s - loss: 0.43 - ETA: 39s - loss: 0.43 - ETA: 38s - loss: 0.43 - ETA: 37s - loss: 0.43 - ETA: 36s - loss: 0.43 - ETA: 35s - loss: 0.43 - ETA: 34s - loss: 0.43 - ETA: 34s - loss: 0.43 - ETA: 33s - loss: 0.43 - ETA: 32s - loss: 0.43 - ETA: 31s - loss: 0.43 - ETA: 30s - loss: 0.43 - ETA: 29s - loss: 0.43 - ETA: 28s - loss: 0.43 - ETA: 27s - loss: 0.43 - ETA: 26s - loss: 0.43 - ETA: 25s - loss: 0.44 - ETA: 24s - loss: 0.44 - ETA: 23s - loss: 0.44 - ETA: 22s - loss: 0.44 - ETA: 21s - loss: 0.44 - ETA: 20s - loss: 0.44 - ETA: 19s - loss: 0.44 - ETA: 18s - loss: 0.44 - ETA: 17s - loss: 0.44 - ETA: 16s - loss: 0.44 - ETA: 15s - loss: 0.44 - ETA: 14s - loss: 0.44 - ETA: 13s - loss: 0.44 - ETA: 12s - loss: 0.44 - ETA: 11s - loss: 0.44 - ETA: 10s - loss: 0.44 - ETA: 9s - loss: 0.4431 - ETA: 8s - loss: 0.443 - ETA: 7s - loss: 0.443 - ETA: 6s - loss: 0.443 - ETA: 5s - loss: 0.443 - ETA: 4s - loss: 0.443 - ETA: 3s - loss: 0.443 - ETA: 2s - loss: 0.443 - ETA: 1s - loss: 0.443 - ETA: 0s - loss: 0.443 - 115s 1s/step - loss: 0.4439 - val_loss: 0.3122\n",
      "\n",
      "Epoch 00009: val_loss improved from 0.31715 to 0.31217, saving model to clean_notebooks\\injection_superposition.h5\n",
      "Epoch 10/30\n",
      "111/111 [==============================] - ETA: 1:50 - loss: 0.851 - ETA: 1:45 - loss: 0.779 - ETA: 1:44 - loss: 0.885 - ETA: 1:43 - loss: 0.898 - ETA: 1:43 - loss: 0.879 - ETA: 1:43 - loss: 0.856 - ETA: 1:42 - loss: 0.830 - ETA: 1:41 - loss: 0.808 - ETA: 1:40 - loss: 0.786 - ETA: 1:39 - loss: 0.765 - ETA: 1:38 - loss: 0.746 - ETA: 1:36 - loss: 0.732 - ETA: 1:35 - loss: 0.717 - ETA: 1:34 - loss: 0.703 - ETA: 1:33 - loss: 0.693 - ETA: 1:32 - loss: 0.684 - ETA: 1:32 - loss: 0.675 - ETA: 1:31 - loss: 0.666 - ETA: 1:30 - loss: 0.658 - ETA: 1:29 - loss: 0.649 - ETA: 1:28 - loss: 0.642 - ETA: 1:27 - loss: 0.635 - ETA: 1:26 - loss: 0.628 - ETA: 1:25 - loss: 0.622 - ETA: 1:24 - loss: 0.615 - ETA: 1:23 - loss: 0.608 - ETA: 1:22 - loss: 0.602 - ETA: 1:21 - loss: 0.597 - ETA: 1:20 - loss: 0.591 - ETA: 1:19 - loss: 0.586 - ETA: 1:18 - loss: 0.581 - ETA: 1:17 - loss: 0.578 - ETA: 1:16 - loss: 0.575 - ETA: 1:15 - loss: 0.572 - ETA: 1:14 - loss: 0.570 - ETA: 1:13 - loss: 0.567 - ETA: 1:12 - loss: 0.564 - ETA: 1:11 - loss: 0.561 - ETA: 1:10 - loss: 0.558 - ETA: 1:09 - loss: 0.555 - ETA: 1:08 - loss: 0.552 - ETA: 1:07 - loss: 0.549 - ETA: 1:06 - loss: 0.547 - ETA: 1:05 - loss: 0.544 - ETA: 1:04 - loss: 0.542 - ETA: 1:03 - loss: 0.539 - ETA: 1:02 - loss: 0.537 - ETA: 1:01 - loss: 0.535 - ETA: 1:00 - loss: 0.533 - ETA: 59s - loss: 0.531 - ETA: 58s - loss: 0.52 - ETA: 57s - loss: 0.52 - ETA: 56s - loss: 0.52 - ETA: 55s - loss: 0.52 - ETA: 54s - loss: 0.52 - ETA: 53s - loss: 0.52 - ETA: 53s - loss: 0.52 - ETA: 52s - loss: 0.51 - ETA: 51s - loss: 0.51 - ETA: 50s - loss: 0.51 - ETA: 49s - loss: 0.51 - ETA: 48s - loss: 0.51 - ETA: 47s - loss: 0.51 - ETA: 46s - loss: 0.50 - ETA: 45s - loss: 0.50 - ETA: 44s - loss: 0.50 - ETA: 43s - loss: 0.50 - ETA: 42s - loss: 0.50 - ETA: 41s - loss: 0.50 - ETA: 40s - loss: 0.50 - ETA: 39s - loss: 0.50 - ETA: 38s - loss: 0.49 - ETA: 37s - loss: 0.49 - ETA: 36s - loss: 0.49 - ETA: 35s - loss: 0.49 - ETA: 34s - loss: 0.49 - ETA: 33s - loss: 0.49 - ETA: 32s - loss: 0.49 - ETA: 31s - loss: 0.49 - ETA: 30s - loss: 0.49 - ETA: 29s - loss: 0.48 - ETA: 28s - loss: 0.48 - ETA: 27s - loss: 0.48 - ETA: 26s - loss: 0.48 - ETA: 25s - loss: 0.48 - ETA: 24s - loss: 0.48 - ETA: 23s - loss: 0.48 - ETA: 22s - loss: 0.48 - ETA: 21s - loss: 0.48 - ETA: 20s - loss: 0.48 - ETA: 19s - loss: 0.48 - ETA: 18s - loss: 0.48 - ETA: 17s - loss: 0.48 - ETA: 16s - loss: 0.48 - ETA: 15s - loss: 0.48 - ETA: 14s - loss: 0.47 - ETA: 13s - loss: 0.47 - ETA: 12s - loss: 0.47 - ETA: 11s - loss: 0.47 - ETA: 10s - loss: 0.47 - ETA: 9s - loss: 0.4778 - ETA: 8s - loss: 0.477 - ETA: 7s - loss: 0.477 - ETA: 6s - loss: 0.476 - ETA: 5s - loss: 0.476 - ETA: 4s - loss: 0.476 - ETA: 3s - loss: 0.475 - ETA: 2s - loss: 0.475 - ETA: 1s - loss: 0.475 - ETA: 0s - loss: 0.474 - ETA: 0s - loss: 0.474 - 111s 999ms/step - loss: 0.4741 - val_loss: 0.3002\n",
      "\n",
      "Epoch 00010: val_loss improved from 0.31217 to 0.30025, saving model to clean_notebooks\\injection_superposition.h5\n",
      "Epoch 11/30\n",
      "111/111 [==============================] - ETA: 1:49 - loss: 0.442 - ETA: 1:47 - loss: 0.421 - ETA: 1:46 - loss: 0.465 - ETA: 1:45 - loss: 0.465 - ETA: 1:44 - loss: 0.455 - ETA: 1:42 - loss: 0.442 - ETA: 1:42 - loss: 0.436 - ETA: 1:41 - loss: 0.431 - ETA: 1:40 - loss: 0.429 - ETA: 1:39 - loss: 0.424 - ETA: 1:38 - loss: 0.419 - ETA: 1:37 - loss: 0.415 - ETA: 1:36 - loss: 0.410 - ETA: 1:35 - loss: 0.411 - ETA: 1:34 - loss: 0.410 - ETA: 1:33 - loss: 0.409 - ETA: 1:32 - loss: 0.407 - ETA: 1:31 - loss: 0.410 - ETA: 1:30 - loss: 0.415 - ETA: 1:29 - loss: 0.420 - ETA: 1:28 - loss: 0.424 - ETA: 1:27 - loss: 0.429 - ETA: 1:26 - loss: 0.434 - ETA: 1:25 - loss: 0.438 - ETA: 1:24 - loss: 0.441 - ETA: 1:23 - loss: 0.444 - ETA: 1:22 - loss: 0.447 - ETA: 1:21 - loss: 0.449 - ETA: 1:20 - loss: 0.451 - ETA: 1:19 - loss: 0.453 - ETA: 1:18 - loss: 0.455 - ETA: 1:17 - loss: 0.456 - ETA: 1:16 - loss: 0.458 - ETA: 1:15 - loss: 0.459 - ETA: 1:14 - loss: 0.460 - ETA: 1:13 - loss: 0.461 - ETA: 1:12 - loss: 0.462 - ETA: 1:11 - loss: 0.462 - ETA: 1:10 - loss: 0.462 - ETA: 1:09 - loss: 0.464 - ETA: 1:08 - loss: 0.464 - ETA: 1:07 - loss: 0.465 - ETA: 1:07 - loss: 0.465 - ETA: 1:06 - loss: 0.466 - ETA: 1:04 - loss: 0.466 - ETA: 1:03 - loss: 0.466 - ETA: 1:02 - loss: 0.466 - ETA: 1:01 - loss: 0.466 - ETA: 1:00 - loss: 0.467 - ETA: 59s - loss: 0.467 - ETA: 58s - loss: 0.46 - ETA: 57s - loss: 0.46 - ETA: 56s - loss: 0.46 - ETA: 55s - loss: 0.46 - ETA: 55s - loss: 0.46 - ETA: 54s - loss: 0.46 - ETA: 53s - loss: 0.46 - ETA: 52s - loss: 0.46 - ETA: 51s - loss: 0.46 - ETA: 50s - loss: 0.46 - ETA: 49s - loss: 0.46 - ETA: 48s - loss: 0.46 - ETA: 47s - loss: 0.46 - ETA: 46s - loss: 0.46 - ETA: 45s - loss: 0.46 - ETA: 44s - loss: 0.46 - ETA: 43s - loss: 0.46 - ETA: 42s - loss: 0.46 - ETA: 41s - loss: 0.46 - ETA: 40s - loss: 0.46 - ETA: 39s - loss: 0.46 - ETA: 38s - loss: 0.46 - ETA: 37s - loss: 0.46 - ETA: 36s - loss: 0.46 - ETA: 35s - loss: 0.46 - ETA: 34s - loss: 0.46 - ETA: 33s - loss: 0.46 - ETA: 32s - loss: 0.45 - ETA: 31s - loss: 0.45 - ETA: 30s - loss: 0.45 - ETA: 29s - loss: 0.45 - ETA: 28s - loss: 0.45 - ETA: 27s - loss: 0.45 - ETA: 26s - loss: 0.45 - ETA: 25s - loss: 0.45 - ETA: 24s - loss: 0.45 - ETA: 23s - loss: 0.45 - ETA: 22s - loss: 0.45 - ETA: 21s - loss: 0.45 - ETA: 20s - loss: 0.45 - ETA: 19s - loss: 0.45 - ETA: 18s - loss: 0.45 - ETA: 17s - loss: 0.45 - ETA: 16s - loss: 0.45 - ETA: 15s - loss: 0.45 - ETA: 14s - loss: 0.45 - ETA: 13s - loss: 0.45 - ETA: 12s - loss: 0.45 - ETA: 11s - loss: 0.45 - ETA: 10s - loss: 0.45 - ETA: 9s - loss: 0.4509 - ETA: 8s - loss: 0.450 - ETA: 7s - loss: 0.450 - ETA: 6s - loss: 0.449 - ETA: 5s - loss: 0.449 - ETA: 4s - loss: 0.449 - ETA: 3s - loss: 0.448 - ETA: 2s - loss: 0.448 - ETA: 1s - loss: 0.448 - ETA: 0s - loss: 0.448 - ETA: 0s - loss: 0.448 - 111s 997ms/step - loss: 0.4478 - val_loss: 0.3160\n",
      "\n",
      "Epoch 00011: val_loss did not improve from 0.30025\n",
      "Epoch 12/30\n",
      "111/111 [==============================] - ETA: 1:48 - loss: 0.548 - ETA: 1:43 - loss: 0.548 - ETA: 1:44 - loss: 0.528 - ETA: 1:43 - loss: 0.505 - ETA: 1:43 - loss: 0.479 - ETA: 1:42 - loss: 0.466 - ETA: 1:41 - loss: 0.455 - ETA: 1:40 - loss: 0.454 - ETA: 1:39 - loss: 0.451 - ETA: 1:38 - loss: 0.447 - ETA: 1:37 - loss: 0.443 - ETA: 1:36 - loss: 0.442 - ETA: 1:35 - loss: 0.442 - ETA: 1:34 - loss: 0.442 - ETA: 1:33 - loss: 0.441 - ETA: 1:32 - loss: 0.445 - ETA: 1:31 - loss: 0.449 - ETA: 1:30 - loss: 0.451 - ETA: 1:29 - loss: 0.453 - ETA: 1:28 - loss: 0.454 - ETA: 1:27 - loss: 0.457 - ETA: 1:26 - loss: 0.458 - ETA: 1:25 - loss: 0.459 - ETA: 1:24 - loss: 0.460 - ETA: 1:23 - loss: 0.460 - ETA: 1:22 - loss: 0.460 - ETA: 1:21 - loss: 0.460 - ETA: 1:20 - loss: 0.459 - ETA: 1:19 - loss: 0.459 - ETA: 1:18 - loss: 0.459 - ETA: 1:17 - loss: 0.458 - ETA: 1:16 - loss: 0.458 - ETA: 1:16 - loss: 0.458 - ETA: 1:15 - loss: 0.458 - ETA: 1:14 - loss: 0.458 - ETA: 1:13 - loss: 0.459 - ETA: 1:12 - loss: 0.459 - ETA: 1:11 - loss: 0.459 - ETA: 1:10 - loss: 0.459 - ETA: 1:09 - loss: 0.458 - ETA: 1:08 - loss: 0.458 - ETA: 1:07 - loss: 0.458 - ETA: 1:06 - loss: 0.457 - ETA: 1:05 - loss: 0.457 - ETA: 1:04 - loss: 0.457 - ETA: 1:03 - loss: 0.457 - ETA: 1:02 - loss: 0.457 - ETA: 1:01 - loss: 0.457 - ETA: 1:00 - loss: 0.457 - ETA: 59s - loss: 0.457 - ETA: 58s - loss: 0.45 - ETA: 57s - loss: 0.45 - ETA: 56s - loss: 0.45 - ETA: 55s - loss: 0.45 - ETA: 54s - loss: 0.45 - ETA: 53s - loss: 0.45 - ETA: 52s - loss: 0.45 - ETA: 51s - loss: 0.45 - ETA: 50s - loss: 0.45 - ETA: 49s - loss: 0.45 - ETA: 48s - loss: 0.45 - ETA: 47s - loss: 0.45 - ETA: 46s - loss: 0.45 - ETA: 45s - loss: 0.45 - ETA: 45s - loss: 0.45 - ETA: 44s - loss: 0.45 - ETA: 43s - loss: 0.45 - ETA: 42s - loss: 0.45 - ETA: 41s - loss: 0.45 - ETA: 40s - loss: 0.45 - ETA: 39s - loss: 0.45 - ETA: 38s - loss: 0.45 - ETA: 37s - loss: 0.45 - ETA: 36s - loss: 0.44 - ETA: 35s - loss: 0.44 - ETA: 34s - loss: 0.44 - ETA: 33s - loss: 0.44 - ETA: 32s - loss: 0.44 - ETA: 31s - loss: 0.44 - ETA: 30s - loss: 0.44 - ETA: 29s - loss: 0.44 - ETA: 28s - loss: 0.44 - ETA: 27s - loss: 0.44 - ETA: 26s - loss: 0.44 - ETA: 25s - loss: 0.44 - ETA: 24s - loss: 0.44 - ETA: 23s - loss: 0.44 - ETA: 22s - loss: 0.44 - ETA: 21s - loss: 0.44 - ETA: 20s - loss: 0.44 - ETA: 19s - loss: 0.44 - ETA: 18s - loss: 0.44 - ETA: 17s - loss: 0.44 - ETA: 16s - loss: 0.44 - ETA: 15s - loss: 0.44 - ETA: 14s - loss: 0.44 - ETA: 13s - loss: 0.44 - ETA: 12s - loss: 0.44 - ETA: 11s - loss: 0.44 - ETA: 10s - loss: 0.44 - ETA: 9s - loss: 0.4399 - ETA: 8s - loss: 0.439 - ETA: 7s - loss: 0.439 - ETA: 6s - loss: 0.439 - ETA: 5s - loss: 0.438 - ETA: 4s - loss: 0.438 - ETA: 3s - loss: 0.438 - ETA: 2s - loss: 0.438 - ETA: 1s - loss: 0.438 - ETA: 0s - loss: 0.438 - ETA: 0s - loss: 0.437 - 110s 993ms/step - loss: 0.4375 - val_loss: 0.3074\n",
      "\n",
      "Epoch 00012: val_loss did not improve from 0.30025\n",
      "Epoch 13/30\n",
      "111/111 [==============================] - ETA: 1:44 - loss: 0.187 - ETA: 1:45 - loss: 0.273 - ETA: 1:44 - loss: 0.286 - ETA: 1:43 - loss: 0.294 - ETA: 1:42 - loss: 0.297 - ETA: 1:41 - loss: 0.296 - ETA: 1:41 - loss: 0.295 - ETA: 1:41 - loss: 0.297 - ETA: 1:40 - loss: 0.296 - ETA: 1:38 - loss: 0.295 - ETA: 1:37 - loss: 0.310 - ETA: 1:36 - loss: 0.326 - ETA: 1:35 - loss: 0.340 - ETA: 1:34 - loss: 0.350 - ETA: 1:33 - loss: 0.359 - ETA: 1:32 - loss: 0.365 - ETA: 1:31 - loss: 0.370 - ETA: 1:30 - loss: 0.375 - ETA: 1:29 - loss: 0.380 - ETA: 1:28 - loss: 0.383 - ETA: 1:27 - loss: 0.386 - ETA: 1:26 - loss: 0.388 - ETA: 1:26 - loss: 0.391 - ETA: 1:25 - loss: 0.393 - ETA: 1:24 - loss: 0.396 - ETA: 1:22 - loss: 0.398 - ETA: 1:21 - loss: 0.400 - ETA: 1:21 - loss: 0.402 - ETA: 1:20 - loss: 0.404 - ETA: 1:19 - loss: 0.405 - ETA: 1:18 - loss: 0.406 - ETA: 1:17 - loss: 0.407 - ETA: 1:16 - loss: 0.407 - ETA: 1:15 - loss: 0.408 - ETA: 1:14 - loss: 0.408 - ETA: 1:13 - loss: 0.409 - ETA: 1:12 - loss: 0.410 - ETA: 1:11 - loss: 0.411 - ETA: 1:10 - loss: 0.412 - ETA: 1:09 - loss: 0.413 - ETA: 1:08 - loss: 0.414 - ETA: 1:07 - loss: 0.415 - ETA: 1:06 - loss: 0.416 - ETA: 1:05 - loss: 0.417 - ETA: 1:04 - loss: 0.419 - ETA: 1:03 - loss: 0.420 - ETA: 1:02 - loss: 0.421 - ETA: 1:01 - loss: 0.422 - ETA: 1:00 - loss: 0.423 - ETA: 59s - loss: 0.423 - ETA: 58s - loss: 0.42 - ETA: 57s - loss: 0.42 - ETA: 56s - loss: 0.42 - ETA: 55s - loss: 0.42 - ETA: 54s - loss: 0.42 - ETA: 53s - loss: 0.42 - ETA: 52s - loss: 0.42 - ETA: 51s - loss: 0.42 - ETA: 50s - loss: 0.42 - ETA: 49s - loss: 0.42 - ETA: 48s - loss: 0.42 - ETA: 47s - loss: 0.43 - ETA: 46s - loss: 0.43 - ETA: 45s - loss: 0.43 - ETA: 44s - loss: 0.43 - ETA: 43s - loss: 0.43 - ETA: 42s - loss: 0.43 - ETA: 42s - loss: 0.43 - ETA: 41s - loss: 0.43 - ETA: 40s - loss: 0.43 - ETA: 39s - loss: 0.43 - ETA: 38s - loss: 0.43 - ETA: 37s - loss: 0.43 - ETA: 36s - loss: 0.43 - ETA: 35s - loss: 0.43 - ETA: 34s - loss: 0.43 - ETA: 33s - loss: 0.43 - ETA: 32s - loss: 0.43 - ETA: 31s - loss: 0.43 - ETA: 30s - loss: 0.43 - ETA: 29s - loss: 0.43 - ETA: 28s - loss: 0.43 - ETA: 27s - loss: 0.43 - ETA: 26s - loss: 0.43 - ETA: 25s - loss: 0.43 - ETA: 24s - loss: 0.43 - ETA: 23s - loss: 0.43 - ETA: 22s - loss: 0.43 - ETA: 21s - loss: 0.43 - ETA: 20s - loss: 0.43 - ETA: 19s - loss: 0.43 - ETA: 18s - loss: 0.43 - ETA: 17s - loss: 0.43 - ETA: 16s - loss: 0.43 - ETA: 15s - loss: 0.43 - ETA: 14s - loss: 0.43 - ETA: 13s - loss: 0.43 - ETA: 12s - loss: 0.43 - ETA: 11s - loss: 0.43 - ETA: 10s - loss: 0.43 - ETA: 9s - loss: 0.4307 - ETA: 8s - loss: 0.430 - ETA: 7s - loss: 0.430 - ETA: 6s - loss: 0.430 - ETA: 5s - loss: 0.430 - ETA: 4s - loss: 0.430 - ETA: 3s - loss: 0.430 - ETA: 2s - loss: 0.430 - ETA: 1s - loss: 0.430 - ETA: 0s - loss: 0.430 - ETA: 0s - loss: 0.430 - 110s 991ms/step - loss: 0.4301 - val_loss: 0.2802\n",
      "\n",
      "Epoch 00013: val_loss improved from 0.30025 to 0.28022, saving model to clean_notebooks\\injection_superposition.h5\n",
      "Epoch 14/30\n",
      "111/111 [==============================] - ETA: 1:50 - loss: 0.431 - ETA: 1:43 - loss: 0.438 - ETA: 1:44 - loss: 0.466 - ETA: 1:43 - loss: 0.458 - ETA: 1:42 - loss: 0.440 - ETA: 1:41 - loss: 0.422 - ETA: 1:40 - loss: 0.421 - ETA: 1:39 - loss: 0.422 - ETA: 1:38 - loss: 0.420 - ETA: 1:37 - loss: 0.416 - ETA: 1:36 - loss: 0.414 - ETA: 1:35 - loss: 0.413 - ETA: 1:35 - loss: 0.415 - ETA: 1:34 - loss: 0.416 - ETA: 1:33 - loss: 0.417 - ETA: 1:32 - loss: 0.418 - ETA: 1:31 - loss: 0.422 - ETA: 1:30 - loss: 0.424 - ETA: 1:29 - loss: 0.425 - ETA: 1:28 - loss: 0.426 - ETA: 1:27 - loss: 0.426 - ETA: 1:26 - loss: 0.426 - ETA: 1:25 - loss: 0.425 - ETA: 1:24 - loss: 0.424 - ETA: 1:23 - loss: 0.423 - ETA: 1:22 - loss: 0.423 - ETA: 1:21 - loss: 0.422 - ETA: 1:21 - loss: 0.422 - ETA: 1:19 - loss: 0.421 - ETA: 1:19 - loss: 0.421 - ETA: 1:18 - loss: 0.420 - ETA: 1:17 - loss: 0.419 - ETA: 1:16 - loss: 0.419 - ETA: 1:15 - loss: 0.418 - ETA: 1:14 - loss: 0.417 - ETA: 1:13 - loss: 0.416 - ETA: 1:12 - loss: 0.415 - ETA: 1:11 - loss: 0.414 - ETA: 1:10 - loss: 0.413 - ETA: 1:09 - loss: 0.412 - ETA: 1:08 - loss: 0.411 - ETA: 1:07 - loss: 0.410 - ETA: 1:06 - loss: 0.409 - ETA: 1:05 - loss: 0.408 - ETA: 1:04 - loss: 0.407 - ETA: 1:03 - loss: 0.407 - ETA: 1:02 - loss: 0.406 - ETA: 1:01 - loss: 0.405 - ETA: 1:00 - loss: 0.404 - ETA: 59s - loss: 0.404 - ETA: 58s - loss: 0.40 - ETA: 57s - loss: 0.40 - ETA: 56s - loss: 0.40 - ETA: 55s - loss: 0.40 - ETA: 54s - loss: 0.40 - ETA: 53s - loss: 0.40 - ETA: 52s - loss: 0.40 - ETA: 52s - loss: 0.40 - ETA: 51s - loss: 0.40 - ETA: 50s - loss: 0.40 - ETA: 49s - loss: 0.40 - ETA: 48s - loss: 0.40 - ETA: 47s - loss: 0.40 - ETA: 46s - loss: 0.40 - ETA: 45s - loss: 0.40 - ETA: 44s - loss: 0.40 - ETA: 43s - loss: 0.40 - ETA: 42s - loss: 0.40 - ETA: 41s - loss: 0.40 - ETA: 40s - loss: 0.40 - ETA: 39s - loss: 0.40 - ETA: 38s - loss: 0.40 - ETA: 37s - loss: 0.40 - ETA: 36s - loss: 0.40 - ETA: 35s - loss: 0.40 - ETA: 34s - loss: 0.40 - ETA: 33s - loss: 0.40 - ETA: 32s - loss: 0.40 - ETA: 31s - loss: 0.40 - ETA: 30s - loss: 0.40 - ETA: 29s - loss: 0.40 - ETA: 28s - loss: 0.40 - ETA: 27s - loss: 0.40 - ETA: 26s - loss: 0.40 - ETA: 25s - loss: 0.40 - ETA: 24s - loss: 0.40 - ETA: 23s - loss: 0.40 - ETA: 22s - loss: 0.40 - ETA: 21s - loss: 0.40 - ETA: 20s - loss: 0.40 - ETA: 19s - loss: 0.40 - ETA: 18s - loss: 0.40 - ETA: 17s - loss: 0.40 - ETA: 16s - loss: 0.40 - ETA: 15s - loss: 0.40 - ETA: 14s - loss: 0.40 - ETA: 13s - loss: 0.40 - ETA: 12s - loss: 0.40 - ETA: 11s - loss: 0.40 - ETA: 10s - loss: 0.40 - ETA: 9s - loss: 0.4036 - ETA: 8s - loss: 0.403 - ETA: 7s - loss: 0.403 - ETA: 6s - loss: 0.403 - ETA: 5s - loss: 0.402 - ETA: 4s - loss: 0.402 - ETA: 3s - loss: 0.402 - ETA: 2s - loss: 0.402 - ETA: 1s - loss: 0.402 - ETA: 0s - loss: 0.401 - ETA: 0s - loss: 0.401 - 110s 995ms/step - loss: 0.4015 - val_loss: 0.2780\n",
      "\n",
      "Epoch 00014: val_loss improved from 0.28022 to 0.27799, saving model to clean_notebooks\\injection_superposition.h5\n",
      "Epoch 15/30\n",
      "111/111 [==============================] - ETA: 1:50 - loss: 0.234 - ETA: 1:44 - loss: 0.186 - ETA: 1:45 - loss: 0.224 - ETA: 1:44 - loss: 0.243 - ETA: 1:43 - loss: 0.261 - ETA: 1:42 - loss: 0.266 - ETA: 1:42 - loss: 0.270 - ETA: 1:40 - loss: 0.273 - ETA: 1:40 - loss: 0.277 - ETA: 1:39 - loss: 0.282 - ETA: 1:38 - loss: 0.286 - ETA: 1:37 - loss: 0.288 - ETA: 1:36 - loss: 0.289 - ETA: 1:35 - loss: 0.292 - ETA: 1:34 - loss: 0.294 - ETA: 1:33 - loss: 0.297 - ETA: 1:32 - loss: 0.298 - ETA: 1:31 - loss: 0.301 - ETA: 1:30 - loss: 0.302 - ETA: 1:29 - loss: 0.304 - ETA: 1:28 - loss: 0.306 - ETA: 1:27 - loss: 0.307 - ETA: 1:26 - loss: 0.309 - ETA: 1:25 - loss: 0.310 - ETA: 1:24 - loss: 0.312 - ETA: 1:23 - loss: 0.315 - ETA: 1:22 - loss: 0.318 - ETA: 1:21 - loss: 0.320 - ETA: 1:20 - loss: 0.323 - ETA: 1:19 - loss: 0.325 - ETA: 1:18 - loss: 0.327 - ETA: 1:17 - loss: 0.329 - ETA: 1:16 - loss: 0.330 - ETA: 1:15 - loss: 0.332 - ETA: 1:14 - loss: 0.333 - ETA: 1:13 - loss: 0.334 - ETA: 1:12 - loss: 0.335 - ETA: 1:11 - loss: 0.336 - ETA: 1:10 - loss: 0.337 - ETA: 1:09 - loss: 0.338 - ETA: 1:08 - loss: 0.339 - ETA: 1:07 - loss: 0.340 - ETA: 1:06 - loss: 0.341 - ETA: 1:05 - loss: 0.341 - ETA: 1:04 - loss: 0.342 - ETA: 1:03 - loss: 0.343 - ETA: 1:02 - loss: 0.343 - ETA: 1:01 - loss: 0.343 - ETA: 1:00 - loss: 0.344 - ETA: 59s - loss: 0.344 - ETA: 58s - loss: 0.34 - ETA: 57s - loss: 0.34 - ETA: 56s - loss: 0.34 - ETA: 55s - loss: 0.34 - ETA: 54s - loss: 0.34 - ETA: 53s - loss: 0.34 - ETA: 52s - loss: 0.34 - ETA: 51s - loss: 0.34 - ETA: 50s - loss: 0.34 - ETA: 49s - loss: 0.34 - ETA: 48s - loss: 0.34 - ETA: 47s - loss: 0.34 - ETA: 46s - loss: 0.34 - ETA: 45s - loss: 0.34 - ETA: 44s - loss: 0.34 - ETA: 43s - loss: 0.34 - ETA: 42s - loss: 0.34 - ETA: 42s - loss: 0.34 - ETA: 41s - loss: 0.34 - ETA: 40s - loss: 0.34 - ETA: 39s - loss: 0.34 - ETA: 38s - loss: 0.34 - ETA: 37s - loss: 0.34 - ETA: 36s - loss: 0.34 - ETA: 35s - loss: 0.34 - ETA: 34s - loss: 0.34 - ETA: 33s - loss: 0.34 - ETA: 32s - loss: 0.34 - ETA: 31s - loss: 0.34 - ETA: 30s - loss: 0.34 - ETA: 29s - loss: 0.34 - ETA: 28s - loss: 0.34 - ETA: 27s - loss: 0.34 - ETA: 26s - loss: 0.35 - ETA: 25s - loss: 0.35 - ETA: 24s - loss: 0.35 - ETA: 23s - loss: 0.35 - ETA: 22s - loss: 0.35 - ETA: 21s - loss: 0.35 - ETA: 20s - loss: 0.35 - ETA: 19s - loss: 0.35 - ETA: 18s - loss: 0.35 - ETA: 17s - loss: 0.35 - ETA: 16s - loss: 0.35 - ETA: 15s - loss: 0.35 - ETA: 14s - loss: 0.35 - ETA: 13s - loss: 0.35 - ETA: 12s - loss: 0.35 - ETA: 11s - loss: 0.35 - ETA: 10s - loss: 0.35 - ETA: 9s - loss: 0.3534 - ETA: 8s - loss: 0.353 - ETA: 7s - loss: 0.353 - ETA: 6s - loss: 0.354 - ETA: 5s - loss: 0.354 - ETA: 4s - loss: 0.354 - ETA: 3s - loss: 0.354 - ETA: 2s - loss: 0.354 - ETA: 1s - loss: 0.354 - ETA: 0s - loss: 0.355 - ETA: 0s - loss: 0.355 - 110s 992ms/step - loss: 0.3556 - val_loss: 0.2664\n",
      "\n",
      "Epoch 00015: val_loss improved from 0.27799 to 0.26637, saving model to clean_notebooks\\injection_superposition.h5\n",
      "Epoch 16/30\n",
      "111/111 [==============================] - ETA: 1:51 - loss: 0.505 - ETA: 1:46 - loss: 0.407 - ETA: 1:48 - loss: 0.530 - ETA: 1:47 - loss: 0.559 - ETA: 1:45 - loss: 0.554 - ETA: 1:44 - loss: 0.546 - ETA: 1:42 - loss: 0.537 - ETA: 1:41 - loss: 0.530 - ETA: 1:40 - loss: 0.525 - ETA: 1:39 - loss: 0.519 - ETA: 1:38 - loss: 0.514 - ETA: 1:37 - loss: 0.512 - ETA: 1:36 - loss: 0.508 - ETA: 1:35 - loss: 0.503 - ETA: 1:34 - loss: 0.499 - ETA: 1:33 - loss: 0.494 - ETA: 1:32 - loss: 0.491 - ETA: 1:31 - loss: 0.486 - ETA: 1:30 - loss: 0.482 - ETA: 1:29 - loss: 0.480 - ETA: 1:28 - loss: 0.482 - ETA: 1:27 - loss: 0.483 - ETA: 1:26 - loss: 0.483 - ETA: 1:25 - loss: 0.483 - ETA: 1:24 - loss: 0.483 - ETA: 1:23 - loss: 0.482 - ETA: 1:22 - loss: 0.481 - ETA: 1:21 - loss: 0.480 - ETA: 1:20 - loss: 0.480 - ETA: 1:19 - loss: 0.478 - ETA: 1:18 - loss: 0.478 - ETA: 1:17 - loss: 0.477 - ETA: 1:16 - loss: 0.476 - ETA: 1:15 - loss: 0.475 - ETA: 1:14 - loss: 0.474 - ETA: 1:13 - loss: 0.474 - ETA: 1:12 - loss: 0.473 - ETA: 1:11 - loss: 0.472 - ETA: 1:10 - loss: 0.471 - ETA: 1:09 - loss: 0.470 - ETA: 1:08 - loss: 0.469 - ETA: 1:07 - loss: 0.468 - ETA: 1:06 - loss: 0.467 - ETA: 1:05 - loss: 0.466 - ETA: 1:04 - loss: 0.465 - ETA: 1:03 - loss: 0.464 - ETA: 1:02 - loss: 0.463 - ETA: 1:01 - loss: 0.462 - ETA: 1:00 - loss: 0.461 - ETA: 59s - loss: 0.460 - ETA: 58s - loss: 0.45 - ETA: 57s - loss: 0.45 - ETA: 56s - loss: 0.45 - ETA: 55s - loss: 0.45 - ETA: 54s - loss: 0.45 - ETA: 53s - loss: 0.45 - ETA: 52s - loss: 0.45 - ETA: 51s - loss: 0.45 - ETA: 50s - loss: 0.45 - ETA: 49s - loss: 0.45 - ETA: 48s - loss: 0.45 - ETA: 47s - loss: 0.45 - ETA: 46s - loss: 0.45 - ETA: 45s - loss: 0.44 - ETA: 44s - loss: 0.44 - ETA: 44s - loss: 0.44 - ETA: 43s - loss: 0.44 - ETA: 42s - loss: 0.44 - ETA: 41s - loss: 0.44 - ETA: 40s - loss: 0.44 - ETA: 39s - loss: 0.44 - ETA: 38s - loss: 0.44 - ETA: 37s - loss: 0.44 - ETA: 36s - loss: 0.44 - ETA: 35s - loss: 0.44 - ETA: 34s - loss: 0.44 - ETA: 33s - loss: 0.44 - ETA: 32s - loss: 0.44 - ETA: 31s - loss: 0.44 - ETA: 30s - loss: 0.44 - ETA: 29s - loss: 0.43 - ETA: 28s - loss: 0.43 - ETA: 27s - loss: 0.43 - ETA: 26s - loss: 0.43 - ETA: 25s - loss: 0.43 - ETA: 24s - loss: 0.43 - ETA: 23s - loss: 0.43 - ETA: 22s - loss: 0.43 - ETA: 21s - loss: 0.43 - ETA: 20s - loss: 0.43 - ETA: 19s - loss: 0.43 - ETA: 18s - loss: 0.43 - ETA: 17s - loss: 0.43 - ETA: 16s - loss: 0.43 - ETA: 15s - loss: 0.43 - ETA: 14s - loss: 0.43 - ETA: 13s - loss: 0.43 - ETA: 12s - loss: 0.43 - ETA: 11s - loss: 0.43 - ETA: 10s - loss: 0.43 - ETA: 9s - loss: 0.4337 - ETA: 8s - loss: 0.433 - ETA: 7s - loss: 0.433 - ETA: 6s - loss: 0.433 - ETA: 5s - loss: 0.432 - ETA: 4s - loss: 0.432 - ETA: 3s - loss: 0.432 - ETA: 2s - loss: 0.432 - ETA: 1s - loss: 0.431 - ETA: 0s - loss: 0.431 - ETA: 0s - loss: 0.431 - 110s 990ms/step - loss: 0.4313 - val_loss: 0.2489\n",
      "\n",
      "Epoch 00016: val_loss improved from 0.26637 to 0.24891, saving model to clean_notebooks\\injection_superposition.h5\n",
      "Epoch 17/30\n",
      "111/111 [==============================] - ETA: 1:51 - loss: 0.385 - ETA: 1:47 - loss: 0.321 - ETA: 1:46 - loss: 0.338 - ETA: 1:46 - loss: 0.337 - ETA: 1:44 - loss: 0.329 - ETA: 1:43 - loss: 0.321 - ETA: 1:42 - loss: 0.323 - ETA: 1:41 - loss: 0.330 - ETA: 1:40 - loss: 0.331 - ETA: 1:39 - loss: 0.330 - ETA: 1:38 - loss: 0.335 - ETA: 1:37 - loss: 0.339 - ETA: 1:36 - loss: 0.340 - ETA: 1:35 - loss: 0.340 - ETA: 1:34 - loss: 0.340 - ETA: 1:33 - loss: 0.340 - ETA: 1:32 - loss: 0.340 - ETA: 1:31 - loss: 0.340 - ETA: 1:30 - loss: 0.339 - ETA: 1:29 - loss: 0.337 - ETA: 1:28 - loss: 0.337 - ETA: 1:27 - loss: 0.337 - ETA: 1:26 - loss: 0.338 - ETA: 1:25 - loss: 0.338 - ETA: 1:24 - loss: 0.338 - ETA: 1:23 - loss: 0.338 - ETA: 1:22 - loss: 0.337 - ETA: 1:21 - loss: 0.337 - ETA: 1:20 - loss: 0.337 - ETA: 1:19 - loss: 0.336 - ETA: 1:18 - loss: 0.336 - ETA: 1:17 - loss: 0.335 - ETA: 1:16 - loss: 0.335 - ETA: 1:15 - loss: 0.335 - ETA: 1:14 - loss: 0.335 - ETA: 1:13 - loss: 0.335 - ETA: 1:12 - loss: 0.335 - ETA: 1:11 - loss: 0.335 - ETA: 1:10 - loss: 0.335 - ETA: 1:09 - loss: 0.335 - ETA: 1:08 - loss: 0.335 - ETA: 1:07 - loss: 0.335 - ETA: 1:06 - loss: 0.335 - ETA: 1:05 - loss: 0.335 - ETA: 1:04 - loss: 0.335 - ETA: 1:03 - loss: 0.335 - ETA: 1:02 - loss: 0.335 - ETA: 1:01 - loss: 0.335 - ETA: 1:00 - loss: 0.335 - ETA: 59s - loss: 0.335 - ETA: 58s - loss: 0.33 - ETA: 57s - loss: 0.33 - ETA: 56s - loss: 0.33 - ETA: 55s - loss: 0.33 - ETA: 54s - loss: 0.33 - ETA: 53s - loss: 0.33 - ETA: 52s - loss: 0.33 - ETA: 51s - loss: 0.33 - ETA: 50s - loss: 0.33 - ETA: 49s - loss: 0.33 - ETA: 48s - loss: 0.33 - ETA: 47s - loss: 0.33 - ETA: 46s - loss: 0.33 - ETA: 45s - loss: 0.33 - ETA: 44s - loss: 0.33 - ETA: 43s - loss: 0.33 - ETA: 42s - loss: 0.33 - ETA: 42s - loss: 0.33 - ETA: 41s - loss: 0.33 - ETA: 40s - loss: 0.33 - ETA: 39s - loss: 0.33 - ETA: 38s - loss: 0.33 - ETA: 37s - loss: 0.33 - ETA: 36s - loss: 0.34 - ETA: 35s - loss: 0.34 - ETA: 34s - loss: 0.34 - ETA: 33s - loss: 0.34 - ETA: 32s - loss: 0.34 - ETA: 31s - loss: 0.34 - ETA: 30s - loss: 0.34 - ETA: 29s - loss: 0.34 - ETA: 28s - loss: 0.34 - ETA: 27s - loss: 0.34 - ETA: 26s - loss: 0.34 - ETA: 25s - loss: 0.34 - ETA: 24s - loss: 0.34 - ETA: 23s - loss: 0.34 - ETA: 22s - loss: 0.34 - ETA: 21s - loss: 0.34 - ETA: 20s - loss: 0.34 - ETA: 19s - loss: 0.34 - ETA: 18s - loss: 0.34 - ETA: 17s - loss: 0.34 - ETA: 16s - loss: 0.34 - ETA: 15s - loss: 0.34 - ETA: 14s - loss: 0.34 - ETA: 13s - loss: 0.34 - ETA: 12s - loss: 0.35 - ETA: 11s - loss: 0.35 - ETA: 10s - loss: 0.35 - ETA: 9s - loss: 0.3519 - ETA: 8s - loss: 0.352 - ETA: 7s - loss: 0.352 - ETA: 6s - loss: 0.353 - ETA: 5s - loss: 0.353 - ETA: 4s - loss: 0.353 - ETA: 3s - loss: 0.354 - ETA: 2s - loss: 0.354 - ETA: 1s - loss: 0.354 - ETA: 0s - loss: 0.355 - ETA: 0s - loss: 0.355 - 110s 993ms/step - loss: 0.3557 - val_loss: 0.2493\n",
      "\n",
      "Epoch 00017: val_loss did not improve from 0.24891\n",
      "Epoch 18/30\n",
      "111/111 [==============================] - ETA: 1:44 - loss: 0.389 - ETA: 1:45 - loss: 0.366 - ETA: 1:45 - loss: 0.434 - ETA: 1:44 - loss: 0.434 - ETA: 1:42 - loss: 0.423 - ETA: 1:41 - loss: 0.417 - ETA: 1:40 - loss: 0.409 - ETA: 1:40 - loss: 0.403 - ETA: 1:39 - loss: 0.400 - ETA: 1:38 - loss: 0.396 - ETA: 1:37 - loss: 0.391 - ETA: 1:36 - loss: 0.389 - ETA: 1:35 - loss: 0.386 - ETA: 1:34 - loss: 0.384 - ETA: 1:33 - loss: 0.382 - ETA: 1:32 - loss: 0.379 - ETA: 1:31 - loss: 0.377 - ETA: 1:30 - loss: 0.376 - ETA: 1:30 - loss: 0.375 - ETA: 1:29 - loss: 0.374 - ETA: 1:28 - loss: 0.373 - ETA: 1:27 - loss: 0.373 - ETA: 1:26 - loss: 0.372 - ETA: 1:25 - loss: 0.372 - ETA: 1:24 - loss: 0.371 - ETA: 1:23 - loss: 0.370 - ETA: 1:22 - loss: 0.370 - ETA: 1:21 - loss: 0.369 - ETA: 1:20 - loss: 0.368 - ETA: 1:19 - loss: 0.368 - ETA: 1:18 - loss: 0.368 - ETA: 1:17 - loss: 0.367 - ETA: 1:16 - loss: 0.367 - ETA: 1:15 - loss: 0.367 - ETA: 1:14 - loss: 0.367 - ETA: 1:13 - loss: 0.367 - ETA: 1:12 - loss: 0.367 - ETA: 1:11 - loss: 0.368 - ETA: 1:10 - loss: 0.369 - ETA: 1:09 - loss: 0.370 - ETA: 1:08 - loss: 0.370 - ETA: 1:07 - loss: 0.371 - ETA: 1:06 - loss: 0.371 - ETA: 1:05 - loss: 0.372 - ETA: 1:04 - loss: 0.372 - ETA: 1:03 - loss: 0.373 - ETA: 1:02 - loss: 0.373 - ETA: 1:01 - loss: 0.373 - ETA: 1:00 - loss: 0.374 - ETA: 59s - loss: 0.374 - ETA: 58s - loss: 0.37 - ETA: 57s - loss: 0.37 - ETA: 56s - loss: 0.37 - ETA: 55s - loss: 0.37 - ETA: 54s - loss: 0.37 - ETA: 53s - loss: 0.37 - ETA: 52s - loss: 0.37 - ETA: 51s - loss: 0.37 - ETA: 51s - loss: 0.37 - ETA: 50s - loss: 0.37 - ETA: 49s - loss: 0.37 - ETA: 48s - loss: 0.37 - ETA: 47s - loss: 0.37 - ETA: 46s - loss: 0.37 - ETA: 45s - loss: 0.37 - ETA: 44s - loss: 0.37 - ETA: 43s - loss: 0.37 - ETA: 42s - loss: 0.37 - ETA: 41s - loss: 0.37 - ETA: 40s - loss: 0.37 - ETA: 39s - loss: 0.37 - ETA: 38s - loss: 0.37 - ETA: 37s - loss: 0.37 - ETA: 36s - loss: 0.37 - ETA: 35s - loss: 0.37 - ETA: 34s - loss: 0.37 - ETA: 33s - loss: 0.37 - ETA: 32s - loss: 0.37 - ETA: 31s - loss: 0.37 - ETA: 30s - loss: 0.37 - ETA: 29s - loss: 0.37 - ETA: 28s - loss: 0.37 - ETA: 27s - loss: 0.37 - ETA: 26s - loss: 0.37 - ETA: 25s - loss: 0.37 - ETA: 24s - loss: 0.37 - ETA: 23s - loss: 0.37 - ETA: 22s - loss: 0.37 - ETA: 21s - loss: 0.37 - ETA: 20s - loss: 0.37 - ETA: 19s - loss: 0.37 - ETA: 18s - loss: 0.37 - ETA: 17s - loss: 0.37 - ETA: 16s - loss: 0.37 - ETA: 15s - loss: 0.37 - ETA: 14s - loss: 0.37 - ETA: 13s - loss: 0.37 - ETA: 12s - loss: 0.37 - ETA: 11s - loss: 0.37 - ETA: 10s - loss: 0.37 - ETA: 9s - loss: 0.3762 - ETA: 8s - loss: 0.376 - ETA: 7s - loss: 0.376 - ETA: 6s - loss: 0.376 - ETA: 5s - loss: 0.376 - ETA: 4s - loss: 0.376 - ETA: 3s - loss: 0.376 - ETA: 2s - loss: 0.376 - ETA: 1s - loss: 0.376 - ETA: 0s - loss: 0.375 - ETA: 0s - loss: 0.375 - 110s 994ms/step - loss: 0.3758 - val_loss: 0.2443\n",
      "\n",
      "Epoch 00018: val_loss improved from 0.24891 to 0.24426, saving model to clean_notebooks\\injection_superposition.h5\n",
      "Epoch 19/30\n",
      "111/111 [==============================] - ETA: 1:47 - loss: 0.167 - ETA: 1:45 - loss: 0.187 - ETA: 1:45 - loss: 0.280 - ETA: 1:44 - loss: 0.323 - ETA: 1:43 - loss: 0.346 - ETA: 1:43 - loss: 0.355 - ETA: 1:42 - loss: 0.363 - ETA: 1:41 - loss: 0.365 - ETA: 1:40 - loss: 0.363 - ETA: 1:39 - loss: 0.363 - ETA: 1:38 - loss: 0.361 - ETA: 1:37 - loss: 0.358 - ETA: 1:36 - loss: 0.354 - ETA: 1:35 - loss: 0.350 - ETA: 1:34 - loss: 0.348 - ETA: 1:33 - loss: 0.345 - ETA: 1:32 - loss: 0.342 - ETA: 1:31 - loss: 0.339 - ETA: 1:30 - loss: 0.336 - ETA: 1:29 - loss: 0.333 - ETA: 1:28 - loss: 0.331 - ETA: 1:27 - loss: 0.329 - ETA: 1:26 - loss: 0.328 - ETA: 1:25 - loss: 0.327 - ETA: 1:24 - loss: 0.326 - ETA: 1:23 - loss: 0.326 - ETA: 1:22 - loss: 0.325 - ETA: 1:21 - loss: 0.325 - ETA: 1:20 - loss: 0.324 - ETA: 1:19 - loss: 0.323 - ETA: 1:18 - loss: 0.323 - ETA: 1:17 - loss: 0.322 - ETA: 1:16 - loss: 0.322 - ETA: 1:15 - loss: 0.321 - ETA: 1:14 - loss: 0.320 - ETA: 1:13 - loss: 0.320 - ETA: 1:12 - loss: 0.320 - ETA: 1:11 - loss: 0.320 - ETA: 1:10 - loss: 0.319 - ETA: 1:09 - loss: 0.319 - ETA: 1:08 - loss: 0.320 - ETA: 1:07 - loss: 0.320 - ETA: 1:06 - loss: 0.320 - ETA: 1:05 - loss: 0.321 - ETA: 1:04 - loss: 0.321 - ETA: 1:03 - loss: 0.322 - ETA: 1:02 - loss: 0.322 - ETA: 1:01 - loss: 0.322 - ETA: 1:00 - loss: 0.323 - ETA: 59s - loss: 0.323 - ETA: 58s - loss: 0.32 - ETA: 57s - loss: 0.32 - ETA: 56s - loss: 0.32 - ETA: 55s - loss: 0.32 - ETA: 54s - loss: 0.32 - ETA: 53s - loss: 0.32 - ETA: 52s - loss: 0.32 - ETA: 51s - loss: 0.32 - ETA: 50s - loss: 0.32 - ETA: 49s - loss: 0.32 - ETA: 48s - loss: 0.32 - ETA: 47s - loss: 0.32 - ETA: 47s - loss: 0.32 - ETA: 46s - loss: 0.32 - ETA: 45s - loss: 0.32 - ETA: 44s - loss: 0.33 - ETA: 43s - loss: 0.33 - ETA: 42s - loss: 0.33 - ETA: 41s - loss: 0.33 - ETA: 40s - loss: 0.33 - ETA: 39s - loss: 0.33 - ETA: 38s - loss: 0.33 - ETA: 37s - loss: 0.33 - ETA: 36s - loss: 0.33 - ETA: 35s - loss: 0.33 - ETA: 34s - loss: 0.33 - ETA: 33s - loss: 0.33 - ETA: 32s - loss: 0.33 - ETA: 31s - loss: 0.33 - ETA: 30s - loss: 0.33 - ETA: 29s - loss: 0.33 - ETA: 28s - loss: 0.33 - ETA: 27s - loss: 0.33 - ETA: 26s - loss: 0.33 - ETA: 25s - loss: 0.33 - ETA: 24s - loss: 0.33 - ETA: 23s - loss: 0.33 - ETA: 22s - loss: 0.33 - ETA: 21s - loss: 0.33 - ETA: 20s - loss: 0.33 - ETA: 19s - loss: 0.33 - ETA: 18s - loss: 0.33 - ETA: 17s - loss: 0.33 - ETA: 16s - loss: 0.33 - ETA: 15s - loss: 0.33 - ETA: 14s - loss: 0.33 - ETA: 13s - loss: 0.33 - ETA: 12s - loss: 0.33 - ETA: 11s - loss: 0.33 - ETA: 10s - loss: 0.33 - ETA: 9s - loss: 0.3372 - ETA: 8s - loss: 0.337 - ETA: 7s - loss: 0.337 - ETA: 6s - loss: 0.337 - ETA: 5s - loss: 0.337 - ETA: 4s - loss: 0.337 - ETA: 3s - loss: 0.337 - ETA: 2s - loss: 0.337 - ETA: 1s - loss: 0.337 - ETA: 0s - loss: 0.337 - ETA: 0s - loss: 0.338 - 110s 990ms/step - loss: 0.3382 - val_loss: 0.2323\n",
      "\n",
      "Epoch 00019: val_loss improved from 0.24426 to 0.23230, saving model to clean_notebooks\\injection_superposition.h5\n",
      "Epoch 20/30\n",
      "111/111 [==============================] - ETA: 1:54 - loss: 0.168 - ETA: 1:47 - loss: 0.233 - ETA: 1:45 - loss: 0.287 - ETA: 1:44 - loss: 0.316 - ETA: 1:43 - loss: 0.319 - ETA: 1:41 - loss: 0.318 - ETA: 1:40 - loss: 0.312 - ETA: 1:40 - loss: 0.310 - ETA: 1:39 - loss: 0.309 - ETA: 1:38 - loss: 0.311 - ETA: 1:37 - loss: 0.314 - ETA: 1:36 - loss: 0.316 - ETA: 1:35 - loss: 0.316 - ETA: 1:34 - loss: 0.316 - ETA: 1:33 - loss: 0.316 - ETA: 1:32 - loss: 0.316 - ETA: 1:31 - loss: 0.316 - ETA: 1:30 - loss: 0.316 - ETA: 1:29 - loss: 0.317 - ETA: 1:28 - loss: 0.317 - ETA: 1:27 - loss: 0.318 - ETA: 1:26 - loss: 0.318 - ETA: 1:25 - loss: 0.318 - ETA: 1:24 - loss: 0.318 - ETA: 1:23 - loss: 0.318 - ETA: 1:22 - loss: 0.318 - ETA: 1:21 - loss: 0.318 - ETA: 1:20 - loss: 0.317 - ETA: 1:19 - loss: 0.317 - ETA: 1:18 - loss: 0.317 - ETA: 1:17 - loss: 0.317 - ETA: 1:16 - loss: 0.317 - ETA: 1:15 - loss: 0.316 - ETA: 1:14 - loss: 0.316 - ETA: 1:13 - loss: 0.316 - ETA: 1:13 - loss: 0.315 - ETA: 1:12 - loss: 0.316 - ETA: 1:11 - loss: 0.315 - ETA: 1:10 - loss: 0.315 - ETA: 1:09 - loss: 0.315 - ETA: 1:08 - loss: 0.315 - ETA: 1:07 - loss: 0.315 - ETA: 1:06 - loss: 0.315 - ETA: 1:05 - loss: 0.315 - ETA: 1:04 - loss: 0.316 - ETA: 1:03 - loss: 0.316 - ETA: 1:02 - loss: 0.317 - ETA: 1:01 - loss: 0.317 - ETA: 1:00 - loss: 0.318 - ETA: 59s - loss: 0.319 - ETA: 58s - loss: 0.31 - ETA: 57s - loss: 0.31 - ETA: 56s - loss: 0.32 - ETA: 55s - loss: 0.32 - ETA: 54s - loss: 0.32 - ETA: 53s - loss: 0.32 - ETA: 52s - loss: 0.32 - ETA: 51s - loss: 0.32 - ETA: 50s - loss: 0.32 - ETA: 49s - loss: 0.32 - ETA: 48s - loss: 0.32 - ETA: 47s - loss: 0.32 - ETA: 46s - loss: 0.32 - ETA: 45s - loss: 0.32 - ETA: 44s - loss: 0.32 - ETA: 43s - loss: 0.32 - ETA: 42s - loss: 0.32 - ETA: 41s - loss: 0.32 - ETA: 41s - loss: 0.32 - ETA: 40s - loss: 0.32 - ETA: 39s - loss: 0.32 - ETA: 38s - loss: 0.32 - ETA: 37s - loss: 0.32 - ETA: 36s - loss: 0.32 - ETA: 35s - loss: 0.32 - ETA: 34s - loss: 0.32 - ETA: 33s - loss: 0.32 - ETA: 32s - loss: 0.32 - ETA: 31s - loss: 0.32 - ETA: 30s - loss: 0.32 - ETA: 29s - loss: 0.32 - ETA: 28s - loss: 0.32 - ETA: 27s - loss: 0.32 - ETA: 26s - loss: 0.32 - ETA: 25s - loss: 0.32 - ETA: 24s - loss: 0.32 - ETA: 23s - loss: 0.32 - ETA: 22s - loss: 0.32 - ETA: 21s - loss: 0.32 - ETA: 20s - loss: 0.32 - ETA: 19s - loss: 0.32 - ETA: 18s - loss: 0.32 - ETA: 17s - loss: 0.32 - ETA: 16s - loss: 0.32 - ETA: 15s - loss: 0.32 - ETA: 14s - loss: 0.32 - ETA: 13s - loss: 0.32 - ETA: 12s - loss: 0.32 - ETA: 11s - loss: 0.32 - ETA: 10s - loss: 0.32 - ETA: 9s - loss: 0.3259 - ETA: 8s - loss: 0.326 - ETA: 7s - loss: 0.326 - ETA: 6s - loss: 0.326 - ETA: 5s - loss: 0.326 - ETA: 4s - loss: 0.326 - ETA: 3s - loss: 0.327 - ETA: 2s - loss: 0.327 - ETA: 1s - loss: 0.327 - ETA: 0s - loss: 0.327 - ETA: 0s - loss: 0.327 - 110s 988ms/step - loss: 0.3279 - val_loss: 0.2254\n",
      "\n",
      "Epoch 00020: val_loss improved from 0.23230 to 0.22541, saving model to clean_notebooks\\injection_superposition.h5\n",
      "Epoch 21/30\n",
      "111/111 [==============================] - ETA: 1:49 - loss: 0.239 - ETA: 1:47 - loss: 0.204 - ETA: 1:45 - loss: 0.218 - ETA: 1:46 - loss: 0.224 - ETA: 1:45 - loss: 0.227 - ETA: 1:43 - loss: 0.235 - ETA: 1:42 - loss: 0.244 - ETA: 1:41 - loss: 0.253 - ETA: 1:39 - loss: 0.257 - ETA: 1:38 - loss: 0.261 - ETA: 1:37 - loss: 0.263 - ETA: 1:36 - loss: 0.265 - ETA: 1:35 - loss: 0.267 - ETA: 1:34 - loss: 0.272 - ETA: 1:34 - loss: 0.277 - ETA: 1:33 - loss: 0.281 - ETA: 1:32 - loss: 0.285 - ETA: 1:30 - loss: 0.288 - ETA: 1:29 - loss: 0.290 - ETA: 1:28 - loss: 0.293 - ETA: 1:27 - loss: 0.295 - ETA: 1:26 - loss: 0.297 - ETA: 1:26 - loss: 0.299 - ETA: 1:25 - loss: 0.300 - ETA: 1:24 - loss: 0.304 - ETA: 1:23 - loss: 0.308 - ETA: 1:22 - loss: 0.310 - ETA: 1:21 - loss: 0.313 - ETA: 1:20 - loss: 0.315 - ETA: 1:19 - loss: 0.317 - ETA: 1:18 - loss: 0.319 - ETA: 1:17 - loss: 0.320 - ETA: 1:16 - loss: 0.321 - ETA: 1:15 - loss: 0.322 - ETA: 1:14 - loss: 0.323 - ETA: 1:13 - loss: 0.323 - ETA: 1:12 - loss: 0.324 - ETA: 1:11 - loss: 0.325 - ETA: 1:10 - loss: 0.325 - ETA: 1:09 - loss: 0.325 - ETA: 1:08 - loss: 0.325 - ETA: 1:07 - loss: 0.326 - ETA: 1:06 - loss: 0.326 - ETA: 1:05 - loss: 0.327 - ETA: 1:04 - loss: 0.327 - ETA: 1:03 - loss: 0.328 - ETA: 1:02 - loss: 0.328 - ETA: 1:01 - loss: 0.329 - ETA: 1:00 - loss: 0.330 - ETA: 59s - loss: 0.330 - ETA: 58s - loss: 0.33 - ETA: 57s - loss: 0.33 - ETA: 56s - loss: 0.33 - ETA: 55s - loss: 0.33 - ETA: 54s - loss: 0.33 - ETA: 53s - loss: 0.33 - ETA: 52s - loss: 0.33 - ETA: 51s - loss: 0.33 - ETA: 50s - loss: 0.33 - ETA: 49s - loss: 0.33 - ETA: 48s - loss: 0.33 - ETA: 47s - loss: 0.33 - ETA: 46s - loss: 0.33 - ETA: 45s - loss: 0.33 - ETA: 44s - loss: 0.33 - ETA: 43s - loss: 0.33 - ETA: 42s - loss: 0.33 - ETA: 41s - loss: 0.33 - ETA: 40s - loss: 0.33 - ETA: 39s - loss: 0.33 - ETA: 38s - loss: 0.33 - ETA: 38s - loss: 0.33 - ETA: 37s - loss: 0.33 - ETA: 36s - loss: 0.33 - ETA: 35s - loss: 0.33 - ETA: 34s - loss: 0.33 - ETA: 33s - loss: 0.33 - ETA: 32s - loss: 0.33 - ETA: 31s - loss: 0.33 - ETA: 30s - loss: 0.33 - ETA: 29s - loss: 0.33 - ETA: 28s - loss: 0.33 - ETA: 27s - loss: 0.33 - ETA: 26s - loss: 0.33 - ETA: 25s - loss: 0.33 - ETA: 24s - loss: 0.33 - ETA: 23s - loss: 0.33 - ETA: 22s - loss: 0.33 - ETA: 21s - loss: 0.33 - ETA: 20s - loss: 0.33 - ETA: 19s - loss: 0.33 - ETA: 18s - loss: 0.33 - ETA: 17s - loss: 0.33 - ETA: 16s - loss: 0.33 - ETA: 15s - loss: 0.33 - ETA: 14s - loss: 0.33 - ETA: 13s - loss: 0.33 - ETA: 12s - loss: 0.33 - ETA: 11s - loss: 0.33 - ETA: 10s - loss: 0.33 - ETA: 9s - loss: 0.3325 - ETA: 8s - loss: 0.332 - ETA: 7s - loss: 0.332 - ETA: 6s - loss: 0.332 - ETA: 5s - loss: 0.332 - ETA: 4s - loss: 0.332 - ETA: 3s - loss: 0.332 - ETA: 2s - loss: 0.332 - ETA: 1s - loss: 0.333 - ETA: 0s - loss: 0.333 - ETA: 0s - loss: 0.333 - 110s 988ms/step - loss: 0.3333 - val_loss: 0.2174\n",
      "\n",
      "Epoch 00021: val_loss improved from 0.22541 to 0.21735, saving model to clean_notebooks\\injection_superposition.h5\n",
      "Epoch 22/30\n",
      "111/111 [==============================] - ETA: 1:46 - loss: 0.398 - ETA: 1:45 - loss: 0.502 - ETA: 1:45 - loss: 0.477 - ETA: 1:44 - loss: 0.453 - ETA: 1:43 - loss: 0.438 - ETA: 1:42 - loss: 0.430 - ETA: 1:41 - loss: 0.423 - ETA: 1:40 - loss: 0.413 - ETA: 1:39 - loss: 0.407 - ETA: 1:38 - loss: 0.401 - ETA: 1:37 - loss: 0.397 - ETA: 1:36 - loss: 0.393 - ETA: 1:35 - loss: 0.389 - ETA: 1:34 - loss: 0.386 - ETA: 1:33 - loss: 0.383 - ETA: 1:32 - loss: 0.380 - ETA: 1:31 - loss: 0.377 - ETA: 1:30 - loss: 0.375 - ETA: 1:29 - loss: 0.372 - ETA: 1:28 - loss: 0.370 - ETA: 1:27 - loss: 0.368 - ETA: 1:26 - loss: 0.366 - ETA: 1:25 - loss: 0.364 - ETA: 1:24 - loss: 0.362 - ETA: 1:23 - loss: 0.360 - ETA: 1:22 - loss: 0.358 - ETA: 1:21 - loss: 0.356 - ETA: 1:20 - loss: 0.354 - ETA: 1:19 - loss: 0.352 - ETA: 1:18 - loss: 0.350 - ETA: 1:17 - loss: 0.348 - ETA: 1:16 - loss: 0.346 - ETA: 1:15 - loss: 0.344 - ETA: 1:14 - loss: 0.343 - ETA: 1:13 - loss: 0.343 - ETA: 1:12 - loss: 0.342 - ETA: 1:11 - loss: 0.342 - ETA: 1:10 - loss: 0.342 - ETA: 1:10 - loss: 0.341 - ETA: 1:09 - loss: 0.341 - ETA: 1:08 - loss: 0.341 - ETA: 1:07 - loss: 0.341 - ETA: 1:06 - loss: 0.340 - ETA: 1:05 - loss: 0.340 - ETA: 1:04 - loss: 0.339 - ETA: 1:03 - loss: 0.339 - ETA: 1:02 - loss: 0.339 - ETA: 1:01 - loss: 0.338 - ETA: 1:00 - loss: 0.338 - ETA: 59s - loss: 0.337 - ETA: 58s - loss: 0.33 - ETA: 57s - loss: 0.33 - ETA: 56s - loss: 0.33 - ETA: 55s - loss: 0.33 - ETA: 54s - loss: 0.33 - ETA: 53s - loss: 0.33 - ETA: 52s - loss: 0.33 - ETA: 51s - loss: 0.33 - ETA: 50s - loss: 0.33 - ETA: 49s - loss: 0.33 - ETA: 48s - loss: 0.33 - ETA: 48s - loss: 0.33 - ETA: 47s - loss: 0.33 - ETA: 46s - loss: 0.33 - ETA: 45s - loss: 0.33 - ETA: 44s - loss: 0.33 - ETA: 43s - loss: 0.33 - ETA: 42s - loss: 0.33 - ETA: 41s - loss: 0.33 - ETA: 40s - loss: 0.33 - ETA: 39s - loss: 0.33 - ETA: 38s - loss: 0.34 - ETA: 37s - loss: 0.34 - ETA: 36s - loss: 0.34 - ETA: 35s - loss: 0.34 - ETA: 34s - loss: 0.34 - ETA: 33s - loss: 0.34 - ETA: 32s - loss: 0.34 - ETA: 31s - loss: 0.34 - ETA: 30s - loss: 0.34 - ETA: 29s - loss: 0.34 - ETA: 28s - loss: 0.34 - ETA: 27s - loss: 0.34 - ETA: 26s - loss: 0.34 - ETA: 25s - loss: 0.34 - ETA: 24s - loss: 0.34 - ETA: 23s - loss: 0.34 - ETA: 22s - loss: 0.34 - ETA: 21s - loss: 0.34 - ETA: 20s - loss: 0.34 - ETA: 19s - loss: 0.34 - ETA: 18s - loss: 0.34 - ETA: 17s - loss: 0.34 - ETA: 16s - loss: 0.34 - ETA: 15s - loss: 0.34 - ETA: 14s - loss: 0.34 - ETA: 13s - loss: 0.34 - ETA: 12s - loss: 0.34 - ETA: 11s - loss: 0.34 - ETA: 10s - loss: 0.34 - ETA: 9s - loss: 0.3415 - ETA: 8s - loss: 0.341 - ETA: 7s - loss: 0.341 - ETA: 6s - loss: 0.341 - ETA: 5s - loss: 0.341 - ETA: 4s - loss: 0.341 - ETA: 3s - loss: 0.341 - ETA: 2s - loss: 0.341 - ETA: 1s - loss: 0.341 - ETA: 0s - loss: 0.341 - ETA: 0s - loss: 0.341 - 110s 993ms/step - loss: 0.3418 - val_loss: 0.2228\n",
      "\n",
      "Epoch 00022: val_loss did not improve from 0.21735\n",
      "Epoch 23/30\n",
      "111/111 [==============================] - ETA: 1:48 - loss: 0.303 - ETA: 1:46 - loss: 0.392 - ETA: 1:45 - loss: 0.389 - ETA: 1:44 - loss: 0.369 - ETA: 1:43 - loss: 0.357 - ETA: 1:42 - loss: 0.350 - ETA: 1:41 - loss: 0.341 - ETA: 1:41 - loss: 0.331 - ETA: 1:40 - loss: 0.323 - ETA: 1:39 - loss: 0.326 - ETA: 1:37 - loss: 0.327 - ETA: 1:36 - loss: 0.329 - ETA: 1:35 - loss: 0.329 - ETA: 1:34 - loss: 0.329 - ETA: 1:33 - loss: 0.328 - ETA: 1:32 - loss: 0.329 - ETA: 1:31 - loss: 0.329 - ETA: 1:30 - loss: 0.329 - ETA: 1:29 - loss: 0.330 - ETA: 1:28 - loss: 0.332 - ETA: 1:27 - loss: 0.333 - ETA: 1:26 - loss: 0.333 - ETA: 1:25 - loss: 0.334 - ETA: 1:24 - loss: 0.334 - ETA: 1:23 - loss: 0.334 - ETA: 1:22 - loss: 0.334 - ETA: 1:21 - loss: 0.335 - ETA: 1:20 - loss: 0.336 - ETA: 1:19 - loss: 0.337 - ETA: 1:18 - loss: 0.338 - ETA: 1:17 - loss: 0.338 - ETA: 1:16 - loss: 0.339 - ETA: 1:15 - loss: 0.340 - ETA: 1:14 - loss: 0.341 - ETA: 1:13 - loss: 0.342 - ETA: 1:12 - loss: 0.343 - ETA: 1:11 - loss: 0.344 - ETA: 1:10 - loss: 0.346 - ETA: 1:09 - loss: 0.347 - ETA: 1:08 - loss: 0.348 - ETA: 1:07 - loss: 0.350 - ETA: 1:06 - loss: 0.352 - ETA: 1:05 - loss: 0.353 - ETA: 1:04 - loss: 0.355 - ETA: 1:04 - loss: 0.356 - ETA: 1:03 - loss: 0.357 - ETA: 1:02 - loss: 0.358 - ETA: 1:01 - loss: 0.359 - ETA: 1:00 - loss: 0.360 - ETA: 59s - loss: 0.361 - ETA: 58s - loss: 0.36 - ETA: 57s - loss: 0.36 - ETA: 56s - loss: 0.36 - ETA: 55s - loss: 0.36 - ETA: 54s - loss: 0.36 - ETA: 53s - loss: 0.36 - ETA: 52s - loss: 0.36 - ETA: 51s - loss: 0.36 - ETA: 50s - loss: 0.36 - ETA: 49s - loss: 0.36 - ETA: 48s - loss: 0.36 - ETA: 47s - loss: 0.36 - ETA: 46s - loss: 0.36 - ETA: 45s - loss: 0.36 - ETA: 44s - loss: 0.37 - ETA: 43s - loss: 0.37 - ETA: 42s - loss: 0.37 - ETA: 41s - loss: 0.37 - ETA: 40s - loss: 0.37 - ETA: 39s - loss: 0.37 - ETA: 38s - loss: 0.37 - ETA: 37s - loss: 0.37 - ETA: 36s - loss: 0.37 - ETA: 35s - loss: 0.37 - ETA: 35s - loss: 0.37 - ETA: 34s - loss: 0.37 - ETA: 33s - loss: 0.37 - ETA: 32s - loss: 0.37 - ETA: 31s - loss: 0.37 - ETA: 30s - loss: 0.37 - ETA: 29s - loss: 0.37 - ETA: 28s - loss: 0.37 - ETA: 27s - loss: 0.37 - ETA: 26s - loss: 0.37 - ETA: 25s - loss: 0.37 - ETA: 24s - loss: 0.37 - ETA: 23s - loss: 0.37 - ETA: 22s - loss: 0.37 - ETA: 21s - loss: 0.37 - ETA: 20s - loss: 0.37 - ETA: 19s - loss: 0.37 - ETA: 18s - loss: 0.37 - ETA: 17s - loss: 0.37 - ETA: 16s - loss: 0.37 - ETA: 15s - loss: 0.37 - ETA: 14s - loss: 0.37 - ETA: 13s - loss: 0.37 - ETA: 12s - loss: 0.37 - ETA: 11s - loss: 0.37 - ETA: 10s - loss: 0.37 - ETA: 9s - loss: 0.3712 - ETA: 8s - loss: 0.371 - ETA: 7s - loss: 0.371 - ETA: 6s - loss: 0.370 - ETA: 5s - loss: 0.370 - ETA: 4s - loss: 0.370 - ETA: 3s - loss: 0.370 - ETA: 2s - loss: 0.370 - ETA: 1s - loss: 0.370 - ETA: 0s - loss: 0.370 - ETA: 0s - loss: 0.370 - 110s 992ms/step - loss: 0.3700 - val_loss: 0.2157\n",
      "\n",
      "Epoch 00023: val_loss improved from 0.21735 to 0.21572, saving model to clean_notebooks\\injection_superposition.h5\n",
      "Epoch 24/30\n",
      "111/111 [==============================] - ETA: 1:51 - loss: 0.168 - ETA: 1:48 - loss: 0.160 - ETA: 1:47 - loss: 0.142 - ETA: 1:46 - loss: 0.152 - ETA: 1:45 - loss: 0.157 - ETA: 1:43 - loss: 0.160 - ETA: 1:42 - loss: 0.170 - ETA: 1:41 - loss: 0.178 - ETA: 1:40 - loss: 0.187 - ETA: 1:39 - loss: 0.196 - ETA: 1:38 - loss: 0.202 - ETA: 1:37 - loss: 0.208 - ETA: 1:35 - loss: 0.214 - ETA: 1:34 - loss: 0.219 - ETA: 1:33 - loss: 0.222 - ETA: 1:33 - loss: 0.224 - ETA: 1:31 - loss: 0.227 - ETA: 1:30 - loss: 0.230 - ETA: 1:29 - loss: 0.232 - ETA: 1:28 - loss: 0.235 - ETA: 1:27 - loss: 0.237 - ETA: 1:26 - loss: 0.239 - ETA: 1:25 - loss: 0.240 - ETA: 1:24 - loss: 0.241 - ETA: 1:23 - loss: 0.243 - ETA: 1:22 - loss: 0.244 - ETA: 1:21 - loss: 0.245 - ETA: 1:20 - loss: 0.245 - ETA: 1:19 - loss: 0.246 - ETA: 1:18 - loss: 0.246 - ETA: 1:17 - loss: 0.247 - ETA: 1:16 - loss: 0.247 - ETA: 1:15 - loss: 0.248 - ETA: 1:14 - loss: 0.248 - ETA: 1:13 - loss: 0.248 - ETA: 1:12 - loss: 0.249 - ETA: 1:11 - loss: 0.249 - ETA: 1:10 - loss: 0.249 - ETA: 1:09 - loss: 0.249 - ETA: 1:08 - loss: 0.250 - ETA: 1:08 - loss: 0.250 - ETA: 1:07 - loss: 0.250 - ETA: 1:06 - loss: 0.250 - ETA: 1:05 - loss: 0.251 - ETA: 1:04 - loss: 0.251 - ETA: 1:03 - loss: 0.251 - ETA: 1:02 - loss: 0.252 - ETA: 1:01 - loss: 0.252 - ETA: 1:00 - loss: 0.253 - ETA: 59s - loss: 0.253 - ETA: 58s - loss: 0.25 - ETA: 57s - loss: 0.25 - ETA: 56s - loss: 0.25 - ETA: 55s - loss: 0.25 - ETA: 54s - loss: 0.25 - ETA: 53s - loss: 0.25 - ETA: 52s - loss: 0.25 - ETA: 51s - loss: 0.25 - ETA: 50s - loss: 0.25 - ETA: 49s - loss: 0.25 - ETA: 48s - loss: 0.25 - ETA: 47s - loss: 0.25 - ETA: 46s - loss: 0.25 - ETA: 45s - loss: 0.25 - ETA: 44s - loss: 0.26 - ETA: 43s - loss: 0.26 - ETA: 42s - loss: 0.26 - ETA: 41s - loss: 0.26 - ETA: 40s - loss: 0.26 - ETA: 39s - loss: 0.26 - ETA: 38s - loss: 0.26 - ETA: 37s - loss: 0.26 - ETA: 36s - loss: 0.26 - ETA: 35s - loss: 0.26 - ETA: 35s - loss: 0.26 - ETA: 34s - loss: 0.26 - ETA: 33s - loss: 0.26 - ETA: 32s - loss: 0.26 - ETA: 31s - loss: 0.26 - ETA: 30s - loss: 0.26 - ETA: 29s - loss: 0.26 - ETA: 28s - loss: 0.26 - ETA: 27s - loss: 0.26 - ETA: 26s - loss: 0.26 - ETA: 25s - loss: 0.26 - ETA: 24s - loss: 0.26 - ETA: 23s - loss: 0.26 - ETA: 22s - loss: 0.26 - ETA: 21s - loss: 0.26 - ETA: 20s - loss: 0.26 - ETA: 19s - loss: 0.26 - ETA: 18s - loss: 0.26 - ETA: 17s - loss: 0.27 - ETA: 16s - loss: 0.27 - ETA: 15s - loss: 0.27 - ETA: 14s - loss: 0.27 - ETA: 13s - loss: 0.27 - ETA: 12s - loss: 0.27 - ETA: 11s - loss: 0.27 - ETA: 10s - loss: 0.27 - ETA: 9s - loss: 0.2732 - ETA: 8s - loss: 0.273 - ETA: 7s - loss: 0.274 - ETA: 6s - loss: 0.274 - ETA: 5s - loss: 0.274 - ETA: 4s - loss: 0.275 - ETA: 3s - loss: 0.275 - ETA: 2s - loss: 0.276 - ETA: 1s - loss: 0.276 - ETA: 0s - loss: 0.276 - ETA: 0s - loss: 0.277 - 110s 988ms/step - loss: 0.2777 - val_loss: 0.1984\n",
      "\n",
      "Epoch 00024: val_loss improved from 0.21572 to 0.19837, saving model to clean_notebooks\\injection_superposition.h5\n",
      "Epoch 25/30\n",
      "111/111 [==============================] - ETA: 1:54 - loss: 1.104 - ETA: 1:47 - loss: 0.884 - ETA: 1:45 - loss: 0.759 - ETA: 1:44 - loss: 0.702 - ETA: 1:44 - loss: 0.662 - ETA: 1:43 - loss: 0.636 - ETA: 1:42 - loss: 0.610 - ETA: 1:42 - loss: 0.588 - ETA: 1:41 - loss: 0.568 - ETA: 1:40 - loss: 0.549 - ETA: 1:39 - loss: 0.531 - ETA: 1:37 - loss: 0.520 - ETA: 1:36 - loss: 0.511 - ETA: 1:35 - loss: 0.504 - ETA: 1:34 - loss: 0.498 - ETA: 1:33 - loss: 0.492 - ETA: 1:32 - loss: 0.486 - ETA: 1:31 - loss: 0.480 - ETA: 1:30 - loss: 0.474 - ETA: 1:29 - loss: 0.468 - ETA: 1:28 - loss: 0.463 - ETA: 1:27 - loss: 0.458 - ETA: 1:26 - loss: 0.453 - ETA: 1:25 - loss: 0.448 - ETA: 1:24 - loss: 0.444 - ETA: 1:23 - loss: 0.439 - ETA: 1:22 - loss: 0.435 - ETA: 1:21 - loss: 0.431 - ETA: 1:20 - loss: 0.428 - ETA: 1:19 - loss: 0.425 - ETA: 1:18 - loss: 0.423 - ETA: 1:17 - loss: 0.421 - ETA: 1:16 - loss: 0.419 - ETA: 1:15 - loss: 0.417 - ETA: 1:14 - loss: 0.416 - ETA: 1:13 - loss: 0.414 - ETA: 1:12 - loss: 0.412 - ETA: 1:11 - loss: 0.410 - ETA: 1:10 - loss: 0.408 - ETA: 1:09 - loss: 0.406 - ETA: 1:08 - loss: 0.404 - ETA: 1:07 - loss: 0.402 - ETA: 1:06 - loss: 0.400 - ETA: 1:05 - loss: 0.398 - ETA: 1:04 - loss: 0.397 - ETA: 1:03 - loss: 0.395 - ETA: 1:02 - loss: 0.393 - ETA: 1:01 - loss: 0.391 - ETA: 1:00 - loss: 0.390 - ETA: 59s - loss: 0.388 - ETA: 58s - loss: 0.38 - ETA: 57s - loss: 0.38 - ETA: 56s - loss: 0.38 - ETA: 55s - loss: 0.38 - ETA: 54s - loss: 0.38 - ETA: 53s - loss: 0.38 - ETA: 52s - loss: 0.37 - ETA: 51s - loss: 0.37 - ETA: 50s - loss: 0.37 - ETA: 49s - loss: 0.37 - ETA: 48s - loss: 0.37 - ETA: 47s - loss: 0.37 - ETA: 46s - loss: 0.37 - ETA: 45s - loss: 0.37 - ETA: 44s - loss: 0.37 - ETA: 43s - loss: 0.37 - ETA: 42s - loss: 0.37 - ETA: 41s - loss: 0.37 - ETA: 40s - loss: 0.36 - ETA: 39s - loss: 0.36 - ETA: 38s - loss: 0.36 - ETA: 37s - loss: 0.36 - ETA: 36s - loss: 0.36 - ETA: 35s - loss: 0.36 - ETA: 34s - loss: 0.36 - ETA: 34s - loss: 0.36 - ETA: 33s - loss: 0.36 - ETA: 32s - loss: 0.36 - ETA: 31s - loss: 0.36 - ETA: 30s - loss: 0.36 - ETA: 29s - loss: 0.36 - ETA: 28s - loss: 0.36 - ETA: 27s - loss: 0.36 - ETA: 26s - loss: 0.36 - ETA: 25s - loss: 0.36 - ETA: 24s - loss: 0.36 - ETA: 23s - loss: 0.36 - ETA: 22s - loss: 0.36 - ETA: 21s - loss: 0.36 - ETA: 20s - loss: 0.36 - ETA: 19s - loss: 0.36 - ETA: 18s - loss: 0.35 - ETA: 17s - loss: 0.35 - ETA: 16s - loss: 0.35 - ETA: 15s - loss: 0.35 - ETA: 14s - loss: 0.35 - ETA: 13s - loss: 0.35 - ETA: 12s - loss: 0.35 - ETA: 11s - loss: 0.35 - ETA: 10s - loss: 0.35 - ETA: 9s - loss: 0.3576 - ETA: 8s - loss: 0.357 - ETA: 7s - loss: 0.357 - ETA: 6s - loss: 0.357 - ETA: 5s - loss: 0.357 - ETA: 4s - loss: 0.357 - ETA: 3s - loss: 0.357 - ETA: 2s - loss: 0.357 - ETA: 1s - loss: 0.357 - ETA: 0s - loss: 0.357 - ETA: 0s - loss: 0.357 - 110s 989ms/step - loss: 0.3572 - val_loss: 0.2041\n",
      "\n",
      "Epoch 00025: val_loss did not improve from 0.19837\n",
      "Epoch 26/30\n",
      "111/111 [==============================] - ETA: 1:47 - loss: 0.129 - ETA: 1:45 - loss: 0.183 - ETA: 1:45 - loss: 0.182 - ETA: 1:44 - loss: 0.191 - ETA: 1:43 - loss: 0.193 - ETA: 1:42 - loss: 0.190 - ETA: 1:41 - loss: 0.189 - ETA: 1:40 - loss: 0.192 - ETA: 1:39 - loss: 0.195 - ETA: 1:38 - loss: 0.197 - ETA: 1:37 - loss: 0.201 - ETA: 1:36 - loss: 0.204 - ETA: 1:35 - loss: 0.208 - ETA: 1:34 - loss: 0.214 - ETA: 1:33 - loss: 0.220 - ETA: 1:32 - loss: 0.227 - ETA: 1:31 - loss: 0.232 - ETA: 1:30 - loss: 0.239 - ETA: 1:29 - loss: 0.245 - ETA: 1:28 - loss: 0.250 - ETA: 1:27 - loss: 0.254 - ETA: 1:26 - loss: 0.259 - ETA: 1:25 - loss: 0.263 - ETA: 1:24 - loss: 0.266 - ETA: 1:23 - loss: 0.271 - ETA: 1:22 - loss: 0.275 - ETA: 1:21 - loss: 0.278 - ETA: 1:20 - loss: 0.281 - ETA: 1:19 - loss: 0.283 - ETA: 1:18 - loss: 0.285 - ETA: 1:17 - loss: 0.287 - ETA: 1:16 - loss: 0.288 - ETA: 1:15 - loss: 0.290 - ETA: 1:14 - loss: 0.291 - ETA: 1:13 - loss: 0.292 - ETA: 1:12 - loss: 0.293 - ETA: 1:11 - loss: 0.294 - ETA: 1:10 - loss: 0.295 - ETA: 1:09 - loss: 0.296 - ETA: 1:08 - loss: 0.297 - ETA: 1:07 - loss: 0.298 - ETA: 1:06 - loss: 0.299 - ETA: 1:06 - loss: 0.299 - ETA: 1:05 - loss: 0.300 - ETA: 1:04 - loss: 0.301 - ETA: 1:03 - loss: 0.301 - ETA: 1:02 - loss: 0.301 - ETA: 1:01 - loss: 0.302 - ETA: 1:00 - loss: 0.302 - ETA: 59s - loss: 0.303 - ETA: 58s - loss: 0.30 - ETA: 57s - loss: 0.30 - ETA: 56s - loss: 0.30 - ETA: 55s - loss: 0.30 - ETA: 54s - loss: 0.30 - ETA: 53s - loss: 0.30 - ETA: 52s - loss: 0.30 - ETA: 51s - loss: 0.30 - ETA: 50s - loss: 0.30 - ETA: 49s - loss: 0.30 - ETA: 48s - loss: 0.30 - ETA: 47s - loss: 0.31 - ETA: 46s - loss: 0.31 - ETA: 45s - loss: 0.31 - ETA: 44s - loss: 0.31 - ETA: 43s - loss: 0.31 - ETA: 42s - loss: 0.31 - ETA: 41s - loss: 0.31 - ETA: 40s - loss: 0.31 - ETA: 39s - loss: 0.31 - ETA: 38s - loss: 0.31 - ETA: 37s - loss: 0.31 - ETA: 36s - loss: 0.31 - ETA: 35s - loss: 0.31 - ETA: 35s - loss: 0.31 - ETA: 34s - loss: 0.31 - ETA: 33s - loss: 0.31 - ETA: 32s - loss: 0.31 - ETA: 31s - loss: 0.31 - ETA: 30s - loss: 0.31 - ETA: 29s - loss: 0.31 - ETA: 28s - loss: 0.31 - ETA: 27s - loss: 0.31 - ETA: 26s - loss: 0.31 - ETA: 25s - loss: 0.31 - ETA: 24s - loss: 0.31 - ETA: 23s - loss: 0.31 - ETA: 22s - loss: 0.31 - ETA: 21s - loss: 0.31 - ETA: 20s - loss: 0.31 - ETA: 19s - loss: 0.31 - ETA: 18s - loss: 0.31 - ETA: 17s - loss: 0.31 - ETA: 16s - loss: 0.31 - ETA: 15s - loss: 0.31 - ETA: 14s - loss: 0.31 - ETA: 13s - loss: 0.31 - ETA: 12s - loss: 0.31 - ETA: 11s - loss: 0.31 - ETA: 10s - loss: 0.31 - ETA: 9s - loss: 0.3158 - ETA: 8s - loss: 0.315 - ETA: 8s - loss: 0.315 - ETA: 7s - loss: 0.315 - ETA: 6s - loss: 0.315 - ETA: 5s - loss: 0.315 - ETA: 4s - loss: 0.315 - ETA: 3s - loss: 0.315 - ETA: 2s - loss: 0.315 - ETA: 1s - loss: 0.315 - ETA: 0s - loss: 0.315 - 115s 1s/step - loss: 0.3158 - val_loss: 0.2057\n",
      "\n",
      "Epoch 00026: val_loss did not improve from 0.19837\n",
      "Epoch 27/30\n",
      "111/111 [==============================] - ETA: 2:00 - loss: 0.412 - ETA: 2:24 - loss: 0.363 - ETA: 2:12 - loss: 0.500 - ETA: 2:04 - loss: 0.528 - ETA: 2:01 - loss: 0.537 - ETA: 1:59 - loss: 0.533 - ETA: 1:55 - loss: 0.523 - ETA: 1:54 - loss: 0.514 - ETA: 1:52 - loss: 0.504 - ETA: 1:50 - loss: 0.497 - ETA: 1:48 - loss: 0.488 - ETA: 1:46 - loss: 0.479 - ETA: 1:45 - loss: 0.472 - ETA: 1:43 - loss: 0.467 - ETA: 1:42 - loss: 0.461 - ETA: 1:41 - loss: 0.456 - ETA: 1:39 - loss: 0.450 - ETA: 1:38 - loss: 0.444 - ETA: 1:37 - loss: 0.439 - ETA: 1:36 - loss: 0.434 - ETA: 1:34 - loss: 0.429 - ETA: 1:34 - loss: 0.424 - ETA: 1:34 - loss: 0.420 - ETA: 1:33 - loss: 0.415 - ETA: 1:32 - loss: 0.411 - ETA: 1:32 - loss: 0.407 - ETA: 1:31 - loss: 0.404 - ETA: 1:30 - loss: 0.400 - ETA: 1:28 - loss: 0.397 - ETA: 1:27 - loss: 0.394 - ETA: 1:26 - loss: 0.391 - ETA: 1:25 - loss: 0.389 - ETA: 1:23 - loss: 0.387 - ETA: 1:22 - loss: 0.385 - ETA: 1:21 - loss: 0.383 - ETA: 1:20 - loss: 0.381 - ETA: 1:19 - loss: 0.379 - ETA: 1:18 - loss: 0.378 - ETA: 1:17 - loss: 0.376 - ETA: 1:15 - loss: 0.375 - ETA: 1:14 - loss: 0.374 - ETA: 1:13 - loss: 0.373 - ETA: 1:12 - loss: 0.372 - ETA: 1:11 - loss: 0.371 - ETA: 1:10 - loss: 0.370 - ETA: 1:09 - loss: 0.369 - ETA: 1:08 - loss: 0.368 - ETA: 1:06 - loss: 0.367 - ETA: 1:05 - loss: 0.366 - ETA: 1:04 - loss: 0.365 - ETA: 1:03 - loss: 0.364 - ETA: 1:02 - loss: 0.363 - ETA: 1:01 - loss: 0.362 - ETA: 1:00 - loss: 0.362 - ETA: 59s - loss: 0.361 - ETA: 58s - loss: 0.36 - ETA: 57s - loss: 0.36 - ETA: 56s - loss: 0.35 - ETA: 54s - loss: 0.35 - ETA: 53s - loss: 0.35 - ETA: 52s - loss: 0.35 - ETA: 51s - loss: 0.35 - ETA: 50s - loss: 0.35 - ETA: 49s - loss: 0.35 - ETA: 48s - loss: 0.35 - ETA: 47s - loss: 0.35 - ETA: 46s - loss: 0.35 - ETA: 45s - loss: 0.35 - ETA: 44s - loss: 0.35 - ETA: 43s - loss: 0.35 - ETA: 42s - loss: 0.35 - ETA: 41s - loss: 0.35 - ETA: 39s - loss: 0.35 - ETA: 38s - loss: 0.35 - ETA: 37s - loss: 0.35 - ETA: 36s - loss: 0.35 - ETA: 35s - loss: 0.35 - ETA: 34s - loss: 0.35 - ETA: 33s - loss: 0.35 - ETA: 32s - loss: 0.35 - ETA: 31s - loss: 0.35 - ETA: 30s - loss: 0.35 - ETA: 29s - loss: 0.34 - ETA: 28s - loss: 0.34 - ETA: 27s - loss: 0.34 - ETA: 26s - loss: 0.34 - ETA: 25s - loss: 0.34 - ETA: 24s - loss: 0.34 - ETA: 23s - loss: 0.34 - ETA: 22s - loss: 0.34 - ETA: 20s - loss: 0.34 - ETA: 19s - loss: 0.34 - ETA: 18s - loss: 0.34 - ETA: 17s - loss: 0.34 - ETA: 16s - loss: 0.34 - ETA: 15s - loss: 0.34 - ETA: 14s - loss: 0.34 - ETA: 13s - loss: 0.34 - ETA: 12s - loss: 0.34 - ETA: 11s - loss: 0.34 - ETA: 10s - loss: 0.34 - ETA: 9s - loss: 0.3468 - ETA: 8s - loss: 0.346 - ETA: 7s - loss: 0.346 - ETA: 6s - loss: 0.346 - ETA: 5s - loss: 0.345 - ETA: 4s - loss: 0.345 - ETA: 3s - loss: 0.345 - ETA: 2s - loss: 0.345 - ETA: 1s - loss: 0.345 - ETA: 0s - loss: 0.345 - 118s 1s/step - loss: 0.3448 - val_loss: 0.1912\n",
      "\n",
      "Epoch 00027: val_loss improved from 0.19837 to 0.19119, saving model to clean_notebooks\\injection_superposition.h5\n",
      "Epoch 28/30\n",
      "111/111 [==============================] - ETA: 1:50 - loss: 0.154 - ETA: 1:52 - loss: 0.167 - ETA: 1:51 - loss: 0.178 - ETA: 1:49 - loss: 0.211 - ETA: 1:48 - loss: 0.223 - ETA: 1:47 - loss: 0.272 - ETA: 1:46 - loss: 0.302 - ETA: 1:45 - loss: 0.319 - ETA: 1:45 - loss: 0.329 - ETA: 1:44 - loss: 0.351 - ETA: 1:43 - loss: 0.365 - ETA: 1:42 - loss: 0.374 - ETA: 1:41 - loss: 0.380 - ETA: 1:40 - loss: 0.384 - ETA: 1:39 - loss: 0.386 - ETA: 1:38 - loss: 0.388 - ETA: 1:37 - loss: 0.388 - ETA: 1:36 - loss: 0.388 - ETA: 1:35 - loss: 0.387 - ETA: 1:33 - loss: 0.388 - ETA: 1:32 - loss: 0.388 - ETA: 1:31 - loss: 0.391 - ETA: 1:30 - loss: 0.393 - ETA: 1:29 - loss: 0.396 - ETA: 1:28 - loss: 0.397 - ETA: 1:27 - loss: 0.398 - ETA: 1:26 - loss: 0.399 - ETA: 1:25 - loss: 0.399 - ETA: 1:24 - loss: 0.400 - ETA: 1:23 - loss: 0.400 - ETA: 1:22 - loss: 0.401 - ETA: 1:21 - loss: 0.401 - ETA: 1:20 - loss: 0.400 - ETA: 1:19 - loss: 0.400 - ETA: 1:18 - loss: 0.400 - ETA: 1:17 - loss: 0.399 - ETA: 1:16 - loss: 0.398 - ETA: 1:15 - loss: 0.397 - ETA: 1:14 - loss: 0.397 - ETA: 1:13 - loss: 0.396 - ETA: 1:12 - loss: 0.395 - ETA: 1:11 - loss: 0.395 - ETA: 1:10 - loss: 0.394 - ETA: 1:09 - loss: 0.394 - ETA: 1:08 - loss: 0.393 - ETA: 1:07 - loss: 0.392 - ETA: 1:06 - loss: 0.392 - ETA: 1:05 - loss: 0.391 - ETA: 1:04 - loss: 0.391 - ETA: 1:02 - loss: 0.390 - ETA: 1:01 - loss: 0.389 - ETA: 1:00 - loss: 0.389 - ETA: 59s - loss: 0.389 - ETA: 58s - loss: 0.38 - ETA: 57s - loss: 0.38 - ETA: 56s - loss: 0.38 - ETA: 55s - loss: 0.38 - ETA: 54s - loss: 0.38 - ETA: 53s - loss: 0.38 - ETA: 52s - loss: 0.38 - ETA: 51s - loss: 0.38 - ETA: 50s - loss: 0.38 - ETA: 49s - loss: 0.38 - ETA: 48s - loss: 0.38 - ETA: 47s - loss: 0.38 - ETA: 46s - loss: 0.38 - ETA: 45s - loss: 0.38 - ETA: 44s - loss: 0.38 - ETA: 43s - loss: 0.38 - ETA: 42s - loss: 0.38 - ETA: 41s - loss: 0.38 - ETA: 40s - loss: 0.38 - ETA: 39s - loss: 0.38 - ETA: 38s - loss: 0.38 - ETA: 37s - loss: 0.38 - ETA: 36s - loss: 0.38 - ETA: 35s - loss: 0.38 - ETA: 34s - loss: 0.38 - ETA: 33s - loss: 0.37 - ETA: 31s - loss: 0.37 - ETA: 30s - loss: 0.37 - ETA: 29s - loss: 0.37 - ETA: 28s - loss: 0.37 - ETA: 27s - loss: 0.37 - ETA: 26s - loss: 0.37 - ETA: 25s - loss: 0.37 - ETA: 24s - loss: 0.37 - ETA: 23s - loss: 0.37 - ETA: 22s - loss: 0.37 - ETA: 21s - loss: 0.37 - ETA: 20s - loss: 0.37 - ETA: 19s - loss: 0.37 - ETA: 18s - loss: 0.37 - ETA: 17s - loss: 0.37 - ETA: 16s - loss: 0.37 - ETA: 15s - loss: 0.37 - ETA: 14s - loss: 0.37 - ETA: 13s - loss: 0.37 - ETA: 12s - loss: 0.37 - ETA: 11s - loss: 0.37 - ETA: 10s - loss: 0.37 - ETA: 9s - loss: 0.3711 - ETA: 8s - loss: 0.370 - ETA: 7s - loss: 0.370 - ETA: 6s - loss: 0.370 - ETA: 5s - loss: 0.369 - ETA: 4s - loss: 0.369 - ETA: 3s - loss: 0.369 - ETA: 2s - loss: 0.368 - ETA: 1s - loss: 0.368 - ETA: 0s - loss: 0.368 - 116s 1s/step - loss: 0.3677 - val_loss: 0.1926\n",
      "\n",
      "Epoch 00028: val_loss did not improve from 0.19119\n",
      "Epoch 29/30\n",
      "111/111 [==============================] - ETA: 2:07 - loss: 0.243 - ETA: 2:01 - loss: 0.387 - ETA: 1:55 - loss: 0.418 - ETA: 1:53 - loss: 0.412 - ETA: 1:51 - loss: 0.404 - ETA: 1:50 - loss: 0.412 - ETA: 1:48 - loss: 0.412 - ETA: 1:47 - loss: 0.414 - ETA: 1:45 - loss: 0.413 - ETA: 1:44 - loss: 0.411 - ETA: 1:43 - loss: 0.407 - ETA: 1:42 - loss: 0.403 - ETA: 1:41 - loss: 0.398 - ETA: 1:40 - loss: 0.395 - ETA: 1:39 - loss: 0.391 - ETA: 1:38 - loss: 0.389 - ETA: 1:37 - loss: 0.386 - ETA: 1:36 - loss: 0.383 - ETA: 1:35 - loss: 0.382 - ETA: 1:34 - loss: 0.381 - ETA: 1:33 - loss: 0.379 - ETA: 1:32 - loss: 0.378 - ETA: 1:31 - loss: 0.376 - ETA: 1:30 - loss: 0.375 - ETA: 1:29 - loss: 0.373 - ETA: 1:28 - loss: 0.371 - ETA: 1:27 - loss: 0.370 - ETA: 1:26 - loss: 0.370 - ETA: 1:25 - loss: 0.370 - ETA: 1:24 - loss: 0.369 - ETA: 1:22 - loss: 0.369 - ETA: 1:21 - loss: 0.368 - ETA: 1:20 - loss: 0.367 - ETA: 1:20 - loss: 0.366 - ETA: 1:19 - loss: 0.365 - ETA: 1:19 - loss: 0.364 - ETA: 1:18 - loss: 0.363 - ETA: 1:17 - loss: 0.362 - ETA: 1:16 - loss: 0.361 - ETA: 1:15 - loss: 0.361 - ETA: 1:14 - loss: 0.360 - ETA: 1:13 - loss: 0.359 - ETA: 1:12 - loss: 0.358 - ETA: 1:12 - loss: 0.357 - ETA: 1:12 - loss: 0.356 - ETA: 1:12 - loss: 0.356 - ETA: 1:12 - loss: 0.355 - ETA: 1:11 - loss: 0.355 - ETA: 1:10 - loss: 0.354 - ETA: 1:09 - loss: 0.353 - ETA: 1:08 - loss: 0.353 - ETA: 1:07 - loss: 0.352 - ETA: 1:06 - loss: 0.351 - ETA: 1:05 - loss: 0.351 - ETA: 1:04 - loss: 0.350 - ETA: 1:03 - loss: 0.350 - ETA: 1:02 - loss: 0.349 - ETA: 1:01 - loss: 0.349 - ETA: 1:00 - loss: 0.348 - ETA: 59s - loss: 0.348 - ETA: 58s - loss: 0.34 - ETA: 57s - loss: 0.34 - ETA: 56s - loss: 0.34 - ETA: 55s - loss: 0.34 - ETA: 54s - loss: 0.34 - ETA: 52s - loss: 0.34 - ETA: 51s - loss: 0.34 - ETA: 50s - loss: 0.34 - ETA: 49s - loss: 0.34 - ETA: 48s - loss: 0.34 - ETA: 47s - loss: 0.34 - ETA: 46s - loss: 0.34 - ETA: 44s - loss: 0.34 - ETA: 43s - loss: 0.34 - ETA: 42s - loss: 0.34 - ETA: 41s - loss: 0.34 - ETA: 40s - loss: 0.34 - ETA: 38s - loss: 0.34 - ETA: 37s - loss: 0.34 - ETA: 36s - loss: 0.34 - ETA: 35s - loss: 0.34 - ETA: 34s - loss: 0.34 - ETA: 32s - loss: 0.34 - ETA: 31s - loss: 0.34 - ETA: 30s - loss: 0.33 - ETA: 29s - loss: 0.33 - ETA: 28s - loss: 0.33 - ETA: 26s - loss: 0.33 - ETA: 25s - loss: 0.33 - ETA: 24s - loss: 0.33 - ETA: 23s - loss: 0.33 - ETA: 22s - loss: 0.33 - ETA: 21s - loss: 0.33 - ETA: 19s - loss: 0.33 - ETA: 18s - loss: 0.33 - ETA: 17s - loss: 0.33 - ETA: 16s - loss: 0.33 - ETA: 15s - loss: 0.33 - ETA: 13s - loss: 0.33 - ETA: 12s - loss: 0.33 - ETA: 11s - loss: 0.33 - ETA: 10s - loss: 0.33 - ETA: 9s - loss: 0.3347 - ETA: 8s - loss: 0.334 - ETA: 6s - loss: 0.334 - ETA: 5s - loss: 0.334 - ETA: 4s - loss: 0.334 - ETA: 3s - loss: 0.333 - ETA: 2s - loss: 0.333 - ETA: 1s - loss: 0.333 - ETA: 0s - loss: 0.333 - 130s 1s/step - loss: 0.3334 - val_loss: 0.1852\n",
      "\n",
      "Epoch 00029: val_loss improved from 0.19119 to 0.18518, saving model to clean_notebooks\\injection_superposition.h5\n",
      "Epoch 30/30\n",
      "111/111 [==============================] - ETA: 2:07 - loss: 0.084 - ETA: 2:01 - loss: 0.137 - ETA: 1:55 - loss: 0.155 - ETA: 1:53 - loss: 0.175 - ETA: 1:52 - loss: 0.183 - ETA: 1:50 - loss: 0.196 - ETA: 1:49 - loss: 0.207 - ETA: 1:48 - loss: 0.212 - ETA: 1:47 - loss: 0.214 - ETA: 1:46 - loss: 0.214 - ETA: 1:45 - loss: 0.216 - ETA: 1:44 - loss: 0.220 - ETA: 1:43 - loss: 0.223 - ETA: 1:41 - loss: 0.230 - ETA: 1:40 - loss: 0.237 - ETA: 1:39 - loss: 0.242 - ETA: 1:38 - loss: 0.246 - ETA: 1:37 - loss: 0.250 - ETA: 1:36 - loss: 0.254 - ETA: 1:35 - loss: 0.257 - ETA: 1:34 - loss: 0.259 - ETA: 1:33 - loss: 0.261 - ETA: 1:31 - loss: 0.263 - ETA: 1:30 - loss: 0.265 - ETA: 1:30 - loss: 0.267 - ETA: 1:29 - loss: 0.268 - ETA: 1:29 - loss: 0.269 - ETA: 1:28 - loss: 0.270 - ETA: 1:27 - loss: 0.271 - ETA: 1:26 - loss: 0.272 - ETA: 1:25 - loss: 0.273 - ETA: 1:23 - loss: 0.273 - ETA: 1:22 - loss: 0.274 - ETA: 1:21 - loss: 0.274 - ETA: 1:19 - loss: 0.274 - ETA: 1:18 - loss: 0.275 - ETA: 1:17 - loss: 0.276 - ETA: 1:17 - loss: 0.277 - ETA: 1:16 - loss: 0.278 - ETA: 1:15 - loss: 0.278 - ETA: 1:14 - loss: 0.279 - ETA: 1:13 - loss: 0.280 - ETA: 1:12 - loss: 0.280 - ETA: 1:11 - loss: 0.281 - ETA: 1:10 - loss: 0.282 - ETA: 1:09 - loss: 0.283 - ETA: 1:07 - loss: 0.284 - ETA: 1:06 - loss: 0.284 - ETA: 1:05 - loss: 0.285 - ETA: 1:04 - loss: 0.286 - ETA: 1:03 - loss: 0.286 - ETA: 1:01 - loss: 0.287 - ETA: 1:00 - loss: 0.287 - ETA: 59s - loss: 0.288 - ETA: 58s - loss: 0.28 - ETA: 57s - loss: 0.28 - ETA: 55s - loss: 0.28 - ETA: 54s - loss: 0.28 - ETA: 53s - loss: 0.28 - ETA: 52s - loss: 0.29 - ETA: 51s - loss: 0.29 - ETA: 50s - loss: 0.29 - ETA: 49s - loss: 0.29 - ETA: 47s - loss: 0.29 - ETA: 46s - loss: 0.29 - ETA: 45s - loss: 0.29 - ETA: 44s - loss: 0.29 - ETA: 43s - loss: 0.29 - ETA: 42s - loss: 0.29 - ETA: 41s - loss: 0.29 - ETA: 40s - loss: 0.29 - ETA: 39s - loss: 0.29 - ETA: 38s - loss: 0.29 - ETA: 37s - loss: 0.29 - ETA: 36s - loss: 0.29 - ETA: 35s - loss: 0.29 - ETA: 34s - loss: 0.29 - ETA: 33s - loss: 0.29 - ETA: 32s - loss: 0.29 - ETA: 31s - loss: 0.29 - ETA: 29s - loss: 0.29 - ETA: 28s - loss: 0.29 - ETA: 27s - loss: 0.29 - ETA: 26s - loss: 0.29 - ETA: 25s - loss: 0.29 - ETA: 24s - loss: 0.29 - ETA: 23s - loss: 0.30 - ETA: 22s - loss: 0.30 - ETA: 21s - loss: 0.30 - ETA: 20s - loss: 0.30 - ETA: 19s - loss: 0.30 - ETA: 18s - loss: 0.30 - ETA: 17s - loss: 0.30 - ETA: 16s - loss: 0.30 - ETA: 15s - loss: 0.30 - ETA: 14s - loss: 0.30 - ETA: 13s - loss: 0.30 - ETA: 12s - loss: 0.30 - ETA: 11s - loss: 0.30 - ETA: 10s - loss: 0.30 - ETA: 9s - loss: 0.3040 - ETA: 8s - loss: 0.304 - ETA: 7s - loss: 0.304 - ETA: 6s - loss: 0.304 - ETA: 5s - loss: 0.304 - ETA: 4s - loss: 0.304 - ETA: 3s - loss: 0.304 - ETA: 2s - loss: 0.305 - ETA: 1s - loss: 0.305 - ETA: 0s - loss: 0.305 - ETA: 0s - loss: 0.305 - 110s 989ms/step - loss: 0.3057 - val_loss: 0.1794\n",
      "\n",
      "Epoch 00030: val_loss improved from 0.18518 to 0.17939, saving model to clean_notebooks\\injection_superposition.h5\n",
      "4/4 [==============================] - ETA: 1s - loss: 0.165 - ETA: 1s - loss: 0.180 - ETA: 0s - loss: 0.160 - ETA: 0s - loss: 0.190 - 2s 458ms/step - loss: 0.1903\n",
      "Val Score:  0.19034282863140106\n",
      "====================================================================================\n",
      "\n",
      "\n",
      "Training on Fold:  2\n",
      "Epoch 1/30\n",
      "111/111 [==============================] - ETA: 5:19 - loss: 0.297 - ETA: 1:52 - loss: 0.592 - ETA: 1:51 - loss: 0.684 - ETA: 1:49 - loss: 0.698 - ETA: 1:45 - loss: 0.708 - ETA: 1:43 - loss: 0.719 - ETA: 1:41 - loss: 0.724 - ETA: 1:39 - loss: 0.738 - ETA: 1:37 - loss: 0.744 - ETA: 1:36 - loss: 0.770 - ETA: 1:34 - loss: 0.792 - ETA: 1:33 - loss: 0.809 - ETA: 1:32 - loss: 0.820 - ETA: 1:31 - loss: 0.830 - ETA: 1:30 - loss: 0.837 - ETA: 1:29 - loss: 0.841 - ETA: 1:28 - loss: 0.842 - ETA: 1:27 - loss: 0.841 - ETA: 1:26 - loss: 0.840 - ETA: 1:25 - loss: 0.838 - ETA: 1:25 - loss: 0.840 - ETA: 1:24 - loss: 0.842 - ETA: 1:23 - loss: 0.843 - ETA: 1:22 - loss: 0.845 - ETA: 1:21 - loss: 0.846 - ETA: 1:21 - loss: 0.846 - ETA: 1:20 - loss: 0.846 - ETA: 1:19 - loss: 0.846 - ETA: 1:19 - loss: 0.846 - ETA: 1:18 - loss: 0.845 - ETA: 1:18 - loss: 0.846 - ETA: 1:18 - loss: 0.847 - ETA: 1:18 - loss: 0.849 - ETA: 1:18 - loss: 0.851 - ETA: 1:18 - loss: 0.852 - ETA: 1:17 - loss: 0.853 - ETA: 1:16 - loss: 0.854 - ETA: 1:16 - loss: 0.854 - ETA: 1:15 - loss: 0.854 - ETA: 1:14 - loss: 0.854 - ETA: 1:13 - loss: 0.855 - ETA: 1:12 - loss: 0.855 - ETA: 1:11 - loss: 0.855 - ETA: 1:10 - loss: 0.855 - ETA: 1:09 - loss: 0.855 - ETA: 1:09 - loss: 0.855 - ETA: 1:08 - loss: 0.854 - ETA: 1:07 - loss: 0.854 - ETA: 1:06 - loss: 0.854 - ETA: 1:05 - loss: 0.854 - ETA: 1:04 - loss: 0.854 - ETA: 1:03 - loss: 0.854 - ETA: 1:02 - loss: 0.854 - ETA: 1:01 - loss: 0.854 - ETA: 1:00 - loss: 0.853 - ETA: 59s - loss: 0.853 - ETA: 58s - loss: 0.85 - ETA: 57s - loss: 0.85 - ETA: 56s - loss: 0.85 - ETA: 55s - loss: 0.85 - ETA: 54s - loss: 0.85 - ETA: 53s - loss: 0.85 - ETA: 51s - loss: 0.85 - ETA: 50s - loss: 0.85 - ETA: 49s - loss: 0.85 - ETA: 48s - loss: 0.85 - ETA: 47s - loss: 0.85 - ETA: 46s - loss: 0.84 - ETA: 45s - loss: 0.84 - ETA: 44s - loss: 0.84 - ETA: 43s - loss: 0.84 - ETA: 42s - loss: 0.84 - ETA: 41s - loss: 0.84 - ETA: 40s - loss: 0.84 - ETA: 39s - loss: 0.84 - ETA: 38s - loss: 0.84 - ETA: 37s - loss: 0.84 - ETA: 35s - loss: 0.84 - ETA: 34s - loss: 0.84 - ETA: 33s - loss: 0.84 - ETA: 32s - loss: 0.84 - ETA: 31s - loss: 0.84 - ETA: 30s - loss: 0.84 - ETA: 28s - loss: 0.84 - ETA: 27s - loss: 0.84 - ETA: 26s - loss: 0.83 - ETA: 25s - loss: 0.83 - ETA: 24s - loss: 0.83 - ETA: 23s - loss: 0.83 - ETA: 22s - loss: 0.83 - ETA: 21s - loss: 0.83 - ETA: 20s - loss: 0.83 - ETA: 19s - loss: 0.83 - ETA: 18s - loss: 0.83 - ETA: 16s - loss: 0.83 - ETA: 15s - loss: 0.83 - ETA: 14s - loss: 0.83 - ETA: 13s - loss: 0.83 - ETA: 12s - loss: 0.83 - ETA: 11s - loss: 0.83 - ETA: 10s - loss: 0.83 - ETA: 9s - loss: 0.8295 - ETA: 8s - loss: 0.828 - ETA: 7s - loss: 0.828 - ETA: 6s - loss: 0.827 - ETA: 5s - loss: 0.826 - ETA: 4s - loss: 0.826 - ETA: 3s - loss: 0.825 - ETA: 2s - loss: 0.824 - ETA: 1s - loss: 0.823 - ETA: 0s - loss: 0.823 - 119s 1s/step - loss: 0.8223 - val_loss: 0.5243\n",
      "\n",
      "Epoch 00001: val_loss did not improve from 0.17939\n",
      "Epoch 2/30\n",
      "111/111 [==============================] - ETA: 1:43 - loss: 0.258 - ETA: 1:40 - loss: 0.307 - ETA: 1:41 - loss: 0.343 - ETA: 1:40 - loss: 0.344 - ETA: 1:39 - loss: 0.341 - ETA: 1:37 - loss: 0.339 - ETA: 1:36 - loss: 0.336 - ETA: 1:35 - loss: 0.336 - ETA: 1:35 - loss: 0.337 - ETA: 1:34 - loss: 0.338 - ETA: 1:34 - loss: 0.340 - ETA: 1:33 - loss: 0.344 - ETA: 1:33 - loss: 0.349 - ETA: 1:33 - loss: 0.356 - ETA: 1:32 - loss: 0.361 - ETA: 1:32 - loss: 0.365 - ETA: 1:31 - loss: 0.369 - ETA: 1:31 - loss: 0.373 - ETA: 1:31 - loss: 0.376 - ETA: 1:31 - loss: 0.380 - ETA: 1:30 - loss: 0.384 - ETA: 1:30 - loss: 0.386 - ETA: 1:29 - loss: 0.389 - ETA: 1:29 - loss: 0.393 - ETA: 1:28 - loss: 0.396 - ETA: 1:28 - loss: 0.399 - ETA: 1:27 - loss: 0.402 - ETA: 1:26 - loss: 0.405 - ETA: 1:25 - loss: 0.409 - ETA: 1:25 - loss: 0.413 - ETA: 1:24 - loss: 0.418 - ETA: 1:23 - loss: 0.422 - ETA: 1:22 - loss: 0.425 - ETA: 1:21 - loss: 0.428 - ETA: 1:20 - loss: 0.432 - ETA: 1:19 - loss: 0.435 - ETA: 1:18 - loss: 0.438 - ETA: 1:17 - loss: 0.441 - ETA: 1:16 - loss: 0.444 - ETA: 1:15 - loss: 0.447 - ETA: 1:15 - loss: 0.450 - ETA: 1:14 - loss: 0.452 - ETA: 1:13 - loss: 0.454 - ETA: 1:12 - loss: 0.456 - ETA: 1:11 - loss: 0.458 - ETA: 1:10 - loss: 0.460 - ETA: 1:09 - loss: 0.461 - ETA: 1:08 - loss: 0.463 - ETA: 1:07 - loss: 0.465 - ETA: 1:06 - loss: 0.466 - ETA: 1:05 - loss: 0.468 - ETA: 1:04 - loss: 0.469 - ETA: 1:03 - loss: 0.470 - ETA: 1:01 - loss: 0.471 - ETA: 1:00 - loss: 0.472 - ETA: 59s - loss: 0.473 - ETA: 58s - loss: 0.47 - ETA: 57s - loss: 0.47 - ETA: 56s - loss: 0.47 - ETA: 55s - loss: 0.47 - ETA: 53s - loss: 0.47 - ETA: 52s - loss: 0.47 - ETA: 51s - loss: 0.48 - ETA: 50s - loss: 0.48 - ETA: 49s - loss: 0.48 - ETA: 48s - loss: 0.48 - ETA: 46s - loss: 0.48 - ETA: 45s - loss: 0.48 - ETA: 44s - loss: 0.48 - ETA: 43s - loss: 0.48 - ETA: 42s - loss: 0.48 - ETA: 41s - loss: 0.48 - ETA: 40s - loss: 0.48 - ETA: 38s - loss: 0.48 - ETA: 37s - loss: 0.48 - ETA: 36s - loss: 0.48 - ETA: 35s - loss: 0.48 - ETA: 34s - loss: 0.48 - ETA: 33s - loss: 0.48 - ETA: 32s - loss: 0.48 - ETA: 31s - loss: 0.48 - ETA: 30s - loss: 0.48 - ETA: 28s - loss: 0.48 - ETA: 27s - loss: 0.48 - ETA: 26s - loss: 0.48 - ETA: 25s - loss: 0.48 - ETA: 24s - loss: 0.48 - ETA: 23s - loss: 0.48 - ETA: 22s - loss: 0.48 - ETA: 21s - loss: 0.48 - ETA: 20s - loss: 0.48 - ETA: 19s - loss: 0.48 - ETA: 18s - loss: 0.48 - ETA: 17s - loss: 0.48 - ETA: 16s - loss: 0.48 - ETA: 15s - loss: 0.48 - ETA: 14s - loss: 0.48 - ETA: 13s - loss: 0.48 - ETA: 12s - loss: 0.48 - ETA: 11s - loss: 0.48 - ETA: 10s - loss: 0.48 - ETA: 9s - loss: 0.4870 - ETA: 8s - loss: 0.487 - ETA: 7s - loss: 0.487 - ETA: 6s - loss: 0.487 - ETA: 5s - loss: 0.487 - ETA: 4s - loss: 0.487 - ETA: 3s - loss: 0.487 - ETA: 2s - loss: 0.488 - ETA: 1s - loss: 0.488 - ETA: 0s - loss: 0.488 - 116s 1s/step - loss: 0.4888 - val_loss: 0.4117\n",
      "\n",
      "Epoch 00002: val_loss did not improve from 0.17939\n",
      "Epoch 3/30\n",
      "111/111 [==============================] - ETA: 2:08 - loss: 0.580 - ETA: 2:06 - loss: 0.528 - ETA: 2:03 - loss: 0.486 - ETA: 2:01 - loss: 0.450 - ETA: 2:00 - loss: 0.426 - ETA: 1:58 - loss: 0.406 - ETA: 1:56 - loss: 0.397 - ETA: 1:55 - loss: 0.390 - ETA: 1:54 - loss: 0.386 - ETA: 1:53 - loss: 0.384 - ETA: 1:52 - loss: 0.384 - ETA: 1:51 - loss: 0.385 - ETA: 1:50 - loss: 0.385 - ETA: 1:49 - loss: 0.383 - ETA: 1:48 - loss: 0.382 - ETA: 1:47 - loss: 0.380 - ETA: 1:45 - loss: 0.379 - ETA: 1:44 - loss: 0.380 - ETA: 1:43 - loss: 0.380 - ETA: 1:42 - loss: 0.381 - ETA: 1:41 - loss: 0.381 - ETA: 1:40 - loss: 0.381 - ETA: 1:39 - loss: 0.381 - ETA: 1:38 - loss: 0.382 - ETA: 1:36 - loss: 0.383 - ETA: 1:35 - loss: 0.383 - ETA: 1:34 - loss: 0.385 - ETA: 1:33 - loss: 0.388 - ETA: 1:32 - loss: 0.390 - ETA: 1:31 - loss: 0.392 - ETA: 1:29 - loss: 0.394 - ETA: 1:28 - loss: 0.395 - ETA: 1:27 - loss: 0.396 - ETA: 1:25 - loss: 0.398 - ETA: 1:24 - loss: 0.399 - ETA: 1:23 - loss: 0.400 - ETA: 1:21 - loss: 0.401 - ETA: 1:20 - loss: 0.403 - ETA: 1:19 - loss: 0.404 - ETA: 1:17 - loss: 0.405 - ETA: 1:16 - loss: 0.406 - ETA: 1:14 - loss: 0.407 - ETA: 1:13 - loss: 0.407 - ETA: 1:12 - loss: 0.408 - ETA: 1:10 - loss: 0.409 - ETA: 1:09 - loss: 0.410 - ETA: 1:08 - loss: 0.410 - ETA: 1:06 - loss: 0.411 - ETA: 1:05 - loss: 0.411 - ETA: 1:04 - loss: 0.412 - ETA: 1:03 - loss: 0.412 - ETA: 1:02 - loss: 0.412 - ETA: 1:00 - loss: 0.412 - ETA: 59s - loss: 0.413 - ETA: 58s - loss: 0.41 - ETA: 57s - loss: 0.41 - ETA: 56s - loss: 0.41 - ETA: 54s - loss: 0.41 - ETA: 53s - loss: 0.41 - ETA: 52s - loss: 0.41 - ETA: 51s - loss: 0.41 - ETA: 50s - loss: 0.41 - ETA: 49s - loss: 0.41 - ETA: 48s - loss: 0.41 - ETA: 46s - loss: 0.41 - ETA: 45s - loss: 0.41 - ETA: 44s - loss: 0.41 - ETA: 43s - loss: 0.41 - ETA: 42s - loss: 0.41 - ETA: 41s - loss: 0.41 - ETA: 40s - loss: 0.41 - ETA: 39s - loss: 0.41 - ETA: 38s - loss: 0.41 - ETA: 37s - loss: 0.41 - ETA: 36s - loss: 0.41 - ETA: 35s - loss: 0.41 - ETA: 34s - loss: 0.41 - ETA: 33s - loss: 0.42 - ETA: 32s - loss: 0.42 - ETA: 31s - loss: 0.42 - ETA: 30s - loss: 0.42 - ETA: 29s - loss: 0.42 - ETA: 28s - loss: 0.42 - ETA: 27s - loss: 0.42 - ETA: 26s - loss: 0.42 - ETA: 25s - loss: 0.42 - ETA: 24s - loss: 0.42 - ETA: 23s - loss: 0.42 - ETA: 22s - loss: 0.42 - ETA: 21s - loss: 0.42 - ETA: 20s - loss: 0.42 - ETA: 19s - loss: 0.42 - ETA: 18s - loss: 0.42 - ETA: 17s - loss: 0.42 - ETA: 16s - loss: 0.42 - ETA: 15s - loss: 0.42 - ETA: 14s - loss: 0.42 - ETA: 13s - loss: 0.42 - ETA: 12s - loss: 0.43 - ETA: 11s - loss: 0.43 - ETA: 10s - loss: 0.43 - ETA: 9s - loss: 0.4317 - ETA: 8s - loss: 0.432 - ETA: 7s - loss: 0.432 - ETA: 6s - loss: 0.433 - ETA: 5s - loss: 0.433 - ETA: 4s - loss: 0.434 - ETA: 3s - loss: 0.434 - ETA: 2s - loss: 0.434 - ETA: 1s - loss: 0.435 - ETA: 0s - loss: 0.435 - 122s 1s/step - loss: 0.4357 - val_loss: 0.3704\n",
      "\n",
      "Epoch 00003: val_loss did not improve from 0.17939\n",
      "Epoch 4/30\n",
      "111/111 [==============================] - ETA: 2:19 - loss: 0.280 - ETA: 2:08 - loss: 0.283 - ETA: 2:03 - loss: 0.328 - ETA: 2:01 - loss: 0.332 - ETA: 1:59 - loss: 0.324 - ETA: 1:57 - loss: 0.320 - ETA: 1:55 - loss: 0.316 - ETA: 1:54 - loss: 0.313 - ETA: 1:53 - loss: 0.311 - ETA: 1:52 - loss: 0.311 - ETA: 1:50 - loss: 0.315 - ETA: 1:48 - loss: 0.318 - ETA: 1:46 - loss: 0.322 - ETA: 1:44 - loss: 0.326 - ETA: 1:42 - loss: 0.329 - ETA: 1:41 - loss: 0.333 - ETA: 1:39 - loss: 0.336 - ETA: 1:37 - loss: 0.339 - ETA: 1:36 - loss: 0.341 - ETA: 1:35 - loss: 0.343 - ETA: 1:33 - loss: 0.345 - ETA: 1:32 - loss: 0.346 - ETA: 1:31 - loss: 0.347 - ETA: 1:29 - loss: 0.348 - ETA: 1:28 - loss: 0.349 - ETA: 1:27 - loss: 0.349 - ETA: 1:26 - loss: 0.349 - ETA: 1:25 - loss: 0.349 - ETA: 1:23 - loss: 0.349 - ETA: 1:22 - loss: 0.349 - ETA: 1:21 - loss: 0.351 - ETA: 1:20 - loss: 0.352 - ETA: 1:19 - loss: 0.354 - ETA: 1:18 - loss: 0.355 - ETA: 1:17 - loss: 0.356 - ETA: 1:16 - loss: 0.356 - ETA: 1:14 - loss: 0.357 - ETA: 1:13 - loss: 0.358 - ETA: 1:12 - loss: 0.358 - ETA: 1:11 - loss: 0.359 - ETA: 1:10 - loss: 0.360 - ETA: 1:09 - loss: 0.361 - ETA: 1:08 - loss: 0.362 - ETA: 1:07 - loss: 0.363 - ETA: 1:06 - loss: 0.364 - ETA: 1:05 - loss: 0.365 - ETA: 1:04 - loss: 0.366 - ETA: 1:03 - loss: 0.367 - ETA: 1:02 - loss: 0.367 - ETA: 1:01 - loss: 0.368 - ETA: 1:00 - loss: 0.369 - ETA: 1:00 - loss: 0.370 - ETA: 59s - loss: 0.371 - ETA: 58s - loss: 0.37 - ETA: 57s - loss: 0.37 - ETA: 56s - loss: 0.37 - ETA: 56s - loss: 0.37 - ETA: 55s - loss: 0.37 - ETA: 54s - loss: 0.37 - ETA: 53s - loss: 0.37 - ETA: 52s - loss: 0.37 - ETA: 51s - loss: 0.37 - ETA: 51s - loss: 0.38 - ETA: 50s - loss: 0.38 - ETA: 49s - loss: 0.38 - ETA: 48s - loss: 0.38 - ETA: 47s - loss: 0.38 - ETA: 46s - loss: 0.38 - ETA: 45s - loss: 0.38 - ETA: 44s - loss: 0.38 - ETA: 43s - loss: 0.38 - ETA: 42s - loss: 0.38 - ETA: 41s - loss: 0.38 - ETA: 40s - loss: 0.39 - ETA: 39s - loss: 0.39 - ETA: 38s - loss: 0.39 - ETA: 37s - loss: 0.39 - ETA: 36s - loss: 0.39 - ETA: 35s - loss: 0.39 - ETA: 34s - loss: 0.39 - ETA: 33s - loss: 0.39 - ETA: 32s - loss: 0.39 - ETA: 31s - loss: 0.39 - ETA: 30s - loss: 0.39 - ETA: 28s - loss: 0.39 - ETA: 27s - loss: 0.39 - ETA: 26s - loss: 0.39 - ETA: 25s - loss: 0.40 - ETA: 24s - loss: 0.40 - ETA: 23s - loss: 0.40 - ETA: 22s - loss: 0.40 - ETA: 21s - loss: 0.40 - ETA: 20s - loss: 0.40 - ETA: 19s - loss: 0.40 - ETA: 18s - loss: 0.40 - ETA: 16s - loss: 0.40 - ETA: 15s - loss: 0.40 - ETA: 14s - loss: 0.40 - ETA: 13s - loss: 0.40 - ETA: 12s - loss: 0.40 - ETA: 11s - loss: 0.40 - ETA: 10s - loss: 0.40 - ETA: 8s - loss: 0.4073 - ETA: 7s - loss: 0.407 - ETA: 6s - loss: 0.408 - ETA: 5s - loss: 0.408 - ETA: 4s - loss: 0.408 - ETA: 3s - loss: 0.409 - ETA: 2s - loss: 0.409 - ETA: 1s - loss: 0.409 - ETA: 0s - loss: 0.409 - 124s 1s/step - loss: 0.4102 - val_loss: 0.3439\n",
      "\n",
      "Epoch 00004: val_loss did not improve from 0.17939\n",
      "Epoch 5/30\n",
      "111/111 [==============================] - ETA: 1:44 - loss: 1.038 - ETA: 1:42 - loss: 1.005 - ETA: 1:42 - loss: 0.911 - ETA: 1:42 - loss: 0.838 - ETA: 1:42 - loss: 0.778 - ETA: 1:43 - loss: 0.728 - ETA: 1:42 - loss: 0.687 - ETA: 1:41 - loss: 0.652 - ETA: 1:40 - loss: 0.623 - ETA: 1:39 - loss: 0.597 - ETA: 1:37 - loss: 0.578 - ETA: 1:36 - loss: 0.563 - ETA: 1:36 - loss: 0.549 - ETA: 1:34 - loss: 0.535 - ETA: 1:34 - loss: 0.523 - ETA: 1:33 - loss: 0.515 - ETA: 1:32 - loss: 0.507 - ETA: 1:32 - loss: 0.500 - ETA: 1:31 - loss: 0.493 - ETA: 1:31 - loss: 0.486 - ETA: 1:30 - loss: 0.479 - ETA: 1:30 - loss: 0.473 - ETA: 1:29 - loss: 0.470 - ETA: 1:29 - loss: 0.468 - ETA: 1:29 - loss: 0.465 - ETA: 1:29 - loss: 0.463 - ETA: 1:28 - loss: 0.461 - ETA: 1:28 - loss: 0.460 - ETA: 1:27 - loss: 0.458 - ETA: 1:27 - loss: 0.457 - ETA: 1:26 - loss: 0.456 - ETA: 1:26 - loss: 0.455 - ETA: 1:25 - loss: 0.454 - ETA: 1:24 - loss: 0.453 - ETA: 1:23 - loss: 0.453 - ETA: 1:23 - loss: 0.452 - ETA: 1:22 - loss: 0.452 - ETA: 1:21 - loss: 0.452 - ETA: 1:20 - loss: 0.452 - ETA: 1:19 - loss: 0.452 - ETA: 1:18 - loss: 0.452 - ETA: 1:17 - loss: 0.453 - ETA: 1:16 - loss: 0.453 - ETA: 1:16 - loss: 0.453 - ETA: 1:15 - loss: 0.453 - ETA: 1:14 - loss: 0.454 - ETA: 1:13 - loss: 0.454 - ETA: 1:12 - loss: 0.453 - ETA: 1:11 - loss: 0.453 - ETA: 1:10 - loss: 0.453 - ETA: 1:09 - loss: 0.453 - ETA: 1:07 - loss: 0.453 - ETA: 1:06 - loss: 0.452 - ETA: 1:05 - loss: 0.452 - ETA: 1:04 - loss: 0.452 - ETA: 1:03 - loss: 0.451 - ETA: 1:02 - loss: 0.451 - ETA: 1:01 - loss: 0.450 - ETA: 1:00 - loss: 0.450 - ETA: 59s - loss: 0.449 - ETA: 58s - loss: 0.44 - ETA: 57s - loss: 0.44 - ETA: 55s - loss: 0.44 - ETA: 54s - loss: 0.44 - ETA: 53s - loss: 0.44 - ETA: 52s - loss: 0.44 - ETA: 51s - loss: 0.44 - ETA: 49s - loss: 0.44 - ETA: 48s - loss: 0.44 - ETA: 47s - loss: 0.44 - ETA: 45s - loss: 0.44 - ETA: 44s - loss: 0.44 - ETA: 43s - loss: 0.44 - ETA: 42s - loss: 0.44 - ETA: 41s - loss: 0.44 - ETA: 39s - loss: 0.44 - ETA: 38s - loss: 0.44 - ETA: 37s - loss: 0.44 - ETA: 36s - loss: 0.44 - ETA: 34s - loss: 0.44 - ETA: 33s - loss: 0.44 - ETA: 32s - loss: 0.44 - ETA: 31s - loss: 0.44 - ETA: 30s - loss: 0.44 - ETA: 29s - loss: 0.44 - ETA: 27s - loss: 0.44 - ETA: 26s - loss: 0.44 - ETA: 25s - loss: 0.44 - ETA: 24s - loss: 0.44 - ETA: 23s - loss: 0.44 - ETA: 22s - loss: 0.44 - ETA: 21s - loss: 0.44 - ETA: 19s - loss: 0.44 - ETA: 18s - loss: 0.44 - ETA: 17s - loss: 0.44 - ETA: 16s - loss: 0.44 - ETA: 15s - loss: 0.44 - ETA: 14s - loss: 0.44 - ETA: 13s - loss: 0.44 - ETA: 12s - loss: 0.44 - ETA: 11s - loss: 0.43 - ETA: 9s - loss: 0.4395 - ETA: 8s - loss: 0.439 - ETA: 7s - loss: 0.439 - ETA: 6s - loss: 0.439 - ETA: 5s - loss: 0.439 - ETA: 4s - loss: 0.439 - ETA: 3s - loss: 0.439 - ETA: 2s - loss: 0.439 - ETA: 1s - loss: 0.439 - ETA: 0s - loss: 0.439 - 127s 1s/step - loss: 0.4396 - val_loss: 0.3357\n",
      "\n",
      "Epoch 00005: val_loss did not improve from 0.17939\n",
      "Epoch 6/30\n",
      "111/111 [==============================] - ETA: 2:27 - loss: 0.096 - ETA: 2:14 - loss: 0.149 - ETA: 2:13 - loss: 0.212 - ETA: 2:13 - loss: 0.267 - ETA: 2:11 - loss: 0.291 - ETA: 2:11 - loss: 0.342 - ETA: 2:09 - loss: 0.372 - ETA: 2:08 - loss: 0.388 - ETA: 2:06 - loss: 0.407 - ETA: 2:05 - loss: 0.421 - ETA: 2:04 - loss: 0.431 - ETA: 2:03 - loss: 0.436 - ETA: 2:02 - loss: 0.440 - ETA: 2:00 - loss: 0.442 - ETA: 1:59 - loss: 0.443 - ETA: 1:59 - loss: 0.443 - ETA: 1:57 - loss: 0.442 - ETA: 1:56 - loss: 0.440 - ETA: 1:55 - loss: 0.437 - ETA: 1:53 - loss: 0.435 - ETA: 1:52 - loss: 0.434 - ETA: 1:51 - loss: 0.432 - ETA: 1:49 - loss: 0.431 - ETA: 1:48 - loss: 0.433 - ETA: 1:47 - loss: 0.434 - ETA: 1:46 - loss: 0.435 - ETA: 1:44 - loss: 0.436 - ETA: 1:43 - loss: 0.437 - ETA: 1:41 - loss: 0.437 - ETA: 1:40 - loss: 0.437 - ETA: 1:38 - loss: 0.437 - ETA: 1:36 - loss: 0.437 - ETA: 1:35 - loss: 0.437 - ETA: 1:33 - loss: 0.437 - ETA: 1:31 - loss: 0.437 - ETA: 1:29 - loss: 0.437 - ETA: 1:28 - loss: 0.436 - ETA: 1:26 - loss: 0.436 - ETA: 1:25 - loss: 0.435 - ETA: 1:23 - loss: 0.435 - ETA: 1:21 - loss: 0.435 - ETA: 1:20 - loss: 0.435 - ETA: 1:19 - loss: 0.435 - ETA: 1:17 - loss: 0.434 - ETA: 1:16 - loss: 0.434 - ETA: 1:15 - loss: 0.433 - ETA: 1:14 - loss: 0.433 - ETA: 1:13 - loss: 0.432 - ETA: 1:12 - loss: 0.432 - ETA: 1:11 - loss: 0.431 - ETA: 1:09 - loss: 0.431 - ETA: 1:08 - loss: 0.430 - ETA: 1:07 - loss: 0.430 - ETA: 1:06 - loss: 0.430 - ETA: 1:04 - loss: 0.430 - ETA: 1:03 - loss: 0.430 - ETA: 1:02 - loss: 0.429 - ETA: 1:01 - loss: 0.429 - ETA: 1:00 - loss: 0.429 - ETA: 58s - loss: 0.429 - ETA: 57s - loss: 0.42 - ETA: 56s - loss: 0.42 - ETA: 55s - loss: 0.42 - ETA: 53s - loss: 0.42 - ETA: 52s - loss: 0.42 - ETA: 51s - loss: 0.42 - ETA: 50s - loss: 0.42 - ETA: 49s - loss: 0.42 - ETA: 48s - loss: 0.42 - ETA: 47s - loss: 0.42 - ETA: 46s - loss: 0.42 - ETA: 45s - loss: 0.42 - ETA: 43s - loss: 0.42 - ETA: 42s - loss: 0.42 - ETA: 41s - loss: 0.42 - ETA: 40s - loss: 0.42 - ETA: 39s - loss: 0.42 - ETA: 38s - loss: 0.42 - ETA: 37s - loss: 0.42 - ETA: 36s - loss: 0.42 - ETA: 35s - loss: 0.42 - ETA: 33s - loss: 0.42 - ETA: 32s - loss: 0.42 - ETA: 31s - loss: 0.42 - ETA: 30s - loss: 0.42 - ETA: 29s - loss: 0.42 - ETA: 28s - loss: 0.42 - ETA: 27s - loss: 0.42 - ETA: 25s - loss: 0.42 - ETA: 24s - loss: 0.42 - ETA: 23s - loss: 0.42 - ETA: 22s - loss: 0.42 - ETA: 21s - loss: 0.42 - ETA: 20s - loss: 0.42 - ETA: 18s - loss: 0.42 - ETA: 17s - loss: 0.42 - ETA: 16s - loss: 0.42 - ETA: 15s - loss: 0.42 - ETA: 14s - loss: 0.42 - ETA: 13s - loss: 0.42 - ETA: 11s - loss: 0.42 - ETA: 10s - loss: 0.42 - ETA: 9s - loss: 0.4218 - ETA: 8s - loss: 0.421 - ETA: 7s - loss: 0.421 - ETA: 5s - loss: 0.421 - ETA: 4s - loss: 0.421 - ETA: 3s - loss: 0.421 - ETA: 2s - loss: 0.421 - ETA: 1s - loss: 0.421 - ETA: 0s - loss: 0.421 - 134s 1s/step - loss: 0.4210 - val_loss: 0.2929\n",
      "\n",
      "Epoch 00006: val_loss did not improve from 0.17939\n",
      "Epoch 7/30\n",
      "111/111 [==============================] - ETA: 2:03 - loss: 0.417 - ETA: 1:59 - loss: 0.468 - ETA: 1:55 - loss: 0.481 - ETA: 1:52 - loss: 0.466 - ETA: 1:49 - loss: 0.471 - ETA: 1:48 - loss: 0.476 - ETA: 1:46 - loss: 0.483 - ETA: 1:44 - loss: 0.494 - ETA: 1:43 - loss: 0.497 - ETA: 1:41 - loss: 0.504 - ETA: 1:40 - loss: 0.508 - ETA: 1:38 - loss: 0.509 - ETA: 1:37 - loss: 0.510 - ETA: 1:36 - loss: 0.509 - ETA: 1:35 - loss: 0.508 - ETA: 1:33 - loss: 0.506 - ETA: 1:32 - loss: 0.504 - ETA: 1:31 - loss: 0.502 - ETA: 1:30 - loss: 0.500 - ETA: 1:29 - loss: 0.498 - ETA: 1:28 - loss: 0.496 - ETA: 1:27 - loss: 0.494 - ETA: 1:26 - loss: 0.492 - ETA: 1:25 - loss: 0.491 - ETA: 1:24 - loss: 0.489 - ETA: 1:23 - loss: 0.486 - ETA: 1:22 - loss: 0.484 - ETA: 1:21 - loss: 0.481 - ETA: 1:20 - loss: 0.479 - ETA: 1:19 - loss: 0.476 - ETA: 1:18 - loss: 0.473 - ETA: 1:17 - loss: 0.470 - ETA: 1:16 - loss: 0.468 - ETA: 1:15 - loss: 0.465 - ETA: 1:15 - loss: 0.463 - ETA: 1:14 - loss: 0.460 - ETA: 1:13 - loss: 0.458 - ETA: 1:13 - loss: 0.456 - ETA: 1:12 - loss: 0.455 - ETA: 1:12 - loss: 0.454 - ETA: 1:11 - loss: 0.453 - ETA: 1:11 - loss: 0.453 - ETA: 1:10 - loss: 0.452 - ETA: 1:09 - loss: 0.451 - ETA: 1:09 - loss: 0.450 - ETA: 1:08 - loss: 0.449 - ETA: 1:07 - loss: 0.449 - ETA: 1:06 - loss: 0.448 - ETA: 1:05 - loss: 0.448 - ETA: 1:05 - loss: 0.447 - ETA: 1:04 - loss: 0.446 - ETA: 1:03 - loss: 0.446 - ETA: 1:02 - loss: 0.446 - ETA: 1:01 - loss: 0.446 - ETA: 1:00 - loss: 0.445 - ETA: 59s - loss: 0.445 - ETA: 59s - loss: 0.44 - ETA: 58s - loss: 0.44 - ETA: 57s - loss: 0.44 - ETA: 56s - loss: 0.44 - ETA: 56s - loss: 0.44 - ETA: 55s - loss: 0.44 - ETA: 54s - loss: 0.44 - ETA: 54s - loss: 0.44 - ETA: 53s - loss: 0.44 - ETA: 52s - loss: 0.44 - ETA: 51s - loss: 0.44 - ETA: 50s - loss: 0.44 - ETA: 49s - loss: 0.44 - ETA: 47s - loss: 0.43 - ETA: 46s - loss: 0.43 - ETA: 45s - loss: 0.43 - ETA: 44s - loss: 0.43 - ETA: 43s - loss: 0.43 - ETA: 42s - loss: 0.43 - ETA: 40s - loss: 0.43 - ETA: 39s - loss: 0.43 - ETA: 38s - loss: 0.43 - ETA: 37s - loss: 0.43 - ETA: 36s - loss: 0.43 - ETA: 34s - loss: 0.43 - ETA: 33s - loss: 0.43 - ETA: 32s - loss: 0.43 - ETA: 31s - loss: 0.43 - ETA: 30s - loss: 0.43 - ETA: 28s - loss: 0.43 - ETA: 27s - loss: 0.43 - ETA: 26s - loss: 0.43 - ETA: 25s - loss: 0.42 - ETA: 24s - loss: 0.42 - ETA: 22s - loss: 0.42 - ETA: 21s - loss: 0.42 - ETA: 20s - loss: 0.42 - ETA: 19s - loss: 0.42 - ETA: 18s - loss: 0.42 - ETA: 17s - loss: 0.42 - ETA: 15s - loss: 0.42 - ETA: 14s - loss: 0.42 - ETA: 13s - loss: 0.42 - ETA: 12s - loss: 0.42 - ETA: 11s - loss: 0.42 - ETA: 10s - loss: 0.42 - ETA: 8s - loss: 0.4240 - ETA: 7s - loss: 0.423 - ETA: 6s - loss: 0.423 - ETA: 5s - loss: 0.423 - ETA: 4s - loss: 0.422 - ETA: 3s - loss: 0.422 - ETA: 2s - loss: 0.422 - ETA: 1s - loss: 0.422 - ETA: 0s - loss: 0.421 - 126s 1s/step - loss: 0.4215 - val_loss: 0.2763\n",
      "\n",
      "Epoch 00007: val_loss did not improve from 0.17939\n",
      "Epoch 8/30\n",
      "111/111 [==============================] - ETA: 2:04 - loss: 0.128 - ETA: 2:17 - loss: 0.153 - ETA: 2:14 - loss: 0.150 - ETA: 2:13 - loss: 0.151 - ETA: 2:10 - loss: 0.164 - ETA: 2:09 - loss: 0.181 - ETA: 2:08 - loss: 0.193 - ETA: 2:08 - loss: 0.200 - ETA: 2:07 - loss: 0.206 - ETA: 2:06 - loss: 0.217 - ETA: 2:05 - loss: 0.227 - ETA: 2:04 - loss: 0.236 - ETA: 2:02 - loss: 0.244 - ETA: 2:01 - loss: 0.253 - ETA: 1:59 - loss: 0.262 - ETA: 1:58 - loss: 0.269 - ETA: 1:57 - loss: 0.274 - ETA: 1:55 - loss: 0.278 - ETA: 1:54 - loss: 0.282 - ETA: 1:53 - loss: 0.285 - ETA: 1:52 - loss: 0.287 - ETA: 1:50 - loss: 0.289 - ETA: 1:49 - loss: 0.291 - ETA: 1:48 - loss: 0.292 - ETA: 1:47 - loss: 0.294 - ETA: 1:45 - loss: 0.295 - ETA: 1:44 - loss: 0.296 - ETA: 1:43 - loss: 0.297 - ETA: 1:42 - loss: 0.298 - ETA: 1:41 - loss: 0.299 - ETA: 1:39 - loss: 0.300 - ETA: 1:38 - loss: 0.301 - ETA: 1:37 - loss: 0.302 - ETA: 1:36 - loss: 0.303 - ETA: 1:34 - loss: 0.305 - ETA: 1:33 - loss: 0.306 - ETA: 1:32 - loss: 0.307 - ETA: 1:31 - loss: 0.308 - ETA: 1:30 - loss: 0.309 - ETA: 1:29 - loss: 0.309 - ETA: 1:28 - loss: 0.310 - ETA: 1:27 - loss: 0.311 - ETA: 1:26 - loss: 0.312 - ETA: 1:25 - loss: 0.312 - ETA: 1:24 - loss: 0.313 - ETA: 1:23 - loss: 0.314 - ETA: 1:21 - loss: 0.315 - ETA: 1:20 - loss: 0.316 - ETA: 1:18 - loss: 0.316 - ETA: 1:17 - loss: 0.317 - ETA: 1:16 - loss: 0.318 - ETA: 1:14 - loss: 0.318 - ETA: 1:13 - loss: 0.319 - ETA: 1:11 - loss: 0.319 - ETA: 1:10 - loss: 0.320 - ETA: 1:08 - loss: 0.320 - ETA: 1:06 - loss: 0.321 - ETA: 1:05 - loss: 0.321 - ETA: 1:03 - loss: 0.322 - ETA: 1:02 - loss: 0.322 - ETA: 1:01 - loss: 0.322 - ETA: 59s - loss: 0.323 - ETA: 58s - loss: 0.32 - ETA: 56s - loss: 0.32 - ETA: 55s - loss: 0.32 - ETA: 53s - loss: 0.32 - ETA: 52s - loss: 0.32 - ETA: 51s - loss: 0.32 - ETA: 49s - loss: 0.32 - ETA: 48s - loss: 0.32 - ETA: 47s - loss: 0.32 - ETA: 45s - loss: 0.32 - ETA: 44s - loss: 0.32 - ETA: 43s - loss: 0.32 - ETA: 42s - loss: 0.32 - ETA: 40s - loss: 0.32 - ETA: 39s - loss: 0.32 - ETA: 38s - loss: 0.32 - ETA: 37s - loss: 0.32 - ETA: 35s - loss: 0.32 - ETA: 34s - loss: 0.33 - ETA: 33s - loss: 0.33 - ETA: 32s - loss: 0.33 - ETA: 31s - loss: 0.33 - ETA: 29s - loss: 0.33 - ETA: 28s - loss: 0.33 - ETA: 27s - loss: 0.33 - ETA: 26s - loss: 0.33 - ETA: 25s - loss: 0.33 - ETA: 24s - loss: 0.33 - ETA: 23s - loss: 0.33 - ETA: 22s - loss: 0.33 - ETA: 20s - loss: 0.33 - ETA: 19s - loss: 0.33 - ETA: 18s - loss: 0.33 - ETA: 17s - loss: 0.33 - ETA: 16s - loss: 0.33 - ETA: 15s - loss: 0.33 - ETA: 14s - loss: 0.33 - ETA: 12s - loss: 0.33 - ETA: 11s - loss: 0.33 - ETA: 10s - loss: 0.33 - ETA: 9s - loss: 0.3332 - ETA: 8s - loss: 0.333 - ETA: 7s - loss: 0.333 - ETA: 5s - loss: 0.333 - ETA: 4s - loss: 0.333 - ETA: 3s - loss: 0.333 - ETA: 2s - loss: 0.333 - ETA: 1s - loss: 0.333 - ETA: 0s - loss: 0.333 - 134s 1s/step - loss: 0.3333 - val_loss: 0.2590\n",
      "\n",
      "Epoch 00008: val_loss did not improve from 0.17939\n",
      "Epoch 9/30\n",
      "111/111 [==============================] - ETA: 2:22 - loss: 0.226 - ETA: 2:14 - loss: 0.277 - ETA: 2:16 - loss: 0.287 - ETA: 2:13 - loss: 0.292 - ETA: 2:12 - loss: 0.290 - ETA: 2:11 - loss: 0.291 - ETA: 2:10 - loss: 0.306 - ETA: 2:08 - loss: 0.314 - ETA: 2:08 - loss: 0.322 - ETA: 2:06 - loss: 0.330 - ETA: 2:04 - loss: 0.335 - ETA: 2:03 - loss: 0.339 - ETA: 2:01 - loss: 0.344 - ETA: 2:00 - loss: 0.346 - ETA: 1:59 - loss: 0.348 - ETA: 1:57 - loss: 0.349 - ETA: 1:55 - loss: 0.349 - ETA: 1:54 - loss: 0.349 - ETA: 1:52 - loss: 0.352 - ETA: 1:50 - loss: 0.355 - ETA: 1:48 - loss: 0.358 - ETA: 1:46 - loss: 0.360 - ETA: 1:44 - loss: 0.362 - ETA: 1:43 - loss: 0.364 - ETA: 1:41 - loss: 0.365 - ETA: 1:39 - loss: 0.367 - ETA: 1:37 - loss: 0.368 - ETA: 1:36 - loss: 0.369 - ETA: 1:34 - loss: 0.370 - ETA: 1:32 - loss: 0.370 - ETA: 1:31 - loss: 0.370 - ETA: 1:29 - loss: 0.371 - ETA: 1:27 - loss: 0.371 - ETA: 1:26 - loss: 0.371 - ETA: 1:25 - loss: 0.371 - ETA: 1:23 - loss: 0.371 - ETA: 1:22 - loss: 0.371 - ETA: 1:20 - loss: 0.371 - ETA: 1:19 - loss: 0.371 - ETA: 1:18 - loss: 0.371 - ETA: 1:16 - loss: 0.371 - ETA: 1:15 - loss: 0.371 - ETA: 1:14 - loss: 0.371 - ETA: 1:12 - loss: 0.372 - ETA: 1:11 - loss: 0.372 - ETA: 1:10 - loss: 0.373 - ETA: 1:09 - loss: 0.374 - ETA: 1:07 - loss: 0.374 - ETA: 1:06 - loss: 0.375 - ETA: 1:05 - loss: 0.376 - ETA: 1:04 - loss: 0.377 - ETA: 1:03 - loss: 0.377 - ETA: 1:02 - loss: 0.378 - ETA: 1:01 - loss: 0.378 - ETA: 59s - loss: 0.379 - ETA: 58s - loss: 0.37 - ETA: 57s - loss: 0.38 - ETA: 56s - loss: 0.38 - ETA: 55s - loss: 0.38 - ETA: 55s - loss: 0.38 - ETA: 54s - loss: 0.38 - ETA: 53s - loss: 0.38 - ETA: 52s - loss: 0.38 - ETA: 51s - loss: 0.38 - ETA: 50s - loss: 0.38 - ETA: 49s - loss: 0.38 - ETA: 48s - loss: 0.38 - ETA: 47s - loss: 0.38 - ETA: 46s - loss: 0.38 - ETA: 45s - loss: 0.38 - ETA: 44s - loss: 0.38 - ETA: 43s - loss: 0.38 - ETA: 42s - loss: 0.38 - ETA: 41s - loss: 0.38 - ETA: 40s - loss: 0.38 - ETA: 39s - loss: 0.38 - ETA: 38s - loss: 0.38 - ETA: 37s - loss: 0.38 - ETA: 35s - loss: 0.38 - ETA: 34s - loss: 0.38 - ETA: 33s - loss: 0.38 - ETA: 32s - loss: 0.38 - ETA: 31s - loss: 0.38 - ETA: 30s - loss: 0.38 - ETA: 29s - loss: 0.38 - ETA: 28s - loss: 0.38 - ETA: 27s - loss: 0.38 - ETA: 26s - loss: 0.38 - ETA: 25s - loss: 0.38 - ETA: 24s - loss: 0.38 - ETA: 22s - loss: 0.38 - ETA: 21s - loss: 0.38 - ETA: 20s - loss: 0.38 - ETA: 19s - loss: 0.38 - ETA: 18s - loss: 0.38 - ETA: 17s - loss: 0.38 - ETA: 16s - loss: 0.38 - ETA: 15s - loss: 0.38 - ETA: 13s - loss: 0.38 - ETA: 12s - loss: 0.38 - ETA: 11s - loss: 0.38 - ETA: 10s - loss: 0.38 - ETA: 9s - loss: 0.3889 - ETA: 8s - loss: 0.388 - ETA: 6s - loss: 0.388 - ETA: 5s - loss: 0.388 - ETA: 4s - loss: 0.388 - ETA: 3s - loss: 0.388 - ETA: 2s - loss: 0.388 - ETA: 1s - loss: 0.388 - ETA: 0s - loss: 0.387 - 131s 1s/step - loss: 0.3877 - val_loss: 0.2413\n",
      "\n",
      "Epoch 00009: val_loss did not improve from 0.17939\n",
      "Epoch 10/30\n",
      "111/111 [==============================] - ETA: 2:02 - loss: 0.788 - ETA: 2:01 - loss: 0.608 - ETA: 1:55 - loss: 0.626 - ETA: 1:53 - loss: 0.608 - ETA: 1:51 - loss: 0.581 - ETA: 1:49 - loss: 0.552 - ETA: 1:46 - loss: 0.530 - ETA: 1:45 - loss: 0.510 - ETA: 1:43 - loss: 0.502 - ETA: 1:41 - loss: 0.505 - ETA: 1:39 - loss: 0.505 - ETA: 1:38 - loss: 0.504 - ETA: 1:37 - loss: 0.503 - ETA: 1:35 - loss: 0.502 - ETA: 1:34 - loss: 0.500 - ETA: 1:33 - loss: 0.499 - ETA: 1:32 - loss: 0.498 - ETA: 1:31 - loss: 0.496 - ETA: 1:30 - loss: 0.493 - ETA: 1:29 - loss: 0.491 - ETA: 1:28 - loss: 0.488 - ETA: 1:27 - loss: 0.484 - ETA: 1:26 - loss: 0.482 - ETA: 1:25 - loss: 0.479 - ETA: 1:23 - loss: 0.476 - ETA: 1:23 - loss: 0.473 - ETA: 1:22 - loss: 0.471 - ETA: 1:21 - loss: 0.470 - ETA: 1:20 - loss: 0.468 - ETA: 1:19 - loss: 0.467 - ETA: 1:18 - loss: 0.465 - ETA: 1:17 - loss: 0.463 - ETA: 1:16 - loss: 0.461 - ETA: 1:15 - loss: 0.460 - ETA: 1:14 - loss: 0.458 - ETA: 1:14 - loss: 0.457 - ETA: 1:13 - loss: 0.455 - ETA: 1:12 - loss: 0.453 - ETA: 1:12 - loss: 0.452 - ETA: 1:11 - loss: 0.450 - ETA: 1:11 - loss: 0.449 - ETA: 1:10 - loss: 0.447 - ETA: 1:09 - loss: 0.446 - ETA: 1:09 - loss: 0.445 - ETA: 1:08 - loss: 0.443 - ETA: 1:08 - loss: 0.442 - ETA: 1:07 - loss: 0.441 - ETA: 1:06 - loss: 0.439 - ETA: 1:05 - loss: 0.438 - ETA: 1:05 - loss: 0.437 - ETA: 1:04 - loss: 0.436 - ETA: 1:03 - loss: 0.435 - ETA: 1:02 - loss: 0.434 - ETA: 1:01 - loss: 0.433 - ETA: 1:00 - loss: 0.432 - ETA: 59s - loss: 0.431 - ETA: 58s - loss: 0.42 - ETA: 58s - loss: 0.42 - ETA: 57s - loss: 0.42 - ETA: 56s - loss: 0.42 - ETA: 55s - loss: 0.42 - ETA: 54s - loss: 0.42 - ETA: 53s - loss: 0.42 - ETA: 52s - loss: 0.42 - ETA: 51s - loss: 0.42 - ETA: 50s - loss: 0.42 - ETA: 49s - loss: 0.41 - ETA: 48s - loss: 0.41 - ETA: 47s - loss: 0.41 - ETA: 46s - loss: 0.41 - ETA: 45s - loss: 0.41 - ETA: 44s - loss: 0.41 - ETA: 43s - loss: 0.41 - ETA: 42s - loss: 0.41 - ETA: 40s - loss: 0.41 - ETA: 39s - loss: 0.41 - ETA: 38s - loss: 0.41 - ETA: 37s - loss: 0.41 - ETA: 36s - loss: 0.41 - ETA: 35s - loss: 0.40 - ETA: 34s - loss: 0.40 - ETA: 33s - loss: 0.40 - ETA: 31s - loss: 0.40 - ETA: 30s - loss: 0.40 - ETA: 29s - loss: 0.40 - ETA: 28s - loss: 0.40 - ETA: 27s - loss: 0.40 - ETA: 26s - loss: 0.40 - ETA: 24s - loss: 0.40 - ETA: 23s - loss: 0.40 - ETA: 22s - loss: 0.40 - ETA: 21s - loss: 0.40 - ETA: 20s - loss: 0.40 - ETA: 19s - loss: 0.40 - ETA: 17s - loss: 0.40 - ETA: 16s - loss: 0.40 - ETA: 15s - loss: 0.40 - ETA: 14s - loss: 0.40 - ETA: 13s - loss: 0.39 - ETA: 12s - loss: 0.39 - ETA: 11s - loss: 0.39 - ETA: 9s - loss: 0.3984 - ETA: 8s - loss: 0.398 - ETA: 7s - loss: 0.397 - ETA: 6s - loss: 0.397 - ETA: 5s - loss: 0.396 - ETA: 4s - loss: 0.396 - ETA: 3s - loss: 0.395 - ETA: 2s - loss: 0.395 - ETA: 1s - loss: 0.394 - ETA: 0s - loss: 0.394 - 124s 1s/step - loss: 0.3938 - val_loss: 0.2331\n",
      "\n",
      "Epoch 00010: val_loss did not improve from 0.17939\n",
      "Epoch 11/30\n",
      "111/111 [==============================] - ETA: 1:54 - loss: 0.309 - ETA: 1:53 - loss: 0.430 - ETA: 1:54 - loss: 0.486 - ETA: 1:55 - loss: 0.496 - ETA: 1:54 - loss: 0.485 - ETA: 1:54 - loss: 0.481 - ETA: 1:55 - loss: 0.476 - ETA: 1:56 - loss: 0.469 - ETA: 1:57 - loss: 0.462 - ETA: 1:56 - loss: 0.453 - ETA: 1:56 - loss: 0.445 - ETA: 1:56 - loss: 0.437 - ETA: 1:55 - loss: 0.430 - ETA: 1:55 - loss: 0.423 - ETA: 1:55 - loss: 0.417 - ETA: 1:54 - loss: 0.411 - ETA: 1:54 - loss: 0.405 - ETA: 1:53 - loss: 0.401 - ETA: 1:51 - loss: 0.396 - ETA: 1:51 - loss: 0.395 - ETA: 1:49 - loss: 0.394 - ETA: 1:48 - loss: 0.393 - ETA: 1:47 - loss: 0.392 - ETA: 1:46 - loss: 0.393 - ETA: 1:45 - loss: 0.393 - ETA: 1:44 - loss: 0.393 - ETA: 1:43 - loss: 0.393 - ETA: 1:42 - loss: 0.393 - ETA: 1:41 - loss: 0.395 - ETA: 1:40 - loss: 0.396 - ETA: 1:38 - loss: 0.397 - ETA: 1:37 - loss: 0.397 - ETA: 1:36 - loss: 0.397 - ETA: 1:35 - loss: 0.397 - ETA: 1:34 - loss: 0.397 - ETA: 1:33 - loss: 0.396 - ETA: 1:31 - loss: 0.395 - ETA: 1:30 - loss: 0.394 - ETA: 1:29 - loss: 0.394 - ETA: 1:28 - loss: 0.393 - ETA: 1:26 - loss: 0.392 - ETA: 1:25 - loss: 0.391 - ETA: 1:23 - loss: 0.390 - ETA: 1:22 - loss: 0.389 - ETA: 1:21 - loss: 0.388 - ETA: 1:20 - loss: 0.387 - ETA: 1:18 - loss: 0.386 - ETA: 1:17 - loss: 0.385 - ETA: 1:15 - loss: 0.384 - ETA: 1:14 - loss: 0.383 - ETA: 1:12 - loss: 0.382 - ETA: 1:11 - loss: 0.381 - ETA: 1:09 - loss: 0.380 - ETA: 1:08 - loss: 0.380 - ETA: 1:07 - loss: 0.379 - ETA: 1:06 - loss: 0.378 - ETA: 1:04 - loss: 0.377 - ETA: 1:03 - loss: 0.376 - ETA: 1:02 - loss: 0.375 - ETA: 1:01 - loss: 0.375 - ETA: 59s - loss: 0.374 - ETA: 58s - loss: 0.37 - ETA: 57s - loss: 0.37 - ETA: 55s - loss: 0.37 - ETA: 54s - loss: 0.37 - ETA: 53s - loss: 0.37 - ETA: 51s - loss: 0.37 - ETA: 50s - loss: 0.36 - ETA: 49s - loss: 0.36 - ETA: 47s - loss: 0.36 - ETA: 46s - loss: 0.36 - ETA: 45s - loss: 0.36 - ETA: 43s - loss: 0.36 - ETA: 42s - loss: 0.36 - ETA: 41s - loss: 0.36 - ETA: 40s - loss: 0.36 - ETA: 38s - loss: 0.36 - ETA: 37s - loss: 0.36 - ETA: 36s - loss: 0.36 - ETA: 35s - loss: 0.36 - ETA: 34s - loss: 0.36 - ETA: 33s - loss: 0.36 - ETA: 31s - loss: 0.36 - ETA: 30s - loss: 0.36 - ETA: 29s - loss: 0.36 - ETA: 28s - loss: 0.36 - ETA: 27s - loss: 0.36 - ETA: 26s - loss: 0.36 - ETA: 25s - loss: 0.36 - ETA: 23s - loss: 0.36 - ETA: 22s - loss: 0.36 - ETA: 21s - loss: 0.36 - ETA: 20s - loss: 0.36 - ETA: 19s - loss: 0.36 - ETA: 18s - loss: 0.36 - ETA: 17s - loss: 0.36 - ETA: 16s - loss: 0.36 - ETA: 14s - loss: 0.36 - ETA: 13s - loss: 0.36 - ETA: 12s - loss: 0.36 - ETA: 11s - loss: 0.36 - ETA: 10s - loss: 0.36 - ETA: 9s - loss: 0.3620 - ETA: 8s - loss: 0.361 - ETA: 6s - loss: 0.361 - ETA: 5s - loss: 0.361 - ETA: 4s - loss: 0.361 - ETA: 3s - loss: 0.361 - ETA: 2s - loss: 0.361 - ETA: 1s - loss: 0.361 - ETA: 0s - loss: 0.360 - 132s 1s/step - loss: 0.3607 - val_loss: 0.2332\n",
      "\n",
      "Epoch 00011: val_loss did not improve from 0.17939\n",
      "Epoch 12/30\n",
      "111/111 [==============================] - ETA: 2:27 - loss: 0.269 - ETA: 2:16 - loss: 0.268 - ETA: 2:11 - loss: 0.263 - ETA: 2:10 - loss: 0.288 - ETA: 2:06 - loss: 0.338 - ETA: 2:04 - loss: 0.372 - ETA: 2:05 - loss: 0.389 - ETA: 2:05 - loss: 0.396 - ETA: 2:05 - loss: 0.399 - ETA: 2:03 - loss: 0.401 - ETA: 2:02 - loss: 0.400 - ETA: 2:01 - loss: 0.398 - ETA: 2:00 - loss: 0.396 - ETA: 1:58 - loss: 0.393 - ETA: 1:57 - loss: 0.391 - ETA: 1:55 - loss: 0.390 - ETA: 1:53 - loss: 0.392 - ETA: 1:51 - loss: 0.394 - ETA: 1:48 - loss: 0.396 - ETA: 1:46 - loss: 0.397 - ETA: 1:44 - loss: 0.397 - ETA: 1:42 - loss: 0.397 - ETA: 1:40 - loss: 0.397 - ETA: 1:38 - loss: 0.397 - ETA: 1:37 - loss: 0.397 - ETA: 1:35 - loss: 0.397 - ETA: 1:34 - loss: 0.396 - ETA: 1:32 - loss: 0.396 - ETA: 1:30 - loss: 0.396 - ETA: 1:29 - loss: 0.396 - ETA: 1:28 - loss: 0.395 - ETA: 1:26 - loss: 0.394 - ETA: 1:25 - loss: 0.393 - ETA: 1:24 - loss: 0.393 - ETA: 1:22 - loss: 0.392 - ETA: 1:21 - loss: 0.392 - ETA: 1:20 - loss: 0.391 - ETA: 1:18 - loss: 0.390 - ETA: 1:17 - loss: 0.390 - ETA: 1:16 - loss: 0.389 - ETA: 1:15 - loss: 0.388 - ETA: 1:14 - loss: 0.388 - ETA: 1:12 - loss: 0.387 - ETA: 1:11 - loss: 0.386 - ETA: 1:10 - loss: 0.386 - ETA: 1:09 - loss: 0.385 - ETA: 1:08 - loss: 0.384 - ETA: 1:07 - loss: 0.383 - ETA: 1:06 - loss: 0.382 - ETA: 1:05 - loss: 0.381 - ETA: 1:04 - loss: 0.381 - ETA: 1:03 - loss: 0.380 - ETA: 1:02 - loss: 0.379 - ETA: 1:01 - loss: 0.379 - ETA: 1:00 - loss: 0.378 - ETA: 59s - loss: 0.377 - ETA: 58s - loss: 0.37 - ETA: 57s - loss: 0.37 - ETA: 56s - loss: 0.37 - ETA: 55s - loss: 0.37 - ETA: 54s - loss: 0.37 - ETA: 54s - loss: 0.37 - ETA: 53s - loss: 0.37 - ETA: 52s - loss: 0.37 - ETA: 51s - loss: 0.37 - ETA: 50s - loss: 0.37 - ETA: 49s - loss: 0.37 - ETA: 48s - loss: 0.37 - ETA: 47s - loss: 0.37 - ETA: 46s - loss: 0.37 - ETA: 44s - loss: 0.37 - ETA: 43s - loss: 0.37 - ETA: 42s - loss: 0.37 - ETA: 41s - loss: 0.37 - ETA: 40s - loss: 0.37 - ETA: 39s - loss: 0.37 - ETA: 38s - loss: 0.37 - ETA: 37s - loss: 0.37 - ETA: 36s - loss: 0.37 - ETA: 35s - loss: 0.37 - ETA: 34s - loss: 0.37 - ETA: 33s - loss: 0.37 - ETA: 32s - loss: 0.37 - ETA: 30s - loss: 0.37 - ETA: 29s - loss: 0.37 - ETA: 28s - loss: 0.37 - ETA: 27s - loss: 0.37 - ETA: 26s - loss: 0.36 - ETA: 25s - loss: 0.36 - ETA: 24s - loss: 0.36 - ETA: 23s - loss: 0.36 - ETA: 21s - loss: 0.36 - ETA: 20s - loss: 0.36 - ETA: 19s - loss: 0.36 - ETA: 18s - loss: 0.36 - ETA: 17s - loss: 0.36 - ETA: 16s - loss: 0.36 - ETA: 15s - loss: 0.36 - ETA: 13s - loss: 0.36 - ETA: 12s - loss: 0.36 - ETA: 11s - loss: 0.36 - ETA: 10s - loss: 0.36 - ETA: 9s - loss: 0.3670 - ETA: 8s - loss: 0.366 - ETA: 6s - loss: 0.366 - ETA: 5s - loss: 0.366 - ETA: 4s - loss: 0.366 - ETA: 3s - loss: 0.366 - ETA: 2s - loss: 0.366 - ETA: 1s - loss: 0.365 - ETA: 0s - loss: 0.365 - 128s 1s/step - loss: 0.3656 - val_loss: 0.2280\n",
      "\n",
      "Epoch 00012: val_loss did not improve from 0.17939\n",
      "Epoch 13/30\n",
      "111/111 [==============================] - ETA: 1:45 - loss: 0.335 - ETA: 1:45 - loss: 0.282 - ETA: 1:44 - loss: 0.278 - ETA: 1:42 - loss: 0.284 - ETA: 1:41 - loss: 0.283 - ETA: 1:40 - loss: 0.284 - ETA: 1:39 - loss: 0.281 - ETA: 1:38 - loss: 0.277 - ETA: 1:37 - loss: 0.274 - ETA: 1:36 - loss: 0.269 - ETA: 1:35 - loss: 0.269 - ETA: 1:34 - loss: 0.267 - ETA: 1:33 - loss: 0.265 - ETA: 1:33 - loss: 0.263 - ETA: 1:32 - loss: 0.260 - ETA: 1:31 - loss: 0.258 - ETA: 1:30 - loss: 0.256 - ETA: 1:30 - loss: 0.255 - ETA: 1:29 - loss: 0.254 - ETA: 1:28 - loss: 0.254 - ETA: 1:28 - loss: 0.254 - ETA: 1:27 - loss: 0.254 - ETA: 1:27 - loss: 0.254 - ETA: 1:26 - loss: 0.253 - ETA: 1:26 - loss: 0.253 - ETA: 1:26 - loss: 0.253 - ETA: 1:25 - loss: 0.254 - ETA: 1:25 - loss: 0.256 - ETA: 1:25 - loss: 0.258 - ETA: 1:24 - loss: 0.260 - ETA: 1:24 - loss: 0.262 - ETA: 1:23 - loss: 0.264 - ETA: 1:23 - loss: 0.265 - ETA: 1:22 - loss: 0.267 - ETA: 1:21 - loss: 0.268 - ETA: 1:21 - loss: 0.269 - ETA: 1:20 - loss: 0.270 - ETA: 1:19 - loss: 0.271 - ETA: 1:18 - loss: 0.272 - ETA: 1:17 - loss: 0.273 - ETA: 1:17 - loss: 0.273 - ETA: 1:16 - loss: 0.274 - ETA: 1:15 - loss: 0.275 - ETA: 1:15 - loss: 0.276 - ETA: 1:14 - loss: 0.276 - ETA: 1:13 - loss: 0.277 - ETA: 1:12 - loss: 0.278 - ETA: 1:11 - loss: 0.278 - ETA: 1:10 - loss: 0.279 - ETA: 1:09 - loss: 0.280 - ETA: 1:08 - loss: 0.281 - ETA: 1:07 - loss: 0.281 - ETA: 1:06 - loss: 0.282 - ETA: 1:05 - loss: 0.283 - ETA: 1:04 - loss: 0.284 - ETA: 1:03 - loss: 0.284 - ETA: 1:02 - loss: 0.285 - ETA: 1:01 - loss: 0.285 - ETA: 1:00 - loss: 0.286 - ETA: 59s - loss: 0.286 - ETA: 58s - loss: 0.28 - ETA: 56s - loss: 0.28 - ETA: 55s - loss: 0.28 - ETA: 54s - loss: 0.28 - ETA: 53s - loss: 0.28 - ETA: 52s - loss: 0.28 - ETA: 51s - loss: 0.28 - ETA: 50s - loss: 0.29 - ETA: 49s - loss: 0.29 - ETA: 47s - loss: 0.29 - ETA: 46s - loss: 0.29 - ETA: 45s - loss: 0.29 - ETA: 44s - loss: 0.29 - ETA: 43s - loss: 0.29 - ETA: 41s - loss: 0.29 - ETA: 40s - loss: 0.29 - ETA: 39s - loss: 0.29 - ETA: 38s - loss: 0.29 - ETA: 36s - loss: 0.29 - ETA: 35s - loss: 0.29 - ETA: 34s - loss: 0.29 - ETA: 33s - loss: 0.29 - ETA: 32s - loss: 0.29 - ETA: 30s - loss: 0.29 - ETA: 29s - loss: 0.29 - ETA: 28s - loss: 0.29 - ETA: 27s - loss: 0.29 - ETA: 26s - loss: 0.29 - ETA: 24s - loss: 0.29 - ETA: 23s - loss: 0.29 - ETA: 22s - loss: 0.29 - ETA: 21s - loss: 0.29 - ETA: 20s - loss: 0.29 - ETA: 19s - loss: 0.29 - ETA: 17s - loss: 0.29 - ETA: 16s - loss: 0.29 - ETA: 15s - loss: 0.29 - ETA: 14s - loss: 0.29 - ETA: 13s - loss: 0.29 - ETA: 12s - loss: 0.29 - ETA: 11s - loss: 0.29 - ETA: 10s - loss: 0.29 - ETA: 8s - loss: 0.2970 - ETA: 7s - loss: 0.297 - ETA: 6s - loss: 0.297 - ETA: 5s - loss: 0.297 - ETA: 4s - loss: 0.297 - ETA: 3s - loss: 0.297 - ETA: 2s - loss: 0.297 - ETA: 1s - loss: 0.298 - ETA: 0s - loss: 0.298 - 126s 1s/step - loss: 0.2984 - val_loss: 0.2234\n",
      "\n",
      "Epoch 00013: val_loss did not improve from 0.17939\n",
      "Epoch 14/30\n",
      "111/111 [==============================] - ETA: 2:19 - loss: 0.219 - ETA: 2:18 - loss: 0.235 - ETA: 2:18 - loss: 0.230 - ETA: 2:18 - loss: 0.223 - ETA: 2:15 - loss: 0.215 - ETA: 2:13 - loss: 0.209 - ETA: 2:12 - loss: 0.207 - ETA: 2:10 - loss: 0.205 - ETA: 2:09 - loss: 0.209 - ETA: 2:08 - loss: 0.216 - ETA: 2:07 - loss: 0.222 - ETA: 2:05 - loss: 0.227 - ETA: 2:04 - loss: 0.231 - ETA: 2:02 - loss: 0.239 - ETA: 2:01 - loss: 0.244 - ETA: 2:00 - loss: 0.248 - ETA: 1:59 - loss: 0.253 - ETA: 1:57 - loss: 0.257 - ETA: 1:56 - loss: 0.261 - ETA: 1:55 - loss: 0.264 - ETA: 1:53 - loss: 0.267 - ETA: 1:52 - loss: 0.269 - ETA: 1:51 - loss: 0.271 - ETA: 1:50 - loss: 0.273 - ETA: 1:49 - loss: 0.274 - ETA: 1:47 - loss: 0.276 - ETA: 1:46 - loss: 0.277 - ETA: 1:45 - loss: 0.278 - ETA: 1:44 - loss: 0.280 - ETA: 1:42 - loss: 0.281 - ETA: 1:41 - loss: 0.282 - ETA: 1:40 - loss: 0.284 - ETA: 1:38 - loss: 0.285 - ETA: 1:37 - loss: 0.287 - ETA: 1:36 - loss: 0.289 - ETA: 1:35 - loss: 0.291 - ETA: 1:34 - loss: 0.292 - ETA: 1:32 - loss: 0.293 - ETA: 1:31 - loss: 0.295 - ETA: 1:29 - loss: 0.296 - ETA: 1:28 - loss: 0.297 - ETA: 1:26 - loss: 0.299 - ETA: 1:24 - loss: 0.300 - ETA: 1:23 - loss: 0.301 - ETA: 1:21 - loss: 0.302 - ETA: 1:20 - loss: 0.303 - ETA: 1:18 - loss: 0.304 - ETA: 1:16 - loss: 0.306 - ETA: 1:15 - loss: 0.307 - ETA: 1:13 - loss: 0.309 - ETA: 1:12 - loss: 0.310 - ETA: 1:10 - loss: 0.311 - ETA: 1:09 - loss: 0.312 - ETA: 1:08 - loss: 0.313 - ETA: 1:06 - loss: 0.314 - ETA: 1:05 - loss: 0.316 - ETA: 1:04 - loss: 0.317 - ETA: 1:03 - loss: 0.318 - ETA: 1:01 - loss: 0.319 - ETA: 1:00 - loss: 0.320 - ETA: 59s - loss: 0.321 - ETA: 57s - loss: 0.32 - ETA: 56s - loss: 0.32 - ETA: 55s - loss: 0.32 - ETA: 54s - loss: 0.32 - ETA: 53s - loss: 0.32 - ETA: 51s - loss: 0.32 - ETA: 50s - loss: 0.32 - ETA: 49s - loss: 0.32 - ETA: 48s - loss: 0.32 - ETA: 47s - loss: 0.32 - ETA: 46s - loss: 0.33 - ETA: 44s - loss: 0.33 - ETA: 43s - loss: 0.33 - ETA: 42s - loss: 0.33 - ETA: 41s - loss: 0.33 - ETA: 40s - loss: 0.33 - ETA: 39s - loss: 0.33 - ETA: 38s - loss: 0.33 - ETA: 37s - loss: 0.33 - ETA: 35s - loss: 0.33 - ETA: 34s - loss: 0.33 - ETA: 33s - loss: 0.33 - ETA: 32s - loss: 0.33 - ETA: 31s - loss: 0.33 - ETA: 29s - loss: 0.33 - ETA: 28s - loss: 0.33 - ETA: 27s - loss: 0.33 - ETA: 26s - loss: 0.33 - ETA: 25s - loss: 0.34 - ETA: 24s - loss: 0.34 - ETA: 22s - loss: 0.34 - ETA: 21s - loss: 0.34 - ETA: 20s - loss: 0.34 - ETA: 19s - loss: 0.34 - ETA: 18s - loss: 0.34 - ETA: 17s - loss: 0.34 - ETA: 15s - loss: 0.34 - ETA: 14s - loss: 0.34 - ETA: 13s - loss: 0.34 - ETA: 12s - loss: 0.34 - ETA: 10s - loss: 0.34 - ETA: 9s - loss: 0.3444 - ETA: 8s - loss: 0.344 - ETA: 7s - loss: 0.344 - ETA: 6s - loss: 0.345 - ETA: 4s - loss: 0.345 - ETA: 3s - loss: 0.345 - ETA: 2s - loss: 0.345 - ETA: 1s - loss: 0.345 - ETA: 0s - loss: 0.346 - 137s 1s/step - loss: 0.3463 - val_loss: 0.2118\n",
      "\n",
      "Epoch 00014: val_loss did not improve from 0.17939\n",
      "Epoch 15/30\n",
      "111/111 [==============================] - ETA: 1:58 - loss: 0.056 - ETA: 2:07 - loss: 0.112 - ETA: 2:00 - loss: 0.119 - ETA: 1:57 - loss: 0.126 - ETA: 1:53 - loss: 0.137 - ETA: 1:50 - loss: 0.154 - ETA: 1:47 - loss: 0.171 - ETA: 1:45 - loss: 0.184 - ETA: 1:43 - loss: 0.192 - ETA: 1:42 - loss: 0.198 - ETA: 1:40 - loss: 0.204 - ETA: 1:38 - loss: 0.210 - ETA: 1:37 - loss: 0.215 - ETA: 1:36 - loss: 0.218 - ETA: 1:34 - loss: 0.223 - ETA: 1:33 - loss: 0.227 - ETA: 1:32 - loss: 0.231 - ETA: 1:31 - loss: 0.234 - ETA: 1:30 - loss: 0.237 - ETA: 1:30 - loss: 0.240 - ETA: 1:29 - loss: 0.241 - ETA: 1:28 - loss: 0.243 - ETA: 1:27 - loss: 0.244 - ETA: 1:26 - loss: 0.245 - ETA: 1:25 - loss: 0.246 - ETA: 1:25 - loss: 0.247 - ETA: 1:24 - loss: 0.248 - ETA: 1:24 - loss: 0.248 - ETA: 1:23 - loss: 0.249 - ETA: 1:23 - loss: 0.249 - ETA: 1:22 - loss: 0.249 - ETA: 1:21 - loss: 0.249 - ETA: 1:21 - loss: 0.249 - ETA: 1:20 - loss: 0.248 - ETA: 1:20 - loss: 0.248 - ETA: 1:19 - loss: 0.248 - ETA: 1:19 - loss: 0.248 - ETA: 1:18 - loss: 0.248 - ETA: 1:18 - loss: 0.248 - ETA: 1:17 - loss: 0.247 - ETA: 1:17 - loss: 0.248 - ETA: 1:16 - loss: 0.249 - ETA: 1:15 - loss: 0.250 - ETA: 1:14 - loss: 0.250 - ETA: 1:13 - loss: 0.251 - ETA: 1:12 - loss: 0.252 - ETA: 1:11 - loss: 0.253 - ETA: 1:11 - loss: 0.254 - ETA: 1:10 - loss: 0.254 - ETA: 1:09 - loss: 0.255 - ETA: 1:08 - loss: 0.256 - ETA: 1:07 - loss: 0.256 - ETA: 1:06 - loss: 0.257 - ETA: 1:05 - loss: 0.257 - ETA: 1:04 - loss: 0.257 - ETA: 1:03 - loss: 0.258 - ETA: 1:02 - loss: 0.258 - ETA: 1:01 - loss: 0.258 - ETA: 1:00 - loss: 0.259 - ETA: 59s - loss: 0.259 - ETA: 57s - loss: 0.25 - ETA: 56s - loss: 0.26 - ETA: 55s - loss: 0.26 - ETA: 54s - loss: 0.26 - ETA: 53s - loss: 0.26 - ETA: 52s - loss: 0.26 - ETA: 50s - loss: 0.26 - ETA: 49s - loss: 0.26 - ETA: 48s - loss: 0.26 - ETA: 47s - loss: 0.26 - ETA: 45s - loss: 0.26 - ETA: 44s - loss: 0.26 - ETA: 43s - loss: 0.26 - ETA: 42s - loss: 0.26 - ETA: 40s - loss: 0.26 - ETA: 39s - loss: 0.26 - ETA: 38s - loss: 0.26 - ETA: 37s - loss: 0.26 - ETA: 35s - loss: 0.26 - ETA: 34s - loss: 0.26 - ETA: 33s - loss: 0.26 - ETA: 32s - loss: 0.26 - ETA: 31s - loss: 0.26 - ETA: 30s - loss: 0.26 - ETA: 28s - loss: 0.26 - ETA: 27s - loss: 0.26 - ETA: 26s - loss: 0.26 - ETA: 25s - loss: 0.26 - ETA: 24s - loss: 0.27 - ETA: 23s - loss: 0.27 - ETA: 22s - loss: 0.27 - ETA: 21s - loss: 0.27 - ETA: 19s - loss: 0.27 - ETA: 18s - loss: 0.27 - ETA: 17s - loss: 0.27 - ETA: 16s - loss: 0.27 - ETA: 15s - loss: 0.27 - ETA: 14s - loss: 0.27 - ETA: 13s - loss: 0.27 - ETA: 12s - loss: 0.27 - ETA: 11s - loss: 0.27 - ETA: 9s - loss: 0.2751 - ETA: 8s - loss: 0.275 - ETA: 7s - loss: 0.275 - ETA: 6s - loss: 0.276 - ETA: 5s - loss: 0.276 - ETA: 4s - loss: 0.276 - ETA: 3s - loss: 0.276 - ETA: 2s - loss: 0.277 - ETA: 1s - loss: 0.277 - ETA: 0s - loss: 0.277 - 127s 1s/step - loss: 0.2781 - val_loss: 0.2084\n",
      "\n",
      "Epoch 00015: val_loss did not improve from 0.17939\n",
      "Epoch 16/30\n",
      "111/111 [==============================] - ETA: 2:31 - loss: 0.898 - ETA: 2:20 - loss: 0.747 - ETA: 2:16 - loss: 0.645 - ETA: 2:13 - loss: 0.604 - ETA: 2:12 - loss: 0.563 - ETA: 2:11 - loss: 0.527 - ETA: 2:11 - loss: 0.502 - ETA: 2:09 - loss: 0.480 - ETA: 2:08 - loss: 0.461 - ETA: 2:07 - loss: 0.443 - ETA: 2:05 - loss: 0.429 - ETA: 2:04 - loss: 0.418 - ETA: 2:02 - loss: 0.409 - ETA: 2:01 - loss: 0.403 - ETA: 2:00 - loss: 0.398 - ETA: 1:58 - loss: 0.394 - ETA: 1:57 - loss: 0.390 - ETA: 1:56 - loss: 0.385 - ETA: 1:55 - loss: 0.382 - ETA: 1:54 - loss: 0.379 - ETA: 1:52 - loss: 0.377 - ETA: 1:51 - loss: 0.374 - ETA: 1:50 - loss: 0.371 - ETA: 1:49 - loss: 0.369 - ETA: 1:47 - loss: 0.367 - ETA: 1:46 - loss: 0.365 - ETA: 1:45 - loss: 0.363 - ETA: 1:44 - loss: 0.362 - ETA: 1:42 - loss: 0.360 - ETA: 1:41 - loss: 0.359 - ETA: 1:39 - loss: 0.357 - ETA: 1:37 - loss: 0.355 - ETA: 1:36 - loss: 0.354 - ETA: 1:34 - loss: 0.352 - ETA: 1:33 - loss: 0.351 - ETA: 1:31 - loss: 0.350 - ETA: 1:29 - loss: 0.348 - ETA: 1:28 - loss: 0.347 - ETA: 1:26 - loss: 0.346 - ETA: 1:25 - loss: 0.345 - ETA: 1:23 - loss: 0.344 - ETA: 1:21 - loss: 0.344 - ETA: 1:20 - loss: 0.343 - ETA: 1:18 - loss: 0.343 - ETA: 1:17 - loss: 0.343 - ETA: 1:15 - loss: 0.342 - ETA: 1:14 - loss: 0.342 - ETA: 1:12 - loss: 0.342 - ETA: 1:11 - loss: 0.341 - ETA: 1:10 - loss: 0.341 - ETA: 1:08 - loss: 0.341 - ETA: 1:07 - loss: 0.340 - ETA: 1:06 - loss: 0.340 - ETA: 1:04 - loss: 0.340 - ETA: 1:03 - loss: 0.340 - ETA: 1:02 - loss: 0.340 - ETA: 1:00 - loss: 0.340 - ETA: 59s - loss: 0.339 - ETA: 58s - loss: 0.33 - ETA: 57s - loss: 0.33 - ETA: 55s - loss: 0.33 - ETA: 54s - loss: 0.33 - ETA: 53s - loss: 0.33 - ETA: 52s - loss: 0.33 - ETA: 51s - loss: 0.33 - ETA: 49s - loss: 0.33 - ETA: 48s - loss: 0.33 - ETA: 47s - loss: 0.33 - ETA: 46s - loss: 0.33 - ETA: 45s - loss: 0.33 - ETA: 44s - loss: 0.33 - ETA: 43s - loss: 0.33 - ETA: 42s - loss: 0.33 - ETA: 41s - loss: 0.33 - ETA: 40s - loss: 0.33 - ETA: 39s - loss: 0.33 - ETA: 38s - loss: 0.33 - ETA: 37s - loss: 0.33 - ETA: 35s - loss: 0.33 - ETA: 35s - loss: 0.33 - ETA: 34s - loss: 0.33 - ETA: 33s - loss: 0.33 - ETA: 32s - loss: 0.33 - ETA: 31s - loss: 0.33 - ETA: 30s - loss: 0.33 - ETA: 29s - loss: 0.33 - ETA: 28s - loss: 0.33 - ETA: 26s - loss: 0.33 - ETA: 25s - loss: 0.33 - ETA: 24s - loss: 0.33 - ETA: 23s - loss: 0.33 - ETA: 22s - loss: 0.33 - ETA: 21s - loss: 0.33 - ETA: 20s - loss: 0.33 - ETA: 18s - loss: 0.33 - ETA: 17s - loss: 0.33 - ETA: 16s - loss: 0.33 - ETA: 15s - loss: 0.33 - ETA: 14s - loss: 0.32 - ETA: 13s - loss: 0.32 - ETA: 11s - loss: 0.32 - ETA: 10s - loss: 0.32 - ETA: 9s - loss: 0.3289 - ETA: 8s - loss: 0.328 - ETA: 7s - loss: 0.328 - ETA: 5s - loss: 0.328 - ETA: 4s - loss: 0.327 - ETA: 3s - loss: 0.327 - ETA: 2s - loss: 0.327 - ETA: 1s - loss: 0.327 - ETA: 0s - loss: 0.327 - 135s 1s/step - loss: 0.3269 - val_loss: 0.2062\n",
      "\n",
      "Epoch 00016: val_loss did not improve from 0.17939\n",
      "Epoch 17/30\n",
      "111/111 [==============================] - ETA: 2:15 - loss: 0.325 - ETA: 2:08 - loss: 0.361 - ETA: 2:00 - loss: 0.359 - ETA: 1:57 - loss: 0.345 - ETA: 1:54 - loss: 0.352 - ETA: 1:52 - loss: 0.355 - ETA: 1:50 - loss: 0.354 - ETA: 1:50 - loss: 0.353 - ETA: 1:48 - loss: 0.350 - ETA: 1:46 - loss: 0.348 - ETA: 1:45 - loss: 0.345 - ETA: 1:43 - loss: 0.343 - ETA: 1:42 - loss: 0.342 - ETA: 1:40 - loss: 0.340 - ETA: 1:39 - loss: 0.338 - ETA: 1:38 - loss: 0.335 - ETA: 1:36 - loss: 0.332 - ETA: 1:35 - loss: 0.329 - ETA: 1:33 - loss: 0.327 - ETA: 1:32 - loss: 0.326 - ETA: 1:31 - loss: 0.325 - ETA: 1:29 - loss: 0.324 - ETA: 1:28 - loss: 0.324 - ETA: 1:27 - loss: 0.323 - ETA: 1:26 - loss: 0.323 - ETA: 1:25 - loss: 0.323 - ETA: 1:24 - loss: 0.323 - ETA: 1:23 - loss: 0.323 - ETA: 1:22 - loss: 0.324 - ETA: 1:21 - loss: 0.325 - ETA: 1:20 - loss: 0.326 - ETA: 1:19 - loss: 0.326 - ETA: 1:18 - loss: 0.326 - ETA: 1:18 - loss: 0.326 - ETA: 1:17 - loss: 0.326 - ETA: 1:16 - loss: 0.326 - ETA: 1:16 - loss: 0.326 - ETA: 1:15 - loss: 0.325 - ETA: 1:14 - loss: 0.325 - ETA: 1:14 - loss: 0.324 - ETA: 1:13 - loss: 0.324 - ETA: 1:12 - loss: 0.324 - ETA: 1:12 - loss: 0.323 - ETA: 1:11 - loss: 0.323 - ETA: 1:10 - loss: 0.322 - ETA: 1:09 - loss: 0.322 - ETA: 1:09 - loss: 0.321 - ETA: 1:08 - loss: 0.321 - ETA: 1:07 - loss: 0.321 - ETA: 1:06 - loss: 0.320 - ETA: 1:05 - loss: 0.320 - ETA: 1:04 - loss: 0.320 - ETA: 1:03 - loss: 0.320 - ETA: 1:02 - loss: 0.320 - ETA: 1:01 - loss: 0.320 - ETA: 1:01 - loss: 0.320 - ETA: 1:00 - loss: 0.319 - ETA: 59s - loss: 0.319 - ETA: 58s - loss: 0.31 - ETA: 57s - loss: 0.32 - ETA: 56s - loss: 0.32 - ETA: 55s - loss: 0.32 - ETA: 54s - loss: 0.32 - ETA: 53s - loss: 0.32 - ETA: 52s - loss: 0.32 - ETA: 51s - loss: 0.32 - ETA: 50s - loss: 0.32 - ETA: 49s - loss: 0.32 - ETA: 48s - loss: 0.32 - ETA: 47s - loss: 0.32 - ETA: 46s - loss: 0.32 - ETA: 44s - loss: 0.32 - ETA: 43s - loss: 0.32 - ETA: 42s - loss: 0.32 - ETA: 41s - loss: 0.32 - ETA: 40s - loss: 0.32 - ETA: 39s - loss: 0.32 - ETA: 38s - loss: 0.32 - ETA: 37s - loss: 0.32 - ETA: 35s - loss: 0.32 - ETA: 34s - loss: 0.32 - ETA: 33s - loss: 0.32 - ETA: 32s - loss: 0.32 - ETA: 31s - loss: 0.32 - ETA: 30s - loss: 0.32 - ETA: 28s - loss: 0.32 - ETA: 27s - loss: 0.32 - ETA: 26s - loss: 0.32 - ETA: 25s - loss: 0.32 - ETA: 24s - loss: 0.32 - ETA: 22s - loss: 0.32 - ETA: 21s - loss: 0.32 - ETA: 20s - loss: 0.32 - ETA: 19s - loss: 0.32 - ETA: 18s - loss: 0.32 - ETA: 17s - loss: 0.32 - ETA: 15s - loss: 0.32 - ETA: 14s - loss: 0.32 - ETA: 13s - loss: 0.32 - ETA: 12s - loss: 0.32 - ETA: 11s - loss: 0.32 - ETA: 10s - loss: 0.32 - ETA: 8s - loss: 0.3259 - ETA: 7s - loss: 0.326 - ETA: 6s - loss: 0.326 - ETA: 5s - loss: 0.326 - ETA: 4s - loss: 0.326 - ETA: 3s - loss: 0.326 - ETA: 2s - loss: 0.326 - ETA: 1s - loss: 0.326 - ETA: 0s - loss: 0.326 - 126s 1s/step - loss: 0.3262 - val_loss: 0.1921\n",
      "\n",
      "Epoch 00017: val_loss did not improve from 0.17939\n",
      "Epoch 18/30\n",
      "111/111 [==============================] - ETA: 2:19 - loss: 0.265 - ETA: 2:19 - loss: 0.222 - ETA: 2:23 - loss: 0.206 - ETA: 2:21 - loss: 0.199 - ETA: 2:21 - loss: 0.195 - ETA: 2:18 - loss: 0.197 - ETA: 2:16 - loss: 0.199 - ETA: 2:15 - loss: 0.199 - ETA: 2:13 - loss: 0.198 - ETA: 2:12 - loss: 0.196 - ETA: 2:10 - loss: 0.195 - ETA: 2:09 - loss: 0.193 - ETA: 2:07 - loss: 0.191 - ETA: 2:06 - loss: 0.192 - ETA: 2:04 - loss: 0.193 - ETA: 2:04 - loss: 0.193 - ETA: 2:02 - loss: 0.194 - ETA: 2:02 - loss: 0.196 - ETA: 2:02 - loss: 0.197 - ETA: 2:01 - loss: 0.199 - ETA: 2:01 - loss: 0.201 - ETA: 2:00 - loss: 0.203 - ETA: 1:58 - loss: 0.205 - ETA: 1:57 - loss: 0.207 - ETA: 1:56 - loss: 0.209 - ETA: 1:56 - loss: 0.211 - ETA: 1:54 - loss: 0.212 - ETA: 1:52 - loss: 0.214 - ETA: 1:51 - loss: 0.215 - ETA: 1:49 - loss: 0.216 - ETA: 1:47 - loss: 0.217 - ETA: 1:46 - loss: 0.218 - ETA: 1:44 - loss: 0.219 - ETA: 1:43 - loss: 0.220 - ETA: 1:41 - loss: 0.221 - ETA: 1:40 - loss: 0.221 - ETA: 1:38 - loss: 0.222 - ETA: 1:37 - loss: 0.223 - ETA: 1:35 - loss: 0.225 - ETA: 1:34 - loss: 0.227 - ETA: 1:32 - loss: 0.228 - ETA: 1:30 - loss: 0.229 - ETA: 1:28 - loss: 0.231 - ETA: 1:27 - loss: 0.232 - ETA: 1:25 - loss: 0.233 - ETA: 1:24 - loss: 0.234 - ETA: 1:22 - loss: 0.235 - ETA: 1:20 - loss: 0.236 - ETA: 1:19 - loss: 0.237 - ETA: 1:17 - loss: 0.239 - ETA: 1:15 - loss: 0.240 - ETA: 1:14 - loss: 0.241 - ETA: 1:12 - loss: 0.242 - ETA: 1:10 - loss: 0.243 - ETA: 1:09 - loss: 0.245 - ETA: 1:07 - loss: 0.246 - ETA: 1:06 - loss: 0.247 - ETA: 1:04 - loss: 0.248 - ETA: 1:03 - loss: 0.250 - ETA: 1:01 - loss: 0.251 - ETA: 1:00 - loss: 0.252 - ETA: 59s - loss: 0.253 - ETA: 57s - loss: 0.25 - ETA: 56s - loss: 0.25 - ETA: 55s - loss: 0.25 - ETA: 53s - loss: 0.25 - ETA: 52s - loss: 0.25 - ETA: 51s - loss: 0.25 - ETA: 49s - loss: 0.25 - ETA: 48s - loss: 0.25 - ETA: 47s - loss: 0.26 - ETA: 45s - loss: 0.26 - ETA: 44s - loss: 0.26 - ETA: 43s - loss: 0.26 - ETA: 42s - loss: 0.26 - ETA: 40s - loss: 0.26 - ETA: 39s - loss: 0.26 - ETA: 38s - loss: 0.26 - ETA: 37s - loss: 0.26 - ETA: 36s - loss: 0.26 - ETA: 35s - loss: 0.26 - ETA: 33s - loss: 0.26 - ETA: 32s - loss: 0.26 - ETA: 31s - loss: 0.26 - ETA: 30s - loss: 0.26 - ETA: 29s - loss: 0.26 - ETA: 28s - loss: 0.26 - ETA: 27s - loss: 0.26 - ETA: 25s - loss: 0.26 - ETA: 24s - loss: 0.26 - ETA: 23s - loss: 0.26 - ETA: 22s - loss: 0.26 - ETA: 21s - loss: 0.26 - ETA: 20s - loss: 0.27 - ETA: 18s - loss: 0.27 - ETA: 17s - loss: 0.27 - ETA: 16s - loss: 0.27 - ETA: 15s - loss: 0.27 - ETA: 14s - loss: 0.27 - ETA: 13s - loss: 0.27 - ETA: 11s - loss: 0.27 - ETA: 10s - loss: 0.27 - ETA: 9s - loss: 0.2743 - ETA: 8s - loss: 0.274 - ETA: 7s - loss: 0.275 - ETA: 5s - loss: 0.275 - ETA: 4s - loss: 0.275 - ETA: 3s - loss: 0.276 - ETA: 2s - loss: 0.276 - ETA: 1s - loss: 0.276 - ETA: 0s - loss: 0.277 - 135s 1s/step - loss: 0.2775 - val_loss: 0.1972\n",
      "\n",
      "Epoch 00018: val_loss did not improve from 0.17939\n",
      "Epoch 19/30\n",
      "111/111 [==============================] - ETA: 2:21 - loss: 0.373 - ETA: 2:25 - loss: 0.315 - ETA: 2:21 - loss: 0.299 - ETA: 2:16 - loss: 0.287 - ETA: 2:17 - loss: 0.276 - ETA: 2:14 - loss: 0.271 - ETA: 2:12 - loss: 0.270 - ETA: 2:11 - loss: 0.271 - ETA: 2:10 - loss: 0.272 - ETA: 2:07 - loss: 0.272 - ETA: 2:06 - loss: 0.270 - ETA: 2:05 - loss: 0.269 - ETA: 2:04 - loss: 0.266 - ETA: 2:03 - loss: 0.268 - ETA: 2:01 - loss: 0.270 - ETA: 2:00 - loss: 0.272 - ETA: 1:59 - loss: 0.275 - ETA: 1:57 - loss: 0.279 - ETA: 1:55 - loss: 0.282 - ETA: 1:53 - loss: 0.284 - ETA: 1:51 - loss: 0.286 - ETA: 1:49 - loss: 0.288 - ETA: 1:47 - loss: 0.290 - ETA: 1:45 - loss: 0.291 - ETA: 1:43 - loss: 0.294 - ETA: 1:41 - loss: 0.297 - ETA: 1:39 - loss: 0.299 - ETA: 1:37 - loss: 0.301 - ETA: 1:35 - loss: 0.303 - ETA: 1:34 - loss: 0.304 - ETA: 1:32 - loss: 0.305 - ETA: 1:31 - loss: 0.306 - ETA: 1:29 - loss: 0.307 - ETA: 1:27 - loss: 0.308 - ETA: 1:26 - loss: 0.309 - ETA: 1:24 - loss: 0.310 - ETA: 1:23 - loss: 0.310 - ETA: 1:21 - loss: 0.311 - ETA: 1:20 - loss: 0.312 - ETA: 1:19 - loss: 0.312 - ETA: 1:17 - loss: 0.313 - ETA: 1:16 - loss: 0.313 - ETA: 1:14 - loss: 0.313 - ETA: 1:13 - loss: 0.313 - ETA: 1:12 - loss: 0.314 - ETA: 1:11 - loss: 0.314 - ETA: 1:09 - loss: 0.315 - ETA: 1:08 - loss: 0.315 - ETA: 1:07 - loss: 0.316 - ETA: 1:06 - loss: 0.316 - ETA: 1:05 - loss: 0.317 - ETA: 1:04 - loss: 0.317 - ETA: 1:02 - loss: 0.317 - ETA: 1:01 - loss: 0.317 - ETA: 1:00 - loss: 0.318 - ETA: 59s - loss: 0.318 - ETA: 58s - loss: 0.31 - ETA: 58s - loss: 0.31 - ETA: 57s - loss: 0.31 - ETA: 56s - loss: 0.31 - ETA: 55s - loss: 0.31 - ETA: 54s - loss: 0.31 - ETA: 53s - loss: 0.31 - ETA: 52s - loss: 0.31 - ETA: 51s - loss: 0.31 - ETA: 50s - loss: 0.31 - ETA: 49s - loss: 0.31 - ETA: 48s - loss: 0.31 - ETA: 47s - loss: 0.31 - ETA: 46s - loss: 0.31 - ETA: 45s - loss: 0.31 - ETA: 44s - loss: 0.31 - ETA: 43s - loss: 0.31 - ETA: 42s - loss: 0.31 - ETA: 41s - loss: 0.31 - ETA: 40s - loss: 0.31 - ETA: 39s - loss: 0.31 - ETA: 38s - loss: 0.31 - ETA: 37s - loss: 0.31 - ETA: 36s - loss: 0.31 - ETA: 35s - loss: 0.31 - ETA: 34s - loss: 0.31 - ETA: 33s - loss: 0.31 - ETA: 32s - loss: 0.31 - ETA: 31s - loss: 0.31 - ETA: 30s - loss: 0.31 - ETA: 29s - loss: 0.31 - ETA: 27s - loss: 0.31 - ETA: 26s - loss: 0.31 - ETA: 25s - loss: 0.31 - ETA: 24s - loss: 0.31 - ETA: 23s - loss: 0.31 - ETA: 21s - loss: 0.31 - ETA: 20s - loss: 0.31 - ETA: 19s - loss: 0.31 - ETA: 18s - loss: 0.31 - ETA: 17s - loss: 0.31 - ETA: 15s - loss: 0.31 - ETA: 14s - loss: 0.31 - ETA: 13s - loss: 0.31 - ETA: 12s - loss: 0.31 - ETA: 10s - loss: 0.31 - ETA: 9s - loss: 0.3192 - ETA: 8s - loss: 0.319 - ETA: 7s - loss: 0.319 - ETA: 6s - loss: 0.319 - ETA: 4s - loss: 0.319 - ETA: 3s - loss: 0.319 - ETA: 2s - loss: 0.319 - ETA: 1s - loss: 0.319 - ETA: 0s - loss: 0.319 - 136s 1s/step - loss: 0.3199 - val_loss: 0.1949\n",
      "\n",
      "Epoch 00019: val_loss did not improve from 0.17939\n",
      "Epoch 20/30\n",
      "111/111 [==============================] - ETA: 1:49 - loss: 0.106 - ETA: 1:46 - loss: 0.190 - ETA: 1:45 - loss: 0.235 - ETA: 1:47 - loss: 0.247 - ETA: 1:46 - loss: 0.250 - ETA: 1:45 - loss: 0.248 - ETA: 1:44 - loss: 0.252 - ETA: 1:44 - loss: 0.258 - ETA: 1:44 - loss: 0.262 - ETA: 1:43 - loss: 0.264 - ETA: 1:41 - loss: 0.266 - ETA: 1:40 - loss: 0.267 - ETA: 1:42 - loss: 0.268 - ETA: 1:41 - loss: 0.267 - ETA: 1:40 - loss: 0.275 - ETA: 1:39 - loss: 0.280 - ETA: 1:38 - loss: 0.285 - ETA: 1:37 - loss: 0.288 - ETA: 1:36 - loss: 0.291 - ETA: 1:36 - loss: 0.293 - ETA: 1:35 - loss: 0.294 - ETA: 1:34 - loss: 0.295 - ETA: 1:33 - loss: 0.296 - ETA: 1:32 - loss: 0.296 - ETA: 1:31 - loss: 0.295 - ETA: 1:31 - loss: 0.295 - ETA: 1:31 - loss: 0.294 - ETA: 1:30 - loss: 0.294 - ETA: 1:29 - loss: 0.293 - ETA: 1:29 - loss: 0.293 - ETA: 1:29 - loss: 0.293 - ETA: 1:29 - loss: 0.292 - ETA: 1:28 - loss: 0.292 - ETA: 1:27 - loss: 0.291 - ETA: 1:26 - loss: 0.291 - ETA: 1:26 - loss: 0.290 - ETA: 1:25 - loss: 0.290 - ETA: 1:24 - loss: 0.289 - ETA: 1:23 - loss: 0.288 - ETA: 1:22 - loss: 0.288 - ETA: 1:21 - loss: 0.288 - ETA: 1:20 - loss: 0.287 - ETA: 1:20 - loss: 0.287 - ETA: 1:19 - loss: 0.287 - ETA: 1:18 - loss: 0.286 - ETA: 1:17 - loss: 0.286 - ETA: 1:16 - loss: 0.286 - ETA: 1:15 - loss: 0.285 - ETA: 1:14 - loss: 0.285 - ETA: 1:13 - loss: 0.285 - ETA: 1:11 - loss: 0.285 - ETA: 1:10 - loss: 0.285 - ETA: 1:09 - loss: 0.285 - ETA: 1:08 - loss: 0.285 - ETA: 1:07 - loss: 0.285 - ETA: 1:06 - loss: 0.286 - ETA: 1:05 - loss: 0.286 - ETA: 1:04 - loss: 0.286 - ETA: 1:03 - loss: 0.286 - ETA: 1:02 - loss: 0.287 - ETA: 1:00 - loss: 0.287 - ETA: 59s - loss: 0.287 - ETA: 58s - loss: 0.28 - ETA: 57s - loss: 0.28 - ETA: 56s - loss: 0.28 - ETA: 54s - loss: 0.28 - ETA: 53s - loss: 0.28 - ETA: 52s - loss: 0.28 - ETA: 51s - loss: 0.28 - ETA: 50s - loss: 0.28 - ETA: 48s - loss: 0.29 - ETA: 47s - loss: 0.29 - ETA: 46s - loss: 0.29 - ETA: 45s - loss: 0.29 - ETA: 43s - loss: 0.29 - ETA: 42s - loss: 0.29 - ETA: 41s - loss: 0.29 - ETA: 40s - loss: 0.29 - ETA: 38s - loss: 0.29 - ETA: 37s - loss: 0.29 - ETA: 36s - loss: 0.29 - ETA: 35s - loss: 0.29 - ETA: 33s - loss: 0.29 - ETA: 32s - loss: 0.29 - ETA: 31s - loss: 0.29 - ETA: 29s - loss: 0.29 - ETA: 28s - loss: 0.29 - ETA: 27s - loss: 0.29 - ETA: 26s - loss: 0.29 - ETA: 24s - loss: 0.29 - ETA: 23s - loss: 0.29 - ETA: 22s - loss: 0.29 - ETA: 21s - loss: 0.29 - ETA: 20s - loss: 0.29 - ETA: 18s - loss: 0.29 - ETA: 17s - loss: 0.29 - ETA: 16s - loss: 0.29 - ETA: 15s - loss: 0.29 - ETA: 13s - loss: 0.29 - ETA: 12s - loss: 0.29 - ETA: 11s - loss: 0.29 - ETA: 10s - loss: 0.29 - ETA: 9s - loss: 0.2981 - ETA: 8s - loss: 0.298 - ETA: 6s - loss: 0.298 - ETA: 5s - loss: 0.298 - ETA: 4s - loss: 0.298 - ETA: 3s - loss: 0.298 - ETA: 2s - loss: 0.298 - ETA: 1s - loss: 0.298 - ETA: 0s - loss: 0.298 - 130s 1s/step - loss: 0.2989 - val_loss: 0.1943\n",
      "\n",
      "Epoch 00020: val_loss did not improve from 0.17939\n",
      "Epoch 21/30\n",
      "111/111 [==============================] - ETA: 2:21 - loss: 0.174 - ETA: 2:15 - loss: 0.193 - ETA: 2:14 - loss: 0.212 - ETA: 2:14 - loss: 0.231 - ETA: 2:13 - loss: 0.266 - ETA: 2:10 - loss: 0.281 - ETA: 2:09 - loss: 0.286 - ETA: 2:08 - loss: 0.287 - ETA: 2:07 - loss: 0.287 - ETA: 2:06 - loss: 0.286 - ETA: 2:05 - loss: 0.289 - ETA: 2:04 - loss: 0.290 - ETA: 2:02 - loss: 0.290 - ETA: 2:01 - loss: 0.290 - ETA: 2:00 - loss: 0.290 - ETA: 1:58 - loss: 0.290 - ETA: 1:57 - loss: 0.291 - ETA: 1:56 - loss: 0.291 - ETA: 1:54 - loss: 0.291 - ETA: 1:53 - loss: 0.291 - ETA: 1:52 - loss: 0.291 - ETA: 1:50 - loss: 0.291 - ETA: 1:49 - loss: 0.291 - ETA: 1:48 - loss: 0.291 - ETA: 1:47 - loss: 0.291 - ETA: 1:45 - loss: 0.291 - ETA: 1:44 - loss: 0.291 - ETA: 1:43 - loss: 0.290 - ETA: 1:42 - loss: 0.290 - ETA: 1:41 - loss: 0.290 - ETA: 1:40 - loss: 0.290 - ETA: 1:39 - loss: 0.290 - ETA: 1:38 - loss: 0.291 - ETA: 1:38 - loss: 0.292 - ETA: 1:36 - loss: 0.293 - ETA: 1:35 - loss: 0.293 - ETA: 1:34 - loss: 0.294 - ETA: 1:32 - loss: 0.294 - ETA: 1:31 - loss: 0.295 - ETA: 1:30 - loss: 0.295 - ETA: 1:28 - loss: 0.295 - ETA: 1:27 - loss: 0.295 - ETA: 1:25 - loss: 0.295 - ETA: 1:24 - loss: 0.295 - ETA: 1:23 - loss: 0.295 - ETA: 1:21 - loss: 0.295 - ETA: 1:20 - loss: 0.296 - ETA: 1:19 - loss: 0.296 - ETA: 1:17 - loss: 0.295 - ETA: 1:16 - loss: 0.295 - ETA: 1:15 - loss: 0.295 - ETA: 1:13 - loss: 0.295 - ETA: 1:12 - loss: 0.295 - ETA: 1:10 - loss: 0.295 - ETA: 1:09 - loss: 0.295 - ETA: 1:08 - loss: 0.296 - ETA: 1:06 - loss: 0.296 - ETA: 1:05 - loss: 0.296 - ETA: 1:03 - loss: 0.296 - ETA: 1:02 - loss: 0.296 - ETA: 1:00 - loss: 0.296 - ETA: 59s - loss: 0.296 - ETA: 57s - loss: 0.29 - ETA: 56s - loss: 0.29 - ETA: 55s - loss: 0.29 - ETA: 53s - loss: 0.29 - ETA: 52s - loss: 0.29 - ETA: 51s - loss: 0.29 - ETA: 49s - loss: 0.29 - ETA: 48s - loss: 0.29 - ETA: 47s - loss: 0.29 - ETA: 45s - loss: 0.29 - ETA: 44s - loss: 0.29 - ETA: 43s - loss: 0.29 - ETA: 42s - loss: 0.29 - ETA: 40s - loss: 0.29 - ETA: 39s - loss: 0.29 - ETA: 38s - loss: 0.29 - ETA: 37s - loss: 0.29 - ETA: 35s - loss: 0.29 - ETA: 34s - loss: 0.29 - ETA: 33s - loss: 0.29 - ETA: 32s - loss: 0.29 - ETA: 31s - loss: 0.29 - ETA: 29s - loss: 0.29 - ETA: 28s - loss: 0.29 - ETA: 27s - loss: 0.29 - ETA: 26s - loss: 0.29 - ETA: 25s - loss: 0.29 - ETA: 24s - loss: 0.29 - ETA: 23s - loss: 0.29 - ETA: 21s - loss: 0.29 - ETA: 20s - loss: 0.29 - ETA: 19s - loss: 0.29 - ETA: 18s - loss: 0.29 - ETA: 17s - loss: 0.29 - ETA: 16s - loss: 0.29 - ETA: 15s - loss: 0.29 - ETA: 13s - loss: 0.29 - ETA: 12s - loss: 0.29 - ETA: 11s - loss: 0.29 - ETA: 10s - loss: 0.29 - ETA: 9s - loss: 0.2958 - ETA: 8s - loss: 0.295 - ETA: 7s - loss: 0.295 - ETA: 5s - loss: 0.295 - ETA: 4s - loss: 0.295 - ETA: 3s - loss: 0.295 - ETA: 2s - loss: 0.295 - ETA: 1s - loss: 0.295 - ETA: 0s - loss: 0.295 - 132s 1s/step - loss: 0.2953 - val_loss: 0.2029\n",
      "\n",
      "Epoch 00021: val_loss did not improve from 0.17939\n",
      "Epoch 22/30\n",
      "111/111 [==============================] - ETA: 2:26 - loss: 0.210 - ETA: 2:14 - loss: 0.187 - ETA: 2:14 - loss: 0.224 - ETA: 2:14 - loss: 0.253 - ETA: 2:20 - loss: 0.263 - ETA: 2:22 - loss: 0.270 - ETA: 2:19 - loss: 0.271 - ETA: 2:17 - loss: 0.271 - ETA: 2:15 - loss: 0.270 - ETA: 2:12 - loss: 0.269 - ETA: 2:10 - loss: 0.271 - ETA: 2:08 - loss: 0.274 - ETA: 2:06 - loss: 0.276 - ETA: 2:05 - loss: 0.278 - ETA: 2:03 - loss: 0.280 - ETA: 2:02 - loss: 0.282 - ETA: 2:00 - loss: 0.283 - ETA: 1:59 - loss: 0.284 - ETA: 1:57 - loss: 0.285 - ETA: 1:55 - loss: 0.286 - ETA: 1:54 - loss: 0.286 - ETA: 1:52 - loss: 0.286 - ETA: 1:50 - loss: 0.285 - ETA: 1:48 - loss: 0.285 - ETA: 1:46 - loss: 0.285 - ETA: 1:45 - loss: 0.285 - ETA: 1:43 - loss: 0.286 - ETA: 1:41 - loss: 0.287 - ETA: 1:39 - loss: 0.287 - ETA: 1:37 - loss: 0.288 - ETA: 1:36 - loss: 0.291 - ETA: 1:34 - loss: 0.292 - ETA: 1:32 - loss: 0.294 - ETA: 1:31 - loss: 0.296 - ETA: 1:29 - loss: 0.297 - ETA: 1:27 - loss: 0.298 - ETA: 1:26 - loss: 0.299 - ETA: 1:24 - loss: 0.300 - ETA: 1:22 - loss: 0.300 - ETA: 1:21 - loss: 0.301 - ETA: 1:19 - loss: 0.302 - ETA: 1:18 - loss: 0.302 - ETA: 1:16 - loss: 0.303 - ETA: 1:15 - loss: 0.304 - ETA: 1:13 - loss: 0.305 - ETA: 1:12 - loss: 0.305 - ETA: 1:10 - loss: 0.306 - ETA: 1:09 - loss: 0.307 - ETA: 1:08 - loss: 0.307 - ETA: 1:06 - loss: 0.308 - ETA: 1:05 - loss: 0.308 - ETA: 1:04 - loss: 0.309 - ETA: 1:03 - loss: 0.309 - ETA: 1:01 - loss: 0.309 - ETA: 1:00 - loss: 0.310 - ETA: 59s - loss: 0.310 - ETA: 58s - loss: 0.31 - ETA: 57s - loss: 0.31 - ETA: 56s - loss: 0.31 - ETA: 54s - loss: 0.31 - ETA: 53s - loss: 0.31 - ETA: 52s - loss: 0.31 - ETA: 51s - loss: 0.31 - ETA: 50s - loss: 0.31 - ETA: 49s - loss: 0.31 - ETA: 48s - loss: 0.31 - ETA: 47s - loss: 0.31 - ETA: 46s - loss: 0.31 - ETA: 45s - loss: 0.31 - ETA: 44s - loss: 0.31 - ETA: 43s - loss: 0.31 - ETA: 42s - loss: 0.31 - ETA: 41s - loss: 0.31 - ETA: 40s - loss: 0.31 - ETA: 39s - loss: 0.31 - ETA: 38s - loss: 0.31 - ETA: 37s - loss: 0.31 - ETA: 36s - loss: 0.31 - ETA: 35s - loss: 0.31 - ETA: 34s - loss: 0.31 - ETA: 33s - loss: 0.31 - ETA: 32s - loss: 0.31 - ETA: 31s - loss: 0.31 - ETA: 30s - loss: 0.31 - ETA: 29s - loss: 0.31 - ETA: 28s - loss: 0.31 - ETA: 27s - loss: 0.31 - ETA: 26s - loss: 0.31 - ETA: 25s - loss: 0.31 - ETA: 24s - loss: 0.31 - ETA: 22s - loss: 0.31 - ETA: 21s - loss: 0.31 - ETA: 20s - loss: 0.31 - ETA: 19s - loss: 0.31 - ETA: 18s - loss: 0.31 - ETA: 17s - loss: 0.32 - ETA: 16s - loss: 0.32 - ETA: 15s - loss: 0.32 - ETA: 14s - loss: 0.32 - ETA: 12s - loss: 0.32 - ETA: 11s - loss: 0.32 - ETA: 10s - loss: 0.32 - ETA: 9s - loss: 0.3233 - ETA: 8s - loss: 0.323 - ETA: 7s - loss: 0.324 - ETA: 5s - loss: 0.324 - ETA: 4s - loss: 0.324 - ETA: 3s - loss: 0.325 - ETA: 2s - loss: 0.325 - ETA: 1s - loss: 0.325 - ETA: 0s - loss: 0.325 - 134s 1s/step - loss: 0.3261 - val_loss: 0.1961\n",
      "\n",
      "Epoch 00022: val_loss did not improve from 0.17939\n",
      "Epoch 00022: early stopping\n",
      "4/4 [==============================] - ETA: 2s - loss: 0.154 - ETA: 1s - loss: 0.154 - ETA: 0s - loss: 0.172 - ETA: 0s - loss: 0.185 - 3s 625ms/step - loss: 0.1856\n",
      "Val Score:  0.18556812405586243\n",
      "====================================================================================\n",
      "\n",
      "\n",
      "Computation time :  103.761 min\n"
750
     ]
Bannier Delphine's avatar
Bannier Delphine committed
751
752
753
    }
   ],
   "source": [
754
    "from time import time\n",
Bannier Delphine's avatar
Bannier Delphine committed
755
    "from processing.models import fit_and_evaluate\n",
756
    "t0 = time()\n",
757
758
759
    "n_folds = 2\n",
    "epochs = 30\n",
    "batch_size = 8\n",
Bannier Delphine's avatar
Bannier Delphine committed
760
761
762
763
764
765
766
767
    "\n",
    "\n",
    "#save the model history in a list after fitting so that we can plot later\n",
    "model_history = [] \n",
    "\n",
    "for i in range(n_folds):\n",
    "    print(\"Training on Fold: \",i+1)\n",
    "    model = None\n",
Bannier Delphine's avatar
Bannier Delphine committed
768
    "    model = create_hybrid(trainAttrX.shape[1], shape = (240,240,4))\n",
769
    "    model.compile(loss=\"mean_squared_error\", optimizer=opt)\n",
Bannier Delphine's avatar
Bannier Delphine committed
770
771
    "    t_x, val_x, t_y, val_y = custom_shuffle_split(trainAttrX,train_dataset,trainY,test_size = 0.1)    \n",
    "    model_history.append(fit_and_evaluate(t_x, val_x, t_y, val_y, epochs, batch_size,model,es,cp))\n",
772
773
774
    "    print(\"=======\"*12, end=\"\\n\\n\\n\")\n",
    "\n",
    "print(\"Computation time : \", round((time() - t0)/60,3), \"min\")"
Bannier Delphine's avatar
Bannier Delphine committed
775
776
777
778
   ]
  },
  {
   "cell_type": "code",
779
   "execution_count": 28,
Bannier Delphine's avatar
Bannier Delphine committed
780
   "metadata": {},
781
782
783
   "outputs": [
    {
     "data": {
784
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXUAAAEICAYAAACgQWTXAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAxt0lEQVR4nO3deXxU9dn//9eVyb6QkIUACTEsSQDZhIiCKMGtoK1alVrUVrt81d5201btftt6+9O23t7VttobW2+7ifuCW93KpqIYFJBFFiGQECAbhCxknev3x5lAjNkz4TAz1/PxmEcm55w55zoZeM+Zz/mczxFVxRhjTHAIc7sAY4wx/mOhbowxQcRC3RhjgoiFujHGBBELdWOMCSIW6sYYE0Qs1I05wYjItSLyltt1mMBkoW4GnYgUici5btfRHyJSICJeEant8Jjldm3GdCbc7QKMCQClqprpdhHG9IYdqRvXiEiUiPxOREp9j9+JSJRvXqqIvCgih0SkSkRWiUiYb95tIrJXRGpEZKuInNPJuk8Tkf0i4mk37YsissH3fKaIFIrIYRE5ICL39nMflovIXSKyxreu50Ukud38i0Rkk28/lovIhHbzRonIMyJSLiKVIvKHDuu+R0QOisguEVnQbvq1IrLTt/+7ROSq/tRugpOFunHTT4HTgWnAVGAm8DPfvB8AJUAakA78BFARyQO+DZyqqgnA54CijitW1feAOuDsdpOvBB71Pb8PuE9VhwBjgScGsB9fBb4OjABagPsBRCQXWAJ837cfLwMviEik78PmRWA3kA1kAI+1W+dpwFYgFfgN8BdxxPnWv8C3/7OBdQOo3QQZC3XjpquAX6lqmaqWA78EvuKb14wTkieparOqrlJnoKJWIAqYKCIRqlqkqp90sf4lwCIAEUkALvBNa1v/OBFJVdVaVX23mzpH+o602z/i2s3/u6puVNU64OfAl3yhfQXwkqq+rqrNwD1ADE4QzwRGAreoap2qNqhq+5Oju1X1IVVtBf7q+1uk++Z5gUkiEqOq+1R1Uze1mxBjoW7cNBLnSLXNbt80gN8CO4DXfE0NPwJQ1R04R763A2Ui8piIjKRzjwKX+pp0LgU+UNW27X0DyAU+FpH3ReTz3dRZqqpJHR517eYXd9iHCJwj7E/tn6p6fctmAKNwgruli23ub/e6et/TeN92rwBuAPaJyEsiMr6b2k2IsVA3bioFTmr3e5ZvGqpao6o/UNUxwEXAzW1t56r6qKrO8b1WgV93tnJV3YwTqgv4dNMLqrpdVRcBw3yvf6rD0XdfjOqwD81ARcf9ExHxLbsXJ9yzRKTPnRVU9VVVPQ/n6P1j4KF+1m2CkIW6OV4iRCS63SMcpynkZyKSJiKpwC+AfwCIyOdFZJwvCKtxml28IpInImf7jr4bgCM4zRFdeRT4HnAW8GTbRBG5WkTSfEfPh3yTu1tPd64WkYkiEgv8CnjK12zyBHChiJwjIhE45wkagXeANcA+4G4RifP9Tc7oaUMiki4iF/s+gBqB2gHUbYKQhbo5Xl7GCeC2x+3AfwGFwAbgI+AD3zSAHOANnNBaDTygqstw2tPvxjkS3o9zpP3jbra7BJgL/FtVK9pNnw9sEpFanJOmX1bVI12sY2Qn/dQvazf/78Ajvnqige8CqOpW4Grg9756vwB8QVWbfKH/BWAcsAfnpPAV3exHmzDgZpxvAVW+fftWL15nQoTYTTKM6T8RWQ78Q1X/7HYtxoAdqRtjTFCxUDfGmCBizS/GGBNE7EjdGGOCiGsDeqWmpmp2drZbmzfGmIC0du3aClVN62q+a6GenZ1NYWGhW5s3xpiAJCK7u5tvzS/GGBNELNSNMSaIWKgbY0wQsTsfGRMimpubKSkpoaGhwe1STC9ER0eTmZlJREREn15noW5MiCgpKSEhIYHs7GyccdLMiUpVqayspKSkhNGjR/fptdb8YkyIaGhoICUlxQI9AIgIKSkp/fpWZaFuTAixQA8c/X2vAi7Ut+6v4a5XtlDT0Ox2KcYYc8LpMdRF5GERKRORjV3MTxSRF0Rkve+u6V/zf5nHFFfV878rdrK9rHYwN2OM8bPKykqmTZvGtGnTGD58OBkZGUd/b2pq6va1hYWFfPe73+1xG7Nnz/ZLrcuXLycxMfFofeeee263y2dnZ1NRUfGZ6bfffjv33HPPZ6avXLmS6dOnEx4ezlNPPeWXmtv05kTpI8AfgL91Mf9GYLOqfkFE0oCtIvJPVe3+Xeqn3PQEALbtr2F61tDB2IQxZhCkpKSwbt06wAm7+Ph4fvjDHx6d39LSQnh455GUn59Pfn5+j9t45513/FIrwJlnnsmLL77ot/W1l5WVxSOPPNJp4A9Uj0fqqroS5w4rXS4CJPhuOxbvW7arm+kOWObQGGIiPGw7YEfqxgS6a6+9lhtuuIHTTjuNW2+9lTVr1jBr1ixOOeUUZs+ezdatWwHnyPnzn3fuDX777bfz9a9/nYKCAsaMGcP9999/dH3x8fFHly8oKODyyy9n/PjxXHXVVbSNSPvyyy8zfvx4ZsyYwXe/+92j6+2NJUuWMHnyZCZNmsRtt93W6TJ33nknubm5zJkz52j9HWVnZzNlyhTCwvzfAu6PLo1/AJbi3F4rAbjCd9/HQREWJuSkx7PtQM1gbcKYoPfLFzaxufSwX9c5ceQQ/vMLJ/f5dSUlJbzzzjt4PB4OHz7MqlWrCA8P54033uAnP/kJTz/99Gde8/HHH7Ns2TJqamrIy8vjW9/61mf6c3/44Yds2rSJkSNHcsYZZ/D222+Tn5/P9ddfz8qVKxk9ejSLFi3qsq5Vq1Yxbdo0ABYuXMjXvvY1brvtNtauXcvQoUM5//zzee6557jkkkuOvmbt2rU89thjrFu3jpaWFqZPn86MGTP6/DcZCH+E+ueAdcDZwFjgdRFZpaqf+RcjItcB14Hz9aO/coYlsGp7eb9fb4w5cSxcuBCPxwNAdXU111xzDdu3b0dEaG7uvEPEhRdeSFRUFFFRUQwbNowDBw6QmZn5qWVmzpx5dNq0adMoKioiPj6eMWPGHO37vWjRIhYvXtzpNjo2vzz//PMUFBSQluYMkHjVVVexcuXKT4X6qlWr+OIXv0hsbCwAF110UT/+IgPjj1D/GnC3Ot9tdojILmA8zt3SP0VVFwOLAfLz8/t9d4684fE8/UEJh+qbSIqN7O9qjAlZ/TmiHixxcXFHn//85z9n3rx5PPvssxQVFVFQUNDpa6Kioo4+93g8tLR8tsW3N8sEI3806OwBzgEQkXQgD9jph/V2KaftZKm1qxsTVKqrq8nIyADgkUce8fv68/Ly2LlzJ0VFRQA8/vjjvX7tzJkzWbFiBRUVFbS2trJkyRLmzp37qWXOOussnnvuOY4cOUJNTQ0vvPCCP8vvld50aVwCrAbyRKRERL4hIjeIyA2+Re4AZovIR8CbwG2q+tm+PX50tAeMtasbE1RuvfVWfvzjH3PKKacMypF1TEwMDzzwAPPnz2fGjBkkJCSQmJjYq9eOGDGCu+++m3nz5jF16lRmzJjBxRdf/Kllpk+fzhVXXMHUqVNZsGABp556aqfrev/998nMzOTJJ5/k+uuv5+ST/ffNybV7lObn52t/b5Khqky+/TUum57BLy+e5OfKjAlOW7ZsYcKECW6X4bra2lri4+NRVW688UZycnK46aab3C6rU529ZyKyVlW77N8ZcFeUgnP5bE56PFvtSN0Y00cPPfQQ06ZN4+STT6a6uprrr7/e7ZL8KmBHacwdlsAbWw64XYYxJsDcdNNNJ+yRuT8E5JE6QO7wBCrrmqiobXS7FGOMOWEEbqinO1eO2clSY4w5JoBD3ekBs926NRpjzFEBG+rDEqJIjImwI3VjjGknYENdRMi1MWCMCRg29O4x9957LxMnTmTKlCmcc8457N692y91QwD3fgGnCebFDftQVbujizEnOBt695hTTjmFwsJCYmNjefDBB7n11lv7dHVrdwL2SB2cUK8+0kxZjfWAMSYQherQu/PmzTs66Nfpp59OSUlJr2voSUAfqee06wGTPiTa5WqMCSCv/Aj2f+TfdQ6fDAvu7vPLQn3o3b/85S8sWLCg93+wHgR0qOe1G9jrzJw0l6sxxvRHKA+9+49//IPCwkJWrFjR7XJ9EdChnhIfRUpcJNv228lSY/qkH0fUgyVUh9594403uPPOO1mxYsWnah2ogG5TB6cJZluZhboxwSBUht798MMPuf7661m6dCnDhg3r9/50JuBDPS89ge0HanFrtEljjP+EytC7t9xyC7W1tSxcuJBp06b59Q5JATn0bnv/eHc3P3tuI2//6GwykmL8UJkxwcmG3nXY0LsnOLthhjGmL2zo3RNc28Be2w/UMC/Pv21TxpjgE/JD74rIwyJSJiIbu1mmQETWicgmEfFf35xeSIqNZFhCFFv328BexvTEzj0Fjv6+V71pfnkEmN/VTBFJAh4ALlLVk4GF/apkAHLTE9huPWCM6VZ0dDSVlZUW7AFAVamsrCQ6uu8XVfbY/KKqK0Uku5tFrgSeUdU9vuXL+lzFAOWmJ7BkzR68XiUszMaAMaYzmZmZlJSUUF5e7nYppheio6M/c0FVb/ijTT0XiBCR5UACcJ+q/q2zBUXkOuA6gKysLD9s2ldAejxHmlspOXiErJRYv63XmGASERFx9EpKE7z80fslHJgBXAh8Dvi5iOR2tqCqLlbVfFXNb7vU1h9yrAeMMcYA/gn1EuBVVa1T1QpgJTDVD+vttbYeMFst1I0xIc4fof48MEdEwkUkFjgN2OKH9fZaQnQEIxOj2W6hbowJcT22qYvIEqAASBWREuA/gQgAVf2Tqm4RkX8BGwAv8GdV7bL742DJSU9gm92v1BgT4nrT+6XrAYePLfNb4Ld+qaif8oYnsHpnJa1exWM9YIwxISrghwlokzMsnqYWL7sr69wuxRhjXBM0oZ43/NgNM4wxJlQFTaiPG3bs1nbGGBOqgibUYyPDGZUcY6FujAlpQRPqcOyGGcYYE6qCKtRz0hPYWVFLc6vX7VKMMcYVQRXquenxNLcqRRXWA8YYE5qCLNStB4wxJrQFVaiPTYsnTGwMGGNM6AqqUI+O8HBSSpyNAWOMCVlBFergtKvbkboxJlQFYagnsLuynsaWVrdLMcaY4y7oQj0nPYFWr7Kz3HrAGGNCT9CFep7dBckYE8KCLtRHp8YRHiYW6saYkBR0oR4ZHkZ2apz1VTfGhKSgC3VwmmDsSN0YE4p6DHUReVhEykSk21vUicipItIiIpf7r7z+yUmPZ09VPUearAeMMSa09OZI/RFgfncLiIgH+DXwmh9qGrDc9ARU4ZNya4IxxoSWHkNdVVcCVT0s9h3gaaDMH0UNVNsYMFv3WxOMMSa0DLhNXUQygC8CD/Zi2etEpFBECsvLywe66S5lp8QS6QljW5mFujEmtPjjROnvgNtUtcdBzFV1sarmq2p+WlqaHzbduXBPGGPS4uyGGcaYkBPuh3XkA4+JCEAqcIGItKjqc35Yd7/lpiewdvdBN0swxpjjbsBH6qo6WlWzVTUbeAr4D7cDHZyBvfYeOkJtY4vbpRhjzHHTmy6NS4DVQJ6IlIjIN0TkBhG5YfDL67+2k6U2DK8xJpT02Pyiqot6uzJVvXZA1fjRsVCv5ZSsoS5XY4wxx0dQXlEKMCo5lqjwMLuy1BgTUoI21D1hwvgRQ1hXfMjtUowx5rgJ2lAHmD02hXXFh+xkqTEmZAR1qM8Zl0qLV1mzq9LtUowx5rgI6lCfcdJQosLDeGu7hboxJjQEdahHR3g4NTuZt3dUuF2KMcYcF0Ed6gBnjEtl64Eaymoa3C7FGGMGXdCH+pxxqQC8s8OaYIwxwS/oQ33iyCEkxUbwljXBGGNCQNCHuidMmD02hbd3VKCqbpdjjDGDKuhDHZx29X3VDeysqHO7FGOMGVQhEept7erWC8YYE+xCItSzkmPJHBrDW9st1I0xwS0kQl1EODMnldU7K2lp7fEGTcYYE7BCItTBaVevaWjho73VbpdijDGDJmRCffZYa1c3xgS/kAn15LhITh45xPqrG2OCWm9uZ/ewiJSJyMYu5l8lIhtE5CMReUdEpvq/TP+YMy6VD3Yfor7JhuI1xgSn3hypPwLM72b+LmCuqk4G7gAW+6GuQXHGuFSaWr28X3TQ7VKMMWZQ9BjqqroSqOpm/juq2paS7wKZfqrN707NTibSE2bt6saYoOXvNvVvAK90NVNErhORQhEpLC8v9/OmexYT6WHGSUOtv7oxJmj5LdRFZB5OqN/W1TKqulhV81U1Py0tzV+b7pM5Oals3neYytpGV7ZvjDGDyS+hLiJTgD8DF6vqCT3G7RltQ/F+ckKXaYwx/TLgUBeRLOAZ4Cuqum3gJQ2uyRmJJESHW7u6MSYohfe0gIgsAQqAVBEpAf4TiABQ1T8BvwBSgAdEBKBFVfMHq+CBahuKd9V2ZyheX83GGBMUegx1VV3Uw/xvAt/0W0XHwZxxqby66QB7quo5KSXO7XKMMcZvQuaK0vba2tXt6lJjTLAJyVAfnRrHyMRoa1c3xgSdkAx1EeGMcam880klrV67xZ0xJniEZKiD01/9UH0zm0sPu12KMcb4TciGettQvNaubowJJiEb6mkJUYwfnmDt6saYoBKyoQ5OL5g1RVU0NLe6XYoxxvhFSIf6nHGpNLV4WbvbhuI1xgSHkA71maOTCQ8Ta1c3xgSNkA71uKhwpmcNtXZ1Y0zQCOlQB6dd/aO91Ryqb3K7FGOMGbCQD/U5OSmowmobitcYEwRCPtSnZCYRHxVu7erGmKAQ8qEe4Qnj9DHJ1q5ujAkKIR/q4HRtLKqsp7iq3u1SjDFmQCzUgTNznfulrth2/G+GbYwx/tRjqIvIwyJSJiIbu5gvInK/iOwQkQ0iMt3/ZQ6uMalxjEqOYflWC3VjTGDrzZH6I8D8buYvAHJ8j+uABwde1vElIszNTeOdTypobLEhA4wxgavHUFfVlUBVN4tcDPxNHe8CSSIywl8FHi8FucOob2plbZENGWCMCVz+aFPPAIrb/V7imxZQZo1NIdITxnJrVzfGBLDjeqJURK4TkUIRKSwvP7HCMy4qnFNHD2X51jK3SzHGmH7zR6jvBUa1+z3TN+0zVHWxquaran5aWpofNu1fBbnD2HagltJDR9wuxRhj+sUfob4U+KqvF8zpQLWq7vPDeo+7gjzr2miMCWy96dK4BFgN5IlIiYh8Q0RuEJEbfIu8DOwEdgAPAf8xaNUOsnHD4hmZGG1NMMaYgBXe0wKquqiH+Qrc6LeKXCQizM0bxgvrS2lq8RIZbtdmGWMCi6VWBwV5adQ2tvDBHuvaaIwJPBbqHcwem0J4mNjVpcaYgGSh3kFCdAT52da10RgTmCzUO1GQN4yP99dw4HCD26UYY0yfWKh3Ym7bqI3WBGOMCTAW6p0YPzyB4UOiWb7NmmCMMYHFQr0TbaM2rtpeQUur1+1yjDGm1yzUuzA3L42ahhY+LD7kdinGGNNrFupdOGNcKp4wsV4wxpiAYqHehcSYCGZkDbVxYIwxAcVCvRtz89LYuPcwZTXWtdEYExgs1LvR1rVx5bYKlysxxpjesVDvxskjh5CWEGXt6saYgGGh3g0R4awcp2tjq1fdLscYY3pkod6Dgrw0qo80s866NhpjAoCFeg/OzEklTGCFNcEYYwKAhXoPkmIjmTYqybo2GmMCQq9CXUTmi8hWEdkhIj/qZH6WiCwTkQ9FZIOIXOD/Ut1TkDeMDXurqaxtdLsUY4zpVm/uUeoB/ggsACYCi0RkYofFfgY8oaqnAF8GHvB3oW4qyEtDFVZut6N1Y8yJrTdH6jOBHaq6U1WbgMeAizsso8AQ3/NEoNR/Jbpv0shEUuIibSheY8wJrzehngEUt/u9xDetvduBq0WkBHgZ+E5nKxKR60SkUEQKy8sDJyDDwoSzctNYub0Cr3VtNMacwPx1onQR8IiqZgIXAH8Xkc+sW1UXq2q+quanpaX5adPHR0FeGlV1TWzYW+12KcYY06XehPpeYFS73zN909r7BvAEgKquBqKBVH8UeKI4MycNEbsbkjHmxNabUH8fyBGR0SISiXMidGmHZfYA5wCIyAScUA+q9EuOi2RKZpLdDckYc0LrMdRVtQX4NvAqsAWnl8smEfmViFzkW+wHwP8TkfXAEuBaVQ26xueC3DTWFR/iYF2T26UYY0ynwnuzkKq+jHMCtP20X7R7vhk4w7+lnXgK8tK4783trNxezsXTOp4rNsYY99kVpX0wJTOJobERvLRhn9ulGGNMpwIv1Ms+hldug5bj3wTiCRO+Miub1zYfoLCo6rhv3xhjehJ4oV5dDO/9Cba/5srmb5g7hvQhUdzx4mbrs26MOeEEXqiPmQfx6bB+iSubj40M55bPjWd9STXPr+/Ys9MYY9wVeKHuCYfJC2Hbq1DvThPIpadkMDkjkV+/spX6phZXajDGmM4EXqgDTF0E3mbY+LQrmw8LE37++YnsP9zA4pU7XanBGGM6E5ihPnwSpE92rQkGYOboZC6YPJz/XbGTfdVHXKvDGGPaC8xQB5j6Zdi7Fsq3uVbCjxdMoNWr/PZfW12rwRhj2gvcUJ+8EMQDGx5zrYRRybF8fc5onvlwL+vtHqbGmBNA4IZ6QjqMOwfWPw5er2tl3DhvLKnxkdzx4maCcGQEY0yACdxQB6cJ5nAJFK1yrYSE6AhuPi+Pwt0Heekju9LUGOOuwA71vAsgagisd68JBuCKU0cxfngCd7/yMQ3Nra7WYowJbYEd6hExcPIlsPl5aKpzrQyPr4tjycEjPPz2LtfqMMaYwA51cPqsN9fBlhddLeOMcamcO2EYDyz7hLKaBldrMcaErsAP9VGnQ9JJrvZZb/OTCybQ0NzKva+5183SGBPaAj/Uw8Kco/Wdy+FwqauljEmL56uzsnm8sJjNpYddrcUYE5oCP9QBpl4BKGx43O1K+N45OSTGRFgXR2OMK3oV6iIyX0S2isgOEflRF8t8SUQ2i8gmEXnUv2X2IHmM0wyz/jFwOUgTYyO46dxcVu+s5PXNB1ytxRgTenoMdRHxAH8EFgATgUUiMrHDMjnAj4EzVPVk4Pv+L7UHU78M5R/DvnXHfdMdXXlaFmPT4vj/Xt5CcVW92+UYY0JIb47UZwI7VHWnqjYBjwEXd1jm/wF/VNWDAKpa5t8ye+HkS8AT5XqfdYAITxi/ungSpdUNzLtnOT959iNKD9mgX8aYwdebUM8Aitv9XuKb1l4ukCsib4vIuyIyv7MVich1IlIoIoXl5eX9q7grMUMhbwF89CS0Nvt33f1wxrhUVtxSwKKZWTxZWEzBb5fzi+c3sr/aujsaYwaPv06UhgM5QAGwCHhIRJI6LqSqi1U1X1Xz09LS/LTpdqZdCfWVsP11/6+7H0YkxnDHJZNYfss8LpuRyaPv7eGs3y7jly9ssr7sxphB0ZtQ3wuMavd7pm9aeyXAUlVtVtVdwDackD++xp4NcWknRJ/19jKSYrjr0sks+2EBl0wbyd9W7+as3yzjv17cTEVto9vlGWOCSG9C/X0gR0RGi0gk8GVgaYdlnsM5SkdEUnGaY47/LYE8Eb5b3f3LtVvddWdUciy/uXwqb948lwsmj+Dht3dx5q+XcdcrW6i0cDfG+EGPoa6qLcC3gVeBLcATqrpJRH4lIhf5FnsVqBSRzcAy4BZVrRysors19cvQ2gSbnnVl872RnRrHvV+axus3z+X8k9NZvHInc369jDte3MyBw9YsY4zpP3HrApn8/HwtLCz0/4pV4cHZEBkP3zwx2tZ7sqOshgeWfcLz60vxiLAwP5Mb5o5lVHKs26UZY04wIrJWVfO7mh8cV5S2J+IcrZesgcpP3K6mV8YNS+DeK6ax7AcFXDYjkycLSyi4Zzk3P7GOHWW1bpdnjAkgwRfqAJO/BBJ2wp0w7UlWSix3XTqZlbfO45pZ2bz80T7O+58V3PjPD9hUWu12ecaYABB8zS9t/n4pVGyH7613Bv0KQBW1jTz81i7+vno3NY0tnD1+GN85exynZA11uzRjjEtCr/mlzdRFUL0H9rzjdiX9lhofxa3zx/PWj87mB+fl8uGeg3zxgXdYsmaP26UZY05QwRvq4y90Tpb+68dQus7tagYkMSaC75yTw1u3nU1BXho/fuYjnni/uOcXGmNCTvCGemQsXPKAM8b64gJY+h2o9fPQBMdZXFQ4f7p6BmflpnHbMxt4stCC3RjzacEb6gATL4bvrIVZN8K6R+H30+Gd30NLk9uV9Vt0hIfFX5nBnHGp3Pr0Bp5eW+J2ScaYE0hwhzpATBJ87k74j3dh1Gnw2s/gwVmw7TW3K+u36AgPD301nzPGpvLDp9bz7IcW7MYYR/CHepvUHLj6KbjySef3RxfCPxc6PWQCUFuwzxqTwg+eWM/z6zoOx2OMCUWhE+ptcs+Hb62G8++EPe/CA6fDqz+FhsDrBx4T6eHP1+Qzc3QyNz2+jhfWu3uPVmOM+0Iv1AHCI2H2t5329mlXwuo/wn3T4OVbnKD3et2usNdiI8N5+NpTyc9O5vuPr+OlDfvcLskY46LgvfioL0rXwVv3wrZXoaUBhmQ6d1KadBmMPMUZeuAEV9fYwrX/t4YP9hziD4tOYcHkEW6XZIwZBD1dfGSh3l5jDWx9BTY+DTveBG8zDB3thPukyyB9Ys/rcFFtYwvXPLyG9cWH+MOV05k/aTgAqkr1kWaKq46wp6qe4oP1FFfVU3zwCMVV9VQfaWZSRiL5Jw0lP3so00YlERsZ7vLeGGM6Y6HeX0cOwpYXnYDftQLUC2kTYNKlzknXpjrfo9b3s77d8zporocwD0y/xulaGeY5LmXXNDRzzcNr2FBSTUFeGnsPNVBSVU9NY8unlhsaG8Go5FhGDY0lLsrD+uJqtpXVoAqeMGHSyCHMOCmZU7OHMiN7KMMSoo9L/caY7lmo+0NtGWx+HjY+0/mwA+Jxrl6NjIXION8jHmr2QdVOSM2FM38Aky4Hz+AfAR9uaOamx9axu6qerORYRg2NcQLcF+KjkmNIiI74zOuq65v5YM9BCndXUVh0kHXFh2hscc4vZCXHkp89lIkjhpCdEkd2ahyjkmOICj8+H1bGGIeFur/V7HeO4iPjIMIX4OFRnbe7e1thy1JYeQ8c2AhDs2HOTTD1Sudk7QmuqcXLptJqCoucoF+7+yAVtccu3BKBkYkxZKfGclJKHKNT4jgpJZbs1DiykmOJjvBP4B+qb2LzvsMcPtLCOROGEeEJzfP7xoCF+onB63VusbfyN1D6oXMi9ozvwfSvQESM29X1mqpyqL6Zoso651FRz+7KOooq6ymqrONQffPRZdsCf0xaHKNTjz3GpMaTMTQGT9hnPwRbvUpRZR1b9h32PWrYsu8w+6qP3Q1qTFocP7twAvPyhiEBcALbGH/zS6iLyHzgPsAD/FlV7+5iucuAp4BTVbXbxA6pUG+jCp+8CSt+C8XvQnw6zP4O5H/dOeIPcIfqm9jtC/hdFXUUVTg/d5bXfapNP9ITxkkpsUeD/nBDM5v31bBtfw1HmlsBp11/XFo840ckMGHEECaMGMKRphZ+/a+t7Kqo48ycVH7++Ynkpie4tbvGuGLAoS4iHmAbcB5QgnMj6kWqurnDcgnAS0Ak8G0L9W6oQtFbsPK3zknY2BQY/3lIGw9pec5jSEZAdKXsDVWlsq6JXRV17CqvY2dFHTvLa9lVUcfuynpiozxMGD7EF95OiOekx3faXt/U4uXv7+7mvje2UdfUypUzs7jpvFyS4/zfnKWqbNlXw7827uONLWXER4Uza2wKs8emMC0ryc4nGFf4I9RnAber6ud8v/8YQFXv6rDc74DXgVuAH1qo91LxGnjrd7BnNRypOjY9Mt45wdoW8qm+n0Ozj1tPmuPB61VE6HNTSlVdE797Yxv/fG8PsZEevndODl+dlU1k+MDa271eZX3JIf61cT//2rSf3ZX1hAnkZyfT2NzKR3ur8SpER4RxanayL+RTmTRyCOHW1m+OA3+E+uXAfFX9pu/3rwCnqeq32y0zHfipql4mIsvpItRF5DrgOoCsrKwZu3fv7scuBbG6Cij/GMq3Oo+KrVC+DWraXf4fnQgX/DdMWehenSeQ7QdquOOlLazcVs7o1Dh+esEEzpnQt/b2Vq/yflGVE+Qb97P/cAMRHmH22FTmTxrOeRPTSY2PAqD6SDPv7azknU8qWf1JJVsP1ACQEBXOaWOSmTU2lVljUsgbntDpeQNjBmrQQ11EwoB/A9eqalF3od6eHan3QUO1M/BY+Vb44G9Oe/wpV8OC3wRFW7w/LNtaxn+9uJlPyus4Y1wK501Ix6vgVfU9nPDWDs8PHG7kjS0HqKxrIio8jLNy01gwaTjnTEgnMeaz3T47Kq9p5N2jIV9BUWU9ADERHiZnJDJ1VCJTRyUxNTOJzKExdnLXDNigN7+ISCLwCdB22/vhQBVwUXfBbqHeT60tsOJup5tkai4s/D9IP3lg66sudpp1Ajxwmlu9PPreHv7njW2f6onTnYSocObmpbFg0ggK8tKIixrYdQR7Dx1hza5K1hdXs77kEJtKD9Pk6+ufHBfJ1ExfyPuCfjDOBZjg5o9QD8c5UXoOsBfnROmVqrqpi+WXY0fqg2/ncnjmOucofv5dMONrfQvl5gZY/yi8fT8c3AVJWTDxEmfMm5HTAzfgq3bS+u+7aBqaQ9Np30XCPXhECBNBxOlVEyZCWD/a8fujqcXLtgM1rCs+xPriQ6wvOcT2slra/ttlDo1hSmYikzOSmJKZyKSMxF59QzjRNLa0sm1/LTUNzZw+JoUwa3oaNP7q0ngB8DucLo0Pq+qdIvIroFBVl3ZYdjkW6sdHbRk8ez188m8nkC+632lz705jDRQ+DKsfgNr9ToBPuhR2rXTW420JzIBvrHG+vbz7gPN7axNkzYZLF0PSKHdr66C2sYWNe6tZX3yIDXur+aikmj1V9Ufnj06NY3JGoi/sEzk5I5H4AX6DGJAtLzjfBpPHANDQ3MqWfYfZWHqYjSXVbCytZtuBGppbnSzJTY/npnNz+dzJwy3cB4FdfBTsvF545z548w5IzITL/w8yZ3x2ubpKeO9BWLPYObofPRfOvNn52Rba9VWw9WXY9BzsXHYs4E/+ohPy/h6xUhVaGru+Irc3vF5YvwTe/CXUHnCu1j3nF05X0Zd+4PQUuuj3zvg7J7CDdU1sLK1mQ0k1G0oO8VFJNaW+i65EIDsljoykGIYnRjMiMfrozxGJMYxIjCYxJmLA3zxavcqR5lbqm1o40tRKfWMzie/9NyPX38/2kRfxv0N/yMa91Wwvq6XV6+RGUmwEkzOcbxiTRibS3Orl9//ezifldUwYMYSbz8vl3D6euG5TXFXP0vWl7Kqo47LpmZw+JtnOSWChHjqK18BT33B6ypx7O5x+I4SFQXWJc1/WtX+FliNOf/g5N3ce/O0dDfhnnaYebwsknQTDJoKEOesWj++5p91z33RwBjVrqodm34BnzW2DnvmeN9c7A6UNyYTxF8KEL0DWrN6Pj1O8Bl65DUo/gIx858Rx+/2q2glPfxP2rnUGVpt/V0CdWC6vaWTjXifotx5wrqzdX93AgcMNeDv8t42OCGNEYgzDh0QTFRFGq9c5QdzqbfdQp8tm27zmVq8T3s2t1De1Hm37B4innv+JeJDzPGt5omUuP2v5OglxcUzKSDwW4hlDyEj67MnfVq+ydP1e7ntjO0WV9UzJTOSm83IpyE3rMZSr6pp4aUMpz60rZe3ug04tUeHUNrYwKWMI35wzhgunjAjpoSIs1EPJkYOw9DvO1+Wc8yFuGGx4zJk3+Usw5/tOX/e+qq+Cj19y1luzzznC1lZnbBttdYLZ2+EnChG+Ac4iYp3BziLijg161vY8PAr2fuA0/bQ0QEwyjL8AJlzkfIuI6GR0yMOl8MbtsOFxSBgB5/4SJi90PlA6am2GZXc61wKk5sBlf4ERU/r+N3BLawvsLXQ+oMYUwJCRtLR6qahtorT6CPurG3xhf+Ro6De1egkTwRPme/ieh4UJnnbnFSI8YcREeoiJ8BAb6SEm0vk5rLmUsz/8HvF1ReyY/hMOTryWrNQ4hg+J7tORckurl2c+3Mv9b26n5OARTslK4ubzcpkzLvVT66lvauH1zQd47sO9rNpeQYtXyU2P5+JpGVw0dSRpCVE8++Fe/rxqJ5+U1zF8SDTXnpHNoplZAXn+YaAs1EONKrz/Z+cWfRIG07/q3OUpKcvtyrrXWAs73oCPX3RuVtJ42LkAK+d85wg+5zwIC4fVf4BV9zofHrO/4wyQFhXf8/p3rnDOP9RXOt9kTvtW5x8CA1FfBfvWOx98aXnOt5r+jO1Tc8D5W+x43fmwa3+rxcyZzt9j4kVOjyV/2/EmPPU159/Owr/CmLkDXmVTi5en1pbwh39vp7S6gZnZyXz/3BwaWlp5fl0pr206wJHmVkYmRvOFaSO5ZFoG44cnfOYDxOtVVmwr56FVO3nnk0riIj186dRRfP2M0YxKju1VLapKVV0TlXVNHGlqpbHFS0Nz9z9T4iLJSoklOyWOzKEx/f6W0LbtfdUNJMZE9LrmjizUQ1VtudMsEpvsdiV919IIu1Y5I1xufRnqysET5ZwEritzjuLPv6PvoVZXCUu/7axz3LlwyYMQP6zv9ak6wb1vPezb4Pzcv8HpGtqehDndTodPbveYAnGpn16u7Wh8+2uw/XVnXQDxw506c86F5LGw/VXn29K+9c784VOccJ9wMaTl9n0/Ou7T6j/C6z937hvw5X9C8uiBrbODxpZWHn+/mD/8ewdlNY0AJMZEcMHkEVwybSSnZif3+sTqptJq/vLWLl5YX0qrV5k/aTjfmDOGUUNj2Nf+28vhhnbfZhrYf7jhU81MfeUJE0YmRZOd4oxEmp0SdzTwh8ZFUHa40bd955vTvkNHjtXTbts3zB3LjxaM71cNFuomsHlbofg954YllTtg1o0DO3ps+ybz2s8gKgEW/NoZZ8fb4jxaW4499zY72/e2OL1pqnYeC/H6Ct8KBVLGwYipTrPOiKnO+so/dpbd/5HzOFxyrIaEkU7Ap0+Eg0XHjsbFA6NmOt9Kxp3nLNNZc8fBIifcNy+FkjXOtNQ8X8Bf1PXrutJ8BF74vtNUN+Ei58OuN99++qmhuZUX1peSFBvJ3Ny0AQ3tsL+6gb+uLuKf7+7mcEPLZ+ZHesIY3u7E8vDEaEYMiSY1IYqYCA/RER6iwsM+8zMqwkN0RBgRYWFU1jUdHY10j+/n7ipnhNLuroeI8AjpQ6IZmeg7wZ3kbHtEUgzjhydwUkr/zu9YqBvTmQOb4elvQNnmnpdtExYBw8bD8KnHQjx9Uu8CsL7KF/Dtgr58q3PUPu4852h8zDyISerbfhwudT7wtiyF3W875zNiUyBjxrHHyOkQl9L16x+7yjnZPO+ncOYP/d8sdRzUNbbw0oZ9NLZ6GTHkWIgnx0UOao+Z6vpmdlc5A9MdrG9iWEI0I5Oc7afGRQ1Kl04LdWO60tzg3MlKFTwRTpt9+8fRaR7nZ3y6c2LXX1qanG34K3TqKpx77Ba/65x8LtsC+P5/J5306aAfMdW5ccvjVzu3X7x0sdMDyZzwLNSNCVWNNU5T0d61Tsjv/QCq9zjzxON8mCSOgkVLYNgEd2s1vdZTqNst440JVlEJkD3HebSpLfMF/FrnCP2sHwbmyXTTJQt1Y0JJ/DDIm+88TFAKvDMixhhjumShbowxQcRC3RhjgoiFujHGBBELdWOMCSIW6sYYE0Qs1I0xJohYqBtjTBBxbZgAESkHdvfz5alARY9LBZZg26dg2x8Ivn0Ktv2B4NunzvbnJFVN6+oFroX6QIhIYXdjHwSiYNunYNsfCL59Crb9geDbp/7sjzW/GGNMELFQN8aYIBKoob7Y7QIGQbDtU7DtDwTfPgXb/kDw7VOf9ycg29SNMcZ0LlCP1I0xxnTCQt0YY4JIwIW6iMwXka0iskNEfuR2Pf4gIkUi8pGIrBORgLvHn4g8LCJlIrKx3bRkEXldRLb7fg51s8a+6mKfbheRvb73aZ2IXOBmjX0hIqNEZJmIbBaRTSLyPd/0gHyfutmfQH6PokVkjYis9+3TL33TR4vIe77Me1xEIrtdTyC1qYuIB9gGnAeUAO8Di1S1D7eEP/GISBGQr6oBedGEiJwF1AJ/U9VJvmm/AapU9W7fh+9QVb3NzTr7oot9uh2oVdV73KytP0RkBDBCVT8QkQRgLXAJcC0B+D51sz9fInDfIwHiVLVWRCKAt4DvATcDz6jqYyLyJ2C9qj7Y1XoC7Uh9JrBDVXeqahPwGHCxyzWFPFVdCVR1mHwx8Fff87/i/IcLGF3sU8BS1X2q+oHveQ2wBcggQN+nbvYnYKmj1vdrhO+hwNnAU77pPb5HgRbqGUBxu99LCPA30keB10RkrYhc53YxfpKuqvt8z/cD6W4W40ffFpENvuaZgGiq6EhEsoFTgPcIgvepw/5AAL9HIuIRkXVAGfA68AlwSFVbfIv0mHmBFurBao6qTgcWADf6vvoHDXXa+AKnna9rDwJjgWnAPuC/Xa2mH0QkHnga+L6qHm4/LxDfp072J6DfI1VtVdVpQCZOy8T4vq4j0EJ9LzCq3e+ZvmkBTVX3+n6WAc/ivJmB7oCv3bOt/bPM5XoGTFUP+P7TeYGHCLD3yddO+zTwT1V9xjc5YN+nzvYn0N+jNqp6CFgGzAKSRCTcN6vHzAu0UH8fyPGdDY4EvgwsdbmmARGRON+JHkQkDjgf2Nj9qwLCUuAa3/NrgOddrMUv2sLP54sE0PvkOwn3F2CLqt7bblZAvk9d7U+Av0dpIpLkex6D0yFkC064X+5brMf3KKB6vwD4uij9DvAAD6vqne5WNDAiMgbn6BwgHHg00PZJRJYABTjDhB4A/hN4DngCyMIZYvlLqhowJx672KcCnK/1ChQB17drjz6hicgcYBXwEeD1Tf4JTjt0wL1P3ezPIgL3PZqCcyLUg3PA/YSq/sqXEY8BycCHwNWq2tjlegIt1I0xxnQt0JpfjDHGdMNC3RhjgoiFujHGBBELdWOMCSIW6sYYE0Qs1I0xJohYqBtjTBD5/wGePIwERUvGYAAAAABJRU5ErkJggg==\n",
785
786
787
788
789
790
791
792
793
794
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
Bannier Delphine's avatar
Bannier Delphine committed
795
   "source": [
Bannier Delphine's avatar
Bannier Delphine committed
796
797
798
799
800
    "import matplotlib.pyplot as plt \n",
    "\n",
    "plt.title('Loss vs Epochs')\n",
    "plt.plot(model_history[0].history['loss'], label='Training Fold 1')\n",
    "plt.plot(model_history[1].history['loss'], label='Training Fold 2')\n",
801
    "\n",
Bannier Delphine's avatar
Bannier Delphine committed
802
803
804
805
806
807
    "plt.legend()\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
808
   "execution_count": 13,
Bannier Delphine's avatar
Bannier Delphine committed
809
810
811
812
   "metadata": {},
   "outputs": [],
   "source": [
    "from keras.models import load_model\n",
Bannier Delphine's avatar
Bannier Delphine committed
813
    "\n",
814
815
    "model = None\n",
    "model = load_model('clean_notebooks/cnn_injection_superposition.h5')"
Bannier Delphine's avatar
Bannier Delphine committed
816
817
   ]
  },
Bannier Delphine's avatar
Bannier Delphine committed
818
819
  {
   "cell_type": "code",
820
   "execution_count": 14,
Bannier Delphine's avatar
Bannier Delphine committed
821
   "metadata": {},
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model: \"model_3\"\n",
      "__________________________________________________________________________________________________\n",
      "Layer (type)                    Output Shape         Param #     Connected to                     \n",
      "==================================================================================================\n",
      "input_2 (InputLayer)            [(None, 240, 240, 4) 0                                            \n",
      "__________________________________________________________________________________________________\n",
      "conv2d_3 (Conv2D)               (None, 240, 240, 32) 1184        input_2[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "activation_5 (Activation)       (None, 240, 240, 32) 0           conv2d_3[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "batch_normalization_4 (BatchNor (None, 240, 240, 32) 128         activation_5[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "max_pooling2d_3 (MaxPooling2D)  (None, 120, 120, 32) 0           batch_normalization_4[0][0]      \n",
      "__________________________________________________________________________________________________\n",
      "conv2d_4 (Conv2D)               (None, 120, 120, 64) 18496       max_pooling2d_3[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "activation_6 (Activation)       (None, 120, 120, 64) 0           conv2d_4[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "batch_normalization_5 (BatchNor (None, 120, 120, 64) 256         activation_6[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "max_pooling2d_4 (MaxPooling2D)  (None, 60, 60, 64)   0           batch_normalization_5[0][0]      \n",
      "__________________________________________________________________________________________________\n",
      "conv2d_5 (Conv2D)               (None, 60, 60, 128)  73856       max_pooling2d_4[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "activation_7 (Activation)       (None, 60, 60, 128)  0           conv2d_5[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "batch_normalization_6 (BatchNor (None, 60, 60, 128)  512         activation_7[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "max_pooling2d_5 (MaxPooling2D)  (None, 30, 30, 128)  0           batch_normalization_6[0][0]      \n",
      "__________________________________________________________________________________________________\n",
      "flatten_1 (Flatten)             (None, 115200)       0           max_pooling2d_5[0][0]            \n",
      "__________________________________________________________________________________________________\n",
      "dense_8 (Dense)                 (None, 16)           1843216     flatten_1[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "activation_8 (Activation)       (None, 16)           0           dense_8[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "dense_6_input (InputLayer)      [(None, 6)]          0                                            \n",
      "__________________________________________________________________________________________________\n",
      "batch_normalization_7 (BatchNor (None, 16)           64          activation_8[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "dense_6 (Dense)                 (None, 8)            56          dense_6_input[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_4 (Dropout)             (None, 16)           0           batch_normalization_7[0][0]      \n",
      "__________________________________________________________________________________________________\n",
      "dropout_3 (Dropout)             (None, 8)            0           dense_6[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "dense_9 (Dense)                 (None, 4)            68          dropout_4[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "dense_7 (Dense)                 (None, 4)            36          dropout_3[0][0]                  \n",
      "__________________________________________________________________________________________________\n",
      "activation_9 (Activation)       (None, 4)            0           dense_9[0][0]                    \n",
      "__________________________________________________________________________________________________\n",
      "concatenate_1 (Concatenate)     (None, 8)            0           dense_7[0][0]                    \n",
      "                                                                 activation_9[0][0]               \n",
      "__________________________________________________________________________________________________\n",
      "dense_10 (Dense)                (None, 4)            36          concatenate_1[0][0]              \n",
      "__________________________________________________________________________________________________\n",
      "dropout_5 (Dropout)             (None, 4)            0           dense_10[0][0]                   \n",
      "__________________________________________________________________________________________________\n",
      "dense_11 (Dense)                (None, 1)            5           dropout_5[0][0]                  \n",
      "==================================================================================================\n",
      "Total params: 1,937,913\n",
      "Trainable params: 1,937,433\n",
      "Non-trainable params: 480\n",
      "__________________________________________________________________________________________________\n"
     ]
    }
   ],
   "source": [
    "model.summary()"
   ]
Bannier Delphine's avatar
Bannier Delphine committed
898
  },
Bannier Delphine's avatar
Bannier Delphine committed
899
900
901
902
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
903
    "## F - Postprocessing : Evaluation"
Bannier Delphine's avatar
Bannier Delphine committed
904
905
906
907
908
909
910
911
912
913
914
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Training set"
   ]
  },
  {
   "cell_type": "code",
915
   "execution_count": 14,
Bannier Delphine's avatar
Bannier Delphine committed
916
   "metadata": {},
917
918
919
920
921
922
923
924
925
926
927
928
929
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO:predicting ...\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "avg. FVC: 2690.479018721756, std FVC 832.7709592986739\n",
930
      "mean difference : 147.58%, std: 939.70%\n"
931
932
933
     ]
    }
   ],
Bannier Delphine's avatar
Bannier Delphine committed
934
   "source": [
935
936
    "from postprocessing.evaluate import evaluate_hybrid\n",
    "preds = evaluate_hybrid(model, df, trainAttrX, train_dataset, trainY, sc)"
Bannier Delphine's avatar
Bannier Delphine committed
937
938
939
940
   ]
  },
  {
   "cell_type": "code",
941
   "execution_count": 15,
Bannier Delphine's avatar
Bannier Delphine committed
942
   "metadata": {},
943
944
945
946
947
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
948
      "35/35 [==============================] - ETA: 3:11 - loss: 0.142 - ETA: 33s - loss: 0.111 - ETA: 30s - loss: 0.11 - ETA: 27s - loss: 0.12 - ETA: 25s - loss: 0.12 - ETA: 24s - loss: 0.11 - ETA: 23s - loss: 0.14 - ETA: 22s - loss: 0.13 - ETA: 22s - loss: 0.12 - ETA: 20s - loss: 0.12 - ETA: 19s - loss: 0.11 - ETA: 19s - loss: 0.11 - ETA: 18s - loss: 0.11 - ETA: 17s - loss: 0.11 - ETA: 16s - loss: 0.10 - ETA: 15s - loss: 0.11 - ETA: 14s - loss: 0.11 - ETA: 13s - loss: 0.14 - ETA: 13s - loss: 0.18 - ETA: 12s - loss: 0.17 - ETA: 11s - loss: 0.17 - ETA: 10s - loss: 0.17 - ETA: 9s - loss: 0.1771 - ETA: 9s - loss: 0.172 - ETA: 8s - loss: 0.167 - ETA: 7s - loss: 0.167 - ETA: 6s - loss: 0.163 - ETA: 5s - loss: 0.161 - ETA: 4s - loss: 0.163 - ETA: 4s - loss: 0.161 - ETA: 3s - loss: 0.159 - ETA: 2s - loss: 0.157 - ETA: 1s - loss: 0.154 - ETA: 0s - loss: 0.152 - ETA: 0s - loss: 0.152 - 33s 798ms/step - loss: 0.1522\n"
949
950
951
952
953
     ]
    },
    {
     "data": {
      "text/plain": [
954
       "0.15223227441310883"
955
956
      ]
     },
957
     "execution_count": 15,
958
959
960
961
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
Bannier Delphine's avatar
Bannier Delphine committed
962
   "source": [
Bannier Delphine's avatar
Bannier Delphine committed
963
    "model.evaluate([trainAttrX, train_dataset], trainY)"
Bannier Delphine's avatar
Bannier Delphine committed
964
965
966
967
968
969
970
971
972
973
974
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Test set"
   ]
  },
  {
   "cell_type": "code",
975
   "execution_count": 16,
Bannier Delphine's avatar
Bannier Delphine committed
976
   "metadata": {},
977
978
979
980
981
982
983
984
985
986
987
988
989
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO:predicting ...\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "avg. FVC: 2690.479018721756, std FVC 832.7709592986739\n",
990
      "mean difference : 75.20%, std: 202.41%\n"
991
992
993
     ]
    }
   ],
Bannier Delphine's avatar
Bannier Delphine committed
994
   "source": [
995
    "preds = evaluate_hybrid(model, df, testAttrX, test_dataset, testY, sc)"
Bannier Delphine's avatar
Bannier Delphine committed
996
997
998
999
   ]
  },
  {
   "cell_type": "code",
1000
   "execution_count": 17,
Bannier Delphine's avatar
Bannier Delphine committed
1001
   "metadata": {},
1002
1003
1004
1005
1006
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
1007
      "9/9 [==============================] - ETA: 6s - loss: 0.102 - ETA: 5s - loss: 0.101 - ETA: 4s - loss: 0.122 - ETA: 3s - loss: 0.170 - ETA: 3s - loss: 0.149 - ETA: 2s - loss: 0.131 - ETA: 1s - loss: 0.125 - ETA: 0s - loss: 0.114 - ETA: 0s - loss: 0.114 - 7s 815ms/step - loss: 0.1143\n"
1008
1009
1010
1011
1012
     ]
    },
    {
     "data": {
      "text/plain": [
1013
       "0.11426162719726562"
1014
1015
      ]
     },
1016
     "execution_count": 17,
1017
1018
1019
1020
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
Bannier Delphine's avatar
Bannier Delphine committed
1021
   "source": [
Bannier Delphine's avatar
Bannier Delphine committed
1022
    "model.evaluate([testAttrX, test_dataset], testY)"
Bannier Delphine's avatar
Bannier Delphine committed
1023
1024
1025
1026
1027
1028
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
1029
    "## G - Competition score"
Bannier Delphine's avatar
Bannier Delphine committed
1030
1031
   ]
  },
1032
1033
1034
1035
1036
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
1037
1038
1039
   "source": [
    "from postprocessing.dropout_predictions import create_dropout_predict_function\n",
    "import tensorflow as tf\n",
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
    "import matplotlib.pyplot as plt\n",
    "from postprocessing.evaluate import compute_score"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Train set"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[<tf.Tensor 'dense_6_input_1:0' shape=(None, 6) dtype=float32>, <tf.Tensor 'input_2_1:0' shape=(None, 240, 240, 4) dtype=float32>]\n"
     ]
    }
   ],
   "source": [
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
    "tf.compat.v1.disable_eager_execution()\n",
    "\n",
    "dropout = 0.2\n",
    "num_iter = 20\n",
    " \n",
    "input_data=[trainAttrX, train_dataset]\n",
    "input_data[1] = np.asarray(input_data[1]).reshape(train_dataset.shape[0],240,240,4)\n",
    "num_samples = input_data[0].shape[0]\n",
    "\n",
    "predict_with_dropout = create_dropout_predict_function(model, dropout)\n",
    "\n",
    "predictions = np.zeros((num_samples, num_iter))\n",
    "for i in range(num_iter):\n",
    "    predictions[:,i] = predict_with_dropout([input_data,1])[0].reshape(-1)"
   ]
1080
  },
Bannier Delphine's avatar
Bannier Delphine committed
1081
  {
1082
   "cell_type": "markdown",
Bannier Delphine's avatar
Bannier Delphine committed
1083
   "metadata": {},
1084
   "source": [
1085
    "Ne fonctionne pas à cause de la taille des données ==> non supporté par la machine"
1086
1087
1088
   ]
  },
  {
1089
   "cell_type": "markdown",
1090
1091
   "metadata": {},
   "source": [
1092
    "# Test"
1093
1094
1095
   ]
  },
  {
1096
   "cell_type": "markdown",
1097
1098
   "metadata": {},
   "source": [
1099
    "**Dropout 0.3**"
1100
1101
1102
1103
   ]
  },
  {
   "cell_type": "code",
1104
   "execution_count": 17,
1105
   "metadata": {},
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[<tf.Tensor 'dense_6_input_2:0' shape=(None, 6) dtype=float32>, <tf.Tensor 'input_2_2:0' shape=(None, 240, 240, 4) dtype=float32>]\n"
     ]
    }
   ],
   "source": [
    "tf.compat.v1.disable_eager_execution()\n",
    "\n",
    "dropout = 0.3\n",
    "num_iter = 20\n",
    " \n",
    "input_data=[testAttrX, test_dataset]\n",
    "input_data[1] = np.asarray(input_data[1]).reshape(test_dataset.shape[0],240,240,4)\n",
    "num_samples = input_data[0].shape[0]\n",
    "\n",
    "predict_with_dropout = create_dropout_predict_function(model, dropout)\n",
    "\n",
    "predictions = np.zeros((num_samples, num_iter))\n",
    "for i in range(num_iter):\n",
    "    predictions[:,i] = predict_with_dropout([input_data,1])[0].reshape(-1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXkAAAD4CAYAAAAJmJb0AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAPeElEQVR4nO3df6jdd33H8edrabVOu7U1dyE0vbtVy6SMmcpdVqmMGleJrWiFMiyi+SNwHShUJtPoYFPYIIVpdDDEuHbNH84f8wctrZtmMSLCSJfYtE0aS2MXWUNs0tmq/adb6nt/nG/lGm+8557zPfcmnzwfcDjf7+f7/Z7zvh84r3zy/ZmqQpLUpt9Y6QIkSZNjyEtSwwx5SWqYIS9JDTPkJalhFyznl61evbpmZmaW8ysl6Zy3f//+p6pqapRtlzXkZ2Zm2Ldv33J+pSSd85L8cNRt3V0jSQ0z5CWpYYa8JDXMkJekhhnyktQwQ16SGjZ0yCdZleSBJPd281cm2ZvkSJIvJnnR5MqUJI1iKSP524DD8+ZvB7ZX1auAp4EtfRYmSRrfUCGfZB1wE/CP3XyAjcCXu1V2AjdPoD5J0hiGveL1k8AHgYu7+ZcDz1TVqW7+CeDyhTZMMgfMAUxPT49c6LlgZut9vX7e0W039fp5ks4/i47kk7wFOFFV+0f5gqraUVWzVTU7NTXSrRckSSMaZiR/HfDWJDcCFwG/BXwKuCTJBd1ofh1wbHJlSpJGsehIvqo+XFXrqmoGeAfwrap6J7AHuKVbbTNw98SqlCSNZJzz5D8E/HmSIwz20d/RT0mSpL4s6VbDVfVt4Nvd9OPAhv5LkiT1xSteJalhhrwkNcyQl6SGGfKS1DBDXpIaZshLUsMMeUlqmCEvSQ0z5CWpYUu64lXLy1sXSxqXI3lJapghL0kNM+QlqWGGvCQ1zJCXpIYZ8pLUsGEe5H1RkvuTPJjkUJKPde13JfmvJAe61/qJVytJWpJhzpN/DthYVc8muRD4bpJ/7Zb9RVV9eXLlSZLGsWjIV1UBz3azF3avmmRRkqR+DLVPPsmqJAeAE8CuqtrbLfrbJA8l2Z7kxZMqUpI0mqFCvqqer6r1wDpgQ5LfBz4MvBr4Q+Ay4EMLbZtkLsm+JPtOnjzZT9WSpKEs6eyaqnoG2ANsqqrjNfAc8E/AhjNss6OqZqtqdmpqauyCJUnDG+bsmqkkl3TTLwFuAL6fZG3XFuBm4ODkypQkjWKYs2vWAjuTrGLwj8KXqureJN9KMgUEOAD82eTKlCSNYpizax4CrlmgfeNEKpIk9cYrXiWpYYa8JDXMkJekhhnyktQwQ16SGmbIS1LDDHlJapghL0kNM+QlqWGGvCQ1zJCXpIYZ8pLUMENekhpmyEtSwwx5SWqYIS9JDTPkJalhwzzj9aIk9yd5MMmhJB/r2q9MsjfJkSRfTPKiyZcrSVqKYUbyzwEbq+o1wHpgU5JrgduB7VX1KuBpYMvEqpQkjWTRkK+BZ7vZC7tXARuBL3ftO4GbJ1GgJGl0Q+2TT7IqyQHgBLAL+AHwTFWd6lZ5Arj8DNvOJdmXZN/Jkyd7KFmSNKyhQr6qnq+q9cA6YAPw6mG/oKp2VNVsVc1OTU2NVqUkaSRLOrumqp4B9gCvAy5JckG3aB1wrN/SJEnjGubsmqkkl3TTLwFuAA4zCPtbutU2A3dPqEZJ0oguWHwV1gI7k6xi8I/Cl6rq3iSPAF9I8jfAA8AdE6xTkjSCRUO+qh4Crlmg/XEG++clSWepYUbyasTM1vt6/8yj227q/TMl9cfbGkhSwwx5SWqYIS9JDTPkJalhhrwkNcyQl6SGGfKS1DBDXpIaZshLUsMMeUlqmCEvSQ0z5CWpYYa8JDXMkJekhhnyktQwQ16SGjbMM16vSLInySNJDiW5rWv/aJJjSQ50rxsnX64kaSmGeTLUKeADVfW9JBcD+5Ps6pZtr6q/m1x5kqRxDPOM1+PA8W76Z0kOA5dPujBJ0viW9IzXJDMMHuq9F7gOeF+SdwP7GIz2n15gmzlgDmB6enrcenszieedStLZZugDr0leBnwFeH9V/RT4NPBKYD2Dkf7HF9quqnZU1WxVzU5NTY1fsSRpaEOFfJILGQT856rqqwBV9WRVPV9VPwc+C2yYXJmSpFEMc3ZNgDuAw1X1iXnta+et9nbgYP/lSZLGMcw++euAdwEPJznQtX0EuDXJeqCAo8B7JlCfJGkMw5xd810gCyz6ev/lSJL65BWvktQwQ16SGmbIS1LDDHlJapghL0kNW9JtDaTz3blwO4yj225a6RJ0FnEkL0kNM+QlqWGGvCQ1zJCXpIYZ8pLUMENekhpmyEtSwwx5SWqYIS9JDfOKV42l7ytAvVpT6pcjeUlq2DDPeL0iyZ4kjyQ5lOS2rv2yJLuSPNa9Xzr5ciVJSzHMSP4U8IGquhq4FnhvkquBrcDuqroK2N3NS5LOIouGfFUdr6rvddM/Aw4DlwNvA3Z2q+0Ebp5QjZKkES3pwGuSGeAaYC+wpqqOd4t+BKw5wzZzwBzA9PT0yIVKGo4HwzXf0Adek7wM+Arw/qr66fxlVVVALbRdVe2oqtmqmp2amhqrWEnS0gwV8kkuZBDwn6uqr3bNTyZZ2y1fC5yYTImSpFENc3ZNgDuAw1X1iXmL7gE2d9Obgbv7L0+SNI5h9slfB7wLeDjJga7tI8A24EtJtgA/BP50IhVKkka2aMhX1XeBnGHxG/stR5LUJ694laSGGfKS1DBDXpIaZshLUsMMeUlqmPeTV9P6vsRfOtc4kpekhhnyktQwQ16SGmbIS1LDDHlJapghL0kNM+QlqWGGvCQ1zJCXpIYZ8pLUMENekho2zDNe70xyIsnBeW0fTXIsyYHudeNky5QkjWKYkfxdwKYF2rdX1fru9fV+y5Ik9WHRkK+q7wA/XoZaJEk9G2ef/PuSPNTtzrn0TCslmUuyL8m+kydPjvF1kqSlGjXkPw28ElgPHAc+fqYVq2pHVc1W1ezU1NSIXydJGsVIIV9VT1bV81X1c+CzwIZ+y5Ik9WGkkE+ydt7s24GDZ1pXkrRyFn38X5LPA9cDq5M8Afw1cH2S9UABR4H3TK5ESdKoFg35qrp1geY7JlCLJKlnXvEqSQ0z5CWpYYa8JDXMkJekhi164FVaTjNb71vpEqSmOJKXpIYZ8pLUMENekhpmyEtSwwx5SWqYIS9JDTPkJalhhrwkNcyQl6SGGfKS1LBz5rYGXu4uSUvnSF6SGrZoyCe5M8mJJAfntV2WZFeSx7r3SydbpiRpFMOM5O8CNp3WthXYXVVXAbu7eUnSWWbRkK+q7wA/Pq35bcDObnoncHO/ZUmS+jDqgdc1VXW8m/4RsOZMKyaZA+YApqenR/w6SStlEic9HN12U++fqYWNfeC1qgqoX7N8R1XNVtXs1NTUuF8nSVqCUUP+ySRrAbr3E/2VJEnqy6ghfw+wuZveDNzdTzmSpD4Ncwrl54H/AH4vyRNJtgDbgBuSPAb8STcvSTrLLHrgtapuPcOiN/ZciySpZ17xKkkNM+QlqWGGvCQ1zJCXpIYZ8pLUMENekhpmyEtSwwx5SWqYIS9JDTPkJalhhrwkNcyQl6SGGfKS1DBDXpIaZshLUsMMeUlqmCEvSQ1b9MlQv06So8DPgOeBU1U120dRkqR+jBXynTdU1VM9fI4kqWfurpGkho07ki/gm0kK+ExV7Th9hSRzwBzA9PT0mF8nqQUzW+/r9fOObrup189rybgj+ddX1WuBNwPvTfLHp69QVTuqaraqZqempsb8OknSUowV8lV1rHs/AXwN2NBHUZKkfowc8klemuTiF6aBNwEH+ypMkjS+cfbJrwG+luSFz/nnqvq3XqqSJPVi5JCvqseB1/RYiySpZ55CKUkNM+QlqWGGvCQ1zJCXpIb1ce8aSVpRXkF7Zo7kJalhhrwkNcyQl6SGGfKS1DAPvErSafo+kAsrdzDXkbwkNcyQl6SGGfKS1DBDXpIaZshLUsMMeUlqmCEvSQ0bK+STbEryaJIjSbb2VZQkqR/jPMh7FfAPwJuBq4Fbk1zdV2GSpPGNM5LfABypqser6n+BLwBv66csSVIfxrmtweXAf8+bfwL4o9NXSjIHzHWzzyZ5dIzvnJTVwFMrXcQ5wH4ajv00nPOqn3L7yJuuBn531I0nfu+aqtoB7Jj094wjyb6qml3pOs529tNw7Kfh2E/D6fppZtTtx9ldcwy4Yt78uq5NknSWGCfk/xO4KsmVSV4EvAO4p5+yJEl9GHl3TVWdSvI+4BvAKuDOqjrUW2XL66zenXQWsZ+GYz8Nx34azlj9lKrqqxBJ0lnGK14lqWGGvCQ17LwI+SR3JjmR5OC8tsuS7EryWPd+adeeJH/f3arhoSSvXbnKl0+SK5LsSfJIkkNJbuva7ad5klyU5P4kD3b99LGu/coke7v++GJ3MgJJXtzNH+mWz6zoH7DMkqxK8kCSe7t5++k0SY4meTjJgST7urbefnfnRcgDdwGbTmvbCuyuqquA3d08DG7TcFX3mgM+vUw1rrRTwAeq6mrgWuC93W0q7Kdf9hywsapeA6wHNiW5Frgd2F5VrwKeBrZ0628Bnu7at3frnU9uAw7Pm7efFvaGqlo/77qB/n53VXVevIAZ4OC8+UeBtd30WuDRbvozwK0LrXc+vYC7gRvsp1/bR78JfI/Bld5PARd07a8DvtFNfwN4XTd9QbdeVrr2ZeqfdV1AbQTuBWI/LdhPR4HVp7X19rs7X0byC1lTVce76R8Ba7rphW7XcPlyFrbSuv8qXwPsxX76Fd0uiAPACWAX8APgmao61a0yvy9+0U/d8p8AL1/WglfOJ4EPAj/v5l+O/bSQAr6ZZH93Gxjo8Xc38dsanAuqqpJ4LimQ5GXAV4D3V9VPk/ximf00UFXPA+uTXAJ8DXj1ylZ09knyFuBEVe1Pcv0Kl3O2e31VHUvyO8CuJN+fv3Dc3935PJJ/MslagO79RNd+3t6uIcmFDAL+c1X11a7ZfjqDqnoG2MNgt8MlSV4YNM3vi1/0U7f8t4H/Wd5KV8R1wFuTHGVwh9qNwKewn35FVR3r3k8wGDRsoMff3fkc8vcAm7vpzQz2Qb/Q/u7uKPa1wE/m/bepWRkM2e8ADlfVJ+Ytsp/mSTLVjeBJ8hIGxy0OMwj7W7rVTu+nF/rvFuBb1e1MbVlVfbiq1tXgxlrvYPB3vxP76ZckeWmSi1+YBt4EHKTP391KH3RYpgMbnweOA//HYB/WFgb7+3YDjwH/DlzWrRsGD0P5AfAwMLvS9S9TH72ewb7Bh4AD3etG++lX+ukPgAe6fjoI/FXX/grgfuAI8C/Ai7v2i7r5I93yV6z037ACfXY9cK/9tGDfvAJ4sHsdAv6ya+/td+dtDSSpYefz7hpJap4hL0kNM+QlqWGGvCQ1zJCXpIYZ8pLUMENekhr2/w7ddrWgh7i8AAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "FVC_pred = sc.inverse_transform(predictions)\n",
    "plt.hist(np.std(FVC_pred, axis = 1), bins=15)\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXIAAAD4CAYAAADxeG0DAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAPeklEQVR4nO3db4xld13H8ffHtqiBYls72WxKl6mImPKAbZlUDH+ClD/9o7YoIfQBVqxZTGhCFR+sNNGa+KCABWM04JI2LKZQUGggFJWlqRCiFLdlabetZbdlid1sdxcLaU1MdcvXB/es3A5zZ+6de8/M/Hbfr+Tmnvs758757u/M+eyZc3733FQVkqR2/cR6FyBJmo5BLkmNM8glqXEGuSQ1ziCXpMadupYrO/vss2t+fn4tVylJzbvnnnu+V1Vzo+avaZDPz8+ze/futVylJDUvyXeXm++pFUlqnEEuSY0zyCWpcQa5JDXOIJekxhnkktQ4g1ySGmeQS1LjDHJJatyafrJTWsn89jvGWu7AjZf3XInUDo/IJalxBrkkNc4gl6TGGeSS1DiDXJIaZ5BLUuMMcklqnEEuSY0zyCWpcQa5JDXOIJekxhnkktQ4g1ySGmeQS1LjVgzyJD+V5BtJvpXkgSR/2rWfl+TuJPuTfCrJc/ovV5K02DhH5E8Dr6uqlwFbgUuSvAJ4H/Chqvp54PvANb1VKUkaacUgr4H/6l6e1j0KeB3w9137TuDKPgqUJC1vrHPkSU5Jsgc4AuwCHgF+UFXHukUeA87ppUJJ0rLGCvKqeqaqtgIvAC4CfnHcFSTZlmR3kt1Hjx5dXZWSpJEmGrVSVT8A7gJ+GTgjyfHv/HwBcHDEe3ZU1UJVLczNzU1TqyRpCeOMWplLckY3/dPAG4CHGAT6W7rFrgY+11ONkqRlnLryImwGdiY5hUHwf7qqvpDkQeC2JH8GfBO4ucc6JUkjrBjkVXUfcMES7Y8yOF8uSVpHfrJTkhpnkEtS4wxySWqcQS5JjTPIJalxBrkkNc4gl6TGjfOBIGlq89vvWO8SpBOWR+SS1DiDXJIaZ5BLUuMMcklqnEEuSY0zyCWpcQa5JDXOIJekxhnkktQ4g1ySGmeQS1LjDHJJapxBLkmNM8glqXEGuSQ1bsUgT3JukruSPJjkgSTv7tpvSHIwyZ7ucVn/5UqSFhvniyWOAe+pqnuTnA7ck2RXN+9DVfXn/ZUnSVrJikFeVYeAQ930U0keAs7puzBJ0ngmOkeeZB64ALi7a7o2yX1Jbkly5oj3bEuyO8nuo0ePTletJOnHjB3kSZ4HfAa4rqqeBD4MvAjYyuCI/aal3ldVO6pqoaoW5ubmpq9YkvQsYwV5ktMYhPitVfVZgKo6XFXPVNUPgY8CF/VXpiRplHFGrQS4GXioqj441L55aLE3A3tnX54kaSXjjFp5JfB24P4ke7q29wJXJdkKFHAAeGcP9UmSVjDOqJWvAVli1hdnX44kaVJ+slOSGmeQS1LjDHJJapxBLkmNM8glqXEGuSQ1bpxx5DrJzG+/Y+xlD9x4eY+VSBqHR+SS1DiDXJIaZ5BLUuMMcklqnEEuSY0zyCWpcQ4/VJPGHSLp8EidDDwil6TGGeSS1DiDXJIaZ5BLUuMMcklqnKNWNJVJbrAlqR8ekUtS4wxySWrcikGe5NwkdyV5MMkDSd7dtZ+VZFeSfd3zmf2XK0labJwj8mPAe6rqfOAVwLuSnA9sB+6sqhcDd3avJUlrbMUgr6pDVXVvN/0U8BBwDnAFsLNbbCdwZU81SpKWMdGolSTzwAXA3cCmqjrUzXoc2DTiPduAbQBbtmxZdaHSasz6nix+DZ42orEvdiZ5HvAZ4LqqenJ4XlUVUEu9r6p2VNVCVS3Mzc1NVawk6ceNFeRJTmMQ4rdW1We75sNJNnfzNwNH+ilRkrSccUatBLgZeKiqPjg06/PA1d301cDnZl+eJGkl45wjfyXwduD+JHu6tvcCNwKfTnIN8F3grb1UKEla1opBXlVfAzJi9sWzLUeSNCk/2SlJjTPIJalxBrkkNc4gl6TGGeSS1DiDXJIaZ5BLUuP8qjcJv7JObfOIXJIaZ5BLUuMMcklqnEEuSY0zyCWpcY5aOYk4MkM6MXlELkmNM8glqXEGuSQ1ziCXpMYZ5JLUOINckhpnkEtS4wxySWqcQS5JjVsxyJPckuRIkr1DbTckOZhkT/e4rN8yJUmjjHNE/jHgkiXaP1RVW7vHF2dbliRpXCsGeVV9FXhiDWqRJK3CNDfNujbJbwG7gfdU1feXWijJNmAbwJYtW6ZYnUbxZljSyW21Fzs/DLwI2AocAm4atWBV7aiqhapamJubW+XqJEmjrCrIq+pwVT1TVT8EPgpcNNuyJEnjWlWQJ9k89PLNwN5Ry0qS+rXiOfIknwReC5yd5DHgT4DXJtkKFHAAeGd/JUqSlrNikFfVVUs039xDLZKkVfCTnZLUOINckhpnkEtS4wxySWqcQS5JjTPIJalxBrkkNc4gl6TGGeSS1DiDXJIaZ5BLUuMMcklq3DTfECRpGeN+c9OBGy/vuRKd6Dwil6TGGeSS1DiDXJIaZ5BLUuMMcklqnEEuSY0zyCWpcQa5JDXOIJekxq0Y5EluSXIkyd6htrOS7Eqyr3s+s98yJUmjjHNE/jHgkkVt24E7q+rFwJ3da0nSOlgxyKvqq8ATi5qvAHZ20zuBK2dbliRpXKs9R76pqg51048Dm0YtmGRbkt1Jdh89enSVq5MkjTL1xc6qKqCWmb+jqhaqamFubm7a1UmSFlltkB9Oshmgez4yu5IkSZNYbZB/Hri6m74a+NxsypEkTWqc4YefBP4VeEmSx5JcA9wIvCHJPuD13WtJ0jpY8RuCquqqEbMunnEtkqRV8JOdktQ4g1ySGmeQS1LjDHJJapxBLkmNM8glqXEGuSQ1ziCXpMYZ5JLUOINckhpnkEtS4wxySWqcQS5JjVvx7ocnqvntd4y97IEbL++xEkmajkfkktQ4g1ySGmeQS1LjDHJJapxBLkmNO2lHrUgbxSQjqMbhKKuTj0fkktQ4g1ySGjfVqZUkB4CngGeAY1W1MIuiJEnjm8U58l+pqu/N4OdIklbBUyuS1Lhpj8gL+FKSAv6mqnYsXiDJNmAbwJYtW6Zc3fpYr1EFs16vpBPTtEfkr6qqC4FLgXclec3iBapqR1UtVNXC3NzclKuTJC02VZBX1cHu+QhwO3DRLIqSJI1v1UGe5LlJTj8+DbwR2DurwiRJ45nmHPkm4PYkx3/OJ6rqH2dSlSRpbKsO8qp6FHjZDGuRJK2Cww8lqXHeNGsdOKxQfRr398uba504PCKXpMYZ5JLUOINckhpnkEtS4wxySWqcQS5JjTPIJalxBrkkNc4gl6TGGeSS1DiDXJIa571WpJNUH/dkOdnu8zLJfZP6/Dd7RC5JjTPIJalxBrkkNc4gl6TGGeSS1DiDXJIa18zwQ78eTdK0TtThkR6RS1LjDHJJatxUQZ7kkiQPJ9mfZPusipIkjW/VQZ7kFOCvgUuB84Grkpw/q8IkSeOZ5oj8ImB/VT1aVf8D3AZcMZuyJEnjSlWt7o3JW4BLqup3u9dvB36pqq5dtNw2YFv38iXAw6svF4Czge9N+TP6slFrs67JbNS6YOPWZl2TmbSuF1bV3KiZvQ8/rKodwI5Z/bwku6tqYVY/b5Y2am3WNZmNWhds3NqsazKzrmuaUysHgXOHXr+ga5MkraFpgvzfgBcnOS/Jc4C3AZ+fTVmSpHGt+tRKVR1Lci3wT8ApwC1V9cDMKhttZqdperBRa7OuyWzUumDj1mZdk5lpXau+2ClJ2hj8ZKckNc4gl6TGbeggT/KpJHu6x4Eke7r2+ST/PTTvI0PveXmS+7vbBvxlkvRQ1w1JDg6t/7KheX/UrfvhJG8aau/9dgZJPpDk35Pcl+T2JGd07evaXyNqXbfbOyQ5N8ldSR5M8kCSd3ftE2/XHmo70G2PPUl2d21nJdmVZF/3fGbXnm6b7e+2+YU91fSSoT7Zk+TJJNetV38luSXJkSR7h9om7qMkV3fL70tydU91rc0+WVVNPICbgD/upueBvSOW+wbwCiDAPwCX9lDLDcAfLtF+PvAt4CeB84BHGFwIPqWb/jngOd0y5/dQ1xuBU7vp9wHv2wj9tcQ616Q/lln/ZuDCbvp04Nvdtptou/ZU2wHg7EVt7we2d9Pbh7brZd02S7cN716jbfc48ML16i/gNcCFw7/Tk/YRcBbwaPd8Zjd9Zg91rck+uaGPyI/r/kd6K/DJFZbbDDy/qr5egx75OHBl/xX+vyuA26rq6ar6DrCfwa0M1uR2BlX1pao61r38OoOx/SOtY3+t6+0dqupQVd3bTT8FPAScs8xbRm3XtXIFsLOb3smPttEVwMdr4OvAGd027dPFwCNV9d1llum1v6rqq8ATS6xzkj56E7Crqp6oqu8Du4BLZl3XWu2TTQQ58GrgcFXtG2o7L8k3k3wlyau7tnOAx4aWeYzld9BpXNv9uXTL8T/junX9xxLrH9Xep99h8L/5cevdX8PWoz+WlGQeuAC4u2uaZLv2oYAvJbkng9tbAGyqqkPd9OPApnWo67i38ewDqvXur+Mm7aMTap9c9yBP8uUke5d4DB+hXcWzf3kOAVuq6gLgD4BPJHn+Gtb1YeBFwNaulptmue4p6jq+zPXAMeDWrqn3/mpRkucBnwGuq6onWcftOuRVVXUhg7uKvivJa4Zndkdp6zJmOIMP/v068Hdd00borx+znn00St/75Lp/1VtVvX65+UlOBX4DePnQe54Gnu6m70nyCPALDG4RMPyny6pvG7BSXUP1fRT4QvdyudsWzOR2BmP0128Dvwpc3P1Cr0l/TWjdb++Q5DQGIX5rVX0WoKoOD80fd7vOVFUd7J6PJLmdwSmJw0k2V9Wh7k/vI2tdV+dS4N7j/bQR+mvIpH10EHjtovZ/7qOwNdknZ3Hxoc8Hg/NWX1nUNkd38YTBBbODwFkjLhRc1kNNm4emf5/B+UCAl/LsizyPMrg4dGo3fR4/urj30p766kFgbiP11xJ1rkl/LLP+MDj3+BfTbNce6noucPrQ9L902/QDPPtC3vu76ct59oW8b/Tcb7cB79gI/cWii4WT9hGDi5zfYXCh88xu+qwe6lqTfXJNdpwpO+ZjwO8tavtN4AFgD3Av8GtD8xaAvQyulP8V3adXZ1zT3wL3A/cxuL/M8C/09d26H2boajODq+ff7uZd31Nf7Wdw3m9P9/jIRuivEbX23h/LrPtVDP70vm+ory5bzXadcV0/xyAAv9Vtr+u79p8F7gT2AV8e2uHD4MtdHunqXuixz54L/CfwM0Nt69JfDE6zHgL+l8E55GtW00cMzlnv7x7v6KmuNdkn/Yi+JDVu3S92SpKmY5BLUuMMcklqnEEuSY0zyCWpcQa5JDXOIJekxv0fRyCwfjw4IS8AAAAASUVORK5CYII=\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "diff_fvc = (np.mean(FVC_pred,axis=1) - sc.inverse_transform(testY))\n",
    "diff = (np.mean(predictions,axis=1) - testY)\n",
    "plt.hist(diff_fvc, bins=30)\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "MSE :  95370.20543924515\n",
      "RMSE :  308.8206687371251\n"
     ]
    }
   ],
   "source": [
    "MSE = np.mean(diff_fvc*diff_fvc)\n",
    "RMSE = np.sqrt(MSE)\n",
    "print(\"MSE : \",MSE)\n",
    "print(\"RMSE : \", RMSE)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "MSE :  0.13710971929203394\n",
      "RMSE :  0.3702832959938025\n"
     ]
    }
   ],
   "source": [
    "MSE = np.mean(diff*diff)\n",
    "RMSE = np.sqrt(MSE)\n",
    "print(\"MSE : \",MSE)\n",
    "print(\"RMSE : \", RMSE)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "-13.174327277900492"
      ]
     },
     "execution_count": 23,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "compute_score(testY,np.mean(FVC_pred,axis=1),np.std(FVC_pred,axis=1))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "**Dropout 0.4**"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 35,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[<tf.Tensor 'dense_6_input_4:0' shape=(None, 6) dtype=float32>, <tf.Tensor 'input_2_4:0' shape=(None, 240, 240, 4) dtype=float32>]\n"
     ]
    }
   ],
   "source": [
    "tf.compat.v1.disable_eager_execution()\n",
    "\n",
    "dropout = 0.4\n",
    "num_iter = 20\n",
    " \n",
    "input_data=[testAttrX, test_dataset]\n",
    "input_data[1] = np.asarray(input_data[1]).reshape(test_dataset.shape[0],240,240,4)\n",
    "num_samples = input_data[0].shape[0]\n",
    "\n",
    "predict_with_dropout = create_dropout_predict_function(model, dropout)\n",
    "\n",
    "predictions = np.zeros((num_samples, num_iter))\n",
    "for i in range(num_iter):\n",
    "    predictions[:,i] = predict_with_dropout([input_data,1])[0].reshape(-1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 36,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXUAAAD4CAYAAAATpHZ6AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAPD0lEQVR4nO3df6jdd33H8edrbbVOxbb2LoS22a2zWMpYU7nLWpShcZXOjllBxspwgRXiQKGCbIsONmUbRJh2G0hZXGvzh/PHqq6ldWoWCyKMdDeapkljadWIDWmTTqt2f3RLfe+P8427hJvc773ne+5NPuf5gMP5fj/f7znn/Tnted1vPuf7/ZxUFZKkNvzCWhcgSRqOoS5JDTHUJakhhrokNcRQl6SGnL+aL3bppZfW7Ozsar6kJJ3z9u7d+2xVzfTZd1VDfXZ2lvn5+dV8SUk65yX5ft99HX6RpIYY6pLUEENdkhpiqEtSQwx1SWqIoS5JDTHUJakhhrokNcRQl6SGrOoVpa2b3fbgoM93ePvNgz6fpPZ5pC5JDTHUJakhhrokNcRQl6SGGOqS1BBDXZIaYqhLUkMMdUlqiKEuSQ0x1CWpIYa6JDXEUJekhiwZ6kkuTPJwkkeSHEzy4a79niTfS7Kvu22ceLWSpDPqM0vjC8Dmqno+yQXAN5L8W7ftT6rq3smVJ0lajiVDvaoKeL5bvaC71SSLkiStTK8x9STnJdkHHAN2VdWebtPfJNmf5I4kLz3NY7cmmU8yf/z48WGqliQtqleoV9WLVbURuBzYlORXgQ8AVwO/DlwC/NlpHrujquaqam5mZmaYqiVJi1rW2S9V9RzwEHBTVR2tkReATwKbJlCfJGkZ+pz9MpPkom75ZcCNwLeTrO/aAtwCHJhcmZKkPvqc/bIe2JnkPEZ/BD5XVQ8k+VqSGSDAPuCPJ1emJKmPPme/7AeuW6R980QqkiStmFeUSlJDDHVJaoihLkkNMdQlqSGGuiQ1xFCXpIYY6pLUEENdkhpiqEtSQwx1SWqIoS5JDTHUJakhhrokNcRQl6SGGOqS1BBDXZIaYqhLUkMMdUlqSJ8fnr4wycNJHklyMMmHu/Yrk+xJ8mSSzyZ5yeTLlSSdSZ8j9ReAzVV1LbARuCnJ9cBHgDuq6rXAj4DbJlalJKmXJUO9Rp7vVi/obgVsBu7t2ncCt0yiQElSf+f32SnJecBe4LXAx4HvAM9V1Ylul6eAy07z2K3AVoANGzaMW+9gZrc9uNYlSNLgen1RWlUvVtVG4HJgE3B13xeoqh1VNVdVczMzMyurUpLUy7LOfqmq54CHgBuAi5KcPNK/HDgybGmSpOXqc/bLTJKLuuWXATcChxiF+zu73bYA902oRklST33G1NcDO7tx9V8APldVDyR5DPhMkr8GvgXcNcE6JUk9LBnqVbUfuG6R9u8yGl+XJJ0lvKJUkhpiqEtSQwx1SWqIoS5JDTHUJakhvaYJ0NoYeiqDw9tvHvT54NyoUZomHqlLUkMMdUlqiKEuSQ0x1CWpIYa6JDXEUJekhhjqktQQQ12SGmKoS1JDDHVJasg5M03A0JejS1KLPFKXpIb0+eHpK5I8lOSxJAeT3N61fyjJkST7utvbJl+uJOlM+gy/nADeX1XfTPJKYG+SXd22O6rqbydXniRpOfr88PRR4Gi3/NMkh4DLJl2YJGn5ljWmnmQWuA7Y0zW9N8n+JHcnuXjo4iRJy9P77JckrwA+D7yvqn6S5E7gr4Dq7j8K/NEij9sKbAXYsGHDEDVrhTyDSGpfryP1JBcwCvRPVdUXAKrqmap6sap+BnwC2LTYY6tqR1XNVdXczMzMUHVLkhbR5+yXAHcBh6rqYwva1y/Y7R3AgeHLkyQtR5/hlzcA7wIeTbKva/sgcGuSjYyGXw4D755AfZKkZehz9ss3gCyy6UvDlyNJGsc5M02ApsPQX+Ye3n7zoM8nne2cJkCSGmKoS1JDDHVJaoihLkkNMdQlqSGGuiQ1xFCXpIYY6pLUEENdkhpiqEtSQwx1SWqIoS5JDTHUJakhhrokNcRQl6SGOJ+6tMacQ15D8khdkhpiqEtSQ5YM9SRXJHkoyWNJDia5vWu/JMmuJE909xdPvlxJ0pn0OVI/Aby/qq4Brgfek+QaYBuwu6quAnZ365KkNbRkqFfV0ar6Zrf8U+AQcBnwdmBnt9tO4JYJ1ShJ6mlZY+pJZoHrgD3Auqo62m16Glh3msdsTTKfZP748ePj1CpJWkLvUE/yCuDzwPuq6icLt1VVAbXY46pqR1XNVdXczMzMWMVKks6sV6gnuYBRoH+qqr7QNT+TZH23fT1wbDIlSpL66nP2S4C7gENV9bEFm+4HtnTLW4D7hi9PkrQcfa4ofQPwLuDRJPu6tg8C24HPJbkN+D7wexOpUJLU25KhXlXfAHKazW8ZthxJ0ji8olSSGmKoS1JDDHVJaoihLkkNMdQlqSGGuiQ1xFCXpIYY6pLUEENdkhpiqEtSQwx1SWqIoS5JDTHUJakhhrokNcRQl6SGGOqS1JA+v3wknbNmtz046PMd3n7zoM8nDc0jdUlqSJ8fnr47ybEkBxa0fSjJkST7utvbJlumJKmPPkfq9wA3LdJ+R1Vt7G5fGrYsSdJKLBnqVfV14IerUIskaUzjjKm/N8n+bnjm4tPtlGRrkvkk88ePHx/j5SRJS1lpqN8J/AqwETgKfPR0O1bVjqqaq6q5mZmZFb6cJKmPFYV6VT1TVS9W1c+ATwCbhi1LkrQSKwr1JOsXrL4DOHC6fSVJq2fJi4+SfBp4E3BpkqeAvwTelGQjUMBh4N2TK1GS1NeSoV5Vty7SfNcEapEkjclpAiSd0dBTLYDTLUyS0wRIUkMMdUlqiKEuSQ0x1CWpIYa6JDXEUJekhhjqktQQQ12SGmKoS1JDvKJUaswkrgDVucMjdUlqiKEuSQ0x1CWpIYa6JDXEUJekhhjqktQQQ12SGrJkqCe5O8mxJAcWtF2SZFeSJ7r7iydbpiSpjz5H6vcAN53Stg3YXVVXAbu7dUnSGlsy1Kvq68APT2l+O7CzW94J3DJsWZKklVjpmPq6qjraLT8NrDvdjkm2JplPMn/8+PEVvpwkqY+xvyitqgLqDNt3VNVcVc3NzMyM+3KSpDNYaag/k2Q9QHd/bLiSJEkrtdJQvx/Y0i1vAe4bphxJ0jj6nNL4aeA/gNcleSrJbcB24MYkTwC/1a1LktbYkvOpV9Wtp9n0loFrkSSNyR/JkJbBH6DQ2c5pAiSpIYa6JDXEUJekhhjqktQQQ12SGmKoS1JDDHVJaoihLkkNMdQlqSGGuiQ1xFCXpIYY6pLUEENdkhpiqEtSQwx1SWqI86lLOucNPc/94e03D/p8q8kjdUlqyFhH6kkOAz8FXgROVNXcEEVJklZmiOGXN1fVswM8jyRpTA6/SFJDxg31Ar6aZG+SrUMUJElauXGHX95YVUeS/BKwK8m3q+rrC3fown4rwIYNG8Z8OUnSmYx1pF5VR7r7Y8AXgU2L7LOjquaqam5mZmacl5MkLWHFoZ7k5UleeXIZeCtwYKjCJEnLN87wyzrgi0lOPs8/V9WXB6lKkrQiKw71qvoucO2AtUiSxuQpjZLUEENdkhpiqEtSQwx1SWqIoS5JDTHUJakhhrokNcRQl6SGGOqS1BBDXZIaYqhLUkOG+Dk7SVqW2W0PrnUJzfJIXZIaYqhLUkMMdUlqiKEuSQ3xi1JJOsUkvsg9vP3mwZ9zMR6pS1JDDHVJashYoZ7kpiSPJ3kyybahipIkrcyKQz3JecDHgd8GrgFuTXLNUIVJkpZvnCP1TcCTVfXdqvof4DPA24cpS5K0EuOc/XIZ8IMF608Bv3HqTkm2Alu71eeTPL7E814KPDtGXec6+z/d/Qffgyb7n4/03nWx/v9y3wdP/JTGqtoB7Oi7f5L5qpqbYElnNfs/3f0H3wP7P17/xxl+OQJcsWD98q5NkrRGxgn1/wSuSnJlkpcAvw/cP0xZkqSVWPHwS1WdSPJe4CvAecDdVXVwgJp6D9U0yv5r2t8D+z+GVNVQhUiS1phXlEpSQwx1SWrIqod6kruTHEtyYEHbJUl2JXmiu7+4a0+Sf+imIdif5PWrXe+QklyR5KEkjyU5mOT2rn0q+g+Q5MIkDyd5pHsPPty1X5lkT9fXz3ZfvpPkpd36k9322TXtwECSnJfkW0ke6Nanpv9JDid5NMm+JPNd2zR9Bi5Kcm+Sbyc5lOSGIfu/Fkfq9wA3ndK2DdhdVVcBu7t1GE1BcFV32wrcuUo1TsoJ4P1VdQ1wPfCebmqFaek/wAvA5qq6FtgI3JTkeuAjwB1V9VrgR8Bt3f63AT/q2u/o9mvB7cChBevT1v83V9XGBedjT9Nn4O+BL1fV1cC1jP4/GK7/VbXqN2AWOLBg/XFgfbe8Hni8W/5H4NbF9mvhBtwH3DjF/f9F4JuMrkR+Fji/a78B+Eq3/BXghm75/G6/rHXtY/b78u6Duxl4AMiU9f8wcOkpbVPxGQBeBXzv1P+GQ/b/bBlTX1dVR7vlp4F13fJiUxFctpqFTUr3z+jrgD1MWf+7oYd9wDFgF/Ad4LmqOtHtsrCfP38Puu0/Bl69qgUP7++APwV+1q2/munqfwFfTbK3m0YEpuczcCVwHPhkN/z2T0lezoD9P1tC/edq9Oeo6fMsk7wC+Dzwvqr6ycJt09D/qnqxqjYyOmLdBFy9thWtniS/Axyrqr1rXcsaemNVvZ7R0MJ7kvzmwo2NfwbOB14P3FlV1wH/zf8PtQDj9/9sCfVnkqwH6O6Pde3NTUWQ5AJGgf6pqvpC1zw1/V+oqp4DHmI03HBRkpMXwy3s58/fg277q4D/Wt1KB/UG4HeTHGY0s+lmRmOs09J/qupId38M+CKjP+zT8hl4CniqqvZ06/cyCvnB+n+2hPr9wJZueQujseaT7X/YfQN8PfDjBf9EOeckCXAXcKiqPrZg01T0HyDJTJKLuuWXMfpO4RCjcH9nt9up78HJ9+adwNe6I5lzUlV9oKour6pZRlNrfK2q/oAp6X+Slyd55cll4K3AAabkM1BVTwM/SPK6ruktwGMM2f81+KLg08BR4H8Z/dW6jdEY4W7gCeDfgUu6fcPohzi+AzwKzK31Fx1j9v2NjP5ZtR/Y193eNi397/r0a8C3uvfgAPAXXftrgIeBJ4F/AV7atV/YrT/ZbX/NWvdhwPfiTcAD09T/rp+PdLeDwJ937dP0GdgIzHefgX8FLh6y/04TIEkNOVuGXyRJAzDUJakhhrokNcRQl6SGGOqS1BBDXZIaYqhLUkP+D0RuH66IRzzvAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "FVC_pred = sc.inverse_transform(predictions)\n",
    "plt.hist(np.std(FVC_pred, axis = 1), bins=15)\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 37,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXAAAAD4CAYAAAD1jb0+AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAPbElEQVR4nO3db4xldX3H8fengG2jWJYyIRtkO2itDT5wwQm18U+s+Ic/bcG2MfLAbC3J2kQSae2DrSQtTfoAtGjTtNGsgbg2KNoq0YhtXQnVmFbsLq6wQHEXXFM2y+5aNGDS0C5+++Ce0cs4s/femXvmzg/er+Tmnvs75875zu/M/cyZc37nTKoKSVJ7fmbWBUiSVscAl6RGGeCS1CgDXJIaZYBLUqNOXc+VnXXWWTU/P7+eq5Sk5u3du/d7VTW3tH1dA3x+fp49e/as5yolqXlJvrtcu4dQJKlRBrgkNcoAl6RGGeCS1CgDXJIaZYBLUqMMcElqlAEuSY0ywCWpUet6JaaefeZ33DHWcoduuLznSqTnHvfAJalRBrgkNcoAl6RGGeCS1CgDXJIaZYBLUqMMcElqlAEuSY0ywCWpUQa4JDXKAJekRhngktQoA1ySGmWAS1KjRgZ4kp9L8o0k30pyf5K/6NrPS3J3koNJPpXkef2XK0laNM4e+FPAG6rqFcBW4JIkrwJuBD5UVb8MfB+4urcqJUk/ZWSA18APu5endY8C3gD8Y9e+C7iyjwIlScsb6xh4klOS7AOOAbuBh4EfVNWJbpFHgXN6qVCStKyx/qVaVT0NbE1yBnA78KvjriDJdmA7wJYtW1ZRop5L/Bdt0vgmGoVSVT8A7gJ+HTgjyeIvgBcBh1d4z86qWqiqhbm5ubXUKkkaMs4olLluz5skPw+8CXiQQZD/XrfYNuBzPdUoSVrGOIdQNgO7kpzCIPA/XVVfSPIAcFuSvwS+CdzcY52SpCVGBnhV3QtcsEz7I8BFfRQlSRrNKzElqVEGuCQ1ygCXpEYZ4JLUKANckhplgEtSowxwSWqUAS5JjTLAJalRBrgkNcoAl6RGGeCS1CgDXJIaZYBLUqMMcElqlAEuSY0ywCWpUQa4JDXKAJekRhngktQoA1ySGmWAS1KjDHBJatTIAE9ybpK7kjyQ5P4k7+nar09yOMm+7nFZ/+VKkhadOsYyJ4D3VtU9SU4H9ibZ3c37UFX9VX/lSZJWMjLAq+oIcKSbfjLJg8A5fRcmSTq5iY6BJ5kHLgDu7pquSXJvkluSbFrhPduT7Emy5/jx42urVpL0Y2MHeJIXAJ8Brq2qJ4APAy8BtjLYQ79pufdV1c6qWqiqhbm5ubVXLEkCxgzwJKcxCO9bq+qzAFV1tKqerqofAR8FLuqvTEnSUuOMQglwM/BgVX1wqH3z0GJvBfZPvzxJ0krGGYXyauAdwH1J9nVt7wOuSrIVKOAQ8K4e6pMkrWCcUShfA7LMrC9OvxxJ0ri8ElOSGmWAS1KjDHBJapQBLkmNMsAlqVEGuCQ1ygCXpEYZ4JLUKANckhplgEtSowxwSWqUAS5JjTLAJalR49xOVtpw5nfcMdZyh264vOdKpNlxD1ySGmWAS1KjDHBJapQBLkmNMsAlqVGOQnkOceSG9OziHrgkNcoAl6RGjQzwJOcmuSvJA0nuT/Kerv3MJLuTHOieN/VfriRp0Th74CeA91bV+cCrgHcnOR/YAdxZVS8F7uxeS5LWycgAr6ojVXVPN/0k8CBwDnAFsKtbbBdwZU81SpKWMdEolCTzwAXA3cDZVXWkm/UYcPYK79kObAfYsmXLqgvV+hl3tMqsv+Y0OUJHLRr7JGaSFwCfAa6tqieG51VVAbXc+6pqZ1UtVNXC3NzcmoqVJP3EWAGe5DQG4X1rVX22az6aZHM3fzNwrJ8SJUnLGWcUSoCbgQer6oNDsz4PbOumtwGfm355kqSVjHMM/NXAO4D7kuzr2t4H3AB8OsnVwHeBt/VSoSRpWSMDvKq+BmSF2RdPtxxJ0ri8ElOSGmWAS1KjDHBJapQBLkmNMsAlqVEGuCQ1ygCXpEb5L9X0rLbRb6IlrYV74JLUKANckhplgEtSowxwSWqUAS5JjTLAJalRBrgkNcoAl6RGGeCS1CgDXJIaZYBLUqMMcElqlAEuSY0ywCWpUQa4JDVqZIAnuSXJsST7h9quT3I4yb7ucVm/ZUqSlhpnD/xjwCXLtH+oqrZ2jy9OtyxJ0igjA7yqvgo8vg61SJImsJZj4Nckubc7xLJppYWSbE+yJ8me48ePr2F1kqRhqw3wDwMvAbYCR4CbVlqwqnZW1UJVLczNza1ydZKkpVYV4FV1tKqerqofAR8FLppuWZKkUVYV4Ek2D718K7B/pWUlSf04ddQCST4JvB44K8mjwJ8Dr0+yFSjgEPCu/kqUJC1nZIBX1VXLNN/cQy2SpAmMDHDNzvyOO8Za7tANl/dciaSNyEvpJalRBrgkNcoAl6RGGeCS1CgDXJIaZYBLUqMMcElqlAEuSY0ywCWpUQa4JDXKAJekRnkvlGeBce+ZIunZxT1wSWqUAS5JjTLAJalRBrgkNcoAl6RGGeCS1CgDXJIaZYBLUqMMcElq1MgAT3JLkmNJ9g+1nZlkd5ID3fOmfsuUJC01zh74x4BLlrTtAO6sqpcCd3avJUnraGSAV9VXgceXNF8B7OqmdwFXTrcsSdIoq72Z1dlVdaSbfgw4e6UFk2wHtgNs2bJllat7dvHmU+2aZNsduuHyHiuRpnASs6oKqJPM31lVC1W1MDc3t9bVSZI6qw3wo0k2A3TPx6ZXkiRpHKsN8M8D27rpbcDnplOOJGlc4wwj/CTw78DLkjya5GrgBuBNSQ4Ab+xeS5LW0ciTmFV11QqzLp5yLZKkCXglpiQ1ygCXpEYZ4JLUKANckhplgEtSowxwSWqUAS5JjVrtzay0DG9SpWHj/jx40yutlnvgktQoA1ySGmWAS1KjDHBJapQBLkmNes6OQvFfY2mjcLSKVss9cElqlAEuSY0ywCWpUQa4JDXKAJekRhngktQoA1ySGmWAS1Kj1nQhT5JDwJPA08CJqlqYRlGSpNGmcSXmb1TV96bwdSRJE/AQiiQ1aq0BXsCXkuxNsn25BZJsT7InyZ7jx4+vcXWSpEVrDfDXVNWFwKXAu5O8bukCVbWzqhaqamFubm6Nq5MkLVpTgFfV4e75GHA7cNE0ipIkjbbqAE/y/CSnL04Dbwb2T6swSdLJrWUUytnA7UkWv84nquqfp1KVJGmkVQd4VT0CvGKKtUiSJuAwQklqlAEuSY0ywCWpUQa4JDXKAJekRhngktQoA1ySGjWN28mui/kdd4y13KEbLp/ZuqU+zfIzoI3JPXBJapQBLkmNMsAlqVEGuCQ1ygCXpEY1MwplXI4Y0XOdo1XWrpU+dA9ckhplgEtSowxwSWqUAS5JjTLAJalRBrgkNepZN4xQ0nhaGHI762F6o0zSh318L+6BS1KjDHBJatSaAjzJJUkeSnIwyY5pFSVJGm3VAZ7kFODvgEuB84Grkpw/rcIkSSe3lj3wi4CDVfVIVf0vcBtwxXTKkiSNspZRKOcA/zX0+lHg15YulGQ7sL17+cMkD53ka54FfG8NNfXFusa3EWsC65rUhqgrN/5U04aoaxkj61rme5nELy3X2PswwqraCewcZ9kke6pqoeeSJmZd49uINYF1Tcq6JjOrutZyCOUwcO7Q6xd1bZKkdbCWAP8P4KVJzkvyPODtwOenU5YkaZRVH0KpqhNJrgH+BTgFuKWq7l9jPWMdapkB6xrfRqwJrGtS1jWZmdSVqprFeiVJa+SVmJLUKANckho1kwBP8qkk+7rHoST7uvb5JP8zNO8jQ+95ZZL7usv2/yZJeqjr+iSHh9Z/2dC8P+3W/VCStwy19347gSQfSPKfSe5NcnuSM7r2mfbXMnXO7NYKSc5NcleSB5Lcn+Q9XfvE27SH2g5122Jfkj1d25lJdic50D1v6trTba+D3fa+sKeaXjbUJ/uSPJHk2ln0V5JbkhxLsn+obeL+SbKtW/5Akm091bWxPotVNdMHcBPwZ930PLB/heW+AbwKCPBPwKU91HI98CfLtJ8PfAv4WeA84GEGJ25P6aZfDDyvW+b8Hup6M3BqN30jcONG6K8l61uXvjjJ+jcDF3bTpwPf7rbbRNu0p9oOAWctaXs/sKOb3jG0TS/rtle67Xf3OvTdKcBjDC4WWff+Al4HXDj8szxp/wBnAo90z5u66U091LWhPoszPYTS/SZ6G/DJEcttBl5YVV+vQY98HLiy/wp/7Argtqp6qqq+AxxkcCuBdbmdQFV9qapOdC+/zmDM/Ypm1F8zvbVCVR2pqnu66SeBBxlcLbySlbbperkC2NVN7+In2+cK4OM18HXgjG579uli4OGq+u5Jlumtv6rqq8Djy6xvkv55C7C7qh6vqu8Du4FLpl3XRvsszvoY+GuBo1V1YKjtvCTfTPKVJK/t2s5hcKn+okc5+YdzLa7p/jy6ZfHPNpa/bcA5J2nv0x8w+C2+aNb9tWgWfbGsJPPABcDdXdMk27QPBXwpyd4Mbi0BcHZVHemmHwPOnkFdi97OM3eiZt1fMHn/PCc/i70FeJIvJ9m/zGN4r+wqnvmDcwTYUlUXAH8MfCLJC9exrg8DLwG2drXcNM11r6GuxWWuA04At3ZNvfdXa5K8APgMcG1VPcEMt+mQ11TVhQzu3PnuJK8bntntmc1kPG8GF+H9NvAPXdNG6K9nmGX/rGSjfBZ7uxdKVb3xZPOTnAr8DvDKofc8BTzVTe9N8jDwKwwu0R/+U2XVl+2Pqmuovo8CX+henuy2AVO5ncAY/fX7wG8CF3c/0OvSXxOY+a0VkpzGILxvrarPAlTV0aH5427Tqaqqw93zsSS3Mzj0cDTJ5qo60v2ZfWy96+pcCtyz2E8bob86k/bPYeD1S9r/tY/CNtRncVoH0yd9MDg+9ZUlbXN0J0YYnAw7DJy5womAy3qoafPQ9B8xOOYH8HKeeQLnEQYnfk7tps/jJyfuXt5TXz0AzG2k/lpSy7r0xUnWHwbHF/96Ldu0h7qeD5w+NP1v3fb8AM88Sff+bvpynnmS7hs999ttwDtn3V8sOQk4af8wOHn5HQYnMDd102f2UNeG+iyuy4drhY75GPCHS9p+F7gf2AfcA/zW0LwFYD+Ds99/S3cV6ZRr+nvgPuBeBvd1Gf5hvq5b90MMnUVmcFb8292863rqq4MMju/t6x4f2Qj9tUydvffFSdb9GgZ/Zt871E+XrWabTrmuFzMIvm912+q6rv0XgTuBA8CXhz7sYfCPUh7u6l7osc+eD/w38AtDbeveXwwOox4B/o/BMeKrV9M/DI5JH+we7+yprg31WfRSeklq1KxHoUiSVskAl6RGGeCS1CgDXJIaZYBLUqMMcElqlAEuSY36f+6cj/VyWY44AAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "diff_fvc = (np.mean(FVC_pred,axis=1) - sc.inverse_transform(testY))\n",
    "diff = (np.mean(predictions,axis=1) - testY)\n",
    "plt.hist(diff_fvc, bins=30)\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 38,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "MSE :  102919.49844574412\n",
      "RMSE :  320.8106894193897\n"
     ]
    }
   ],
   "source": [
    "MSE = np.mean(diff_fvc*diff_fvc)\n",
    "RMSE = np.sqrt(MSE)\n",
    "print(\"MSE : \",MSE)\n",
    "print(\"RMSE : \", RMSE)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 39,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "MSE :  0.14796301923206379\n",
      "RMSE :  0.3846596147661771\n"
     ]
    }
   ],
   "source": [
    "MSE = np.mean(diff*diff)\n",
    "RMSE = np.sqrt(MSE)\n",
    "print(\"MSE : \",MSE)\n",
    "print(\"RMSE : \", RMSE)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 40,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "-11.886677561205694"
      ]
     },
     "execution_count": 40,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "compute_score(testY,np.mean(FVC_pred,axis=1),np.std(FVC_pred,axis=1))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "**Dropout 0.5**"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[<tf.Tensor 'dense_6_input_3:0' shape=(None, 6) dtype=float32>, <tf.Tensor 'input_2_3:0' shape=(None, 240, 240, 4) dtype=float32>]\n"
     ]
    }
   ],
   "source": [
    "tf.compat.v1.disable_eager_execution()\n",
    "\n",
    "dropout = 0.5\n",
    "num_iter = 20\n",
    " \n",
    "input_data=[testAttrX, test_dataset]\n",
    "input_data[1] = np.asarray(input_data[1]).reshape(test_dataset.shape[0],240,240,4)\n",
    "num_samples = input_data[0].shape[0]\n",
    "\n",
    "predict_with_dropout = create_dropout_predict_function(model, dropout)\n",
    "\n",
    "predictions = np.zeros((num_samples, num_iter))\n",
    "for i in range(num_iter):\n",
    "    predictions[:,i] = predict_with_dropout([input_data,1])[0].reshape(-1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXAAAAD4CAYAAAD1jb0+AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAO1UlEQVR4nO3df6zddX3H8edrBcWpEZCzpgG6i5NIyDKKuesgmkXrMEyWiQlZRhZtMpLrEk0wIduqSzbNtgSTKduShawOtH84fwx0EHBiV0mMyVJ2qwVaKgG1RppCywSV/cFSfO+P8714cz235/Tec+7t5/T5SL453+/n+z3n+/70nr7u937O9/s9qSokSe35pfUuQJK0Mga4JDXKAJekRhngktQoA1ySGnXWWu7sggsuqJmZmbXcpSQ1b9++fc9WVW9p+5oG+MzMDPPz82u5S0lqXpIfDGp3CEWSGmWAS1KjDHBJapQBLkmNMsAlqVEGuCQ1ygCXpEYZ4JLUKANckhq1pldiTruZHfeP9fUO33rdWF9P0nTxCFySGmWAS1KjDHBJapQBLkmNMsAlqVEGuCQ1ygCXpEYZ4JLUKANckhplgEtSowxwSWqUAS5JjRoa4EnOSfJQkoeTHEzysa79M0m+n2R/N22ZeLWSpJeNcjfCF4FtVfVCkrOBbyb5j27dn1bVXZMrT5K0nKEBXlUFvNAtnt1NNcmiJEnDjTQGnmRDkv3AMWB3Ve3tVv1tkkeS3Jbklcs8dy7JfJL548ePj6dqSdJoAV5VL1XVFuAiYGuSXwc+DFwG/CZwPvDnyzx3Z1XNVtVsr9cbT9WSpFM7C6WqngceBK6tqqPV9yLwaWDrBOqTJC1jlLNQeknO7eZfBVwDfCfJpq4twPXAgcmVKUlaapSzUDYBu5JsoB/4X6yq+5J8PUkPCLAf+JPJlSlJWmqUs1AeAa4c0L5tIhVJkkbilZiS1CgDXJIaZYBLUqMMcElqlAEuSY0ywCWpUQa4JDXKAJekRhngktQoA1ySGmWAS1KjDHBJapQBLkmNMsAlqVEGuCQ1ygCXpEYZ4JLUKANckho1ypcan5PkoSQPJzmY5GNd+yVJ9iZ5MskXkrxi8uVKkhaMcgT+IrCtqq4AtgDXJrkK+DhwW1W9EXgOuGliVUqSfsHQAK++F7rFs7upgG3AXV37LuD6SRQoSRpspDHwJBuS7AeOAbuB7wLPV9WJbpOngAuXee5ckvkk88ePHx9DyZIkGDHAq+qlqtoCXARsBS4bdQdVtbOqZqtqttfrraxKSdIvOKWzUKrqeeBB4Grg3CRndasuAo6MtzRJ0smMchZKL8m53fyrgGuAQ/SD/IZus+3APROqUZI0wFnDN2ETsCvJBvqB/8Wqui/JY8Dnk/wN8G3gjgnWKUlaYmiAV9UjwJUD2r9HfzxckrQOvBJTkhplgEtSowxwSWqUAS5JjTLAJalRBrgkNcoAl6RGGeCS1CgDXJIaZYBLUqMMcElq1Cg3s9I6mdlx/1hf7/Ct14319SStL4/AJalRBrgkNcoAl6RGGeCS1Kgz9kPMcX9AKElrzSNwSWrUKF9qfHGSB5M8luRgkpu79o8mOZJkfze9a/LlSpIWjDKEcgK4paq+leS1wL4ku7t1t1XV302uPEnSckb5UuOjwNFu/qdJDgEXTrowSdLJndIYeJIZ+t9Qv7dr+mCSR5LcmeS8cRcnSVreyGehJHkNcDfwoar6SZLbgb8Gqnv8BPDHA543B8wBbN68eRw1a4UmceaNl+dL62ekI/AkZ9MP789W1ZcAquqZqnqpqn4GfArYOui5VbWzqmararbX642rbkk6441yFkqAO4BDVfXJRe2bFm32HuDA+MuTJC1nlCGUtwDvBR5Nsr9r+whwY5It9IdQDgPvn0B9kqRljHIWyjeBDFj1lfGXI0kalVdiSlKjDHBJapQBLkmNMsAlqVEGuCQ16oy9H7i0El7NqtOJR+CS1CgDXJIaZYBLUqMMcElqlAEuSY3yLBRNtUmcNXK6G3efPUvm9OURuCQ1ygCXpEYZ4JLUKANckhplgEtSowxwSWqUAS5JjRrlW+kvTvJgkseSHExyc9d+fpLdSZ7oHs+bfLmSpAWjHIGfAG6pqsuBq4APJLkc2AHsqapLgT3dsiRpjQwN8Ko6WlXf6uZ/ChwCLgTeDezqNtsFXD+hGiVJA5zSpfRJZoArgb3Axqo62q16Gti4zHPmgDmAzZs3r7hQnZ68bFtaPyN/iJnkNcDdwIeq6ieL11VVATXoeVW1s6pmq2q21+utqlhJ0s+NFOBJzqYf3p+tqi91zc8k2dSt3wQcm0yJkqRBRjkLJcAdwKGq+uSiVfcC27v57cA94y9PkrScUcbA3wK8F3g0yf6u7SPArcAXk9wE/AD4g4lUKEkaaGiAV9U3gSyz+h3jLWd5Z+J9nc9E/pyl0XklpiQ1ygCXpEYZ4JLUKANckhplgEtSowxwSWqUAS5JjTLAJalRBrgkNcoAl6RGGeCS1CgDXJIaZYBLUqMMcElqlAEuSY0ywCWpUQa4JDXKAJekRo3ypcZ3JjmW5MCito8mOZJkfze9a7JlSpKWGuUI/DPAtQPab6uqLd30lfGWJUkaZmiAV9U3gB+tQS2SpFMw9FvpT+KDSd4HzAO3VNVzgzZKMgfMAWzevHkVu5Om08yO+9e7BDVqpR9i3g78GrAFOAp8YrkNq2pnVc1W1Wyv11vh7iRJS60owKvqmap6qap+BnwK2DresiRJw6wowJNsWrT4HuDActtKkiZj6Bh4ks8BbwMuSPIU8FfA25JsAQo4DLx/ciVKkgYZGuBVdeOA5jsmUIsk6RR4JaYkNcoAl6RGGeCS1CgDXJIaZYBLUqMMcElqlAEuSY0ywCWpUQa4JDXKAJekRq3mfuCSzgCTuF/54VuvG/trnok8ApekRhngktQoA1ySGmWAS1KjDHBJapQBLkmNMsAlqVFDAzzJnUmOJTmwqO38JLuTPNE9njfZMiVJS41yBP4Z4NolbTuAPVV1KbCnW5YkraGhAV5V3wB+tKT53cCubn4XcP14y5IkDbPSMfCNVXW0m38a2LjchknmkswnmT9+/PgKdydJWmrVH2JWVQF1kvU7q2q2qmZ7vd5qdydJ6qw0wJ9Jsgmgezw2vpIkSaNYaYDfC2zv5rcD94ynHEnSqEY5jfBzwH8Bb0ryVJKbgFuBa5I8AfxOtyxJWkND7wdeVTcus+odY65FknQKvBJTkhplgEtSowxwSWqUAS5JjTLAJalRBrgkNcoAl6RGGeCS1CgDXJIaZYBLUqMMcElqlAEuSY0ywCWpUQa4JDXKAJekRhngktSooV/oIEnjNrPj/rG+3uFbrxvr67XCI3BJatSqjsCTHAZ+CrwEnKiq2XEUJUkabhxDKG+vqmfH8DqSpFPgEIokNWq1AV7A15LsSzI3joIkSaNZ7RDKW6vqSJJfAXYn+U5VfWPxBl2wzwFs3rx5lbuTJC1Y1RF4VR3pHo8BXwa2DthmZ1XNVtVsr9dbze4kSYusOMCTvDrJaxfmgXcCB8ZVmCTp5FYzhLIR+HKShdf516r66liqkiQNteIAr6rvAVeMsRZJ0inwUnpJzTtTL833PHBJapQBLkmNMsAlqVEGuCQ1ygCXpEYZ4JLUKANckhplgEtSowxwSWqUAS5JjfJSeklaYtyX5sNkLs/3CFySGmWAS1KjDHBJapQBLkmNMsAlqVEGuCQ1ygCXpEatKsCTXJvk8SRPJtkxrqIkScOtOMCTbAD+Cfhd4HLgxiSXj6swSdLJreYIfCvwZFV9r6r+D/g88O7xlCVJGmY1l9JfCPxw0fJTwG8t3SjJHDDXLb6Q5PFV7PN0cQHw7HoXsUbs63Syr2ssH1/V0391UOPE74VSVTuBnZPez1pKMl9Vs+tdx1qwr9PJvk6H1QyhHAEuXrR8UdcmSVoDqwnw/wYuTXJJklcAfwjcO56yJEnDrHgIpapOJPkg8ACwAbizqg6OrbLT21QNCQ1hX6eTfZ0Cqar1rkGStAJeiSlJjTLAJalRBvgASS5O8mCSx5IcTHJz135+kt1Jnugez+vak+Qfu1sKPJLkzevbg9ElOSfJQ0ke7vr6sa79kiR7uz59ofugmiSv7Jaf7NbPrGsHTlGSDUm+neS+bnkq+wmQ5HCSR5PsTzLftU3je/jcJHcl+U6SQ0munsZ+DmKAD3YCuKWqLgeuAj7Q3SZgB7Cnqi4F9nTL0L+dwKXdNAfcvvYlr9iLwLaqugLYAlyb5Crg48BtVfVG4Dngpm77m4Dnuvbbuu1acjNwaNHytPZzwdurasui86Cn8T38D8BXq+oy4Ar6P99p7OcvqiqnIRNwD3AN8DiwqWvbBDzezf8zcOOi7V/erqUJ+GXgW/SvqH0WOKtrvxp4oJt/ALi6mz+r2y7rXfuI/buI/n/mbcB9QKaxn4v6exi4YEnbVL2HgdcB31/6s5m2fi43eQQ+RPen85XAXmBjVR3tVj0NbOzmB91W4MK1qnG1umGF/cAxYDfwXeD5qjrRbbK4Py/3tVv/Y+D1a1rwyv098GfAz7rl1zOd/VxQwNeS7OtuaQHT9x6+BDgOfLobGvuXJK9m+vo5kAF+EkleA9wNfKiqfrJ4XfV/fU/FOZhV9VJVbaF/hLoVuGx9Kxq/JL8HHKuqfetdyxp6a1W9mf6wwQeS/PbilVPyHj4LeDNwe1VdCfwvPx8uAaamnwMZ4MtIcjb98P5sVX2pa34myaZu/Sb6R6wwJbcVqKrngQfpDyWcm2ThQq/F/Xm5r9361wH/s7aVrshbgN9Pcpj+nTO30R87nbZ+vqyqjnSPx4Av0//lPG3v4aeAp6pqb7d8F/1An7Z+DmSAD5AkwB3Aoar65KJV9wLbu/nt9MfGF9rf133CfRXw40V/vp3WkvSSnNvNv4r+WP8h+kF+Q7fZ0r4u/BvcAHy9O8I5rVXVh6vqoqqaoX/bh69X1R8xZf1ckOTVSV67MA+8EzjAlL2Hq+pp4IdJ3tQ1vQN4jCnr57LWexD+dJyAt9L/k+sRYH83vYv+GOge4AngP4Hzu+1D/8stvgs8Csyudx9Ooa+/AXy76+sB4C+79jcADwFPAv8GvLJrP6dbfrJb/4b17sMK+vw24L5p7mfXr4e76SDwF137NL6HtwDz3Xv434HzprGfgyYvpZekRjmEIkmNMsAlqVEGuCQ1ygCXpEYZ4JLUKANckhplgEtSo/4fNXQUubWX2akAAAAASUVORK5CYII=\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "FVC_pred = sc.inverse_transform(predictions)\n",
    "plt.hist(np.std(FVC_pred, axis = 1), bins=15)\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXAAAAD4CAYAAAD1jb0+AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAANRUlEQVR4nO3dX4il9X3H8fcnVtOSCK7sdLuo07GJtOxNVhmsxRBs/hijFyqUEC/i0grjhUIC9mKbXsTWXqyhRigE6YqSLSRaaSIu0SbZikUCrclsuomrVjR2pS6ru6JpDIWUNd9ezDN1Ojmz5/+Mv533Cw7nOb/nmXm+58fOh9/+zu88T6oKSVJ73rPRBUiSRmOAS1KjDHBJapQBLkmNMsAlqVG/tp4n27p1a83Nza3nKSWpeQcPHny9qmZWt69rgM/NzbG4uLiep5Sk5iV5uVe7UyiS1CgDXJIaZYBLUqMMcElqlAEuSY0ywCWpUX0DPMmvJ/l+kh8leSbJX3TtFyZ5KsmLSf4+yVnTL1eStGyQEfgvgI9W1YeAncBVSS4D7gTurqoPAm8CN02tSknSr+gb4LXk593LM7tHAR8F/qFr3wdcN40CJUm9DfRNzCRnAAeBDwJfAX4C/LSqTnaHvAKct8bPLgALALOzs+PWKwEwt/vRgY47sueaKVcibZyBPsSsqreraidwPnAp8HuDnqCq9lbVfFXNz8z8ylf5JUkjGmoVSlX9FHgC+APgnCTLI/jzgaOTLU2SdCqDrEKZSXJOt/0bwCeA51gK8j/qDtsFPDKlGiVJPQwyB74d2NfNg78HeKiqvpXkWeDBJH8F/Btw3xTrlCSt0jfAq+rHwMU92l9iaT5ckrQB/CamJDXKAJekRhngktQoA1ySGmWAS1KjDHBJapQBLkmNMsAlqVEGuCQ1ygCXpEYZ4JLUKANckhplgEtSowxwSWqUAS5JjTLAJalRBrgkNcoAl6RGGeCS1CgDXJIaZYBLUqP63pVeOpW53Y9O9Pcd2XPNRH+fdDpzBC5JjTLAJalRBrgkNapvgCe5IMkTSZ5N8kySz3Xttyc5muRQ97h6+uVKkpYN8iHmSeC2qvphkrOBg0kOdPvurqq/nl55kqS19A3wqjoGHOu230ryHHDetAuTJJ3aUMsIk8wBFwNPAZcDtya5EVhkaZT+Zo+fWQAWAGZnZ8etV+tg0ksDJU3HwB9iJnk/8A3g81X1M+Ae4APATpZG6Hf1+rmq2ltV81U1PzMzM37FkiRgwABPciZL4f21qvomQFW9VlVvV9UvgXuBS6dXpiRptUFWoQS4D3iuqr68on37isOuBw5PvjxJ0loGmQO/HPgs8HSSQ13bF4AbkuwECjgC3DyF+iRJaxhkFcr3gPTY9djky5EkDcpvYkpSowxwSWqUAS5JjTLAJalRBrgkNcoAl6RGGeCS1CgDXJIaZYBLUqMMcElqlAEuSY0ywCWpUQa4JDXKAJekRhngktSooW5qLE2bN1SWBucIXJIaZYBLUqMMcElqlAEuSY0ywCWpUQa4JDXKZYQ6rQ26LPHInmumXIk0eY7AJalRBrgkNcoAl6RG9Q3wJBckeSLJs0meSfK5rv3cJAeSvNA9b5l+uZKkZYOMwE8Ct1XVDuAy4JYkO4DdwONVdRHwePdakrRO+gZ4VR2rqh92228BzwHnAdcC+7rD9gHXTalGSVIPQ82BJ5kDLgaeArZV1bFu16vAtjV+ZiHJYpLFEydOjFOrJGmFgQM8yfuBbwCfr6qfrdxXVQVUr5+rqr1VNV9V8zMzM2MVK0l6x0ABnuRMlsL7a1X1za75tSTbu/3bgePTKVGS1Msgq1AC3Ac8V1VfXrFrP7Cr294FPDL58iRJaxnkq/SXA58Fnk5yqGv7ArAHeCjJTcDLwKenUqEkqae+AV5V3wOyxu6PTbYcSdKg/CamJDXKAJekRhngktQoA1ySGmWAS1KjDHBJapQBLkmNMsAlqVEGuCQ1ygCXpEYZ4JLUKANckhplgEtSowxwSWqUAS5JjTLAJalRBrgkNcoAl6RGGeCS1CgDXJIaZYBLUqP63pVe0jvmdj868LFH9lwzxUokR+CS1CwDXJIaZYBLUqP6BniS+5McT3J4RdvtSY4mOdQ9rp5umZKk1QYZgX8VuKpH+91VtbN7PDbZsiRJ/fQN8Kp6EnhjHWqRJA1hnGWEtya5EVgEbquqN3sdlGQBWACYnZ0d43TS9AyzPFB6txj1Q8x7gA8AO4FjwF1rHVhVe6tqvqrmZ2ZmRjydJGm1kQK8ql6rqrer6pfAvcClky1LktTPSAGeZPuKl9cDh9c6VpI0HX3nwJM8AFwBbE3yCvBF4IokO4ECjgA3T69ESVIvfQO8qm7o0XzfFGqRJA3Bb2JKUqO8GuFpYNAlcF4dTzq9OAKXpEYZ4JLUKANckhplgEtSowxwSWqUAS5JjTLAJalRBrgkNcoAl6RGGeCS1CgDXJIaZYBLUqMMcElqlAEuSY0ywCWpUQa4JDXKAJekRhngktQoA1ySGmWAS1KjvKnxJjLozY+1vrwptUblCFySGmWAS1KjDHBJalTfAE9yf5LjSQ6vaDs3yYEkL3TPW6ZbpiRptUFG4F8FrlrVtht4vKouAh7vXkuS1lHfAK+qJ4E3VjVfC+zrtvcB1022LElSP6POgW+rqmPd9qvAtrUOTLKQZDHJ4okTJ0Y8nSRptbE/xKyqAuoU+/dW1XxVzc/MzIx7OklSZ9QAfy3JdoDu+fjkSpIkDWLUAN8P7Oq2dwGPTKYcSdKgBllG+ADwL8DvJnklyU3AHuATSV4APt69liSto77XQqmqG9bY9bEJ1yJJGoLfxJSkRnk1wncxrx4o6VQcgUtSowxwSWqUAS5JjTLAJalRBrgkNcoAl6RGuYxwAN50VqNwGaimzRG4JDXKAJekRhngktQoA1ySGmWAS1KjXIWyAVydIGkSHIFLUqMMcElqlAEuSY0ywCWpUQa4JDXKAJekRrmMcIJcHihpPTkCl6RGGeCS1CgDXJIaNdYceJIjwFvA28DJqpqfRFGSpP4m8SHmH1bV6xP4PZKkITiFIkmNGncEXsB3kxTwt1W1d/UBSRaABYDZ2dkxTyepH+/hunmMOwL/cFVdAnwKuCXJR1YfUFV7q2q+quZnZmbGPJ0kadlYAV5VR7vn48DDwKWTKEqS1N/IAZ7kfUnOXt4GrgQOT6owSdKpjTMHvg14OMny7/l6VX17IlVJkvoaOcCr6iXgQxOsRZI0BJcRSlKjDHBJapQBLkmNMsAlqVEGuCQ1ygCXpEYZ4JLUKANckhqVqlq3k83Pz9fi4uJIP+sNg6WN4VULN16Sg71umOMIXJIaZYBLUqMMcElqlAEuSY0ywCWpUQa4JDVq3JsaS9Jpp5UbQzsCl6RGGeCS1CgDXJIaZYBLUqMMcElqlAEuSY1yGaGkUxrmSqAbtaxuo5b9bXTfOAKXpEYZ4JLUKANckho1VoAnuSrJ80leTLJ7UkVJkvobOcCTnAF8BfgUsAO4IcmOSRUmSTq1cUbglwIvVtVLVfU/wIPAtZMpS5LUzzjLCM8D/nPF61eA3199UJIFYKF7+fMkz49xznerrcDrG11EA+yn/pruo9y5LqcZuY/Wqb5pnPu3ezVOfR14Ve0F9k77PBspyWKvO0br/7Of+rOP+rOP3jHOFMpR4IIVr8/v2iRJ62CcAP8BcFGSC5OcBXwG2D+ZsiRJ/Yw8hVJVJ5PcCnwHOAO4v6qemVhlbTmtp4gmyH7qzz7qzz7qpKo2ugZJ0gj8JqYkNcoAl6RGGeBDSnJ7kqNJDnWPq1fs+7PusgLPJ/nkivZNfcmBzf7+V0pyJMnT3b+dxa7t3CQHkrzQPW/p2pPkb7p++3GSSza2+ulJcn+S40kOr2gbul+S7OqOfyHJro14L+uqqnwM8QBuB/60R/sO4EfAe4ELgZ+w9OHuGd327wBndcfs2Oj3sY79tanff4/+OAJsXdX2JWB3t70buLPbvhr4RyDAZcBTG13/FPvlI8AlwOFR+wU4F3ipe97SbW/Z6Pc2zYcj8Mm5Fniwqn5RVf8BvMjS5QY2+yUHNvv7H8S1wL5uex9w3Yr2v6sl/wqck2T7BtQ3dVX1JPDGquZh++WTwIGqeqOq3gQOAFdNvfgNZICP5tbuv273L/+3jt6XFjjvFO2bxWZ//6sV8N0kB7vLTABsq6pj3farwLZue7P33bD9sun6y1uq9ZDkn4Df6rHrz4F7gDtY+kO8A7gL+JP1q06N+3BVHU3ym8CBJP++cmdVVRLX9q5iv/RmgPdQVR8f5Lgk9wLf6l6e6tICm/mSA15yYYWqOto9H0/yMEtTTK8l2V5Vx7qpgOPd4Zu974btl6PAFava/3kd6twwTqEMadUc5PXA8qfm+4HPJHlvkguBi4Dv4yUHNvv7/z9J3pfk7OVt4EqW/v3sB5ZXTOwCHum29wM3dqsuLgP+a8WUwmYwbL98B7gyyZZuavPKru205Qh8eF9KspOlKZQjwM0AVfVMkoeAZ4GTwC1V9TbAZr7kQHnJhZW2AQ8ngaW/va9X1beT/AB4KMlNwMvAp7vjH2NpxcWLwH8Df7z+Ja+PJA+wNHremuQV4IvAHobol6p6I8kdLA0aAP6yqlZ/MHpa8av0ktQop1AkqVEGuCQ1ygCXpEYZ4JLUKANckhplgEtSowxwSWrU/wKncBCHbuD/wgAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "diff_fvc = (np.mean(FVC_pred,axis=1) - sc.inverse_transform(testY))\n",
    "diff = (np.mean(predictions,axis=1) - testY)\n",
    "plt.hist(diff_fvc, bins=30)\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "MSE :  116767.65566158788\n",
      "RMSE :  341.7128263053465\n"
     ]
    }
   ],
   "source": [
    "MSE = np.mean(diff_fvc*diff_fvc)\n",
    "RMSE = np.sqrt(MSE)\n",
    "print(\"MSE : \",MSE)\n",
    "print(\"RMSE : \", RMSE)"
   ]
  },
1498
1499
  {
   "cell_type": "code",
1500
   "execution_count": 33,
1501
   "metadata": {},
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "MSE :  0.16787193040438847\n",
      "RMSE :  0.40972177194333775\n"
     ]
    }
   ],
1512
1513
1514
1515
1516
1517
1518
1519
1520
   "source": [
    "MSE = np.mean(diff*diff)\n",
    "RMSE = np.sqrt(MSE)\n",
    "print(\"MSE : \",MSE)\n",
    "print(\"RMSE : \", RMSE)"
   ]
  },
  {
   "cell_type": "code",
1521
   "execution_count": 34,
1522
   "metadata": {},
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556
1557
1558
   "outputs": [
    {
     "data": {
      "text/plain": [
       "-10.91977971722263"
      ]
     },
     "execution_count": 34,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "compute_score(testY,np.mean(FVC_pred,axis=1),np.std(FVC_pred,axis=1))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "**Dropout 0.6**"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[<tf.Tensor 'dense_6_input_1:0' shape=(None, 6) dtype=float32>, <tf.Tensor 'input_2_1:0' shape=(None, 240, 240, 4) dtype=float32>]\n"
     ]
    }
   ],
1559
   "source": [
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
    "tf.compat.v1.disable_eager_execution()\n",
    "\n",
    "dropout = 0.6\n",
    "num_iter = 20\n",
    " \n",
    "input_data=[testAttrX, test_dataset]\n",
    "input_data[1] = np.asarray(input_data[1]).reshape(test_dataset.shape[0],240,240,4)\n",
    "num_samples = input_data[0].shape[0]\n",
    "\n",
    "predict_with_dropout = create_dropout_predict_function(model, dropout)\n",
1570
    "\n",
1571
1572
1573
1574
1575
1576
1577
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587
1588
1589
1590
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600
1601
1602
1603
1604
1605
1606
1607
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632
1633
1634
1635
1636
1637
1638
1639
1640
1641
1642
1643
1644
1645
1646
1647
1648
1649
1650
1651
1652
1653
1654
1655
1656
1657
1658
1659
1660
1661
1662
1663
1664
1665
1666
1667
1668
1669
1670
1671
1672
1673
1674
1675
1676
1677
1678
1679
1680
1681
1682
1683
1684
    "predictions = np.zeros((num_samples, num_iter))\n",
    "for i in range(num_iter):\n",
    "    predictions[:,i] = predict_with_dropout([input_data,1])[0].reshape(-1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAD4CAYAAAAXUaZHAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAN9UlEQVR4nO3df6zd9V3H8ed7Lb+ZlNJrUyl4WSAsxChggxDmYmAoWxcgBg1k0ZpgmuhMwJnMTqMJiX8UY/bDZHE267Qxk8EYCilRhsDij5jiLT9bOqSwbisBelEYzj/Usrd/fD+Fm67lnnPPOfd+++7zkdzc769zvi8O377u53zP+Z4TmYkkqZb3LHUASdL4We6SVJDlLkkFWe6SVJDlLkkFLV/Mna1atSqnp6cXc5eSdMzbuXPna5k5NcxtFrXcp6enmZmZWcxdStIxLyK+PextPC0jSQVZ7pJUkOUuSQVZ7pJUkOUuSQVZ7pJUkOUuSQVZ7pJUkOUuSQUt6hWq1U1vemCs97dv8/qx3p+k44cjd0kqyHKXpIIsd0kqyHKXpIIsd0kqyHKXpIIsd0kqyHKXpIIsd0kqyHKXpIIsd0kqyHKXpIIsd0kqyHKXpIIsd0kqyHKXpIIsd0kqyHKXpIIsd0kqaOByj4hlEfFERGxv8+dFxI6I2BsRd0XEiZOLKUkaxjAj91uBPXPm7wA+k5nnA68Dt4wzmCRp4QYq94hYC6wHvtjmA7gKuKdtsg24YQL5JEkLMOjI/bPAJ4EftPmzgDcy82Cb3w+cfaQbRsTGiJiJiJnZ2dlRskqSBjRvuUfER4EDmblzITvIzC2ZuS4z101NTS3kLiRJQ1o+wDZXAtdFxEeAk4EfAT4HrIiI5W30vhZ4aXIxJUnDmHfknpmfysy1mTkN3AQ8kpkfAx4FbmybbQDum1hKSdJQRnmf++8Cn4iIvXTn4LeOJ5IkaVSDnJZ5W2Z+A/hGm34RuGz8kSRJo/IKVUkqyHKXpIIsd0kqyHKXpIIsd0kqyHKXpIIsd0kqyHKXpIIsd0kqyHKXpIIsd0kqyHKXpIIsd0kqyHKXpIIsd0kqyHKXpIIsd0kqyHKXpIIsd0kqyHKXpIIsd0kqyHKXpIIsd0kqyHKXpIIsd0kqyHKXpIIsd0kqyHKXpIIsd0kqyHKXpIIsd0kqyHKXpIIsd0kqyHKXpIIsd0kqyHKXpIIsd0kqaN5yj4iTI+KxiHgqInZHxO1t+XkRsSMi9kbEXRFx4uTjSpIGMcjI/X+AqzLzp4CLgWsj4nLgDuAzmXk+8Dpwy8RSSpKGMm+5Z+f7bfaE9pPAVcA9bfk24IZJBJQkDW/5IBtFxDJgJ3A+8HngBeCNzDzYNtkPnH2U224ENgKce+65o+ZVj0xvemDs97lv8/qx36d0PBroBdXMfCszLwbWApcB7x90B5m5JTPXZea6qamphaWUJA1lqHfLZOYbwKPAFcCKiDg08l8LvDTeaJKkhRrk3TJTEbGiTZ8CXAPsoSv5G9tmG4D7JpRRkjSkQc65rwG2tfPu7wHuzsztEfEs8JWI+CPgCWDrBHNKkoYwb7ln5tPAJUdY/iLd+XdJUs94haokFWS5S1JBlrskFTTQRUxaGuO+SMgLhKTjhyN3SSrIcpekgix3SSrIcpekgnxBVaX5orSOV47cJakgy12SCrLcJakgz7kfRybxzUmS+smRuyQVZLlLUkGWuyQVZLlLUkGWuyQVZLlLUkGWuyQVZLlLUkFexKRe8UIraTwcuUtSQZa7JBVkuUtSQZa7JBV03L6g6gt3kipz5C5JBVnuklSQ5S5JBVnuklSQ5S5JBVnuklSQ5S5JBVnuklTQcXsRk7QQk7j4bd/m9WO/T8mRuyQVNG+5R8Q5EfFoRDwbEbsj4ta2fGVEPBQRz7ffZ04+riRpEIOM3A8Cv5OZFwGXAx+PiIuATcDDmXkB8HCblyT1wLzlnpkvZ+bjbfq/gD3A2cD1wLa22TbghglllCQNaahz7hExDVwC7ABWZ+bLbdUrwOqj3GZjRMxExMzs7OwoWSVJAxq43CPidOBrwG2Z+ebcdZmZQB7pdpm5JTPXZea6qampkcJKkgYzULlHxAl0xf7lzLy3LX41Ita09WuAA5OJKEka1iDvlglgK7AnMz89Z9X9wIY2vQG4b/zxJEkLMchFTFcCvwI8ExFPtmW/B2wG7o6IW4BvA788kYSSpKHNW+6Z+c9AHGX11eONI0kaB69QlaSCLHdJKshyl6SCLHdJKshyl6SCLHdJKshyl6SCLHdJKshyl6SCLHdJKshyl6SCLHdJKshyl6SCLHdJKshyl6SCLHdJKshyl6SCBvmavV6Y3vTAUkeQJmLcx/a+zevHen86Njlyl6SCLHdJKshyl6SCLHdJKshyl6SCLHdJKshyl6SCLHdJKshyl6SCLHdJKshyl6SCLHdJKshyl6SCLHdJKshyl6SCLHdJKshyl6SCjplvYpI0GL/ZSeDIXZJKmrfcI+JLEXEgInbNWbYyIh6KiOfb7zMnG1OSNIxBRu5/CVx72LJNwMOZeQHwcJuXJPXEvOWemf8I/Odhi68HtrXpbcAN440lSRrFQs+5r87Ml9v0K8Dqo20YERsjYiYiZmZnZxe4O0nSMEZ+QTUzE8h3Wb8lM9dl5rqpqalRdydJGsBCy/3ViFgD0H4fGF8kSdKoFlru9wMb2vQG4L7xxJEkjcMgb4W8E/hX4MKI2B8RtwCbgWsi4nngQ21ektQT816hmpk3H2XV1WPOIkkaE69QlaSCLHdJKshyl6SCLHdJKshyl6SCLHdJKshyl6SCLHdJKsiv2ZP0rsb9tX3gV/ctBkfuklSQ5S5JBVnuklSQ59wlLbpxn8f3HP4Pc+QuSQVZ7pJUkOUuSQVZ7pJUkOUuSQVZ7pJUkOUuSQVZ7pJUkOUuSQVZ7pJUkOUuSQVZ7pJUkOUuSQVZ7pJUkOUuSQVZ7pJUkOUuSQVZ7pJUkOUuSQVZ7pJUkOUuSQVZ7pJU0PKlDiBJo5re9MBSR3hX+zavX/R9OnKXpIJGKveIuDYinouIvRGxaVyhJEmjWXC5R8Qy4PPAh4GLgJsj4qJxBZMkLdwoI/fLgL2Z+WJm/i/wFeD68cSSJI1ilBdUzwa+O2d+P/Azh28UERuBjW32+xHx3Aj7HNUq4LUl3P8wzDp+x0pOMOukLEnWuGNBN5ub9ceHvfHE3y2TmVuALZPezyAiYiYz1y11jkGYdfyOlZxg1kk5nrKOclrmJeCcOfNr2zJJ0hIbpdz/DbggIs6LiBOBm4D7xxNLkjSKBZ+WycyDEfFbwIPAMuBLmbl7bMkmoxenhwZk1vE7VnKCWSfluMkamTmuIJKknvAKVUkqyHKXpILKlHtEnBMRj0bEsxGxOyJubctXRsRDEfF8+31mWx4R8aftoxOejohLFzHryRHxWEQ81bLe3pafFxE7Wqa72gvVRMRJbX5vWz+9WFnnZF4WEU9ExPY+Z42IfRHxTEQ8GREzbVnvjoG2/xURcU9EfDMi9kTEFX3MGhEXtsfz0M+bEXFbH7O2/f92+3e1KyLubP/eene8RsStLePuiLitLRvfY5qZJX6ANcClbfq9wL/TfSzCHwOb2vJNwB1t+iPA3wEBXA7sWMSsAZzepk8AdrQMdwM3teVfAH6jTf8m8IU2fRNw1xI8vp8A/hrY3uZ7mRXYB6w6bFnvjoG2/23Ar7fpE4EVfc06J/My4BW6i2p6l5Xu4spvAafMOU5/rW/HK/ATwC7gVLo3tvwDcP44H9NFPzgW8X/yfcA1wHPAmrZsDfBcm/5z4OY527+93SLnPBV4nO7q3teA5W35FcCDbfpB4Io2vbxtF4uYcS3wMHAVsL0dYH3Nuo8fLvfeHQPAGa2Eou9ZD8v388C/9DUr71w5v7Idf9uBX+jb8Qr8ErB1zvwfAJ8c52Na5rTMXO2p1SV0I+LVmflyW/UKsLpNH+njE85exIzLIuJJ4ADwEPAC8EZmHjxCnreztvXfA85arKzAZ+kOvB+0+bPob9YEvh4RO6P76Avo5zFwHjAL/EU73fXFiDitp1nnugm4s033LmtmvgT8CfAd4GW6428n/TtedwE/GxFnRcSpdCPzcxjjY1qu3CPidOBrwG2Z+ebcddn9yevFez8z863MvJhuVHwZ8P6lTXRkEfFR4EBm7lzqLAP6QGZeSvdppR+PiA/OXdmjY2A5cCnwZ5l5CfDfdE/D39ajrAC089TXAV89fF1fsrZz1NfT/fH8MeA04NolDXUEmbkHuAP4OvD3wJPAW4dtM9JjWqrcI+IEumL/cmbe2xa/GhFr2vo1dCNl6MnHJ2TmG8CjdE8VV0TEoQvL5uZ5O2tbfwbwH4sU8UrguojYR/fJn1cBn+tp1kMjNzLzAPA3dH84+3gM7Af2Z+aONn8PXdn3MeshHwYez8xX23wfs34I+FZmzmbm/wH30h3DvTteM3NrZv50Zn4QeJ3udcKxPaZlyj0iAtgK7MnMT89ZdT+woU1voDsXf2j5r7ZXoS8Hvjfn6dCks05FxIo2fQrdawN76Er+xqNkPfTfcCPwSPurPnGZ+anMXJuZ03RPyR/JzI/1MWtEnBYR7z00TXd+eBc9PAYy8xXguxFxYVt0NfBsH7POcTPvnJI5lKlvWb8DXB4Rp7ZOOPS49vF4/dH2+1zgF+nesDC+x3QxXuRYjB/gA3RPYZ6me4rzJN15rLPoXgx8nu4V6ZVt+6D7spEXgGeAdYuY9SeBJ1rWXcAftuXvAx4D9tI99T2pLT+5ze9t69+3RI/xz/HOu2V6l7Vleqr97AZ+vy3v3THQ9n8xMNOOg78Fzuxx1tPoRrRnzFnW16y3A99s/7b+Cjipp8frP9H94XkKuHrcj6kfPyBJBZU5LSNJeoflLkkFWe6SVJDlLkkFWe6SVJDlLkkFWe6SVND/A1NWWPyINK6bAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "FVC_pred = sc.inverse_transform(predictions)\n",
    "plt.hist(np.std(FVC_pred, axis = 1), bins=15)\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXUAAAD4CAYAAAATpHZ6AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAOq0lEQVR4nO3dXaxl5V3H8e9PoF4Uagc5mUyQ6aGkJZleONATbFJKMNSWFxWqpikXFWvN1AQS0HoxlkS5nLbSGqOhDoFADQU1QNrYqlBSSxqFOuAUBpAO0CEyGWaoNIELg0L/Xuw1dnM8L/vs93nm+0l29trPWvusP886+zfPWWvth1QVkqQ2/NSsC5AkjY+hLkkNMdQlqSGGuiQ1xFCXpIacOM2dnXbaabW4uDjNXUrSMe+RRx75YVUtDLLtVEN9cXGRPXv2THOXknTMS/L8oNt6+kWSGmKoS1JDDHVJaoihLkkNMdQlqSGGuiQ1xFCXpIYY6pLUEENdkhoy1W+UStO2uPPrA213YNdlE65Emg5H6pLUEENdkhpiqEtSQwx1SWqIoS5JDTHUJakhhrokNcRQl6SGGOqS1BBDXZIaYqhLUkMMdUlqiKEuSQ0x1CWpIU69q7niVLnSaBypS1JDDHVJaoihLkkNWTfUk5yR5FtJnkzyRJJru/YbkhxMsrd7XDr5ciVJaxnkQunrwKer6tEkpwCPJLm/W/fFqvqTyZUnSdqIdUO9qg4Bh7rlV5M8BZw+6cIkSRu3oXPqSRaBc4CHu6ZrkjyW5NYkm1Z5z44ke5Lseemll0arVpK0poFDPcnJwN3AdVX1CnATcBawnd5I/saV3ldVu6tqqaqWFhYWRq9YkrSqgUI9yUn0Av2OqroHoKoOV9UbVfVj4GbgvMmVKUkaxCB3vwS4BXiqqr7Q176lb7OPAPvGX54kaSMGufvl/cDHgceT7O3aPgNcmWQ7UMAB4FMTqE+StAGD3P3yHSArrPrG+MuRJI3Cb5RKUkMMdUlqiKEuSQ0x1CWpIYa6JDXEUJekhhjqktQQQ12SGmKoS1JDBpkmQFrV4s6vD7TdgV2XTbgSSeBIXZKaYqhLUkMMdUlqiKEuSQ0x1CWpIYa6JDXEUJekhhjqktQQQ12SGmKoS1JDDHVJaoihLkkNMdQlqSHO0ihtwKCzUoIzU2o2HKlLUkMMdUlqiKEuSQ0x1CWpIeuGepIzknwryZNJnkhybdd+apL7k+zvnjdNvlxJ0loGGam/Dny6qrYB7wOuTrIN2Ak8UFXvAh7oXkuSZmjdUK+qQ1X1aLf8KvAUcDpwOXB7t9ntwBUTqlGSNKANnVNPsgicAzwMbK6qQ92qF4HNq7xnR5I9Sfa89NJLo9QqSVrHwKGe5GTgbuC6qnqlf11VFVArva+qdlfVUlUtLSwsjFSsJGltA4V6kpPoBfodVXVP13w4yZZu/RbgyGRKlCQNapC7XwLcAjxVVV/oW/U14Kpu+Srgq+MvT5K0EYPM/fJ+4OPA40n2dm2fAXYBf5Pkk8DzwEcnUqEkaWDrhnpVfQfIKqsvGm85kqRR+I1SSWqIU+9KbGxKXWmeOVKXpIYY6pLUEENdkhpiqEtSQwx1SWqIoS5JDTHUJakh3qeuqRj3feDeVy6tzJG6JDXEUJekhhjqktQQQ12SGmKoS1JDDHVJaoihLkkNMdQlqSGGuiQ1xFCXpIYY6pLUEENdkhpiqEtSQwx1SWqIoS5JDTHUJakhhrokNcRQl6SGrBvqSW5NciTJvr62G5IcTLK3e1w62TIlSYMYZKR+G3DxCu1frKrt3eMb4y1LkjSMdUO9qh4EXp5CLZKkEZ04wnuvSfKbwB7g01X1o5U2SrID2AGwdevWEXanaVnc+fVZl3BcGbS/D+y6bMKVqAXDXii9CTgL2A4cAm5cbcOq2l1VS1W1tLCwMOTuJEmDGCrUq+pwVb1RVT8GbgbOG29ZkqRhDBXqSbb0vfwIsG+1bSVJ07PuOfUkdwIXAqcleQH4Y+DCJNuBAg4An5pciZKkQa0b6lV15QrNt0ygFknSiPxGqSQ1ZJRbGiWtwVtDNQuO1CWpIYa6JDXEUJekhhjqktQQQ12SGmKoS1JDDHVJaoihLkkNMdQlqSGGuiQ1xFCXpIYY6pLUEENdkhpiqEtSQ5x6VzpGDDqV74Fdl024Es0zR+qS1BBDXZIaYqhLUkMMdUlqiKEuSQ0x1CWpId7S2ABvdZN0lCN1SWqIoS5JDTHUJakhhrokNWTdUE9ya5IjSfb1tZ2a5P4k+7vnTZMtU5I0iEFG6rcBFy9r2wk8UFXvAh7oXkuSZmzdUK+qB4GXlzVfDtzeLd8OXDHesiRJwxj2PvXNVXWoW34R2Lzahkl2ADsAtm7dOuTuJA3K7y0c30a+UFpVBdQa63dX1VJVLS0sLIy6O0nSGoYN9cNJtgB0z0fGV5IkaVjDhvrXgKu65auAr46nHEnSKAa5pfFO4F+As5O8kOSTwC7gl5LsBz7YvZYkzdi6F0qr6spVVl005lokSSPyG6WS1BBDXZIaYqhLUkMMdUlqiKEuSQ0x1CWpIYa6JDXEUJekhhjqktSQYafe1RQMOoWqJB3lSF2SGmKoS1JDDHVJaoihLkkNMdQlqSGGuiQ1xFCXpIYY6pLUEENdkhpiqEtSQwx1SWqIoS5JDTHUJakhztI4gEFnSzyw67IJVyJJa3OkLkkNMdQlqSGGuiQ1xFCXpIaMdKE0yQHgVeAN4PWqWhpHUZKk4Yzj7pdfrKofjuHnSJJG5OkXSWrIqCP1Au5LUsBfVtXu5Rsk2QHsANi6devQO/JecUla36gj9fOr6lzgEuDqJBcs36CqdlfVUlUtLSwsjLg7SdJaRgr1qjrYPR8B7gXOG0dRkqThDB3qSd6a5JSjy8CHgH3jKkyStHGjnFPfDNyb5OjP+UpV/cNYqpIkDWXoUK+q54CfH2MtkqQReUujJDXkuJ16d9BbJFva9yz/m3Xs2sjvjbcUz54jdUlqiKEuSQ0x1CWpIYa6JDXEUJekhhjqktQQQ12SGnLc3qc+Cd4HruPd8TZF9jzew+9IXZIaYqhLUkMMdUlqiKEuSQ0x1CWpIYa6JDXEWxql49SxMP30oLcBHm+3Uq7FkbokNcRQl6SGGOqS1BBDXZIaYqhLUkMMdUlqiKEuSQ0x1CWpIYa6JDXEUJekhhjqktSQkUI9ycVJnk7yTJKd4ypKkjScoUM9yQnAXwCXANuAK5NsG1dhkqSNG2Wkfh7wTFU9V1X/DdwFXD6esiRJw0hVDffG5DeAi6vqd7rXHwd+oaquWbbdDmBH9/Js4Onhy92Q04AfTmlfg5rHmmA+65rHmmA+65rHmmA+65rHmmD9ut5RVQuD/KCJz6deVbuB3ZPez3JJ9lTV0rT3u5Z5rAnms655rAnms655rAnms655rAnGW9cop18OAmf0vf65rk2SNCOjhPq/Au9KcmaStwAfA742nrIkScMY+vRLVb2e5BrgH4ETgFur6omxVTa6qZ/yGcA81gTzWdc81gTzWdc81gTzWdc81gRjrGvoC6WSpPnjN0olqSGGuiQ15JgP9SR/nWRv9ziQZG/Xvpjkv/rWfanvPe9N8ng3vcGfJckE6rohycG+/V/at+4Pu30/neTDfe0TnXYhyeeT/HuSx5Lcm+TtXftM+2qFOmcy/USSM5J8K8mTSZ5Icm3XvuFjOea6DnTHYG+SPV3bqUnuT7K/e97Utac7Ts90x/ncCdV0dl9/7E3ySpLrZtFXSW5NciTJvr62DfdPkqu67fcnuWoCNU3n81dVzTyAG4E/6pYXgX2rbPdd4H1AgL8HLplALTcAf7BC+zbge8BPA2cCz9K70HxCt/xO4C3dNtvGXNOHgBO75c8Cn52Hvlq2v4n3wxr73gKc2y2fAny/O14bOpYTqOsAcNqyts8BO7vlnX3H8tLuOKU7bg9Pod9OAF4E3jGLvgIuAM7t/x3eaP8ApwLPdc+buuVNY65pKp+/Y36kflT3L9hHgTvX2W4L8Laqeqh6vfZl4IrJV/h/LgfuqqrXquoHwDP0plyY+LQLVXVfVb3evXyI3ncLVjWjvprZ9BNVdaiqHu2WXwWeAk5f4y2rHctpuBy4vVu+nZ8cl8uBL1fPQ8Dbu+M4SRcBz1bV82tsM7G+qqoHgZdX2N9G+ufDwP1V9XJV/Qi4H7h4nDVN6/PXTKgDHwAOV9X+vrYzk/xbkm8n+UDXdjrwQt82L7D2B3cU13R/at169M+/bl//scL+V2uflN+m9y//UbPuq6Om3Q8rSrIInAM83DVt5FiOWwH3JXkkvWk3ADZX1aFu+UVg85Rr6vcx3jyYmmVfHbXR/mnm83dMhHqSbybZt8KjfwR3JW/+xToEbK2qc4DfB76S5G1TrOsm4Cxge1fLjePc95A1Hd3meuB14I6uaeJ9dSxJcjJwN3BdVb3CjI5ln/Or6lx6M6JeneSC/pXdKG4m9yan98XDXwX+tmuadV/9P7Psn5VM+vM38blfxqGqPrjW+iQnAr8GvLfvPa8Br3XLjyR5Fng3vakM+v/sGXp6g/Xq6qvvZuDvupdrTa8w8rQLA/TVbwG/DFzU/bJPpa82YKbTTyQ5iV6g31FV9wBU1eG+9YMey7GpqoPd85Ek99I7bXE4yZaqOtT9mX5kmjX1uQR49Ggfzbqv+my0fw4CFy5r/6dxFzWVz984LlTM+kHv3Ne3l7Ut0F2IoXfR7SBwaq188eHSCdS0pW/59+idTwR4D2++YPQcvQtNJ3bLZ/KTC4TvmUA/PQkszFNfLatl4v2wxr5D77zln45yLMdc01uBU/qW/7k7jp/nzRcCP9ctX8abLwR+d8J9dhfwiVn3FcsuNm60f+hdIP0BvYukm7rlU8dc01Q+fxP/oEzjAdwG/O6ytl8HngD2Ao8Cv9K3bgnYR+8K/J/TfbN2zDX9FfA48Bi9OXH6f9mv7/b9NH1Xs+ldmf9+t+76CdT0DL3zhnu7x5fmoa9WqHOi/bDGfs+n92f6Y319dOkwx3KMNb2TXhh+rztG13ftPws8AOwHvtkXAqH3P695tqt5aYL99VbgP4GfGeX3fgx13EnvFMb/0Dvv/Mlh+ofeee5nuscnJlDTVD5/ThMgSQ05Ji6USpIGY6hLUkMMdUlqiKEuSQ0x1CWpIYa6JDXEUJekhvwvWo99SVm+ce0AAAAASUVORK5CYII=\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "diff_fvc = (np.mean(FVC_pred,axis=1) - sc.inverse_transform(testY))\n",
    "diff = (np.mean(predictions,axis=1) - testY)\n",
    "plt.hist(diff_fvc, bins=30)\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "MSE :  134499.59763007556\n",
      "RMSE :  366.7418678445039\n"
     ]
    }
   ],
   "source": [
    "MSE = np.mean(diff_fvc*diff_fvc)\n",
    "RMSE = np.sqrt(MSE)\n",
    "print(\"MSE : \",MSE)\n",
    "print(\"RMSE : \", RMSE)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "MSE :  0.1933643950017387\n",
      "RMSE :  0.4397321855422215\n"
     ]
    }
   ],
   "source": [
    "MSE = np.mean(diff*diff)\n",
    "RMSE = np.sqrt(MSE)\n",
    "print(\"MSE : \",MSE)\n",
    "print(\"RMSE : \", RMSE)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "-10.215427811103961"
      ]
     },
     "execution_count": 22,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "compute_score(testY,np.mean(FVC_pred,axis=1),np.std(FVC_pred,axis=1))"
1685
1686
1687
1688
1689
1690
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
1691
    "**Dropout 0.7**"
1692
1693
1694
1695
   ]
  },
  {
   "cell_type": "code",
1696
   "execution_count": 23,
1697
   "metadata": {},
1698
1699
1700
1701
1702
1703
1704
1705
1706
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[<tf.Tensor 'dense_6_input_2:0' shape=(None, 6) dtype=float32>, <tf.Tensor 'input_2_2:0' shape=(None, 240, 240, 4) dtype=float32>]\n"
     ]
    }
   ],
1707
   "source": [
1708
1709
1710
1711
1712
    "tf.compat.v1.disable_eager_execution()\n",
    "\n",
    "dropout = 0.7\n",
    "num_iter = 20\n",
    " \n",
1713
1714
    "input_data=[testAttrX, test_dataset]\n",
    "input_data[1] = np.asarray(input_data[1]).reshape(test_dataset.shape[0],240,240,4)\n",
1715
1716
1717
    "num_samples = input_data[0].shape[0]\n",
    "\n",
    "predict_with_dropout = create_dropout_predict_function(model, dropout)\n",
1718
1719
1720
1721
1722
1723
1724
1725
    "\n",
    "predictions = np.zeros((num_samples, num_iter))\n",
    "for i in range(num_iter):\n",
    "    predictions[:,i] = predict_with_dropout([input_data,1])[0].reshape(-1)"
   ]
  },
  {
   "cell_type": "code",
1726
   "execution_count": 24,
1727
   "metadata": {},
1728
1729
1730
1731
1732
1733
1734
1735
1736
1737
1738
1739
1740
1741
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXAAAAD4CAYAAAD1jb0+AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAO8UlEQVR4nO3df4xlZX3H8fenC4IVI7tlstkC6aASDTFhMdMtRNNSFIvQFExMI2nstiVZm0gKDWm7+g+atMmSKLRNGuJakE1DUQpYCFgpXUmMSbN2VrfLwkJYcVXIwg6Vn/2DuvjtH/cMDsPM3jsz987w7Lxfyc2c85zn3vOdJ89+9txzz7mTqkKS1J5fWukCJEmLY4BLUqMMcElqlAEuSY0ywCWpUcct585OOeWUGh8fX85dSlLzdu/e/WxVjc1u7xvgSU4Evg2c0PW/o6quTXIL8FvAC13XP6qqPUd7rfHxcSYnJxdYuiStbkl+NFf7IEfgrwAXVNXLSY4HvpPk37ptf1FVdwyrSEnS4PoGePXu9Hm5Wz2+e3j3jyStsIE+xEyyJske4DDwQFXt6jb9TZK9SW5IcsKoipQkvdFAAV5Vr1bVRuA0YFOS9wGfAd4L/DqwDviruZ6bZEuSySSTU1NTw6lakrSwywir6nngQeCiqjpUPa8AXwE2zfOc7VU1UVUTY2Nv+BBVkrRIfQM8yViSk7vltwIXAo8m2dC1BbgM2De6MiVJsw1yFcoGYEeSNfQC//aqujfJt5KMAQH2AH86ujIlSbMNchXKXuCcOdovGElFkqSBeCu9JDVqWW+l18KMb71vqK93cNslQ309SSvLI3BJapQBLkmNMsAlqVEGuCQ1ygCXpEZ5FcoqMuyrWkbBK2WkwXkELkmNMsAlqVEGuCQ1ygCXpEYZ4JLUKANckhplgEtSowxwSWqUAS5JjTLAJalRBrgkNcoAl6RGGeCS1CgDXJIa1TfAk5yY5LtJ/jvJw0k+37WfkWRXkgNJvpbkLaMvV5I0bZAj8FeAC6rqbGAjcFGSc4HrgBuq6t3Ac8AVI6tSkvQGfQO8el7uVo/vHgVcANzRte8ALhtFgZKkuQ10DjzJmiR7gMPAA8APgOer6kjX5Ung1HmeuyXJZJLJqampIZQsSYIBA7yqXq2qjcBpwCbgvYPuoKq2V9VEVU2MjY0trkpJ0hss6CqUqnoeeBA4Dzg5yfTf1DwNeGq4pUmSjmaQq1DGkpzcLb8VuBDYTy/IP9512wzcPaIaJUlzGOSv0m8AdiRZQy/wb6+qe5M8Anw1yV8D3wduGmGdkqRZ+gZ4Ve0Fzpmj/Ql658MlSSvAOzElqVEGuCQ1ygCXpEYZ4JLUKANckhplgEtSowa5DlwDGt9630qX0Lxhj+HBbZcM9fWkNxOPwCWpUQa4JDXKAJekRhngktQoA1ySGmWAS1KjDHBJapQBLkmNMsAlqVEGuCQ1ygCXpEYZ4JLUKANckhplgEtSo/oGeJLTkzyY5JEkDye5qmv/XJKnkuzpHhePvlxJ0rRBvg/8CHBNVX0vyduB3Uke6LbdUFVfGF15kqT59A3wqjoEHOqWX0qyHzh11IVJko5uQefAk4wD5wC7uqYrk+xNcnOStcMuTpI0v4EDPMlJwJ3A1VX1InAj8C5gI70j9C/O87wtSSaTTE5NTS29YkkSMGCAJzmeXnjfWlV3AVTVM1X1alX9HPgysGmu51bV9qqaqKqJsbGxYdUtSaveIFehBLgJ2F9V189o3zCj28eAfcMvT5I0n0GuQvkA8EngoSR7urbPApcn2QgUcBD41AjqkyTNY5CrUL4DZI5N3xh+OZKkQXknpiQ1ygCXpEYZ4JLUKANckhplgEtSowxwSWqUAS5JjRrkRp5j0vjW+1a6BDWohXlzcNslK12ClolH4JLUKANckhplgEtSowxwSWqUAS5JjTLAJalRBrgkNcoAl6RGGeCS1CgDXJIaZYBLUqNW7XehaHVo4btLpMXyCFySGmWAS1Kj+gZ4ktOTPJjkkSQPJ7mqa1+X5IEkj3c/146+XEnStEGOwI8A11TVWcC5wKeTnAVsBXZW1ZnAzm5dkrRM+gZ4VR2qqu91yy8B+4FTgUuBHV23HcBlI6pRkjSHBZ0DTzIOnAPsAtZX1aFu09PA+nmesyXJZJLJqamppdQqSZph4ABPchJwJ3B1Vb04c1tVFVBzPa+qtlfVRFVNjI2NLalYSdIvDBTgSY6nF963VtVdXfMzSTZ02zcAh0dToiRpLoNchRLgJmB/VV0/Y9M9wOZueTNw9/DLkyTNZ5A7MT8AfBJ4KMmeru2zwDbg9iRXAD8Cfn8kFUqS5tQ3wKvqO0Dm2fyh4ZYjSRqUd2JKUqMMcElqlAEuSY0ywCWpUQa4JDXKAJekRhngktQoA1ySGmWAS1KjDHBJapQBLkmNMsAlqVEGuCQ1ygCXpEYZ4JLUKANckhplgEtSowxwSWqUAS5JjTLAJalRBrgkNapvgCe5OcnhJPtmtH0uyVNJ9nSPi0dbpiRptkGOwG8BLpqj/Yaq2tg9vjHcsiRJ/fQN8Kr6NvDTZahFkrQASzkHfmWSvd0plrXzdUqyJclkksmpqakl7E6SNNNiA/xG4F3ARuAQ8MX5OlbV9qqaqKqJsbGxRe5OkjTbogK8qp6pqler6ufAl4FNwy1LktTPogI8yYYZqx8D9s3XV5I0Gsf165DkNuB84JQkTwLXAucn2QgUcBD41OhKlCTNpW+AV9XlczTfNIJaJEkL4J2YktQoA1ySGmWAS1KjDHBJapQBLkmNMsAlqVEGuCQ1ygCXpEYZ4JLUKANckhplgEtSowxwSWqUAS5JjTLAJalRBrgkNcoAl6RGGeCS1CgDXJIaZYBLUqMMcElqlAEuSY0ywCWpUX0DPMnNSQ4n2TejbV2SB5I83v1cO9oyJUmzDXIEfgtw0ay2rcDOqjoT2NmtS5KWUd8Ar6pvAz+d1XwpsKNb3gFcNtyyJEn9LPYc+PqqOtQtPw2sn69jki1JJpNMTk1NLXJ3kqTZlvwhZlUVUEfZvr2qJqpqYmxsbKm7kyR1FhvgzyTZAND9PDy8kiRJg1hsgN8DbO6WNwN3D6ccSdKgBrmM8DbgP4H3JHkyyRXANuDCJI8DH+7WJUnL6Lh+Harq8nk2fWjItUiSFqBvgEta3ca33jf01zy47ZKhv+Zq5K30ktQoA1ySGmWAS1KjDHBJapQBLkmN8ioU6RgziqtG9ObkEbgkNcoAl6RGGeCS1CgDXJIaZYBLUqMMcElqlAEuSY0ywCWpUQa4JDXKAJekRhngktSoZr4Lxe93kKTX8whckhplgEtSo5Z0CiXJQeAl4FXgSFVNDKMoSVJ/wzgH/ttV9ewQXkeStACeQpGkRi01wAv49yS7k2yZq0OSLUkmk0xOTU0tcXeSpGlLDfAPVtX7gY8Cn07ym7M7VNX2qpqoqomxsbEl7k6SNG1JAV5VT3U/DwNfBzYNoyhJUn+LDvAkb0vy9ull4CPAvmEVJkk6uqVchbIe+HqS6df556r65lCqkiT1tegAr6ongLOHWIskaQGa+S4USceON/t3Gx3cdslKlzAQrwOXpEYZ4JLUKANckhplgEtSowxwSWqUAS5JjfIyQkmaZRSXOY7i0kSPwCWpUQa4JDXKAJekRhngktQoA1ySGmWAS1KjDHBJapQBLkmNMsAlqVEGuCQ1ygCXpEYZ4JLUKANckhplgEtSo5YU4EkuSvJYkgNJtg6rKElSf4sO8CRrgH8APgqcBVye5KxhFSZJOrqlHIFvAg5U1RNV9X/AV4FLh1OWJKmfpfxFnlOBn8xYfxL4jdmdkmwBtnSrLyd5bAn7XKhTgGeXcX8tcoyOzvE5Osenv1OAZ3Pdkl7j1+ZqHPmfVKuq7cD2Ue9nLkkmq2piJfbdCsfo6Byfo3N8+hvlGC3lFMpTwOkz1k/r2iRJy2ApAf5fwJlJzkjyFuATwD3DKUuS1M+iT6FU1ZEkVwL3A2uAm6vq4aFVNhwrcuqmMY7R0Tk+R+f49DeyMUpVjeq1JUkj5J2YktQoA1ySGtV0gCc5PcmDSR5J8nCSq7r2dUkeSPJ493Nt154kf9/d+r83yftX9jdYHknWJPl+knu79TOS7OrG4Wvdh9AkOaFbP9BtH1/RwpdBkpOT3JHk0ST7k5zn/Hm9JH/e/fval+S2JCeu5jmU5OYkh5Psm9G24DmTZHPX//EkmxdTS9MBDhwBrqmqs4BzgU93t/NvBXZW1ZnAzm4derf9n9k9tgA3Ln/JK+IqYP+M9euAG6rq3cBzwBVd+xXAc137DV2/Y93fAd+sqvcCZ9MbJ+dPJ8mpwJ8BE1X1PnoXLHyC1T2HbgEumtW2oDmTZB1wLb2bHzcB106H/oJU1THzAO4GLgQeAzZ0bRuAx7rlLwGXz+j/Wr9j9UHv+vydwAXAvUDo3Tl3XLf9POD+bvl+4Lxu+biuX1b6dxjh2LwD+OHs39H587qxmL7jel03J+4Ffme1zyFgHNi32DkDXA58aUb76/oN+mj9CPw13Vu1c4BdwPqqOtRtehpY3y3Pdfv/qctV4wr5W+AvgZ93678CPF9VR7r1mWPw2vh021/o+h+rzgCmgK90p5j+McnbcP68pqqeAr4A/Bg4RG9O7MY5NNtC58xQ5tIxEeBJTgLuBK6uqhdnbqvef2+r8lrJJL8LHK6q3Stdy5vUccD7gRur6hzgf/nFW19gdc8fgO5t/aX0/rP7VeBtvPH0gWZYzjnTfIAnOZ5eeN9aVXd1zc8k2dBt3wAc7tpX2+3/HwB+L8lBet8WeQG9c74nJ5m+iWvmGLw2Pt32dwD/s5wFL7MngSerale3fge9QHf+/MKHgR9W1VRV/Qy4i968cg693kLnzFDmUtMBniTATcD+qrp+xqZ7gOlPdTfTOzc+3f6H3SfD5wIvzHjbc8ypqs9U1WlVNU7vg6dvVdUfAA8CH++6zR6f6XH7eNf/mD36rKqngZ8keU/X9CHgEZw/M/0YODfJL3f/3qbHyDn0egudM/cDH0mytnuX85GubWFW+sOAJX6Q8EF6b1X2Anu6x8X0zrntBB4H/gNY1/UPvT9C8QPgIXqfrK/477FMY3U+cG+3/E7gu8AB4F+AE7r2E7v1A932d6503cswLhuByW4O/Suw1vnzhjH6PPAosA/4J+CE1TyHgNvofR7wM3rv4q5YzJwB/qQbpwPAHy+mFm+ll6RGNX0KRZJWMwNckhplgEtSowxwSWqUAS5JjTLAJalRBrgkNer/AToN7nFoiaUkAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],